vdpau: Port to 1.0

New base class for vdpau decoders
* mpeg2 ported and activated
* h264 ported but deactivated
* Uses GstMpegVideo meta
This commit is contained in:
Edward Hervey 2012-09-20 19:03:11 +02:00 committed by Edward Hervey
parent 2296296a51
commit e3c71c65fe
63 changed files with 1688 additions and 8704 deletions

View file

@ -2401,8 +2401,6 @@ sys/shm/Makefile
sys/uvch264/Makefile
sys/vcd/Makefile
sys/vdpau/Makefile
sys/vdpau/gstvdp/Makefile
sys/vdpau/basevideodecoder/Makefile
sys/pvr2d/Makefile
sys/wasapi/Makefile
sys/wininet/Makefile

View file

@ -142,11 +142,11 @@ else
VCD_DIR=
endif
#if USE_VDPAU
#VDPAU_DIR=vdpau
#else
#VDPAU_DIR=
#endif
if USE_VDPAU
VDPAU_DIR=vdpau
else
VDPAU_DIR=
endif
if USE_DIRECT3D9
WINSCREENCAP_DIR=winscreencap

View file

@ -1,45 +1,39 @@
SUBDIRS = basevideodecoder gstvdp
plugin_LTLIBRARIES = libgstvdpau.la
libgstvdpau_la_SOURCES = \
gstvdpau.c \
gstvdpvideopostprocess.c \
gstvdpsink.c \
mpeg/gstvdpmpegframe.c \
mpeg/mpegutil.c \
mpeg/gstvdpmpegdec.c \
h264/gstnalreader.c \
h264/gsth264parser.c \
h264/gsth264frame.c \
h264/gsth264dpb.c \
h264/gstvdph264dec.c \
mpeg4/mpeg4util.c \
mpeg4/gstmpeg4frame.c \
mpeg4/gstvdpmpeg4dec.c
gstvdputils.c \
gstvdpvideomemory.c \
gstvdpvideobufferpool.c \
gstvdpdevice.c \
gstvdpdecoder.c \
mpeg/gstvdpmpegdec.c
# \
# h264/gsth264dpb.c \
# h264/gstvdph264dec.c
libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdpau_la_LIBADD = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-$(GST_API_VERSION).la \
$(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_API_VERSION) \
-lgstinterfaces-$(GST_API_VERSION) $(VDPAU_LIBS) \
gstvdp/libgstvdp-@GST_API_VERSION@.la \
$(LIBM)
$(VDPAU_LIBS) $(LIBM)
libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = \
gstvdputils.h \
gstvdpvideomemory.h \
gstvdpvideobufferpool.h \
gstvdpvideopostprocess.h \
gstvdpsink.h \
mpeg/gstvdpmpegframe.h \
mpeg/mpegutil.h \
mpeg/gstvdpmpegdec.h \
h264/gstnalreader.h \
h264/gsth264parser.h \
h264/gsth264frame.h \
h264/gsth264dpb.h \
h264/gstvdph264dec.h \
# h264/gsth264dpb.h \
# h264/gstvdph264dec.h \
mpeg4/mpeg4util.h \
mpeg4/gstmpeg4frame.h \
mpeg4/gstvdpmpeg4dec.h

View file

@ -1,19 +0,0 @@
noinst_LTLIBRARIES = libgstbasevideodecoder.la
libgstbasevideodecoder_la_SOURCES = \
gstvideoframe.c \
gstbasevideodecoder.c
libgstbasevideodecoder_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
-DGstBaseVideoDecoder=SatBaseVideoDecoder \
-DGstBaseVideoDecoderClass=SatBaseVideoDecoderClass
libgstbasevideodecoder_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_API_VERSION) \
$(GST_BASE_LIBS) $(GST_LIBS)
libgstbasevideodecoder_la_LDFLAGS = $(GST_ALL_LDFLAGS) -module -avoid-version
noinst_HEADERS = \
gstvideoframe.h \
gstbasevideodecoder.h \
gstbasevideoutils.h

File diff suppressed because it is too large Load diff

View file

@ -1,179 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_BASE_VIDEO_DECODER_H_
#define _GST_BASE_VIDEO_DECODER_H_
#include "gstbasevideoutils.h"
#include "gstvideoframe.h"
G_BEGIN_DECLS
#define GST_TYPE_BASE_VIDEO_DECODER (gst_base_video_decoder_get_type())
#define GST_BASE_VIDEO_DECODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_BASE_VIDEO_DECODER, GstBaseVideoDecoder))
#define GST_BASE_VIDEO_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_BASE_VIDEO_DECODER, GstBaseVideoDecoderClass))
#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_BASE_VIDEO_DECODER, GstBaseVideoDecoderClass))
#define GST_IS_BASE_VIDEO_DECODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_BASE_VIDEO_DECODER))
#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_BASE_VIDEO_DECODER))
/**
* GST_BASE_VIDEO_DECODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink"
/**
* GST_BASE_VIDEO_DECODER_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define GST_BASE_VIDEO_DECODER_SRC_NAME "src"
/**
* GST_BASE_VIDEO_CODEC_SRC_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the source #GstPad object of the element.
*/
#define GST_BASE_VIDEO_DECODER_SRC_PAD(obj) (((GstBaseVideoDecoder *) (obj))->srcpad)
/**
* GST_BASE_VIDEO_CODEC_SINK_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the sink #GstPad object of the element.
*/
#define GST_BASE_VIDEO_DECODER_SINK_PAD(obj) (((GstBaseVideoDecoder *) (obj))->sinkpad)
/**
* * GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA:
* *
* */
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
typedef enum _GstBaseVideoDecoderScanResult GstBaseVideoDecoderScanResult;
enum _GstBaseVideoDecoderScanResult
{
GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK,
GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC,
GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA
};
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
struct _GstBaseVideoDecoder
{
GstElement element;
/*< private >*/
GstPad *sinkpad;
GstPad *srcpad;
GstAdapter *input_adapter;
gboolean have_sync;
gboolean discont;
GstVideoState state;
GstSegment segment;
GstCaps *caps;
gboolean have_src_caps;
GstVideoFrame *current_frame;
GList *timestamps;
guint64 field_index;
GstClockTime timestamp_offset;
GstClockTime last_timestamp;
gdouble proportion;
GstClockTime earliest_time;
guint64 input_offset;
guint64 current_buf_offset;
guint64 prev_buf_offset;
gboolean have_segment;
/* properties */
gboolean sink_clipping;
gboolean packetized;
};
struct _GstBaseVideoDecoderClass
{
GstElementClass element_class;
gboolean (*start) (GstBaseVideoDecoder *coder);
gboolean (*stop) (GstBaseVideoDecoder *coder);
gboolean (*flush) (GstBaseVideoDecoder *coder);
gboolean (*set_sink_caps) (GstBaseVideoDecoder *base_video_decoder,
GstCaps *caps);
GstPad *(*create_srcpad) (GstBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass *base_video_decoder_class);
gint (*scan_for_sync) (GstBaseVideoDecoder *coder, GstAdapter *adapter);
GstBaseVideoDecoderScanResult (*scan_for_packet_end)
(GstBaseVideoDecoder *coder, GstAdapter *adapter, guint *size, gboolean at_eos);
GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder,
GstBuffer *buf, gboolean at_eos, GstVideoFrame *frame);
GstVideoFrame *(*create_frame) (GstBaseVideoDecoder *coder);
GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame,
GstClockTimeDiff deadline);
GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder,
GstBuffer *buf);
};
GType gst_base_video_decoder_get_type (void);
GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder,
gint frame_number);
GstVideoFrame *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder);
GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame *frame);
void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame);
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder,
gboolean include_current_buf, GstVideoFrame **new_frame);
GstVideoState gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder,
GstVideoState state);
gboolean gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder);
void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder);
G_END_DECLS
#endif

View file

@ -1,55 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_BASE_VIDEO_UTILS_H_
#define _GST_BASE_VIDEO_UTILS_H_
#define GST_USE_UNSTABLE_API 1
#ifndef GST_USE_UNSTABLE_API
#warning "The base video utils API is unstable and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
typedef struct _GstVideoState GstVideoState;
struct _GstVideoState
{
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
gboolean interlaced;
gint clean_width, clean_height;
gint clean_offset_left, clean_offset_top;
gint bytes_per_picture;
GstBuffer *codec_data;
};
#endif /* _GST_BASE_VIDEO_UTILS_H_ */

View file

@ -1,107 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvideoframe.h"
GST_DEBUG_CATEGORY_STATIC (gst_video_frame_debug);
#define GST_CAT_DEFAULT gst_video_frame_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_video_frame_debug, "gstvideoframe", 0, "Video Frame");
GstVideoFrame *
gst_video_frame_new (void)
{
GstVideoFrame *frame;
frame = (GstVideoFrame *) gst_mini_object_new (GST_TYPE_VIDEO_FRAME);
return frame;
}
static GObjectClass *gst_video_frame_parent_class;
static void
gst_video_frame_finalize (GstVideoFrame * frame)
{
if (frame->sink_buffer)
gst_buffer_unref (frame->sink_buffer);
if (frame->src_buffer)
gst_buffer_unref (frame->src_buffer);
GST_MINI_OBJECT_CLASS (gst_video_frame_parent_class)->finalize
(GST_MINI_OBJECT (frame));
}
static void
gst_video_frame_init (GstVideoFrame * frame, gpointer g_class)
{
frame->upstream_timestamp = GST_CLOCK_TIME_NONE;
frame->upstream_duration = GST_CLOCK_TIME_NONE;
frame->parsed_timestamp = GST_CLOCK_TIME_NONE;
frame->n_fields = 2;
frame->sink_buffer = NULL;
frame->src_buffer = NULL;
}
static void
gst_video_frame_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_video_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_video_frame_finalize;
}
GType
gst_video_frame_get_type (void)
{
static GType _gst_video_frame_type = 0;
if (G_UNLIKELY (_gst_video_frame_type == 0)) {
static const GTypeInfo info = {
sizeof (GstVideoFrameClass),
NULL,
NULL,
gst_video_frame_class_init,
NULL,
NULL,
sizeof (GstVideoFrame),
0,
(GInstanceInitFunc) gst_video_frame_init,
NULL
};
_gst_video_frame_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
"GstVideoFrame", &info, 0);
DEBUG_INIT ();
}
return _gst_video_frame_type;
}

View file

@ -1,149 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VIDEO_FRAME_H_
#define _GST_VIDEO_FRAME_H_
#include <gst/gst.h>
#define GST_TYPE_VIDEO_FRAME (gst_video_frame_get_type())
#define GST_IS_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_FRAME))
#define GST_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_FRAME, GstVideoFrame))
#define GST_VIDEO_FRAME_CAST(obj) ((GstVideoFrame *)obj)
/**
* GstVideoFrameFlag:
* @GST_VIDEO_FRAME_FLAG_PREROLL: the frame is part of a preroll and should not be
* displayed.
* @GST_VIDEO_FRAME_FLAG_DISCONT: the frame marks a discontinuity in the stream.
* This typically occurs after a seek or a dropped buffer from a live or
* network source.
* @GST_VIDEO_FRAME_FLAG_GAP: the frame has been created to fill a gap in the
* stream and contains media neutral data (elements can switch to optimized code
* path that ignores the buffer content).
* @GST_VIDEO_FRAME_FLAG_DELTA_UNIT: the frame is a keyframe.
* @GST_VIDEO_FRAME_FLAG_SYNC_POINT: the frame marks a sync point.
* @GST_VIDEO_FRAME_FLAG_EOS: the frame is the last in the stream.
* @GST_VIDEO_FRAME_FLAG_TFF: If the frame is interlaced, then the first
* field in the video frame is the top field. If unset, the bottom field is first.
* @GST_VIDEO_FRAME_FLAG_LAST: additional flags can be added starting from this flag.
* A set of frame flags used to describe properties of a #GstVideoFrame.
*/
typedef enum
{
GST_VIDEO_FRAME_FLAG_PREROLL = (GST_MINI_OBJECT_FLAG_LAST << 0),
GST_VIDEO_FRAME_FLAG_DISCONT = (GST_MINI_OBJECT_FLAG_LAST << 1),
GST_VIDEO_FRAME_FLAG_GAP = (GST_MINI_OBJECT_FLAG_LAST << 2),
GST_VIDEO_FRAME_FLAG_KEYFRAME = (GST_MINI_OBJECT_FLAG_LAST << 3),
GST_VIDEO_FRAME_FLAG_SYNC_POINT = (GST_MINI_OBJECT_FLAG_LAST << 4),
GST_VIDEO_FRAME_FLAG_EOS = (GST_MINI_OBJECT_FLAG_LAST << 5),
GST_VIDEO_FRAME_FLAG_TFF = (GST_MINI_OBJECT_FLAG_LAST << 6),
GST_VIDEO_FRAME_FLAG_LAST = (GST_MINI_OBJECT_FLAG_LAST << 7)
} GstVideoFrameFlag;
typedef struct _GstVideoFrame GstVideoFrame;
typedef struct _GstVideoFrameClass GstVideoFrameClass;
struct _GstVideoFrame
{
GstMiniObject mini_object;
GstClockTime upstream_timestamp;
GstClockTime upstream_duration;
GstClockTime parsed_timestamp;
guint n_fields;
GstBuffer *sink_buffer;
GstBuffer *src_buffer;
};
struct _GstVideoFrameClass
{
GstMiniObjectClass mini_object_class;
};
/* refcounting */
/**
* gst_video_frame_ref:
* @frame: a #GstVideoFrame.
*
* Increases the refcount of the given frame by one.
*
* Returns: @frame
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC GstVideoFrame * gst_buffer_ref (GstVideoFrame * frame);
#endif
static inline GstVideoFrame *
gst_video_frame_ref (GstVideoFrame *frame)
{
return (GstVideoFrame *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (frame));
}
/**
* gst_video_frame_unref:
* @frame: a #GstVideoFrame.
*
* Decreases the refcount of the frame. If the refcount reaches 0, the frame
* will be freed.
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC void gst_video_frame_unref (GstVideoFrame * frame);
#endif
static inline void
gst_video_frame_unref (GstVideoFrame * frame)
{
gst_mini_object_unref (GST_MINI_OBJECT_CAST (frame));
}
/**
* GST_VIDEO_FRAME_FLAG_IS_SET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to check.
*
* Gives the status of a specific flag on a video frame.
*/
#define GST_VIDEO_FRAME_FLAG_IS_SET(frame,flag) GST_MINI_OBJECT_FLAG_IS_SET (frame, flag)
/**
* GST_VIDEO_FRAME_FLAG_SET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to set.
*
* Sets a frame flag on a video frame.
*/
#define GST_VIDEO_FRAME_FLAG_SET(frame,flag) GST_MINI_OBJECT_FLAG_SET (frame, flag)
/**
* GST_VIDEO_FRAME_FLAG_UNSET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to clear.
*
* Clears a frame flag.
*/
#define GST_VIDEO_FRAME_FLAG_UNSET(frame,flag) GST_MINI_OBJECT_FLAG_UNSET (frame, flag)
GstVideoFrame *gst_video_frame_new (void);
GType gst_video_frame_get_type (void);
#endif

View file

@ -1,40 +0,0 @@
lib_LTLIBRARIES = libgstvdp-@GST_API_VERSION@.la
libgstvdp_@GST_API_VERSION@_la_SOURCES = \
gstvdpdevice.c \
gstvdputils.c \
gstvdpbuffer.c \
gstvdpbufferpool.c \
gstvdpvideobuffer.c \
gstvdpvideobufferpool.c \
gstvdpoutputbuffer.c \
gstvdpoutputbufferpool.c \
gstvdpvideosrcpad.c \
gstvdpoutputsrcpad.c \
gstvdpdecoder.c \
gstvdp.c
libgstvdp_@GST_API_VERSION@includedir = $(includedir)/gstreamer-@GST_API_VERSION@/gst/vdpau
libgstvdp_@GST_API_VERSION@include_HEADERS = \
gstvdpdevice.h \
gstvdputils.h \
gstvdpbuffer.h \
gstvdpbufferpool.h \
gstvdpvideobuffer.h \
gstvdpvideobufferpool.h \
gstvdpoutputbuffer.h \
gstvdpoutputbufferpool.h \
gstvdpvideosrcpad.h \
gstvdpoutputsrcpad.h \
gstvdpdecoder.h \
gstvdp.h
libgstvdp_@GST_API_VERSION@_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdp_@GST_API_VERSION@_la_LIBADD = $(GST_LIBS) $(X11_LIBS) $(VDPAU_LIBS) \
-lgstvideo-$(GST_API_VERSION) \
../basevideodecoder/libgstbasevideodecoder.la
libgstvdp_@GST_API_VERSION@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS)
libgstvdp_@GST_API_VERSION@_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -1,46 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/gst.h>
#include "gstvdpdevice.h"
#include "gstvdpvideobuffer.h"
#include "gstvdpvideosrcpad.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpoutputsrcpad.h"
#include "gstvdpdecoder.h"
#include "gstvdp.h"
GST_DEBUG_CATEGORY (gst_vdp_debug);
void
gst_vdp_init (void)
{
/* do this so debug categories get created */
gst_vdp_device_get_type ();
gst_vdp_output_buffer_get_type ();
gst_vdp_video_buffer_get_type ();
gst_vdp_video_src_pad_get_type ();
gst_vdp_output_src_pad_get_type ();
gst_vdp_decoder_get_type ();
GST_DEBUG_CATEGORY_INIT (gst_vdp_debug, "vdp", 0, "GstVdp debug category");
}

View file

@ -1,26 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_H_
#define _GST_VDP_H_
void gst_vdp_init(void);
#endif /* _GST_VDP_H_ */

View file

@ -1,86 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpbuffer.h"
static GObjectClass *gst_vdp_buffer_parent_class;
void
gst_vdp_buffer_set_buffer_pool (GstVdpBuffer * buffer, GstVdpBufferPool * bpool)
{
g_return_if_fail (GST_IS_VDP_BUFFER (buffer));
if (bpool) {
g_return_if_fail (GST_IS_VDP_BUFFER_POOL (bpool));
g_object_add_weak_pointer (G_OBJECT (bpool), (void **) &buffer->bpool);
}
buffer->bpool = bpool;
}
gboolean
gst_vdp_buffer_revive (GstVdpBuffer * buffer)
{
if (buffer->bpool)
return gst_vdp_buffer_pool_put_buffer (buffer->bpool, buffer);
return FALSE;
}
static void
gst_vdp_buffer_init (GstVdpBuffer * buffer, gpointer g_class)
{
buffer->bpool = NULL;
}
static void
gst_vdp_buffer_class_init (gpointer g_class, gpointer class_data)
{
gst_vdp_buffer_parent_class = g_type_class_peek_parent (g_class);
}
GType
gst_vdp_buffer_get_type (void)
{
static GType _gst_vdp_buffer_type;
if (G_UNLIKELY (_gst_vdp_buffer_type == 0)) {
static const GTypeInfo info = {
sizeof (GstBufferClass),
NULL,
NULL,
gst_vdp_buffer_class_init,
NULL,
NULL,
sizeof (GstVdpBuffer),
0,
(GInstanceInitFunc) gst_vdp_buffer_init,
NULL
};
_gst_vdp_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
"GstVdpBuffer", &info, 0);
}
return _gst_vdp_buffer_type;
}

View file

@ -1,59 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_BUFFER_H_
#define _GST_VDP_BUFFER_H_
#include <gst/gst.h>
typedef struct _GstVdpBuffer GstVdpBuffer;
#include "gstvdpbufferpool.h"
#define GST_TYPE_VDP_BUFFER (gst_vdp_buffer_get_type())
#define GST_IS_VDP_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_BUFFER))
#define GST_VDP_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_BUFFER, GstVdpBuffer))
#define GST_VDP_BUFFER_CAST(obj) ((GstVdpBuffer *)obj)
struct _GstVdpBuffer {
GstBuffer buffer;
GstVdpBufferPool *bpool;
};
void gst_vdp_buffer_set_buffer_pool (GstVdpBuffer *buffer, GstVdpBufferPool *bpool);
gboolean gst_vdp_buffer_revive (GstVdpBuffer * buffer);
static inline GstVdpBuffer *
gst_vdp_buffer_ref (GstVdpBuffer *buffer)
{
return (GstVdpBuffer *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (buffer));
}
static inline void
gst_vdp_buffer_unref (GstVdpBuffer *buffer)
{
gst_mini_object_unref (GST_MINI_OBJECT_CAST (buffer));
}
GType gst_vdp_buffer_get_type (void);
#endif

View file

@ -1,361 +0,0 @@
/*
* GStreamer
* Copyright (C) 2010 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
* with newer GLib versions (>= 2.31.0) */
#define GLIB_DISABLE_DEPRECATION_WARNINGS
#include "gstvdpbufferpool.h"
struct _GstVdpBufferPoolPrivate
{
GQueue *buffers;
GMutex *mutex;
/* properties */
guint max_buffers;
GstCaps *caps;
GstVdpDevice *device;
};
enum
{
PROP_0,
PROP_DEVICE,
PROP_CAPS,
PROP_MAX_BUFFERS
};
G_DEFINE_TYPE (GstVdpBufferPool, gst_vdp_buffer_pool, G_TYPE_OBJECT);
#define DEFAULT_MAX_BUFFERS 20
static void
gst_vdp_buffer_free (GstVdpBuffer * buf)
{
gst_vdp_buffer_set_buffer_pool (buf, NULL);
gst_vdp_buffer_unref (buf);
}
static void
gst_vdp_buffer_pool_clear (GstVdpBufferPool * bpool)
{
GstVdpBufferPoolPrivate *priv = bpool->priv;
g_queue_foreach (priv->buffers, (GFunc) gst_vdp_buffer_free, NULL);
g_queue_clear (priv->buffers);
}
gboolean
gst_vdp_buffer_pool_put_buffer (GstVdpBufferPool * bpool, GstVdpBuffer * buf)
{
GstVdpBufferPoolPrivate *priv;
gboolean res;
GstVdpBufferPoolClass *bpool_class;
GstCaps *caps;
g_return_val_if_fail (GST_IS_VDP_BUFFER_POOL (bpool), FALSE);
g_return_val_if_fail (GST_IS_VDP_BUFFER (buf), FALSE);
priv = bpool->priv;
g_return_val_if_fail (priv->caps, FALSE);
g_mutex_lock (priv->mutex);
if (priv->buffers->length == priv->max_buffers) {
res = FALSE;
goto done;
}
bpool_class = GST_VDP_BUFFER_POOL_GET_CLASS (bpool);
caps = GST_BUFFER_CAPS (buf);
if (!caps)
goto no_caps;
if (!bpool_class->check_caps (bpool, caps)) {
res = FALSE;
goto done;
}
gst_vdp_buffer_ref (buf);
g_queue_push_tail (priv->buffers, buf);
res = TRUE;
done:
g_mutex_unlock (priv->mutex);
return res;
no_caps:
GST_WARNING ("Buffer doesn't have any caps");
res = FALSE;
goto done;
}
GstVdpBuffer *
gst_vdp_buffer_pool_get_buffer (GstVdpBufferPool * bpool, GError ** error)
{
GstVdpBufferPoolPrivate *priv;
GstVdpBuffer *buf;
g_return_val_if_fail (GST_IS_VDP_BUFFER_POOL (bpool), NULL);
priv = bpool->priv;
g_return_val_if_fail (priv->caps, NULL);
g_mutex_lock (priv->mutex);
buf = g_queue_pop_head (priv->buffers);
if (!buf) {
GstVdpBufferPoolClass *bpool_class = GST_VDP_BUFFER_POOL_GET_CLASS (bpool);
buf = bpool_class->alloc_buffer (bpool, error);
if (!buf)
goto done;
gst_buffer_set_caps (GST_BUFFER_CAST (buf), priv->caps);
gst_vdp_buffer_set_buffer_pool (buf, bpool);
}
done:
g_mutex_unlock (priv->mutex);
return buf;
}
void
gst_vdp_buffer_pool_set_max_buffers (GstVdpBufferPool * bpool,
guint max_buffers)
{
GstVdpBufferPoolPrivate *priv;
g_return_if_fail (GST_IS_VDP_BUFFER_POOL (bpool));
g_return_if_fail (max_buffers >= -1);
priv = bpool->priv;
g_mutex_lock (priv->mutex);
if (max_buffers != -1) {
while (max_buffers < priv->buffers->length) {
GstVdpBuffer *buf;
buf = g_queue_pop_tail (priv->buffers);
gst_vdp_buffer_unref (buf);
}
}
priv->max_buffers = max_buffers;
g_mutex_unlock (priv->mutex);
}
guint
gst_vdp_buffer_pool_get_max_buffers (GstVdpBufferPool * bpool)
{
g_return_val_if_fail (GST_IS_VDP_BUFFER_POOL (bpool), 0);
return bpool->priv->max_buffers;
}
void
gst_vdp_buffer_pool_set_caps (GstVdpBufferPool * bpool, const GstCaps * caps)
{
GstVdpBufferPoolPrivate *priv;
GstVdpBufferPoolClass *bpool_class;
gboolean clear_bufs;
g_return_if_fail (GST_IS_VDP_BUFFER_POOL (bpool));
g_return_if_fail (GST_IS_CAPS (caps));
priv = bpool->priv;
bpool_class = GST_VDP_BUFFER_POOL_GET_CLASS (bpool);
g_mutex_lock (priv->mutex);
if (!bpool_class->set_caps (bpool, caps, &clear_bufs))
goto invalid_caps;
if (clear_bufs)
gst_vdp_buffer_pool_clear (bpool);
if (priv->caps)
gst_caps_unref (priv->caps);
priv->caps = gst_caps_copy (caps);
done:
g_mutex_unlock (priv->mutex);
return;
invalid_caps:
GST_WARNING ("Subclass didn't accept caps: %" GST_PTR_FORMAT, caps);
goto done;
}
const GstCaps *
gst_vdp_buffer_pool_get_caps (GstVdpBufferPool * bpool)
{
g_return_val_if_fail (GST_IS_VDP_BUFFER_POOL (bpool), NULL);
return bpool->priv->caps;
}
GstVdpDevice *
gst_vdp_buffer_pool_get_device (GstVdpBufferPool * bpool)
{
g_return_val_if_fail (GST_IS_VDP_BUFFER_POOL (bpool), NULL);
return bpool->priv->device;
}
static void
gst_vdp_buffer_pool_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVdpBufferPool *bpool = (GstVdpBufferPool *) object;
GstVdpBufferPoolPrivate *priv = bpool->priv;
switch (prop_id) {
case PROP_DEVICE:
g_value_set_object (value, priv->device);
break;
case PROP_CAPS:
g_value_set_pointer (value, priv->caps);
break;
case PROP_MAX_BUFFERS:
g_value_set_uint (value, priv->max_buffers);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_buffer_pool_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpBufferPool *bpool = (GstVdpBufferPool *) object;
GstVdpBufferPoolPrivate *priv = bpool->priv;
switch (prop_id) {
case PROP_DEVICE:
priv->device = g_value_get_object (value);
break;
case PROP_CAPS:
gst_vdp_buffer_pool_set_caps (bpool, g_value_get_pointer (value));
break;
case PROP_MAX_BUFFERS:
gst_vdp_buffer_pool_set_max_buffers (bpool, g_value_get_uint (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_buffer_pool_finalize (GObject * object)
{
GstVdpBufferPool *bpool = GST_VDP_BUFFER_POOL (object);
GstVdpBufferPoolPrivate *priv = bpool->priv;
g_mutex_free (priv->mutex);
if (priv->caps)
gst_caps_unref (priv->caps);
G_OBJECT_CLASS (gst_vdp_buffer_pool_parent_class)->finalize (object);
}
static void
gst_vdp_buffer_pool_init (GstVdpBufferPool * bpool)
{
GstVdpBufferPoolPrivate *priv;
bpool->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE (bpool,
GST_TYPE_VDP_BUFFER_POOL, GstVdpBufferPoolPrivate);
priv->buffers = g_queue_new ();
priv->mutex = g_mutex_new ();
/* properties */
priv->caps = NULL;
priv->max_buffers = DEFAULT_MAX_BUFFERS;
}
static void
gst_vdp_buffer_pool_class_init (GstVdpBufferPoolClass * bpool_klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (bpool_klass);
g_type_class_add_private (bpool_klass, sizeof (GstVdpBufferPoolPrivate));
object_class->get_property = gst_vdp_buffer_pool_get_property;
object_class->set_property = gst_vdp_buffer_pool_set_property;
object_class->finalize = gst_vdp_buffer_pool_finalize;
/**
* GstVdpBufferPool:device:
*
* The #GstVdpDevice this pool is bound to.
*/
g_object_class_install_property
(object_class,
PROP_DEVICE,
g_param_spec_object ("device",
"Device",
"The GstVdpDevice this pool is bound to",
GST_TYPE_VDP_DEVICE, G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY));
/**
* GstVdpBufferPool:caps:
*
* The video object capabilities represented as a #GstCaps. This
* shall hold at least the "width" and "height" properties.
*/
g_object_class_install_property
(object_class,
PROP_CAPS,
g_param_spec_pointer ("caps",
"Caps", "The buffer capabilities", G_PARAM_READWRITE));
/**
* GstVdpBufferPool:max-buffers:
*
* The maximum number of buffer in the pool. Or -1, the pool
* will hold as many objects as possible.
*/
g_object_class_install_property
(object_class,
PROP_MAX_BUFFERS,
g_param_spec_int ("max-buffers",
"Max Buffers",
"The maximum number of buffers in the pool, or -1 for unlimited",
-1, G_MAXINT32, DEFAULT_MAX_BUFFERS, G_PARAM_READWRITE));
}

View file

@ -1,74 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_BUFFER_POOL_H_
#define _GST_VDP_BUFFER_POOL_H_
#include <gst/gst.h>
typedef struct _GstVdpBufferPool GstVdpBufferPool;
#include "gstvdpdevice.h"
#include "gstvdpbuffer.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_BUFFER_POOL (gst_vdp_buffer_pool_get_type ())
#define GST_VDP_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_BUFFER_POOL, GstVdpBufferPool))
#define GST_VDP_BUFFER_POOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_BUFFER_POOL, GstVdpBufferPoolClass))
#define GST_IS_VDP_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_BUFFER_POOL))
#define GST_IS_VDP_BUFFER_POOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_BUFFER_POOL))
#define GST_VDP_BUFFER_POOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_BUFFER_POOL, GstVdpBufferPoolClass))
typedef struct _GstVdpBufferPoolClass GstVdpBufferPoolClass;
typedef struct _GstVdpBufferPoolPrivate GstVdpBufferPoolPrivate;
struct _GstVdpBufferPool
{
GObject object;
GstVdpBufferPoolPrivate *priv;
};
struct _GstVdpBufferPoolClass
{
GObjectClass object_class;
GstVdpBuffer *(*alloc_buffer) (GstVdpBufferPool *bpool, GError **error);
gboolean (*set_caps) (GstVdpBufferPool *bpool, const GstCaps *caps, gboolean *clear_bufs);
gboolean (*check_caps) (GstVdpBufferPool *bpool, const GstCaps *caps);
};
gboolean gst_vdp_buffer_pool_put_buffer (GstVdpBufferPool *bpool, GstVdpBuffer *buf);
GstVdpBuffer *gst_vdp_buffer_pool_get_buffer (GstVdpBufferPool * bpool, GError **error);
void gst_vdp_buffer_pool_set_max_buffers (GstVdpBufferPool *bpool, guint max_buffers);
guint gst_vdp_buffer_pool_get_max_buffers (GstVdpBufferPool *bpool);
void gst_vdp_buffer_pool_set_caps (GstVdpBufferPool *bpool, const GstCaps *caps);
const GstCaps *gst_vdp_buffer_pool_get_caps (GstVdpBufferPool * bpool);
GstVdpDevice *gst_vdp_buffer_pool_get_device (GstVdpBufferPool * bpool);
GType gst_vdp_buffer_pool_get_type (void) G_GNUC_CONST;
G_END_DECLS
#endif /* _GST_VDP_BUFFER_POOL_H_ */

View file

@ -1,427 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstvdputils.h"
#include "gstvdpvideobuffer.h"
#include "gstvdpoutputbufferpool.h"
#include "gstvdpoutputsrcpad.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_output_src_pad_debug);
#define GST_CAT_DEFAULT gst_vdp_output_src_pad_debug
enum
{
PROP_0,
PROP_DEVICE
};
typedef enum _GstVdpOutputSrcPadFormat GstVdpOutputSrcPadFormat;
enum _GstVdpOutputSrcPadFormat
{
GST_VDP_OUTPUT_SRC_PAD_FORMAT_RGB,
GST_VDP_OUTPUT_SRC_PAD_FORMAT_VDPAU
};
struct _GstVdpOutputSrcPad
{
GstPad pad;
GstCaps *caps;
GstCaps *output_caps;
GstVdpOutputSrcPadFormat output_format;
VdpRGBAFormat rgba_format;
gint width, height;
GstVdpBufferPool *bpool;
gboolean lock_caps;
/* properties */
GstVdpDevice *device;
};
struct _GstVdpOutputSrcPadClass
{
GstPadClass pad_class;
};
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_output_src_pad_debug, "vdpoutputsrcpad", 0, "GstVdpOutputSrcPad");
G_DEFINE_TYPE_WITH_CODE (GstVdpOutputSrcPad, gst_vdp_output_src_pad,
GST_TYPE_PAD, DEBUG_INIT ());
GstFlowReturn
gst_vdp_output_src_pad_push (GstVdpOutputSrcPad * vdp_pad,
GstVdpOutputBuffer * output_buf, GError ** error)
{
GstPad *pad;
GstBuffer *outbuf;
g_return_val_if_fail (GST_IS_VDP_OUTPUT_SRC_PAD (vdp_pad), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_VDP_OUTPUT_BUFFER (output_buf), GST_FLOW_ERROR);
pad = (GstPad *) vdp_pad;
if (G_UNLIKELY (!GST_PAD_CAPS (pad)))
return GST_FLOW_NOT_NEGOTIATED;
switch (vdp_pad->output_format) {
case GST_VDP_OUTPUT_SRC_PAD_FORMAT_RGB:
{
GstFlowReturn ret;
guint size;
gst_vdp_output_buffer_calculate_size (output_buf, &size);
vdp_pad->lock_caps = TRUE;
ret = gst_pad_alloc_buffer (pad, 0, size, GST_PAD_CAPS (vdp_pad),
&outbuf);
vdp_pad->lock_caps = FALSE;
if (ret != GST_FLOW_OK) {
gst_buffer_unref (GST_BUFFER_CAST (output_buf));
return ret;
}
if (!gst_vdp_output_buffer_download (output_buf, outbuf, error)) {
gst_buffer_unref (GST_BUFFER_CAST (output_buf));
gst_buffer_unref (outbuf);
return GST_FLOW_ERROR;
}
gst_buffer_copy_metadata (outbuf, (const GstBuffer *) output_buf,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
gst_buffer_unref (GST_BUFFER_CAST (output_buf));
break;
}
case GST_VDP_OUTPUT_SRC_PAD_FORMAT_VDPAU:
{
outbuf = GST_BUFFER_CAST (output_buf);
break;
}
default:
g_assert_not_reached ();
break;
}
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (vdp_pad));
return gst_pad_push (pad, outbuf);
}
static GstFlowReturn
gst_vdp_output_src_pad_create_buffer (GstVdpOutputSrcPad * vdp_pad,
GstVdpOutputBuffer ** output_buf, GError ** error)
{
GstFlowReturn ret;
GstBuffer *neg_buf;
/* negotiate */
ret = gst_pad_alloc_buffer_and_set_caps (GST_PAD_CAST (vdp_pad),
GST_BUFFER_OFFSET_NONE, 0, GST_PAD_CAPS (vdp_pad), &neg_buf);
if (ret == GST_FLOW_OK)
gst_buffer_unref (neg_buf);
*output_buf =
(GstVdpOutputBuffer *) gst_vdp_buffer_pool_get_buffer (vdp_pad->bpool,
error);
if (!*output_buf)
return GST_FLOW_ERROR;
return GST_FLOW_OK;
}
static GstFlowReturn
gst_vdp_output_src_pad_alloc_with_caps (GstVdpOutputSrcPad * vdp_pad,
GstCaps * caps, GstVdpOutputBuffer ** output_buf, GError ** error)
{
GstFlowReturn ret;
ret = gst_pad_alloc_buffer_and_set_caps ((GstPad *) vdp_pad, 0, 0, caps,
(GstBuffer **) output_buf);
if (ret != GST_FLOW_OK)
return ret;
if (!GST_IS_VDP_OUTPUT_BUFFER (*output_buf))
goto invalid_buf;
return GST_FLOW_OK;
invalid_buf:
gst_buffer_unref (GST_BUFFER (*output_buf));
g_set_error (error, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED,
"Sink element returned buffer of wrong type");
return GST_FLOW_ERROR;
}
GstFlowReturn
gst_vdp_output_src_pad_alloc_buffer (GstVdpOutputSrcPad * vdp_pad,
GstVdpOutputBuffer ** output_buf, GError ** error)
{
GstCaps *caps;
GstFlowReturn ret;
g_return_val_if_fail (GST_IS_VDP_OUTPUT_SRC_PAD (vdp_pad), GST_FLOW_ERROR);
caps = GST_PAD_CAPS (vdp_pad);
if (!caps)
return GST_FLOW_NOT_NEGOTIATED;
switch (vdp_pad->output_format) {
case GST_VDP_OUTPUT_SRC_PAD_FORMAT_RGB:
{
ret = gst_vdp_output_src_pad_create_buffer (vdp_pad, output_buf, error);
if (ret != GST_FLOW_OK)
return ret;
break;
}
case GST_VDP_OUTPUT_SRC_PAD_FORMAT_VDPAU:
{
ret = gst_vdp_output_src_pad_alloc_with_caps (vdp_pad, caps, output_buf,
error);
if (ret != GST_FLOW_OK)
return ret;
break;
}
default:
g_assert_not_reached ();
break;
}
return GST_FLOW_OK;
}
static gboolean
gst_vdp_output_src_pad_acceptcaps (GstPad * pad, GstCaps * caps)
{
GstVdpOutputSrcPad *vdp_pad = GST_VDP_OUTPUT_SRC_PAD (pad);
if (!vdp_pad->lock_caps)
return TRUE;
return gst_caps_is_equal_fixed (caps, GST_PAD_CAPS (pad));
}
static gboolean
gst_vdp_output_src_pad_setcaps (GstPad * pad, GstCaps * caps)
{
GstVdpOutputSrcPad *vdp_pad = GST_VDP_OUTPUT_SRC_PAD (pad);
const GstStructure *structure;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "width", &vdp_pad->width))
return FALSE;
if (!gst_structure_get_int (structure, "height", &vdp_pad->height))
return FALSE;
if (gst_structure_has_name (structure, "video/x-raw-rgb")) {
if (!gst_vdp_caps_to_rgba_format (caps, &vdp_pad->rgba_format))
return FALSE;
/* create buffer pool if we dont't have one */
if (!vdp_pad->bpool)
vdp_pad->bpool = gst_vdp_output_buffer_pool_new (vdp_pad->device);
if (vdp_pad->output_caps)
gst_caps_unref (vdp_pad->output_caps);
vdp_pad->output_caps = gst_caps_new_simple ("video/x-vdpau-output",
"rgba-format", G_TYPE_INT, vdp_pad->rgba_format,
"width", G_TYPE_INT, vdp_pad->width, "height", G_TYPE_INT,
vdp_pad->height, NULL);
gst_vdp_buffer_pool_set_caps (vdp_pad->bpool, vdp_pad->output_caps);
vdp_pad->output_format = GST_VDP_OUTPUT_SRC_PAD_FORMAT_RGB;
} else if (gst_structure_has_name (structure, "video/x-vdpau-output")) {
if (!gst_structure_get_int (structure, "rgba-format",
(gint *) & vdp_pad->rgba_format))
return FALSE;
/* don't need the buffer pool */
if (vdp_pad->bpool) {
gst_object_unref (vdp_pad->bpool);
vdp_pad->bpool = NULL;
}
vdp_pad->output_format = GST_VDP_OUTPUT_SRC_PAD_FORMAT_VDPAU;
} else
return FALSE;
return TRUE;
}
static GstCaps *
gst_vdp_output_src_pad_getcaps (GstPad * pad)
{
GstVdpOutputSrcPad *vdp_pad = (GstVdpOutputSrcPad *) pad;
const GstCaps *templ_caps;
if (vdp_pad->caps)
return gst_caps_ref (vdp_pad->caps);
else if ((templ_caps = gst_pad_get_pad_template_caps (pad)))
return gst_caps_copy (templ_caps);
return NULL;
}
static gboolean
gst_vdp_output_src_pad_activate_push (GstPad * pad, gboolean active)
{
GstVdpOutputSrcPad *vdp_pad = GST_VDP_OUTPUT_SRC_PAD (pad);
if (!active) {
if (vdp_pad->caps)
gst_caps_unref (vdp_pad->caps);
vdp_pad->caps = NULL;
if (vdp_pad->output_caps)
gst_caps_unref (vdp_pad->output_caps);
vdp_pad->output_caps = NULL;
if (vdp_pad->bpool)
g_object_unref (vdp_pad->bpool);
vdp_pad->bpool = NULL;
if (vdp_pad->device)
g_object_unref (vdp_pad->device);
vdp_pad->device = NULL;
}
return TRUE;
}
GstVdpOutputSrcPad *
gst_vdp_output_src_pad_new (GstPadTemplate * templ, const gchar * name)
{
return g_object_new (GST_TYPE_VDP_OUTPUT_SRC_PAD, "name", name,
"template", templ, "direction", GST_PAD_SRC, NULL);
}
static void
gst_vdp_output_src_pad_update_caps (GstVdpOutputSrcPad * vdp_pad)
{
GstCaps *caps;
const GstCaps *templ_caps;
if (vdp_pad->caps)
gst_caps_unref (vdp_pad->caps);
caps = gst_vdp_output_buffer_get_allowed_caps (vdp_pad->device);
if ((templ_caps = gst_pad_get_pad_template_caps (GST_PAD (vdp_pad)))) {
vdp_pad->caps = gst_caps_intersect (caps, templ_caps);
gst_caps_unref (caps);
} else
vdp_pad->caps = caps;
}
static void
gst_vdp_output_src_pad_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVdpOutputSrcPad *vdp_pad = (GstVdpOutputSrcPad *) object;
switch (prop_id) {
case PROP_DEVICE:
g_value_set_object (value, vdp_pad->device);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_output_src_pad_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpOutputSrcPad *vdp_pad = (GstVdpOutputSrcPad *) object;
switch (prop_id) {
case PROP_DEVICE:
if (vdp_pad->device)
g_object_unref (vdp_pad->device);
vdp_pad->device = g_value_dup_object (value);
gst_vdp_output_src_pad_update_caps (vdp_pad);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_output_src_pad_init (GstVdpOutputSrcPad * vdp_pad)
{
GstPad *pad = GST_PAD (vdp_pad);
vdp_pad->caps = NULL;
vdp_pad->output_caps = NULL;
vdp_pad->bpool = NULL;
vdp_pad->device = NULL;
vdp_pad->lock_caps = FALSE;
gst_pad_set_getcaps_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_output_src_pad_getcaps));
gst_pad_set_setcaps_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_output_src_pad_setcaps));
gst_pad_set_acceptcaps_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_output_src_pad_acceptcaps));
gst_pad_set_activatepush_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_output_src_pad_activate_push));
}
static void
gst_vdp_output_src_pad_class_init (GstVdpOutputSrcPadClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->get_property = gst_vdp_output_src_pad_get_property;
object_class->set_property = gst_vdp_output_src_pad_set_property;
/**
* GstVdpVideoSrcPad:device:
*
* The #GstVdpDevice this pool is bound to.
*/
g_object_class_install_property
(object_class,
PROP_DEVICE,
g_param_spec_object ("device",
"Device",
"The GstVdpDevice the pad should use",
GST_TYPE_VDP_DEVICE, G_PARAM_READWRITE));
}

View file

@ -1,51 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_OUTPUT_SRC_PAD_H_
#define _GST_VDP_OUTPUT_SRC_PAD_H_
#include <gst/gst.h>
#include "gstvdpdevice.h"
#include "gstvdpoutputbuffer.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_OUTPUT_SRC_PAD (gst_vdp_output_src_pad_get_type ())
#define GST_VDP_OUTPUT_SRC_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_OUTPUT_SRC_PAD, GstVdpOutputSrcPad))
#define GST_VDP_OUTPUT_SRC_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_OUTPUT_SRC_PAD, GstVdpOutputSrcPadClass))
#define GST_IS_VDP_OUTPUT_SRC_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_OUTPUT_SRC_PAD))
#define GST_IS_VDP_OUTPUT_SRC_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_OUTPUT_SRC_PAD))
#define GST_VDP_OUTPUT_SRC_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_OUTPUT_SRC_PAD, GstVdpOutputSrcPadClass))
typedef struct _GstVdpOutputSrcPad GstVdpOutputSrcPad;
typedef struct _GstVdpOutputSrcPadClass GstVdpOutputSrcPadClass;
GstFlowReturn gst_vdp_output_src_pad_push (GstVdpOutputSrcPad *vdp_pad, GstVdpOutputBuffer *output_buf, GError **error);
GstFlowReturn gst_vdp_output_src_pad_alloc_buffer (GstVdpOutputSrcPad *vdp_pad, GstVdpOutputBuffer **output_buf, GError **error);
GstFlowReturn gst_vdp_output_src_pad_get_device (GstVdpOutputSrcPad *vdp_pad, GstVdpDevice **device, GError **error);
GstVdpOutputSrcPad *gst_vdp_output_src_pad_new (GstPadTemplate *templ, const gchar *name);
GType gst_vdp_output_src_pad_get_type (void);
G_END_DECLS
#endif /* _GST_VDP_OUTPUT_SRC_PAD_H_ */

View file

@ -1,138 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstvdpvideobuffer.h"
#include "gstvdputils.h"
static void
gst_vdp_video_remove_pixel_aspect_ratio (GstStructure * structure)
{
gint par_n, par_d;
if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n,
&par_d)) {
gint width;
gst_structure_get_int (structure, "width", &width);
width = gst_util_uint64_scale_int (width, par_n, par_d);
gst_structure_set (structure, "width", G_TYPE_INT, width, NULL);
gst_structure_remove_field (structure, "pixel-aspect-ratio");
}
}
GstCaps *
gst_vdp_video_to_output_caps (GstCaps * video_caps)
{
GstCaps *output_caps;
gint i;
g_return_val_if_fail (GST_IS_CAPS (video_caps), NULL);
output_caps = gst_caps_copy (video_caps);
for (i = 0; i < gst_caps_get_size (video_caps); i++) {
GstStructure *structure, *rgb_structure;
structure = gst_caps_get_structure (output_caps, i);
if (!gst_structure_has_name (structure, "video/x-vdpau-video"))
goto not_video_error;
rgb_structure = gst_structure_copy (structure);
gst_structure_set_name (structure, "video/x-vdpau-output");
gst_structure_remove_field (structure, "chroma-type");
gst_vdp_video_remove_pixel_aspect_ratio (structure);
gst_structure_set_name (rgb_structure, "video/x-raw-rgb");
gst_structure_remove_field (rgb_structure, "chroma-type");
gst_vdp_video_remove_pixel_aspect_ratio (rgb_structure);
gst_caps_append_structure (output_caps, rgb_structure);
}
return output_caps;
error:
gst_caps_unref (output_caps);
return NULL;
not_video_error:
GST_WARNING ("The caps weren't of type \"video/x-vdpau-video\"");
goto error;
}
GstCaps *
gst_vdp_yuv_to_video_caps (GstCaps * yuv_caps)
{
GstCaps *video_caps;
gint i;
g_return_val_if_fail (GST_IS_CAPS (yuv_caps), NULL);
video_caps = gst_caps_copy (yuv_caps);
for (i = 0; i < gst_caps_get_size (video_caps); i++) {
GstStructure *structure;
guint32 fourcc;
VdpChromaType chroma_type;
structure = gst_caps_get_structure (video_caps, i);
if (!gst_structure_has_name (structure, "video/x-raw-yuv"))
goto not_yuv_error;
if (!gst_structure_get_fourcc (structure, "format", &fourcc))
goto no_format_error;
chroma_type = -1;
for (i = 0; i < G_N_ELEMENTS (formats); i++) {
if (formats[i].fourcc == fourcc) {
chroma_type = formats[i].chroma_type;
break;
}
}
if (chroma_type == -1)
goto no_chroma_error;
/* now we transform the caps */
gst_structure_set_name (structure, "video/x-vdpau-video");
gst_structure_remove_field (structure, "format");
gst_structure_set (structure, "chroma-type", G_TYPE_INT, chroma_type, NULL);
}
return video_caps;
error:
gst_caps_unref (video_caps);
return NULL;
not_yuv_error:
GST_WARNING ("The caps weren't of type \"video/x-raw-yuv\"");
goto error;
no_format_error:
GST_WARNING ("The caps didn't have a \"fourcc\" field");
goto error;
no_chroma_error:
GST_WARNING ("The caps had an invalid \"fourcc\" field");
goto error;
}

View file

@ -1,502 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpvideobuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_video_buffer_debug);
#define GST_CAT_DEFAULT gst_vdp_video_buffer_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_video_buffer_debug, "vdpvideobuffer", 0, "VDPAU video buffer");
GstVdpVideoBuffer *
gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type,
gint width, gint height, GError ** error)
{
GstVdpVideoBuffer *buffer;
VdpStatus status;
VdpVideoSurface surface;
g_return_val_if_fail (GST_IS_VDP_DEVICE (device), NULL);
status = device->vdp_video_surface_create (device->device, chroma_type, width,
height, &surface);
if (status != VDP_STATUS_OK)
goto create_error;
buffer =
(GstVdpVideoBuffer *) gst_mini_object_new (GST_TYPE_VDP_VIDEO_BUFFER);
buffer->device = g_object_ref (device);
buffer->surface = surface;
return buffer;
create_error:
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_READ,
"Couldn't create a VdpVideoSurface, error returned from vdpau was: %s",
device->vdp_get_error_string (status));
return NULL;
}
static GObjectClass *gst_vdp_video_buffer_parent_class;
static void
gst_vdp_video_buffer_finalize (GstVdpVideoBuffer * buffer)
{
GstVdpDevice *device;
VdpStatus status;
if (gst_vdp_buffer_revive (GST_VDP_BUFFER_CAST (buffer)))
return;
device = buffer->device;
status = device->vdp_video_surface_destroy (buffer->surface);
if (status != VDP_STATUS_OK)
GST_ERROR
("Couldn't destroy the buffers VdpVideoSurface, error returned was: %s",
device->vdp_get_error_string (status));
g_object_unref (buffer->device);
GST_MINI_OBJECT_CLASS (gst_vdp_video_buffer_parent_class)->finalize
(GST_MINI_OBJECT (buffer));
}
static void
gst_vdp_video_buffer_init (GstVdpVideoBuffer * buffer, gpointer g_class)
{
buffer->device = NULL;
buffer->surface = VDP_INVALID_HANDLE;
}
static void
gst_vdp_video_buffer_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_vdp_video_buffer_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_vdp_video_buffer_finalize;
}
GType
gst_vdp_video_buffer_get_type (void)
{
static GType _gst_vdp_video_buffer_type;
if (G_UNLIKELY (_gst_vdp_video_buffer_type == 0)) {
static const GTypeInfo info = {
sizeof (GstBufferClass),
NULL,
NULL,
gst_vdp_video_buffer_class_init,
NULL,
NULL,
sizeof (GstVdpVideoBuffer),
0,
(GInstanceInitFunc) gst_vdp_video_buffer_init,
NULL
};
_gst_vdp_video_buffer_type = g_type_register_static (GST_TYPE_VDP_BUFFER,
"GstVdpVideoBuffer", &info, 0);
DEBUG_INIT ();
}
return _gst_vdp_video_buffer_type;
}
GstCaps *
gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type)
{
GstCaps *video_caps, *yuv_caps;
gint i;
video_caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (chroma_types); i++) {
GstStructure *structure;
if (filter) {
if (chroma_types[i] != chroma_type)
continue;
}
structure = gst_structure_new ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, 4096,
"height", GST_TYPE_INT_RANGE, 1, 4096, NULL);
gst_caps_append_structure (video_caps, structure);
}
yuv_caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (formats); i++) {
GstStructure *structure;
if (filter) {
if (formats[i].chroma_type != chroma_type)
continue;
}
structure = gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[i].fourcc,
"width", GST_TYPE_INT_RANGE, 1, 4096,
"height", GST_TYPE_INT_RANGE, 1, 4096, NULL);
gst_caps_append_structure (yuv_caps, structure);
}
gst_caps_append (video_caps, yuv_caps);
return video_caps;
}
GstCaps *
gst_vdp_video_buffer_get_allowed_caps (GstVdpDevice * device)
{
GstCaps *video_caps, *yuv_caps;
gint i;
VdpStatus status;
video_caps = gst_caps_new_empty ();
yuv_caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (chroma_types); i++) {
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE)
goto surface_query_caps_error;
if (is_supported) {
GstCaps *format_caps;
gint j;
format_caps = gst_caps_new_simple ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (video_caps, format_caps);
for (j = 0; j < G_N_ELEMENTS (formats); j++) {
if (formats[j].chroma_type != chroma_types[i])
continue;
status =
device->vdp_video_surface_query_ycbcr_capabilities (device->device,
formats[j].chroma_type, formats[j].format, &is_supported);
if (status != VDP_STATUS_OK
&& status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT)
goto surface_query_ycbcr_error;
if (is_supported) {
format_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[j].fourcc,
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (yuv_caps, format_caps);
}
}
}
}
done:
gst_caps_append (video_caps, yuv_caps);
return video_caps;
surface_query_caps_error:
GST_ERROR_OBJECT (device,
"Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto done;
surface_query_ycbcr_error:
GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto done;
}
gboolean
gst_vdp_video_buffer_calculate_size (guint32 fourcc, gint width, gint height,
guint * size)
{
switch (fourcc) {
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
{
*size = gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, width, height);
break;
}
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
{
*size = gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, width, height);
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
*size = width * height + width * height / 2;
break;
}
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
{
*size = gst_video_format_get_size (GST_VIDEO_FORMAT_UYVY, width, height);
break;
}
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
{
*size = gst_video_format_get_size (GST_VIDEO_FORMAT_YUY2, width, height);
break;
}
default:
return FALSE;
}
return TRUE;
}
gboolean
gst_vdp_video_buffer_download (GstVdpVideoBuffer * video_buf,
GstBuffer * outbuf, guint32 fourcc, gint width, gint height)
{
guint8 *data[3];
guint32 stride[3];
VdpYCbCrFormat format;
GstVdpDevice *device;
VdpVideoSurface surface;
VdpStatus status;
g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (video_buf), FALSE);
g_return_val_if_fail (GST_IS_BUFFER (outbuf), FALSE);
switch (fourcc) {
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
{
data[0] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
0, width, height);
data[1] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
2, width, height);
data[2] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
1, width, height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
0, width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
2, width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
1, width);
format = VDP_YCBCR_FORMAT_YV12;
break;
}
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
{
data[0] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
0, width, height);
data[1] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
2, width, height);
data[2] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
1, width, height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
0, width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
2, width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
1, width);
format = VDP_YCBCR_FORMAT_YV12;
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
data[0] = GST_BUFFER_DATA (outbuf);
data[1] = GST_BUFFER_DATA (outbuf) + width * height;
stride[0] = width;
stride[1] = width;
format = VDP_YCBCR_FORMAT_NV12;
break;
}
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
{
data[0] = GST_BUFFER_DATA (outbuf);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
0, width);
format = VDP_YCBCR_FORMAT_UYVY;
break;
}
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
{
data[0] = GST_BUFFER_DATA (outbuf);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
0, width);
format = VDP_YCBCR_FORMAT_YUYV;
break;
}
default:
return FALSE;
}
device = video_buf->device;
surface = video_buf->surface;
GST_LOG_OBJECT (video_buf, "Entering vdp_video_surface_get_bits_ycbcr");
status =
device->vdp_video_surface_get_bits_ycbcr (surface,
format, (void *) data, stride);
GST_LOG_OBJECT (video_buf,
"Got status %d from vdp_video_surface_get_bits_ycbcr", status);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ERROR_OBJECT (video_buf,
"Couldn't get data from vdpau, Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
return FALSE;
}
return TRUE;
}
gboolean
gst_vdp_video_buffer_upload (GstVdpVideoBuffer * video_buf, GstBuffer * src_buf,
guint fourcc, gint width, gint height)
{
guint8 *data[3];
guint32 stride[3];
VdpYCbCrFormat format;
GstVdpDevice *device;
VdpStatus status;
g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (video_buf), FALSE);
g_return_val_if_fail (GST_IS_BUFFER (src_buf), FALSE);
switch (fourcc) {
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
{
data[0] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
0, width, height);
data[1] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
2, width, height);
data[2] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
1, width, height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
0, width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
2, width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
1, width);
format = VDP_YCBCR_FORMAT_YV12;
break;
}
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
{
data[0] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
0, width, height);
data[1] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
2, width, height);
data[2] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
1, width, height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
0, width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
2, width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
1, width);
format = VDP_YCBCR_FORMAT_YV12;
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
data[0] = GST_BUFFER_DATA (src_buf);
data[1] = GST_BUFFER_DATA (src_buf) + width * height;
stride[0] = width;
stride[1] = width;
format = VDP_YCBCR_FORMAT_NV12;
break;
}
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
{
data[0] = GST_BUFFER_DATA (src_buf);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
0, width);
format = VDP_YCBCR_FORMAT_UYVY;
break;
}
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
{
data[0] = GST_BUFFER_DATA (src_buf);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
0, width);
format = VDP_YCBCR_FORMAT_YUYV;
break;
}
default:
return FALSE;
}
device = video_buf->device;
status = device->vdp_video_surface_put_bits_ycbcr (video_buf->surface, format,
(void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ERROR_OBJECT (video_buf, "Couldn't push YUV data to VDPAU, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
return FALSE;
}
return TRUE;
}

View file

@ -1,109 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_VIDEO_BUFFER_H_
#define _GST_VDP_VIDEO_BUFFER_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstvdpbuffer.h"
#include "gstvdpdevice.h"
typedef struct _GstVdpVideoBuffer GstVdpVideoBuffer;
#define GST_TYPE_VDP_VIDEO_BUFFER (gst_vdp_video_buffer_get_type())
#define GST_IS_VDP_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_BUFFER))
#define GST_VDP_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_BUFFER, GstVdpVideoBuffer))
struct _GstVdpVideoBuffer {
GstVdpBuffer vdp_buffer;
GstVdpDevice *device;
VdpVideoSurface surface;
};
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
guint32 fourcc;
} GstVdpVideoBufferFormats;
static const VdpChromaType chroma_types[] =
{ VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 };
static const GstVdpVideoBufferFormats formats[] = {
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('I', '4', '2', '0')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('Y', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_NV12,
GST_MAKE_FOURCC ('N', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_UYVY,
GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_V8U8Y8A8,
GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_Y8U8V8A8,
GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_YUYV,
GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
},
};
GType gst_vdp_video_buffer_get_type (void);
GstVdpVideoBuffer *gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type, gint width, gint height, GError **error);
GstCaps *gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type);
GstCaps *gst_vdp_video_buffer_get_allowed_caps (GstVdpDevice * device);
gboolean gst_vdp_video_buffer_calculate_size (guint32 fourcc, gint width, gint height, guint *size);
gboolean gst_vdp_video_buffer_download (GstVdpVideoBuffer *inbuf, GstBuffer *outbuf, guint32 fourcc, gint width, gint height);
gboolean gst_vdp_video_buffer_upload (GstVdpVideoBuffer *video_buf, GstBuffer *src_buf, guint fourcc, gint width, gint height);
#define GST_VDP_VIDEO_CAPS \
"video/x-vdpau-video, " \
"chroma-type = (int)[0,2], " \
"width = (int)[1,4096], " \
"height = (int)[1,4096]"
#endif

View file

@ -1,148 +0,0 @@
/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
/*
* gst-plugins-bad
* Copyright (C) Carl-Anton Ingmarsson 2010 <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstvdpdevice.h"
#include "gstvdpvideobuffer.h"
#include "gstvdpvideobufferpool.h"
struct _GstVdpVideoBufferPool
{
GstVdpBufferPool buffer_pool;
VdpChromaType chroma_type;
guint width, height;
};
G_DEFINE_TYPE (GstVdpVideoBufferPool, gst_vdp_video_buffer_pool,
GST_TYPE_VDP_BUFFER_POOL);
GstVdpBufferPool *
gst_vdp_video_buffer_pool_new (GstVdpDevice * device)
{
g_return_val_if_fail (GST_IS_VDP_DEVICE (device), NULL);
return g_object_new (GST_TYPE_VDP_VIDEO_BUFFER_POOL, "device", device, NULL);
}
static gboolean
parse_caps (const GstCaps * caps, VdpChromaType * chroma_type, gint * width,
gint * height)
{
GstStructure *structure;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "chroma-type", (gint *) chroma_type))
return FALSE;
if (!gst_structure_get_int (structure, "width", width))
return FALSE;
if (!gst_structure_get_int (structure, "height", height))
return FALSE;
return TRUE;
}
static gboolean
gst_vdp_video_buffer_pool_check_caps (GstVdpBufferPool * bpool,
const GstCaps * caps)
{
GstVdpVideoBufferPool *vpool = GST_VDP_VIDEO_BUFFER_POOL (bpool);
VdpChromaType chroma_type;
gint width, height;
if (!parse_caps (caps, &chroma_type, &width, &height))
return FALSE;
if (chroma_type != vpool->chroma_type || width != vpool->width ||
height != vpool->height)
return FALSE;
return TRUE;
}
static gboolean
gst_vdp_video_buffer_pool_set_caps (GstVdpBufferPool * bpool,
const GstCaps * caps, gboolean * clear_bufs)
{
GstVdpVideoBufferPool *vpool = GST_VDP_VIDEO_BUFFER_POOL (bpool);
VdpChromaType chroma_type;
gint width, height;
if (!parse_caps (caps, &chroma_type, &width, &height))
return FALSE;
if (chroma_type != vpool->chroma_type || width != vpool->width ||
height != vpool->height)
*clear_bufs = TRUE;
else
*clear_bufs = FALSE;
vpool->chroma_type = chroma_type;
vpool->width = width;
vpool->height = height;
return TRUE;
}
static GstVdpBuffer *
gst_vdp_video_buffer_pool_alloc_buffer (GstVdpBufferPool * bpool,
GError ** error)
{
GstVdpVideoBufferPool *vpool = GST_VDP_VIDEO_BUFFER_POOL (bpool);
GstVdpDevice *device;
device = gst_vdp_buffer_pool_get_device (bpool);
return GST_VDP_BUFFER_CAST (gst_vdp_video_buffer_new (device,
vpool->chroma_type, vpool->width, vpool->height, error));
}
static void
gst_vdp_video_buffer_pool_finalize (GObject * object)
{
/* TODO: Add deinitalization code here */
G_OBJECT_CLASS (gst_vdp_video_buffer_pool_parent_class)->finalize (object);
}
static void
gst_vdp_video_buffer_pool_init (GstVdpVideoBufferPool * vpool)
{
vpool->chroma_type = -1;
vpool->width = 0;
vpool->height = 0;
}
static void
gst_vdp_video_buffer_pool_class_init (GstVdpVideoBufferPoolClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstVdpBufferPoolClass *buffer_pool_class = GST_VDP_BUFFER_POOL_CLASS (klass);
buffer_pool_class->alloc_buffer = gst_vdp_video_buffer_pool_alloc_buffer;
buffer_pool_class->set_caps = gst_vdp_video_buffer_pool_set_caps;
buffer_pool_class->check_caps = gst_vdp_video_buffer_pool_check_caps;
object_class->finalize = gst_vdp_video_buffer_pool_finalize;
}

View file

@ -1,52 +0,0 @@
/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
/*
* gst-plugins-bad
* Copyright (C) Carl-Anton Ingmarsson 2010 <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_VIDEO_BUFFERPOOL_H_
#define _GST_VDP_VIDEO_BUFFERPOOL_H_
#include <gst/gst.h>
#include "gstvdpbufferpool.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_VIDEO_BUFFER_POOL (gst_vdp_video_buffer_pool_get_type ())
#define GST_VDP_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL, GstVdpVideoBufferPool))
#define GST_VDP_VIDEO_BUFFER_POOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_VIDEO_BUFFER_POOL, GstVdpVideoBufferPoolClass))
#define GST_IS_VDP_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL))
#define GST_IS_VDP_VIDEO_BUFFER_POOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_VIDEO_BUFFER_POOL))
#define GST_VDP_VIDEO_BUFFER_POOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL, GstVdpVideoBufferPoolClass))
typedef struct _GstVdpVideoBufferPool GstVdpVideoBufferPool;
typedef struct _GstVdpVideoBufferPoolClass GstVdpVideoBufferPoolClass;
struct _GstVdpVideoBufferPoolClass
{
GstVdpBufferPoolClass buffer_pool_class;
};
GstVdpBufferPool *gst_vdp_video_buffer_pool_new (GstVdpDevice *device);
GType gst_vdp_video_buffer_pool_get_type (void) G_GNUC_CONST;
G_END_DECLS
#endif /* _GST_VDP_VIDEO_BUFFER_POOL_H_ */

View file

@ -1,328 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstvdpvideobuffer.h"
#include "gstvdpvideobufferpool.h"
#include "gstvdputils.h"
#include "gstvdpvideosrcpad.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_video_src_pad_debug);
#define GST_CAT_DEFAULT gst_vdp_video_src_pad_debug
enum
{
PROP_0,
PROP_DEVICE
};
struct _GstVdpVideoSrcPad
{
GstPad pad;
GstVdpBufferPool *bpool;
GstCaps *caps;
gboolean yuv_output;
gint width, height;
guint32 fourcc;
/* properties */
GstVdpDevice *device;
};
struct _GstVdpVideoSrcPadClass
{
GstPadClass pad_class;
};
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_video_src_pad_debug, "vdpvideosrcpad", 0, "GstVdpVideoSrcPad");
G_DEFINE_TYPE_WITH_CODE (GstVdpVideoSrcPad, gst_vdp_video_src_pad, GST_TYPE_PAD,
DEBUG_INIT ());
GstVdpVideoSrcPad *
gst_vdp_video_src_pad_new (GstPadTemplate * templ, const gchar * name)
{
g_return_val_if_fail (GST_IS_PAD_TEMPLATE (templ), NULL);
g_return_val_if_fail ((templ->direction == GST_PAD_SRC), NULL);
return g_object_new (GST_TYPE_VDP_VIDEO_SRC_PAD,
"name", name, "direction", templ->direction, "template", templ, NULL);
}
GstFlowReturn
gst_vdp_video_src_pad_push (GstVdpVideoSrcPad * vdp_pad,
GstVdpVideoBuffer * video_buf)
{
GstPad *pad;
GstBuffer *out_buf;
g_return_val_if_fail (GST_IS_VDP_VIDEO_SRC_PAD (vdp_pad), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (video_buf), GST_FLOW_ERROR);
pad = (GstPad *) vdp_pad;
if (G_UNLIKELY (!GST_PAD_CAPS (pad)))
return GST_FLOW_NOT_NEGOTIATED;
if (vdp_pad->yuv_output) {
guint size;
GstFlowReturn ret;
GstCaps *caps;
if (!gst_vdp_video_buffer_calculate_size (vdp_pad->fourcc, vdp_pad->width,
vdp_pad->height, &size)) {
GST_ERROR_OBJECT (vdp_pad, "Couldn't calculate buffer size for caps");
gst_buffer_unref (GST_BUFFER_CAST (video_buf));
return GST_FLOW_ERROR;
}
caps = GST_PAD_CAPS (pad);
ret = gst_pad_alloc_buffer (pad,
GST_BUFFER_OFFSET_NONE, size, caps, &out_buf);
if (ret != GST_FLOW_OK) {
gst_buffer_unref (GST_BUFFER_CAST (video_buf));
return ret;
}
if (!gst_caps_is_equal_fixed (caps, GST_BUFFER_CAPS (out_buf))) {
GST_ERROR_OBJECT (vdp_pad,
"Sink element allocated buffer with different caps");
gst_buffer_unref (GST_BUFFER_CAST (video_buf));
gst_buffer_unref (out_buf);
return GST_FLOW_ERROR;
}
if (!gst_vdp_video_buffer_download (video_buf, out_buf, vdp_pad->fourcc,
vdp_pad->width, vdp_pad->height)) {
GST_ERROR_OBJECT (vdp_pad,
"Couldn't convert from GstVdpVideoBuffer to the requested format");
gst_buffer_unref (GST_BUFFER_CAST (video_buf));
gst_buffer_unref (out_buf);
return GST_FLOW_ERROR;
}
gst_buffer_copy_metadata (out_buf, (const GstBuffer *) video_buf,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
gst_buffer_unref (GST_BUFFER_CAST (video_buf));
} else
out_buf = GST_BUFFER_CAST (video_buf);
/* FIXME: can't use gst_buffer_set_caps since we may have additional
* references to the bufffer. We can't either use
* gst_buffer_make_metadata_writable since that creates a regular buffer and
* not a GstVdpVideoBuffer */
gst_caps_replace (&(GST_BUFFER_CAPS (out_buf)), GST_PAD_CAPS (vdp_pad));
return gst_pad_push (pad, out_buf);
}
GstFlowReturn
gst_vdp_video_src_pad_alloc_buffer (GstVdpVideoSrcPad * vdp_pad,
GstVdpVideoBuffer ** video_buf, GError ** error)
{
GstCaps *caps;
g_return_val_if_fail (GST_IS_VDP_VIDEO_SRC_PAD (vdp_pad), GST_FLOW_ERROR);
caps = GST_PAD_CAPS (vdp_pad);
if (!caps)
return GST_FLOW_NOT_NEGOTIATED;
*video_buf =
(GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vdp_pad->bpool,
error);
if (!*video_buf)
return GST_FLOW_ERROR;
return GST_FLOW_OK;
}
static gboolean
gst_vdp_video_src_pad_setcaps (GstPad * pad, GstCaps * caps)
{
GstVdpVideoSrcPad *vdp_pad = GST_VDP_VIDEO_SRC_PAD (pad);
const GstStructure *structure;
GstCaps *video_caps;
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
if (!gst_structure_get_int (structure, "width", &vdp_pad->width))
return FALSE;
if (!gst_structure_get_int (structure, "height", &vdp_pad->height))
return FALSE;
if (!gst_structure_get_fourcc (structure, "format", &vdp_pad->fourcc))
return FALSE;
video_caps = gst_vdp_yuv_to_video_caps (caps);
vdp_pad->yuv_output = TRUE;
} else if (gst_structure_has_name (structure, "video/x-vdpau-video")) {
if (!gst_structure_get_int (structure, "width", &vdp_pad->width))
return FALSE;
if (!gst_structure_get_int (structure, "height", &vdp_pad->height))
return FALSE;
video_caps = gst_caps_ref (caps);
vdp_pad->yuv_output = FALSE;
} else
return FALSE;
gst_vdp_buffer_pool_set_caps (vdp_pad->bpool, video_caps);
gst_caps_unref (video_caps);
return TRUE;
}
static GstCaps *
gst_vdp_video_src_pad_getcaps (GstPad * pad)
{
GstVdpVideoSrcPad *vdp_pad = (GstVdpVideoSrcPad *) pad;
const GstCaps *templ_caps;
if (vdp_pad->caps)
return gst_caps_ref (vdp_pad->caps);
else if ((templ_caps = gst_pad_get_pad_template_caps (pad)))
return gst_caps_copy (templ_caps);
return NULL;
}
static gboolean
gst_vdp_video_src_pad_activate_push (GstPad * pad, gboolean active)
{
GstVdpVideoSrcPad *vdp_pad = (GstVdpVideoSrcPad *) pad;
if (!active) {
if (vdp_pad->caps)
gst_caps_unref (vdp_pad->caps);
vdp_pad->caps = NULL;
if (vdp_pad->device)
gst_object_unref (vdp_pad->device);
vdp_pad->device = NULL;
}
return TRUE;
}
static void
gst_vdp_video_src_pad_set_device (GstVdpVideoSrcPad * vdp_pad,
GstVdpDevice * device)
{
GstCaps *caps;
const GstCaps *templ_caps;
if (vdp_pad->bpool)
g_object_unref (vdp_pad->bpool);
if (vdp_pad->device)
g_object_unref (vdp_pad->device);
vdp_pad->device = device;
vdp_pad->bpool = gst_vdp_video_buffer_pool_new (device);
/* update caps */
if (vdp_pad->caps)
gst_caps_unref (vdp_pad->caps);
caps = gst_vdp_video_buffer_get_allowed_caps (device);
if ((templ_caps = gst_pad_get_pad_template_caps (GST_PAD (vdp_pad)))) {
vdp_pad->caps = gst_caps_intersect (caps, templ_caps);
gst_caps_unref (caps);
} else
vdp_pad->caps = caps;
}
static void
gst_vdp_video_src_pad_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVdpVideoSrcPad *vdp_pad = (GstVdpVideoSrcPad *) object;
switch (prop_id) {
case PROP_DEVICE:
g_value_set_object (value, vdp_pad->device);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_video_src_pad_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpVideoSrcPad *vdp_pad = (GstVdpVideoSrcPad *) object;
switch (prop_id) {
case PROP_DEVICE:
gst_vdp_video_src_pad_set_device (vdp_pad, g_value_dup_object (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_video_src_pad_init (GstVdpVideoSrcPad * vdp_pad)
{
GstPad *pad = GST_PAD (vdp_pad);
vdp_pad->device = NULL;
vdp_pad->caps = NULL;
gst_pad_set_getcaps_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_video_src_pad_getcaps));
gst_pad_set_setcaps_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_video_src_pad_setcaps));
gst_pad_set_activatepush_function (pad,
GST_DEBUG_FUNCPTR (gst_vdp_video_src_pad_activate_push));
}
static void
gst_vdp_video_src_pad_class_init (GstVdpVideoSrcPadClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->get_property = gst_vdp_video_src_pad_get_property;
object_class->set_property = gst_vdp_video_src_pad_set_property;
/**
* GstVdpVideoSrcPad:device:
*
* The #GstVdpDevice this pool is bound to.
*/
g_object_class_install_property
(object_class,
PROP_DEVICE,
g_param_spec_object ("device",
"Device",
"The GstVdpDevice the pad should use",
GST_TYPE_VDP_DEVICE, G_PARAM_READWRITE));
}

View file

@ -1,52 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_VIDEO_SRC_PAD_H_
#define _GST_VDP_VIDEO_SRC_PAD_H_
#include <gst/gst.h>
#include "gstvdpdevice.h"
#include "gstvdpvideobuffer.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_VIDEO_SRC_PAD (gst_vdp_video_src_pad_get_type ())
#define GST_VDP_VIDEO_SRC_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_SRC_PAD, GstVdpVideoSrcPad))
#define GST_VDP_VIDEO_SRC_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_VIDEO_SRC_PAD, GstVdpVideoSrcPadClass))
#define GST_IS_VDP_VIDEO_SRC_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_SRC_PAD))
#define GST_IS_VDP_VIDEO_SRC_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_VIDEO_SRC_PAD))
#define GST_VDP_VIDEO_SRC_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_VIDEO_SRC_PAD, GstVdpVideoSrcPadClass))
typedef struct _GstVdpVideoSrcPad GstVdpVideoSrcPad;
typedef struct _GstVdpVideoSrcPadClass GstVdpVideoSrcPadClass;
GstFlowReturn gst_vdp_video_src_pad_push (GstVdpVideoSrcPad *vdp_pad, GstVdpVideoBuffer *video_buf);
GstFlowReturn gst_vdp_video_src_pad_alloc_buffer (GstVdpVideoSrcPad *vdp_pad, GstVdpVideoBuffer **video_buf, GError ** error);
GstCaps *gst_vdp_video_src_pad_get_template_caps ();
GstVdpVideoSrcPad * gst_vdp_video_src_pad_new (GstPadTemplate * templ, const gchar * name);
GType gst_vdp_video_src_pad_get_type (void);
G_END_DECLS
#endif /* _GST_VDP_VIDEO_SRC_PAD_H_ */

View file

@ -5,10 +5,13 @@
#include <gst/gst.h>
#include "gstvdp/gstvdp.h"
#include "gstvdpdevice.h"
#include "gstvdpvideomemory.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpdecoder.h"
#include "mpeg/gstvdpmpegdec.h"
#include "h264/gstvdph264dec.h"
/* #include "h264/gstvdph264dec.h" */
#include "mpeg4/gstvdpmpeg4dec.h"
#include "gstvdpvideopostprocess.h"
#include "gstvdpsink.h"
@ -16,22 +19,27 @@
static gboolean
vdpau_init (GstPlugin * vdpau_plugin)
{
gst_vdp_init ();
gboolean ret;
/* do this so debug categories get created */
gst_vdp_device_get_type ();
gst_vdp_decoder_get_type ();
gst_vdp_video_memory_init ();
/* Before giving these elements a rank again, make sure they pass at
* least the generic/states test when there's no device available */
gst_element_register (vdpau_plugin, "vdpaumpegdec",
ret = gst_element_register (vdpau_plugin, "vdpaumpegdec",
GST_RANK_NONE, GST_TYPE_VDP_MPEG_DEC);
gst_element_register (vdpau_plugin, "vdpauh264dec",
GST_RANK_NONE, GST_TYPE_VDP_H264_DEC);
gst_element_register (vdpau_plugin, "vdpaumpeg4dec",
GST_RANK_NONE, GST_TYPE_VDP_MPEG4_DEC);
gst_element_register (vdpau_plugin, "vdpauvideopostprocess",
GST_RANK_NONE, GST_TYPE_VDP_VIDEO_POST_PROCESS);
gst_element_register (vdpau_plugin, "vdpausink",
GST_RANK_NONE, GST_TYPE_VDP_SINK);
/* ret &= gst_element_register (vdpau_plugin, "vdpauh264dec", */
/* GST_RANK_NONE, GST_TYPE_VDP_H264_DEC); */
/* gst_element_register (vdpau_plugin, "vdpaumpeg4dec", */
/* GST_RANK_NONE, GST_TYPE_VDP_MPEG4_DEC); */
/* gst_element_register (vdpau_plugin, "vdpauvideopostprocess", */
/* GST_RANK_NONE, GST_TYPE_VDP_VIDEO_POST_PROCESS); */
/* gst_element_register (vdpau_plugin, "vdpausink", */
/* GST_RANK_NONE, GST_TYPE_VDP_SINK); */
return TRUE;
return ret;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,

View file

@ -22,19 +22,19 @@
#include "config.h"
#endif
#include "gstvdpvideosrcpad.h"
#include "gstvdpdecoder.h"
#include "gstvdpvideomemory.h"
#include "gstvdpvideobufferpool.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_decoder_debug);
#define GST_CAT_DEFAULT gst_vdp_decoder_debug
#define DEBUG_INIT(bla) \
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_decoder_debug, "vdpdecoder", 0, \
"VDPAU decoder base class");
GST_BOILERPLATE_FULL (GstVdpDecoder, gst_vdp_decoder, GstBaseVideoDecoder,
GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT);
#define gst_vdp_decoder_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpDecoder, gst_vdp_decoder, GST_TYPE_VIDEO_DECODER,
DEBUG_INIT);
enum
{
@ -42,35 +42,6 @@ enum
PROP_DISPLAY
};
static GstFlowReturn
gst_vdp_decoder_shape_output (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf)
{
GstVdpVideoSrcPad *vdp_pad;
vdp_pad =
(GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder);
return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf));
}
static GstPad *
gst_vdp_decoder_create_srcpad (GstBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass * base_video_decoder_class)
{
GstPadTemplate *pad_template;
GstVdpVideoSrcPad *vdp_pad;
pad_template = gst_element_class_get_pad_template
(GST_ELEMENT_CLASS (base_video_decoder_class),
GST_BASE_VIDEO_DECODER_SRC_NAME);
vdp_pad = gst_vdp_video_src_pad_new (pad_template,
GST_BASE_VIDEO_DECODER_SRC_NAME);
return GST_PAD (vdp_pad);
}
void
gst_vdp_decoder_post_error (GstVdpDecoder * decoder, GError * error)
{
@ -84,67 +55,78 @@ gst_vdp_decoder_post_error (GstVdpDecoder * decoder, GError * error)
g_error_free (error);
}
static GstFlowReturn
gst_vdp_decoder_alloc_buffer (GstVdpDecoder * vdp_decoder,
GstVdpVideoBuffer ** video_buf)
{
GstVdpVideoSrcPad *vdp_pad;
GstFlowReturn ret;
GError *err = NULL;
vdp_pad = (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (vdp_decoder);
ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, video_buf, &err);
if (ret == GST_FLOW_ERROR)
gst_vdp_decoder_post_error (vdp_decoder, err);
return ret;
}
GstFlowReturn
gst_vdp_decoder_render (GstVdpDecoder * vdp_decoder, VdpPictureInfo * info,
guint n_bufs, VdpBitstreamBuffer * bufs, GstVdpVideoBuffer ** video_buf)
guint n_bufs, VdpBitstreamBuffer * bufs, GstVideoCodecFrame * frame)
{
GstFlowReturn ret;
GstVdpDevice *device;
VdpVideoSurface surface;
VdpStatus status;
ret = gst_vdp_decoder_alloc_buffer (vdp_decoder, video_buf);
GstVdpVideoMemory *vmem;
GstClockTime before, after;
GST_DEBUG_OBJECT (vdp_decoder, "n_bufs:%d, frame:%d", n_bufs,
frame->system_frame_number);
ret =
gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (vdp_decoder),
frame);
if (ret != GST_FLOW_OK)
return ret;
goto fail_alloc;
device = (*video_buf)->device;
surface = (*video_buf)->surface;
vmem = (GstVdpVideoMemory *) gst_buffer_get_memory (frame->output_buffer, 0);
if (!vmem
|| !gst_memory_is_type ((GstMemory *) vmem,
GST_VDP_VIDEO_MEMORY_ALLOCATOR))
goto no_mem;
status = device->vdp_decoder_render (vdp_decoder->decoder, surface,
info, n_bufs, bufs);
GST_DEBUG_OBJECT (vdp_decoder, "Calling VdpDecoderRender()");
before = gst_util_get_timestamp ();
status =
vdp_decoder->device->vdp_decoder_render (vdp_decoder->decoder,
vmem->surface, info, n_bufs, bufs);
after = gst_util_get_timestamp ();
if (status != VDP_STATUS_OK)
goto decode_error;
GST_DEBUG_OBJECT (vdp_decoder, "VdpDecoderRender() took %" GST_TIME_FORMAT,
GST_TIME_ARGS (after - before));
return GST_FLOW_OK;
decode_error:
GST_ELEMENT_ERROR (vdp_decoder, RESOURCE, READ,
("Could not decode"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
vdp_decoder->device->vdp_get_error_string (status)));
gst_buffer_unref (GST_BUFFER_CAST (*video_buf));
gst_video_decoder_drop_frame (GST_VIDEO_DECODER (vdp_decoder), frame);
return GST_FLOW_ERROR;
fail_alloc:
{
GST_WARNING_OBJECT (vdp_decoder, "Failed to get an output frame");
return ret;
}
no_mem:
{
GST_ERROR_OBJECT (vdp_decoder, "Didn't get VdpVideoSurface backed buffer");
return GST_FLOW_ERROR;
}
}
GstFlowReturn
gst_vdp_decoder_init_decoder (GstVdpDecoder * vdp_decoder,
VdpDecoderProfile profile, guint32 max_references)
VdpDecoderProfile profile, guint32 max_references,
GstVideoCodecState * output_state)
{
GstVdpDevice *device;
VdpStatus status;
GstVideoState state;
device = vdp_decoder->device;
@ -154,15 +136,14 @@ gst_vdp_decoder_init_decoder (GstVdpDecoder * vdp_decoder,
goto destroy_decoder_error;
}
if (!gst_base_video_decoder_set_src_caps (GST_BASE_VIDEO_DECODER
(vdp_decoder)))
return GST_FLOW_NOT_NEGOTIATED;
state =
gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (vdp_decoder));
GST_DEBUG_OBJECT (vdp_decoder,
"device:%p, profile:%d, width:%d, height:%d, max_references:%d",
device->device, profile, output_state->info.width,
output_state->info.height, max_references);
status = device->vdp_decoder_create (device->device, profile,
state.width, state.height, max_references, &vdp_decoder->decoder);
output_state->info.width, output_state->info.height, max_references,
&vdp_decoder->decoder);
if (status != VDP_STATUS_OK)
goto create_decoder_error;
@ -186,22 +167,74 @@ create_decoder_error:
}
static gboolean
gst_vdp_decoder_start (GstBaseVideoDecoder * base_video_decoder)
gst_vdp_decoder_decide_allocation (GstVideoDecoder * video_decoder,
GstQuery * query)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (base_video_decoder);
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (video_decoder);
GstCaps *outcaps;
GstBufferPool *pool = NULL;
guint size, min = 0, max = 0;
GstStructure *config;
GstVideoInfo vinfo;
gboolean update_pool;
GError *err;
GstVdpVideoSrcPad *vdp_pad;
gst_query_parse_allocation (query, &outcaps, NULL);
gst_video_info_init (&vinfo);
gst_video_info_from_caps (&vinfo, outcaps);
if (gst_query_get_n_allocation_pools (query) > 0) {
gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
size = MAX (size, vinfo.size);
update_pool = TRUE;
} else {
pool = NULL;
size = vinfo.size;
min = max = 0;
update_pool = FALSE;
}
if (pool == NULL
|| !gst_buffer_pool_has_option (pool,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META)) {
/* no pool, we can make our own */
GST_DEBUG_OBJECT (video_decoder,
"no pool or doesn't support GstVdpVideoMeta, making new pool");
pool = gst_vdp_video_buffer_pool_new (vdp_decoder->device);
}
/* now configure */
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META);
gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
gst_buffer_pool_set_config (pool, config);
if (update_pool)
gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
else
gst_query_add_allocation_pool (query, pool, size, min, max);
if (pool)
gst_object_unref (pool);
return TRUE;
}
static gboolean
gst_vdp_decoder_start (GstVideoDecoder * video_decoder)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (video_decoder);
GError *err = NULL;
GST_DEBUG_OBJECT (video_decoder, "Starting");
err = NULL;
vdp_decoder->device = gst_vdp_get_device (vdp_decoder->display, &err);
if (G_UNLIKELY (!vdp_decoder->device))
goto device_error;
vdp_pad =
(GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder);
g_object_set (G_OBJECT (vdp_pad), "device", vdp_decoder->device, NULL);
vdp_decoder->decoder = VDP_INVALID_HANDLE;
return TRUE;
@ -212,9 +245,9 @@ device_error:
}
static gboolean
gst_vdp_decoder_stop (GstBaseVideoDecoder * base_video_decoder)
gst_vdp_decoder_stop (GstVideoDecoder * video_decoder)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (base_video_decoder);
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (video_decoder);
if (vdp_decoder->decoder != VDP_INVALID_HANDLE) {
GstVdpDevice *device = vdp_decoder->device;
@ -281,44 +314,39 @@ gst_vdp_decoder_finalize (GObject * object)
}
static void
gst_vdp_decoder_base_init (gpointer g_class)
gst_vdp_decoder_init (GstVdpDecoder * vdp_decoder)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstCaps *src_caps;
GstPadTemplate *src_template;
src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420);
src_template = gst_pad_template_new (GST_BASE_VIDEO_DECODER_SRC_NAME,
GST_PAD_SRC, GST_PAD_ALWAYS, src_caps);
gst_element_class_add_pad_template (element_class, src_template);
}
static void
gst_vdp_decoder_init (GstVdpDecoder * vdp_decoder, GstVdpDecoderClass * klass)
{
vdp_decoder->display = NULL;
}
static void
gst_vdp_decoder_class_init (GstVdpDecoderClass * klass)
{
GObjectClass *object_class;
GstBaseVideoDecoderClass *base_video_decoder_class;
GstVideoDecoderClass *video_decoder_class;
GstElementClass *element_class;
GstCaps *src_caps;
GstPadTemplate *src_template;
object_class = G_OBJECT_CLASS (klass);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass);
video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
object_class->get_property = gst_vdp_decoder_get_property;
object_class->set_property = gst_vdp_decoder_set_property;
object_class->finalize = gst_vdp_decoder_finalize;
base_video_decoder_class->start = gst_vdp_decoder_start;
base_video_decoder_class->stop = gst_vdp_decoder_stop;
video_decoder_class->start = gst_vdp_decoder_start;
video_decoder_class->stop = gst_vdp_decoder_stop;
video_decoder_class->decide_allocation = gst_vdp_decoder_decide_allocation;
base_video_decoder_class->create_srcpad = gst_vdp_decoder_create_srcpad;
base_video_decoder_class->shape_output = gst_vdp_decoder_shape_output;
GST_FIXME ("Actually create srcpad template from hw capabilities");
src_caps = gst_caps_from_string ("video/x-raw,format={ YV12 }");
src_template = gst_pad_template_new (GST_VIDEO_DECODER_SRC_NAME,
GST_PAD_SRC, GST_PAD_ALWAYS, src_caps);
gst_element_class_add_pad_template (element_class, src_template);
g_object_class_install_property (object_class,
PROP_DISPLAY, g_param_spec_string ("display", "Display", "X Display name",

View file

@ -24,8 +24,9 @@
#include <gst/gst.h>
#include <vdpau/vdpau.h>
#include "../basevideodecoder/gstbasevideodecoder.h"
#include "../gstvdp/gstvdpvideobuffer.h"
#include <gst/video/gstvideodecoder.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
@ -41,17 +42,19 @@ typedef struct _GstVdpDecoderClass GstVdpDecoderClass;
struct _GstVdpDecoder {
GstBaseVideoDecoder base_video_decoder;
GstVideoDecoder video_decoder;
GstVdpDevice *device;
VdpDecoder decoder;
GstVideoInfo info;
/* properties */
gchar *display;
};
struct _GstVdpDecoderClass {
GstBaseVideoDecoderClass base_video_decoder_class;
GstVideoDecoderClass video_decoder_class;
};
void
@ -59,14 +62,15 @@ gst_vdp_decoder_post_error (GstVdpDecoder * decoder, GError * error);
GstFlowReturn
gst_vdp_decoder_render (GstVdpDecoder * vdp_decoder, VdpPictureInfo *info,
guint n_bufs, VdpBitstreamBuffer *bufs, GstVdpVideoBuffer **video_buf);
guint n_bufs, VdpBitstreamBuffer *bufs, GstVideoCodecFrame *frame);
GstFlowReturn
gst_vdp_decoder_init_decoder (GstVdpDecoder * vdp_decoder,
VdpDecoderProfile profile, guint32 max_references);
VdpDecoderProfile profile, guint32 max_references,
GstVideoCodecState *output_state);
GType gst_vdp_decoder_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_DECODER_H__ */
#endif /* __GST_VDP_DECODER_H__ */

View file

@ -36,8 +36,6 @@ enum
PROP_DISPLAY
};
G_DEFINE_TYPE_WITH_CODE (GstVdpDevice, gst_vdp_device, G_TYPE_OBJECT,
DEBUG_INIT ());
@ -108,6 +106,9 @@ gst_vdp_device_open (GstVdpDevice * device, GError ** error)
&device->vdp_presentation_queue_query_surface_status}
};
GST_DEBUG_OBJECT (device, "Opening the device for display '%s'",
device->display_name);
device->display = XOpenDisplay (device->display_name);
if (!device->display)
goto create_display_error;
@ -132,6 +133,8 @@ gst_vdp_device_open (GstVdpDevice * device, GError ** error)
goto function_error;
}
GST_DEBUG_OBJECT (device, "Succesfully opened the device");
return TRUE;
create_display_error:
@ -293,6 +296,8 @@ gst_vdp_get_device (const gchar * display_name, GError ** error)
static GstVdpDeviceCache device_cache;
GstVdpDevice *device;
GST_DEBUG ("display_name '%s'", display_name);
if (g_once_init_enter (&once)) {
device_cache.hash_table =
g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
@ -309,6 +314,7 @@ gst_vdp_get_device (const gchar * display_name, GError ** error)
device = g_hash_table_lookup (device_cache.hash_table, "");
if (!device) {
GST_DEBUG ("No cached device, creating a new one");
device = gst_vdp_device_new (display_name, error);
if (device) {
g_object_weak_ref (G_OBJECT (device), device_destroyed_cb, &device_cache);
@ -317,7 +323,8 @@ gst_vdp_get_device (const gchar * display_name, GError ** error)
device);
else
g_hash_table_insert (device_cache.hash_table, g_strdup (""), device);
}
} else
GST_ERROR ("Could not create GstVdpDevice !");
} else
g_object_ref (device);

View file

@ -23,18 +23,19 @@
#include <gst/gst.h>
#include "gstvdpbuffer.h"
#include "gstvdpdevice.h"
typedef struct _GstVdpOutputBuffer GstVdpOutputBuffer;
GType gst_vdpau_output_meta_api_get_type (void);
#define GST_TYPE_VDP_OUTPUT_BUFFER (gst_vdp_output_buffer_get_type())
#define GST_IS_VDP_OUTPUT_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_OUTPUT_BUFFER))
#define GST_VDP_OUTPUT_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_OUTPUT_BUFFER, GstVdpOutputBuffer))
const GstMetaInfo * gst_vdpau_output_meta_get_info (void);
struct _GstVdpOutputBuffer {
GstVdpBuffer vdp_buffer;
#define GST_VDPAU_OUTPUT_META_GET(buf) ((GstVdpauMeta *)gst_buffer_get_meta(buf,gst_vdpau_output_meta_api_get_type()))
#define GST_VDPAU_OUTPUT_META_ADD(buf) ((GstVdpauMeta *)gst_buffer_add_meta(buf,gst_vdpau_output_meta_get_info(),NULL))
struct _GstVdpauOutputMeta {
GstMeta meta;
/* FIXME : Check we actually need all of this */
GstVdpDevice *device;
VdpRGBAFormat rgba_format;
gint width, height;
@ -42,15 +43,14 @@ struct _GstVdpOutputBuffer {
VdpOutputSurface surface;
};
GType gst_vdp_output_buffer_get_type (void);
GstVdpOutputBuffer* gst_vdp_output_buffer_new (GstVdpDevice * device, VdpRGBAFormat rgba_format, gint width, gint height, GError **error);
#if 0
/* FIXME : Replace with GST_VIDEO_FORMAT... and GST_VIDEO_CHROMA_... */
GstCaps *gst_vdp_output_buffer_get_template_caps (void);
GstCaps *gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice *device);
gboolean gst_vdp_caps_to_rgba_format (GstCaps *caps, VdpRGBAFormat *rgba_format);
gboolean gst_vdp_output_buffer_calculate_size (GstVdpOutputBuffer *output_buf, guint *size);
/* FIXME : Replace with map/unmap */
gboolean gst_vdp_output_buffer_download (GstVdpOutputBuffer *output_buf, GstBuffer *outbuf, GError **error);
#define GST_VDP_OUTPUT_CAPS \
@ -58,5 +58,5 @@ gboolean gst_vdp_output_buffer_download (GstVdpOutputBuffer *output_buf, GstBuff
"rgba-format = (int)[0,4], " \
"width = (int)[1,8192], " \
"height = (int)[1,8192]"
#endif
#endif

View file

@ -24,8 +24,6 @@
#include <gst/gst.h>
#include "gstvdpbufferpool.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_OUTPUT_BUFFER_POOL (gst_vdp_output_buffer_pool_get_type ())

View file

@ -27,16 +27,16 @@
#endif
/* Our interfaces */
#include <gst/interfaces/navigation.h>
#include <gst/interfaces/xoverlay.h>
#include <gst/video/navigation.h>
#include <gst/video/videooverlay.h>
#include <X11/XKBlib.h>
/* Debugging category */
#include <gst/gstinfo.h>
#include "gstvdp/gstvdpoutputbuffer.h"
#include "gstvdp/gstvdpoutputbufferpool.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpoutputbufferpool.h"
/* Object header */
#include "gstvdpsink.h"
@ -852,8 +852,8 @@ gst_vdp_sink_show_frame (GstBaseSink * bsink, GstBuffer * outbuf)
g_mutex_lock (vdp_sink->x_lock);
status =
device->vdp_presentation_queue_query_surface_status (vdp_sink->window->
queue, surface, &queue_status, &pres_time);
device->vdp_presentation_queue_query_surface_status (vdp_sink->
window->queue, surface, &queue_status, &pres_time);
g_mutex_unlock (vdp_sink->x_lock);
if (queue_status == VDP_PRESENTATION_QUEUE_STATUS_QUEUED) {

View file

@ -29,7 +29,7 @@
#include <string.h>
#include <math.h>
#include "gstvdp/gstvdpdevice.h"
#include "gstvdpdevice.h"
G_BEGIN_DECLS
@ -99,7 +99,7 @@ struct _VdpSink {
char *display_name;
GstVdpDevice *device;
GstVdpBufferPool *bpool;
GstBufferPool *bpool;
GstCaps *caps;
GstVdpWindow *window;
@ -135,4 +135,4 @@ GType gst_vdp_sink_get_type(void);
G_END_DECLS
#endif /* __GST_VDP_SINK_H__ */
#endif /* __GST_VDP_SINK_H__ */

89
sys/vdpau/gstvdputils.c Normal file
View file

@ -0,0 +1,89 @@
/*
* gst-plugins-bad
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdputils.h"
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
GstVideoFormat vformat;
} GstVdpVideoBufferFormats;
static const GstVdpVideoBufferFormats yuv_formats[] = {
{VDP_CHROMA_TYPE_420, VDP_YCBCR_FORMAT_YV12, GST_VIDEO_FORMAT_YV12},
{VDP_CHROMA_TYPE_420, VDP_YCBCR_FORMAT_NV12, GST_VIDEO_FORMAT_NV12},
{VDP_CHROMA_TYPE_422, VDP_YCBCR_FORMAT_UYVY, GST_VIDEO_FORMAT_UYVY},
{VDP_CHROMA_TYPE_444, VDP_YCBCR_FORMAT_V8U8Y8A8, GST_VIDEO_FORMAT_AYUV},
/* { */
/* VDP_CHROMA_TYPE_444, */
/* VDP_YCBCR_FORMAT_Y8U8V8A8, */
/* GST_MAKE_FOURCC ('A', 'V', 'U', 'Y') */
/* }, */
{VDP_CHROMA_TYPE_422, VDP_YCBCR_FORMAT_YUYV, GST_VIDEO_FORMAT_YUY2}
};
VdpYCbCrFormat
gst_video_format_to_vdp_ycbcr (GstVideoFormat format)
{
int i;
for (i = 0; i < G_N_ELEMENTS (yuv_formats); i++) {
if (yuv_formats[i].vformat == format)
return yuv_formats[i].format;
}
return -1;
}
VdpChromaType
gst_video_info_to_vdp_chroma_type (GstVideoInfo * info)
{
const GstVideoFormatInfo *finfo = info->finfo;
VdpChromaType ret = -1;
/* Check subsampling of second plane (first is always non-subsampled) */
switch (GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1)) {
case 0:
/* Not subsampled in width for second plane */
if (GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 2))
/* Not subsampled at all (4:4:4) */
ret = VDP_CHROMA_TYPE_444;
break;
case 1:
/* Subsampled horizontally once */
if (GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 2) == 0)
/* Not subsampled vertically (4:2:2) */
ret = VDP_CHROMA_TYPE_422;
else if (GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 2) == 1)
/* Subsampled vertically once (4:2:0) */
ret = VDP_CHROMA_TYPE_420;
break;
default:
break;
}
return ret;
}

View file

@ -1,7 +1,7 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
/*
* gst-plugins-bad
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
@ -22,9 +22,17 @@
#define _GST_VDP_UTILS_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
#include <gst/video/gstvideopool.h>
#include "gstvdpdevice.h"
GstCaps *gst_vdp_video_to_output_caps (GstCaps *video_caps);
GstCaps *gst_vdp_yuv_to_video_caps (GstCaps * yuv_caps);
G_BEGIN_DECLS
VdpChromaType gst_video_info_to_vdp_chroma_type (GstVideoInfo *info);
VdpYCbCrFormat gst_video_format_to_vdp_ycbcr (GstVideoFormat format);
G_END_DECLS
#endif /* _GST_VDP_UTILS_H_ */

View file

@ -0,0 +1,210 @@
/*
* gst-plugins-bad
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpvideobufferpool.h"
#include "gstvdpvideomemory.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_vidbufpool_debug);
#define GST_CAT_DEFAULT gst_vdp_vidbufpool_debug
static void gst_vdp_video_buffer_pool_finalize (GObject * object);
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_vidbufpool_debug, "vdpvideopool", 0, \
"VDPAU Video bufferpool");
#define gst_vdp_video_buffer_pool_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpVideoBufferPool, gst_vdp_video_buffer_pool,
GST_TYPE_BUFFER_POOL, DEBUG_INIT);
static const gchar **
gst_vdp_video_buffer_pool_get_options (GstBufferPool * pool)
{
static const gchar *options[] = { GST_BUFFER_POOL_OPTION_VIDEO_META,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META, NULL
};
return options;
}
static gboolean
gst_vdp_video_buffer_pool_set_config (GstBufferPool * pool,
GstStructure * config)
{
GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool);
GstVideoInfo info;
GstCaps *caps;
if (!gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL))
goto wrong_config;
if (caps == NULL)
goto no_caps;
/* now parse the caps from the config */
if (!gst_video_info_from_caps (&info, caps))
goto wrong_caps;
GST_LOG_OBJECT (pool, "%dx%d, caps %" GST_PTR_FORMAT, info.width, info.height,
caps);
if (GST_VIDEO_INFO_FORMAT (&info) == -1)
goto unknown_format;
vdppool->info = info;
/* enable metadata based on config of the pool */
vdppool->add_videometa =
gst_buffer_pool_config_has_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
/* parse extra alignment info */
vdppool->add_vdpmeta = gst_buffer_pool_config_has_option (config,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META);
return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config);
/* ERRORS */
wrong_config:
{
GST_WARNING_OBJECT (pool, "invalid config");
return FALSE;
}
no_caps:
{
GST_WARNING_OBJECT (pool, "no caps in config");
return FALSE;
}
wrong_caps:
{
GST_WARNING_OBJECT (pool,
"failed getting geometry from caps %" GST_PTR_FORMAT, caps);
return FALSE;
}
unknown_format:
{
GST_WARNING_OBJECT (vdppool, "failed to get format from caps %"
GST_PTR_FORMAT, caps);
GST_ELEMENT_ERROR (vdppool, RESOURCE, WRITE,
("Failed to create output image buffer of %dx%d pixels",
info.width, info.height),
("Invalid input caps %" GST_PTR_FORMAT, caps));
return FALSE;;
}
}
/* This function handles GstBuffer creation */
static GstFlowReturn
gst_vdp_video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
GstBufferPoolAcquireParams * params)
{
GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool);
GstVideoInfo *info;
GstBuffer *buf;
GstMemory *vdp_mem;
info = &vdppool->info;
if (!(buf = gst_buffer_new ()))
goto no_buffer;
if (!(vdp_mem = gst_vdp_video_memory_alloc (vdppool->device, info)))
goto mem_create_failed;
gst_buffer_append_memory (buf, vdp_mem);
if (vdppool->add_videometa) {
GstVideoMeta *vmeta;
GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
/* these are just the defaults for now */
vmeta = gst_buffer_add_video_meta (buf, 0, GST_VIDEO_INFO_FORMAT (info),
GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info));
vmeta->map = gst_vdp_video_memory_map;
vmeta->unmap = gst_vdp_video_memory_unmap;
}
*buffer = buf;
return GST_FLOW_OK;
/* ERROR */
no_buffer:
{
GST_WARNING_OBJECT (pool, "can't create image");
return GST_FLOW_ERROR;
}
mem_create_failed:
{
GST_WARNING_OBJECT (pool, "Could create GstVdpVideo Memory");
return GST_FLOW_ERROR;
}
}
GstBufferPool *
gst_vdp_video_buffer_pool_new (GstVdpDevice * device)
{
GstVdpVideoBufferPool *pool;
pool = g_object_new (GST_TYPE_VDP_VIDEO_BUFFER_POOL, NULL);
pool->device = gst_object_ref (device);
GST_LOG_OBJECT (pool, "new VdpVideo buffer pool %p", pool);
return GST_BUFFER_POOL_CAST (pool);
}
static void
gst_vdp_video_buffer_pool_class_init (GstVdpVideoBufferPoolClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBufferPoolClass *gstbufferpool_class = (GstBufferPoolClass *) klass;
gobject_class->finalize = gst_vdp_video_buffer_pool_finalize;
gstbufferpool_class->get_options = gst_vdp_video_buffer_pool_get_options;
gstbufferpool_class->set_config = gst_vdp_video_buffer_pool_set_config;
gstbufferpool_class->alloc_buffer = gst_vdp_video_buffer_pool_alloc;
}
static void
gst_vdp_video_buffer_pool_init (GstVdpVideoBufferPool * pool)
{
}
static void
gst_vdp_video_buffer_pool_finalize (GObject * object)
{
GstVdpVideoBufferPool *pool = GST_VDP_VIDEO_BUFFER_POOL_CAST (object);
GST_LOG_OBJECT (pool, "finalize VdpVideo buffer pool %p", pool);
gst_object_unref (pool->device);
G_OBJECT_CLASS (gst_vdp_video_buffer_pool_parent_class)->finalize (object);
}

View file

@ -0,0 +1,97 @@
/*
* gst-plugins-bad
* Copyright (C) Carl-Anton Ingmarsson 2010 <ca.ingmarsson@gmail.com>
* 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDP_VIDEO_BUFFERPOOL_H_
#define _GST_VDP_VIDEO_BUFFERPOOL_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
#include <gst/video/gstvideopool.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_VDPAU_SURFACE_META_GET(buf) ((GstVdpauMeta *)gst_buffer_get_meta(buf,gst_vdpau_surface_meta_api_get_type()))
#define GST_VDPAU_SURFACE_META_ADD(buf) ((GstVdpauMeta *)gst_buffer_add_meta(buf,gst_vdpau_surface_meta_get_info(),NULL))
struct _GstVdpauSurfaceMeta {
GstMeta meta;
GstVdpDevice *device;
VdpVideoSurface surface;
};
GType gst_vdpau_surface_meta_api_get_type (void);
const GstMetaInfo * gst_vdpau_surface_meta_get_info (void);
/**
* GST_BUFFER_POOL_OPTION_VDP_VIDEO_META:
*
* An option that can be activated on bufferpool to request VdpVideo metadata
* on buffers from the pool.
*/
#define GST_BUFFER_POOL_OPTION_VDP_VIDEO_META "GstBufferPoolOptionVdpVideoMeta"
typedef struct _GstVdpVideoBufferPool GstVdpVideoBufferPool;
typedef struct _GstVdpVideoBufferPoolClass GstVdpVideoBufferPoolClass;
/* buffer pool functions */
#define GST_TYPE_VDP_VIDEO_BUFFER_POOL (gst_vdp_video_buffer_pool_get_type())
#define GST_IS_VDP_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL))
#define GST_VDP_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL, GstVdpVideoBufferPool))
#define GST_VDP_VIDEO_BUFFER_POOL_CAST(obj) ((GstVdpVideoBufferPool*)(obj))
struct _GstVdpVideoBufferPool
{
GstBufferPool bufferpool;
GstVdpDevice *device;
GstVideoInfo info;
VdpChromaType chroma_type;
gboolean add_videometa;
gboolean add_vdpmeta;
};
struct _GstVdpVideoBufferPoolClass
{
GstBufferPoolClass parent_class;
};
GType gst_vdp_video_buffer_pool_get_type (void);
GstBufferPool *gst_vdp_video_buffer_pool_new (GstVdpDevice *device);
GstCaps *gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type);
#if 0
GstCaps *gst_vdp_video_buffer_get_allowed_caps (GstVdpDevice * device);
gboolean gst_vdp_video_buffer_calculate_size (guint32 fourcc, gint width, gint height, guint *size);
/* FIXME : Replace with map/unmap */
gboolean gst_vdp_video_buffer_download (GstVdpVideoBuffer *inbuf, GstBuffer *outbuf, guint32 fourcc, gint width, gint height);
gboolean gst_vdp_video_buffer_upload (GstVdpVideoBuffer *video_buf, GstBuffer *src_buf, guint fourcc, gint width, gint height);
#endif
G_END_DECLS
#endif /* _GST_VDP_VIDEO_BUFFER_POOL_H_ */

View file

@ -0,0 +1,327 @@
/*
* GStreamer
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/video/video.h>
#include "gstvdpvideomemory.h"
#include "gstvdputils.h"
GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
GST_DEBUG_CATEGORY_STATIC (gst_vdp_video_mem_debug);
#define GST_CAT_DEFAULT gst_vdp_video_mem_debug
static GstAllocator *_vdp_video_allocator;
static void
_vdp_video_mem_init (GstVdpVideoMemory * mem, GstAllocator * allocator,
GstMemory * parent, GstVdpDevice * device, GstVideoInfo * info)
{
gst_memory_init (GST_MEMORY_CAST (mem), GST_MEMORY_FLAG_NO_SHARE,
allocator, parent, GST_VIDEO_INFO_SIZE (info), 0, 0,
GST_VIDEO_INFO_SIZE (info));
mem->device = gst_object_ref (device);
mem->info = info;
mem->chroma_type = gst_video_info_to_vdp_chroma_type (info);
mem->ycbcr_format =
gst_video_format_to_vdp_ycbcr (GST_VIDEO_INFO_FORMAT (info));
mem->refcount = 0;
GST_DEBUG ("new VdpVideo memory");
}
static GstVdpVideoMemory *
_vdp_video_mem_new (GstAllocator * allocator, GstMemory * parent,
GstVdpDevice * device, GstVideoInfo * info)
{
VdpStatus status;
GstVdpVideoMemory *mem;
VdpVideoSurface surface;
mem = g_slice_new0 (GstVdpVideoMemory);
_vdp_video_mem_init (mem, allocator, parent, device, info);
GST_TRACE
("Calling VdpVideoSurfaceCreate(chroma_type:%d, width:%d, height:%d)",
mem->chroma_type, mem->info->width, mem->info->height);
status =
device->vdp_video_surface_create (device->device, mem->chroma_type,
mem->info->width, mem->info->height, &surface);
if (status != VDP_STATUS_OK)
goto create_error;
/* device->vdp_video_surface_get_parameters (device->device, &chroma_type, */
/* &width, &height); */
GST_TRACE ("created surface %u", surface);
mem->surface = surface;
return mem;
/* ERRORS */
create_error:
{
GST_ERROR ("Failed to create video surface: %s",
device->vdp_get_error_string (status));
g_slice_free (GstVdpVideoMemory, mem);
return NULL;
}
}
static gboolean
ensure_data (GstVdpVideoMemory * vmem)
{
VdpStatus vdp_stat;
GstVideoInfo *info = vmem->info;
GstClockTime before, after;
if (g_atomic_int_add (&vmem->refcount, 1) > 1)
return TRUE;
/* Allocate enough room to store data */
vmem->cache = g_malloc (GST_VIDEO_INFO_SIZE (info));
vmem->cached_data[0] = vmem->cache;
vmem->cached_data[1] = vmem->cache + GST_VIDEO_INFO_PLANE_OFFSET (info, 1);
vmem->cached_data[2] = vmem->cache + GST_VIDEO_INFO_PLANE_OFFSET (info, 2);
vmem->destination_pitches[0] = GST_VIDEO_INFO_PLANE_STRIDE (info, 0);
vmem->destination_pitches[1] = GST_VIDEO_INFO_PLANE_STRIDE (info, 1);
vmem->destination_pitches[2] = GST_VIDEO_INFO_PLANE_STRIDE (info, 2);
GST_DEBUG ("cached_data %p %p %p",
vmem->cached_data[0], vmem->cached_data[1], vmem->cached_data[2]);
GST_DEBUG ("pitches %d %d %d",
vmem->destination_pitches[0],
vmem->destination_pitches[1], vmem->destination_pitches[2]);
before = gst_util_get_timestamp ();
vdp_stat =
vmem->device->vdp_video_surface_get_bits_ycbcr (vmem->surface,
vmem->ycbcr_format, vmem->cached_data, vmem->destination_pitches);
after = gst_util_get_timestamp ();
GST_CAT_WARNING (GST_CAT_PERFORMANCE, "Downloading took %" GST_TIME_FORMAT,
GST_TIME_ARGS (after - before));
if (vdp_stat != VDP_STATUS_OK) {
GST_ERROR ("Failed to get bits : %s",
vmem->device->vdp_get_error_string (vdp_stat));
g_free (vmem->cache);
vmem->cache = NULL;
return FALSE;
}
return TRUE;
}
static void
release_data (GstVdpVideoMemory * vmem)
{
g_return_if_fail (vmem->refcount > 0);
if (g_atomic_int_dec_and_test (&vmem->refcount)) {
g_free (vmem->cache);
}
}
static gpointer
_vdp_video_mem_map (GstVdpVideoMemory * vmem, gsize maxsize, GstMapFlags flags)
{
GST_DEBUG ("surface:%d, maxsize:%d, flags:%d", vmem->surface, maxsize, flags);
if (!ensure_data (vmem))
return NULL;
return vmem->cache;
}
static void
_vdp_video_mem_unmap (GstVdpVideoMemory * vmem)
{
GST_DEBUG ("surface:%d", vmem->surface);
release_data (vmem);
}
static GstMemory *
_vdp_video_mem_copy (GstVdpVideoMemory * src, gssize offset, gssize size)
{
GST_FIXME ("Implement !");
return NULL;
}
static GstMemory *
_vdp_video_mem_share (GstVdpVideoMemory * mem, gssize offset, gssize size)
{
GST_FIXME ("Implement !");
return NULL;
}
static gboolean
_vdp_video_mem_is_span (GstVdpVideoMemory * mem1, GstVdpVideoMemory * mem2,
gsize * offset)
{
return FALSE;
}
static GstMemory *
_vdp_video_mem_alloc (GstAllocator * allocator, gsize size,
GstAllocationParams * params)
{
g_warning ("use gst_vdp_video_memory_alloc () to allocate from this "
"GstVdpVideoMemory allocator");
return NULL;
}
static void
_vdp_video_mem_free (GstAllocator * allocator, GstMemory * mem)
{
GstVdpVideoMemory *vmem = (GstVdpVideoMemory *) mem;
VdpStatus status;
GST_DEBUG ("Destroying surface %d", vmem->surface);
status = vmem->device->vdp_video_surface_destroy (vmem->surface);
if (status != VDP_STATUS_OK)
GST_ERROR ("Couldn't destroy the VdpVideoSurface: %s",
vmem->device->vdp_get_error_string (status));
gst_object_unref (vmem->device);
if (vmem->cache)
g_free (vmem->cache);
g_slice_free (GstVdpVideoMemory, vmem);
}
/**
* gst_vdp_video_memory_alloc:
* @device: a #GstVdpDevice
* @info: the #GstVideoInfo describing the format to use
*
* Returns: a GstMemory object with a VdpVideoSurface specified by @info
* from @device
*/
GstMemory *
gst_vdp_video_memory_alloc (GstVdpDevice * device, GstVideoInfo * info)
{
return (GstMemory *) _vdp_video_mem_new (_vdp_video_allocator, NULL, device,
info);
}
G_DEFINE_TYPE (GstVdpVideoAllocator, gst_vdp_video_allocator,
GST_TYPE_ALLOCATOR);
static void
gst_vdp_video_allocator_class_init (GstVdpVideoAllocatorClass * klass)
{
GstAllocatorClass *allocator_class;
allocator_class = (GstAllocatorClass *) klass;
allocator_class->alloc = _vdp_video_mem_alloc;
allocator_class->free = _vdp_video_mem_free;
}
static void
gst_vdp_video_allocator_init (GstVdpVideoAllocator * allocator)
{
GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
alloc->mem_type = GST_VDP_VIDEO_MEMORY_ALLOCATOR;
alloc->mem_map = (GstMemoryMapFunction) _vdp_video_mem_map;
alloc->mem_unmap = (GstMemoryUnmapFunction) _vdp_video_mem_unmap;
alloc->mem_copy = (GstMemoryCopyFunction) _vdp_video_mem_copy;
alloc->mem_share = (GstMemoryShareFunction) _vdp_video_mem_share;
alloc->mem_is_span = (GstMemoryIsSpanFunction) _vdp_video_mem_is_span;
}
/**
* gst_vdp_video_memory_init:
*
* Initializes the GL Memory allocator. It is safe to call this function
* multiple times. This must be called before any other GstVdpVideoMemory operation.
*/
void
gst_vdp_video_memory_init (void)
{
static volatile gsize _init = 0;
if (g_once_init_enter (&_init)) {
_vdp_video_allocator =
g_object_new (gst_vdp_video_allocator_get_type (), NULL);
gst_allocator_register (GST_VDP_VIDEO_MEMORY_ALLOCATOR,
gst_object_ref (_vdp_video_allocator));
GST_DEBUG_CATEGORY_INIT (gst_vdp_video_mem_debug, "vdpvideomem", 0,
"VDPAU VideoSurface Memory/Allocator");
GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
g_once_init_leave (&_init, 1);
}
}
gboolean
gst_vdp_video_memory_map (GstVideoMeta * meta, guint plane, GstMapInfo * info,
gpointer * data, gint * stride, GstMapFlags flags)
{
GstBuffer *buffer = meta->buffer;
GstVdpVideoMemory *vmem =
(GstVdpVideoMemory *) gst_buffer_get_memory (buffer, 0);
/* Only handle GstVdpVideoMemory */
g_return_val_if_fail (((GstMemory *) vmem)->allocator == _vdp_video_allocator,
FALSE);
GST_DEBUG ("plane:%d", plane);
/* download if not already done */
if (!ensure_data (vmem))
return FALSE;
*data = vmem->cached_data[plane];
*stride = vmem->destination_pitches[plane];
return TRUE;
}
gboolean
gst_vdp_video_memory_unmap (GstVideoMeta * meta, guint plane, GstMapInfo * info)
{
GstVdpVideoMemory *vmem =
(GstVdpVideoMemory *) gst_buffer_get_memory (meta->buffer, 0);
GST_DEBUG ("plane:%d", plane);
GST_FIXME ("implement unmap (and potential upload on last unmap)");
release_data (vmem);
return TRUE;
}

View file

@ -0,0 +1,101 @@
/*
* GStreamer
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDP_VIDEO_MEMORY_H_
#define _GST_VDP_VIDEO_MEMORY_H_
#include <gst/gst.h>
#include <gst/gstmemory.h>
#include <gst/gstallocator.h>
#include <gst/video/video-info.h>
#include <gst/video/gstvideometa.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_VIDEO_ALLOCATOR (gst_vdp_video_allocator_get_type())
GType gst_vdp_video_allocator_get_type(void);
#define GST_IS_VDP_VIDEO_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_ALLOCATOR))
#define GST_IS_VDP_VIDEO_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_VIDEO_ALLOCATOR))
#define GST_VDP_VIDEO_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_VIDEO_ALLOCATOR, GstVdpVideoAllocatorClass))
#define GST_VDP_VIDEO_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_ALLOCATOR, GstVdpVideoAllocator))
#define GST_VDP_VIDEO_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_VIDEO_ALLOCATOR, GstVdpVideoAllocatorClass))
#define GST_VDP_VIDEO_ALLOCATOR_CAST(obj) ((GstVdpVideoAllocator *)(obj))
typedef struct _GstVdpVideoMemory GstVdpVideoMemory;
typedef struct _GstVdpVideoAllocator GstVdpVideoAllocator;
typedef struct _GstVdpVideoAllocatorClass GstVdpVideoAllocatorClass;
/**
* GstVdpVideoMemory:
* @mem: the parent object
* @device: the #GstVdpDevice to use
* @surface: the #VdpVideoSurface
*
* Represents information about a #VdpVideoSurface
*/
struct _GstVdpVideoMemory
{
GstMemory mem;
GstVdpDevice *device;
VdpVideoSurface surface;
GstVideoInfo *info;
VdpChromaType chroma_type;
VdpYCbCrFormat ycbcr_format;
/* Cached data for mapping */
volatile gint refcount;
GstMapFlags map_flags;
guint n_planes;
guint8 *cache;
void * cached_data[4];
uint32_t destination_pitches[4];
};
#define GST_VDP_VIDEO_MEMORY_ALLOCATOR "VdpVideoMemory"
void gst_vdp_video_memory_init (void);
GstMemory *
gst_vdp_video_memory_alloc (GstVdpDevice * device, GstVideoInfo *info);
gboolean gst_vdp_video_memory_map(GstVideoMeta * meta, guint plane,
GstMapInfo * info, gpointer * data,
gint * stride, GstMapFlags flags);
gboolean gst_vdp_video_memory_unmap(GstVideoMeta * meta, guint plane,
GstMapInfo * info);
struct _GstVdpVideoAllocator
{
GstAllocator parent;
};
struct _GstVdpVideoAllocatorClass
{
GstAllocatorClass parent_class;
};
G_END_DECLS
#endif /* _GST_VDP_VIDEO_MEMORY_H_ */

View file

@ -43,11 +43,8 @@
#endif
#include <gst/gst.h>
#include <gst/video/gstvideosink.h>
#include "gstvdp/gstvdputils.h"
#include "gstvdp/gstvdpoutputbuffer.h"
#include "gstvdp/gstvdpoutputsrcpad.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpvideopostprocess.h"
@ -73,11 +70,7 @@ enum
PROP_INVERSE_TELECINE
};
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_vpp_debug, "vdpauvideopostprocess", 0, "VDPAU video surface to output surface");
GST_BOILERPLATE_FULL (GstVdpVideoPostProcess, gst_vdp_vpp,
GstElement, GST_TYPE_ELEMENT, DEBUG_INIT);
G_DEFINE_TYPE (GstVdpVideoPostProcess, gst_vdp_vpp, GST_TYPE_ELEMENT);
static void gst_vdp_vpp_finalize (GObject * object);
@ -1172,38 +1165,17 @@ gst_vdp_vpp_set_property (GObject * object, guint property_id,
/* GType vmethod implementations */
static void
gst_vdp_vpp_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GstCaps *src_caps, *sink_caps;
GstPadTemplate *src_template, *sink_template;
gst_element_class_set_static_metadata (element_class,
"VdpauVideoPostProcess",
"Filter/Converter/Decoder/Video",
"Post process GstVdpVideoBuffers and output GstVdpOutputBuffers",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
/* SRC PAD */
src_caps = gst_vdp_output_buffer_get_template_caps ();
src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
src_caps);
gst_element_class_add_pad_template (element_class, src_template);
/* SINK PAD */
sink_caps = gst_vdp_video_buffer_get_caps (FALSE, 0);
sink_template = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
sink_caps);
gst_element_class_add_pad_template (element_class, sink_template);
}
/* initialize the vdpaumpegdecoder's class */
static void
gst_vdp_vpp_class_init (GstVdpVideoPostProcessClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstCaps *src_caps, *sink_caps;
GstPadTemplate *src_template, *sink_template;
GST_DEBUG_CATEGORY_INIT (gst_vdp_vpp_debug, "vdpauvideopostprocess", 0,
"VDPAU video surface to output surface");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
@ -1249,12 +1221,29 @@ gst_vdp_vpp_class_init (GstVdpVideoPostProcessClass * klass)
"Whether inverse telecine should be used", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_metadata (gstelement_class,
"VdpauVideoPostProcess",
"Filter/Converter/Decoder/Video",
"Post process GstVdpVideoBuffers and output GstVdpOutputBuffers",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gstelement_class->change_state = gst_vdp_vpp_change_state;
/* SRC PAD */
src_caps = gst_vdp_output_buffer_get_template_caps ();
src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
src_caps);
gst_element_class_add_pad_template (gstelement_class, src_template);
/* SINK PAD */
sink_caps = gst_vdp_video_buffer_get_caps (FALSE, 0);
sink_template = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
sink_caps);
gst_element_class_add_pad_template (gstelement_class, sink_template);
}
static void
gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp,
GstVdpVideoPostProcessClass * gclass)
gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp)
{
GstPadTemplate *src_template, *sink_template;

View file

@ -23,9 +23,8 @@
#include <gst/gst.h>
#include "gstvdp/gstvdpdevice.h"
#include "gstvdp/gstvdpvideobuffer.h"
#include "gstvdp/gstvdpvideobufferpool.h"
#include "gstvdpdevice.h"
#include "gstvdpvideobufferpool.h"
G_BEGIN_DECLS
@ -35,7 +34,7 @@ typedef struct _GstVdpPicture GstVdpPicture;
struct _GstVdpPicture
{
GstVdpVideoBuffer *buf;
GstBuffer *buf;
VdpVideoMixerPictureStructure structure;
GstClockTime timestamp;
};
@ -73,7 +72,7 @@ struct _GstVdpVideoPostProcess
VdpChromaType chroma_type;
gint width, height;
guint32 fourcc;
GstVdpBufferPool *vpool;
GstBufferPool *vpool;
gboolean got_par;
gint par_n, par_d;
@ -114,4 +113,4 @@ GType gst_vdp_vpp_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_VIDEO_POST_PROCESS_H__ */
#endif /* __GST_VDP_VIDEO_POST_PROCESS_H__ */

View file

@ -19,6 +19,7 @@
*/
#include "gsth264dpb.h"
#include "gstvdpvideomemory.h"
/* Properties */
enum
@ -32,8 +33,8 @@ GST_DEBUG_CATEGORY_STATIC (h264dpb_debug);
#define GST_CAT_DEFAULT h264dpb_debug
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (h264dpb_debug, "h264dpb", 0, \
"H264 DPB");
GST_DEBUG_CATEGORY_INIT (h264dpb_debug, "vdph264dpb", 0, \
"VDPAU H264 DPB");
G_DEFINE_TYPE_WITH_CODE (GstH264DPB, gst_h264_dpb, G_TYPE_OBJECT, DEBUG_INIT);
@ -47,10 +48,11 @@ gst_h264_dpb_fill_reference_frames (GstH264DPB * dpb,
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
GstH264Frame *frame = frames[i];
GstVdpVideoMemory *vmem =
(GstVdpVideoMemory *) gst_buffer_get_memory (frame->frame->
output_buffer, 0);
reference_frames[i].surface =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME_CAST (frame)->src_buffer)->
surface;
reference_frames[i].surface = vmem->surface;
reference_frames[i].is_long_term = frame->is_long_term;
reference_frames[i].top_is_reference = frame->is_reference;
@ -62,8 +64,12 @@ gst_h264_dpb_fill_reference_frames (GstH264DPB * dpb,
for (i = dpb->n_frames; i < 16; i++) {
reference_frames[i].surface = VDP_INVALID_HANDLE;
reference_frames[i].is_long_term = FALSE;
reference_frames[i].top_is_reference = VDP_FALSE;
reference_frames[i].bottom_is_reference = VDP_FALSE;
reference_frames[i].field_order_cnt[0] = 0;
reference_frames[i].field_order_cnt[1] = 0;
reference_frames[i].frame_idx = 0;
}
}
@ -74,7 +80,7 @@ gst_h264_dpb_remove (GstH264DPB * dpb, guint idx)
guint i;
frames = dpb->frames;
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (frames[idx]));
gst_video_codec_frame_unref (frames[idx]->frame);
dpb->n_frames--;
for (i = idx; i < dpb->n_frames; i++)
@ -87,7 +93,7 @@ gst_h264_dpb_output (GstH264DPB * dpb, guint idx)
GstFlowReturn ret;
GstH264Frame *frame = dpb->frames[idx];
gst_video_frame_ref (GST_VIDEO_FRAME_CAST (frame));
gst_video_codec_frame_ref (frame->frame);
ret = dpb->output (dpb, frame, dpb->user_data);
frame->output_needed = FALSE;
@ -132,7 +138,7 @@ gst_h264_dpb_bump (GstH264DPB * dpb, guint poc, GstFlowReturn * ret)
GstFlowReturn
gst_h264_dpb_add (GstH264DPB * dpb, GstH264Frame * h264_frame)
{
GstFlowReturn ret;
GstFlowReturn ret = GST_FLOW_OK;
GST_DEBUG ("add frame with poc: %d", h264_frame->poc);
@ -141,18 +147,13 @@ gst_h264_dpb_add (GstH264DPB * dpb, GstH264Frame * h264_frame)
h264_frame->is_reference = FALSE;
if (h264_frame->is_reference) {
ret = GST_FLOW_OK;
while (dpb->n_frames == dpb->max_frames) {
if (!gst_h264_dpb_bump (dpb, G_MAXUINT, &ret)) {
GST_ERROR_OBJECT (dpb, "Couldn't make room in DPB");
return GST_FLOW_OK;
}
if (!gst_h264_dpb_bump (dpb, G_MAXUINT, &ret))
goto no_room;
}
GST_DEBUG ("Storing frame in slot %d", dpb->n_frames);
dpb->frames[dpb->n_frames++] = h264_frame;
}
else {
} else {
while (gst_h264_dpb_bump (dpb, h264_frame->poc, &ret)) {
if (ret != GST_FLOW_OK)
return ret;
@ -162,13 +163,19 @@ gst_h264_dpb_add (GstH264DPB * dpb, GstH264Frame * h264_frame)
}
return ret;
/* ERRORS */
no_room:
{
GST_ERROR_OBJECT (dpb, "Couldn't make room in DPB");
return GST_FLOW_OK;
}
}
void
gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output)
{
GstFlowReturn ret;
GstVideoFrame **frames;
guint i;
GST_DEBUG ("flush");
@ -176,9 +183,8 @@ gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output)
if (output)
while (gst_h264_dpb_bump (dpb, G_MAXUINT, &ret));
frames = (GstVideoFrame **) dpb->frames;
for (i = 0; i < dpb->n_frames; i++)
gst_video_frame_unref (frames[i]);
gst_video_codec_frame_unref (dpb->frames[i]->frame);
dpb->n_frames = 0;
@ -383,12 +389,10 @@ static void
gst_h264_dpb_finalize (GObject * object)
{
GstH264DPB *dpb = GST_H264_DPB (object);
GstVideoFrame **frames;
guint i;
frames = (GstVideoFrame **) dpb->frames;
for (i = 0; i < dpb->n_frames; i++)
gst_video_frame_unref (frames[i]);
gst_video_codec_frame_unref (dpb->frames[i]->frame);
G_OBJECT_CLASS (gst_h264_dpb_parent_class)->finalize (object);
}

View file

@ -22,16 +22,15 @@
#define _GST_H264_DPB_H_
#include <glib-object.h>
#include <vdpau/vdpau.h>
#include "../gstvdp/gstvdpvideobuffer.h"
#include "gsth264frame.h"
#include <gst/video/video.h>
#include <gst/codecparsers/gsth264meta.h>
G_BEGIN_DECLS
#define MAX_DPB_SIZE 16
#define GST_TYPE_H264_DPB (gst_h264_dpb_get_type ())
#define GST_H264_DPB(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_DPB, GstH264DPB))
#define GST_H264_DPB_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264_DPB, GstH264DPBClass))
@ -42,21 +41,33 @@ G_BEGIN_DECLS
typedef struct _GstH264DPB GstH264DPB;
typedef struct _GstH264DPBClass GstH264DPBClass;
typedef struct _GstH264Frame
{
GstVideoCodecFrame *frame;
guint poc;
guint16 frame_idx;
gboolean is_reference;
gboolean is_long_term;
gboolean output_needed;
} GstH264Frame;
typedef GstFlowReturn (*GstH264DPBOutputFunc) (GstH264DPB *dpb, GstH264Frame *h264_frame, gpointer user_data);
struct _GstH264DPB
{
GObject parent_instance;
/* private */
/* private */
GstH264Frame *frames[MAX_DPB_SIZE];
guint n_frames;
guint max_frames;
gint max_longterm_frame_idx;
GstH264DPBOutputFunc output;
gpointer user_data;
GstH264DPBOutputFunc output;
gpointer user_data;
};
struct _GstH264DPBClass

View file

@ -1,105 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gsth264frame.h"
GST_DEBUG_CATEGORY_STATIC (gst_h264_frame_debug);
#define GST_CAT_DEFAULT gst_h264_frame_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_h264_frame_debug, "gsth264frame", 0, "H264 Frame");
void
gst_h264_frame_add_slice (GstH264Frame * h264_frame, GstBuffer * buf)
{
gst_buffer_ref (buf);
g_ptr_array_add (h264_frame->slices, buf);
}
GstH264Frame *
gst_h264_frame_new (void)
{
GstH264Frame *frame;
frame = (GstH264Frame *) gst_mini_object_new (GST_TYPE_H264_FRAME);
return frame;
}
static GObjectClass *gst_h264_frame_parent_class;
static void
gst_h264_frame_finalize (GstH264Frame * h264_frame)
{
g_ptr_array_foreach (h264_frame->slices, (GFunc) gst_buffer_unref, NULL);
g_ptr_array_unref (h264_frame->slices);
GST_MINI_OBJECT_CLASS (gst_h264_frame_parent_class)->finalize
(GST_MINI_OBJECT (h264_frame));
}
static void
gst_h264_frame_init (GstH264Frame * h264_frame, gpointer g_class)
{
h264_frame->slices = g_ptr_array_new ();
}
static void
gst_h264_frame_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_h264_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_h264_frame_finalize;
}
GType
gst_h264_frame_get_type (void)
{
static GType _gst_h264_frame_type = 0;
if (G_UNLIKELY (_gst_h264_frame_type == 0)) {
static const GTypeInfo info = {
sizeof (GstH264FrameClass),
NULL,
NULL,
gst_h264_frame_class_init,
NULL,
NULL,
sizeof (GstH264Frame),
0,
(GInstanceInitFunc) gst_h264_frame_init,
NULL
};
_gst_h264_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME,
"GstH264Frame", &info, 0);
DEBUG_INIT ();
}
return _gst_h264_frame_type;
}

View file

@ -1,65 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_H264_FRAME_H_
#define _GST_H264_FRAME_H_
#include <gst/gst.h>
#include "../basevideodecoder/gstvideoframe.h"
#include "gsth264parser.h"
#define GST_TYPE_H264_FRAME (gst_h264_frame_get_type())
#define GST_IS_H264_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264_FRAME))
#define GST_H264_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_FRAME, GstH264Frame))
#define GST_H264_FRAME_CAST(obj) ((GstH264Frame *)obj)
#define GST_H264_FRAME_GOT_PRIMARY GST_VIDEO_FRAME_FLAG_LAST
typedef struct _GstH264Frame GstH264Frame;
typedef struct _GstH264FrameClass GstH264FrameClass;
struct _GstH264Frame
{
GstVideoFrame video_frame;
GstH264Slice slice_hdr;
GPtrArray *slices;
guint poc;
guint16 frame_idx;
gboolean is_reference;
gboolean is_long_term;
gboolean output_needed;
};
struct _GstH264FrameClass
{
GstVideoFrameClass video_frame_class;
};
void gst_h264_frame_add_slice (GstH264Frame *h264_frame, GstBuffer *buf);
GstH264Frame *gst_h264_frame_new (void);
GType gst_h264_frame_get_type (void);
#endif

File diff suppressed because it is too large Load diff

View file

@ -1,420 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_H264_PARSER_H_
#define _GST_H264_PARSER_H_
#include <glib-object.h>
G_BEGIN_DECLS
typedef enum
{
GST_NAL_UNKNOWN = 0,
GST_NAL_SLICE = 1,
GST_NAL_SLICE_DPA = 2,
GST_NAL_SLICE_DPB = 3,
GST_NAL_SLICE_DPC = 4,
GST_NAL_SLICE_IDR = 5,
GST_NAL_SEI = 6,
GST_NAL_SPS = 7,
GST_NAL_PPS = 8,
GST_NAL_AU_DELIMITER = 9,
GST_NAL_SEQ_END = 10,
GST_NAL_STREAM_END = 11,
GST_NAL_FILTER_DATA = 12
} GstNalUnitType;
typedef enum
{
GST_H264_P_SLICE,
GST_H264_B_SLICE,
GST_H264_I_SLICE,
GST_H264_SP_SLICE,
GST_H264_SI_SLICE,
GST_H264_S_P_SLICE,
GST_H264_S_B_SLICE,
GST_H264_S_I_SLICE,
GST_H264_S_SP_SLICE,
GST_H264_S_SI_SLICE
} GstH264SliceType;
#define GST_H264_IS_P_SLICE(type) ((type % 5) == GST_H264_P_SLICE)
#define GST_H264_IS_B_SLICE(type) ((type % 5) == GST_H264_B_SLICE)
#define GST_H264_IS_I_SLICE(type) ((type % 5) == GST_H264_I_SLICE)
#define GST_H264_IS_SP_SLICE(type) ((type % 5) == GST_H264_SP_SLICE)
#define GST_H264_IS_SI_SLICE(type) ((type % 5) == GST_H264_SI_SLICE)
typedef struct _GstNalUnit GstNalUnit;
typedef struct _GstH264HRDParameters GstH264HRDParameters;
typedef struct _GstH264VUIParameters GstH264VUIParameters;
typedef struct _GstH264Sequence GstH264Sequence;
typedef struct _GstH264Picture GstH264Picture;
typedef struct _GstH264DecRefPicMarking GstH264DecRefPicMarking;
typedef struct _GstH264RefPicMarking GstH264RefPicMarking;
typedef struct _GstH264PredWeightTable GstH264PredWeightTable;
typedef struct _GstH264Slice GstH264Slice;
typedef struct _GstH264ClockTimestamp GstH264ClockTimestamp;
typedef struct _GstH264PicTiming GstH264PicTiming;
typedef struct _GstH264BufferingPeriod GstH264BufferingPeriod;
typedef struct _GstH264SEIMessage GstH264SEIMessage;
struct _GstNalUnit
{
guint16 ref_idc;
guint16 type;
/* calculated values */
guint8 IdrPicFlag;
};
struct _GstH264HRDParameters
{
guint8 cpb_cnt_minus1;
guint8 bit_rate_scale;
guint8 cpb_size_scale;
guint32 bit_rate_value_minus1[32];
guint32 cpb_size_value_minus1[32];
guint8 cbr_flag[32];
guint8 initial_cpb_removal_delay_length_minus1;
guint8 cpb_removal_delay_length_minus1;
guint8 dpb_output_delay_length_minus1;
guint8 time_offset_length;
};
struct _GstH264VUIParameters
{
guint8 aspect_ratio_idc;
/* if aspect_ratio_idc == 255 */
guint16 sar_width;
guint16 sar_height;
guint8 overscan_info_present_flag;
/* if overscan_info_present_flag */
guint8 overscan_appropriate_flag;
guint8 video_format;
guint8 video_full_range_flag;
guint8 colour_description_present_flag;
guint8 colour_primaries;
guint8 transfer_characteristics;
guint8 matrix_coefficients;
guint8 chroma_sample_loc_type_top_field;
guint8 chroma_sample_loc_type_bottom_field;
guint8 timing_info_present_flag;
/* if timing_info_present_flag */
guint32 num_units_in_tick;
guint32 time_scale;
guint8 fixed_frame_rate_flag;
guint8 nal_hrd_parameters_present_flag;
/* if nal_hrd_parameters_present_flag */
GstH264HRDParameters nal_hrd_parameters;
guint8 vcl_hrd_parameters_present_flag;
/* if nal_hrd_parameters_present_flag */
GstH264HRDParameters vcl_hrd_parameters;
guint8 low_delay_hrd_flag;
guint8 pic_struct_present_flag;
};
struct _GstH264Sequence
{
gint id;
guint8 profile_idc;
guint8 constraint_set0_flag;
guint8 constraint_set1_flag;
guint8 constraint_set2_flag;
guint8 constraint_set3_flag;
guint8 level_idc;
guint8 chroma_format_idc;
guint8 separate_colour_plane_flag;
guint8 bit_depth_luma_minus8;
guint8 bit_depth_chroma_minus8;
guint8 qpprime_y_zero_transform_bypass_flag;
guint8 scaling_matrix_present_flag;
guint8 scaling_lists_4x4[6][16];
guint8 scaling_lists_8x8[6][64];
guint8 log2_max_frame_num_minus4;
guint8 pic_order_cnt_type;
/* if pic_order_cnt_type == 0 */
guint8 log2_max_pic_order_cnt_lsb_minus4;
/* else if pic_order_cnt_type == 1 */
guint8 delta_pic_order_always_zero_flag;
gint32 offset_for_non_ref_pic;
gint32 offset_for_top_to_bottom_field;
guint8 num_ref_frames_in_pic_order_cnt_cycle;
gint32 offset_for_ref_frame[255];
guint32 num_ref_frames;
guint8 gaps_in_frame_num_value_allowed_flag;
guint32 pic_width_in_mbs_minus1;
guint32 pic_height_in_map_units_minus1;
guint8 frame_mbs_only_flag;
guint8 mb_adaptive_frame_field_flag;
guint8 direct_8x8_inference_flag;
guint32 frame_crop_left_offset;
guint32 frame_crop_right_offset;
guint32 frame_crop_top_offset;
guint32 frame_crop_bottom_offset;
guint8 vui_parameters_present_flag;
/* if vui_parameters_present_flag */
GstH264VUIParameters vui_parameters;
/* calculated values */
guint8 ChromaArrayType;
guint32 MaxFrameNum;
};
struct _GstH264Picture
{
gint id;
GstH264Sequence *sequence;
guint8 entropy_coding_mode_flag;
guint8 pic_order_present_flag;
guint32 num_slice_groups_minus1;
/* if num_slice_groups_minus1 > 0 */
guint8 slice_group_map_type;
/* and if slice_group_map_type == 0 */
guint32 run_length_minus1[8];
/* or if slice_group_map_type == 2 */
guint32 top_left[8];
guint32 bottom_right[8];
/* or if slice_group_map_type == (3, 4, 5) */
guint8 slice_group_change_direction_flag;
guint32 slice_group_change_rate_minus1;
/* or if slice_group_map_type == 6 */
guint32 pic_size_in_map_units_minus1;
guint8 *slice_group_id;
guint8 num_ref_idx_l0_active_minus1;
guint8 num_ref_idx_l1_active_minus1;
guint8 weighted_pred_flag;
guint8 weighted_bipred_idc;
gint8 pic_init_qp_minus26;
gint8 pic_init_qs_minus26;
gint8 chroma_qp_index_offset;
guint8 deblocking_filter_control_present_flag;
guint8 constrained_intra_pred_flag;
guint8 redundant_pic_cnt_present_flag;
guint8 transform_8x8_mode_flag;
guint8 scaling_lists_4x4[6][16];
guint8 scaling_lists_8x8[6][64];
guint8 second_chroma_qp_index_offset;
};
struct _GstH264RefPicMarking
{
guint8 memory_management_control_operation;
guint32 difference_of_pic_nums_minus1;
guint32 long_term_pic_num;
guint32 long_term_frame_idx;
guint32 max_long_term_frame_idx_plus1;
};
struct _GstH264DecRefPicMarking
{
/* if slice->nal_unit.IdrPicFlag */
guint8 no_output_of_prior_pics_flag;
guint8 long_term_reference_flag;
guint8 adaptive_ref_pic_marking_mode_flag;
GstH264RefPicMarking ref_pic_marking[10];
guint8 n_ref_pic_marking;
};
struct _GstH264PredWeightTable
{
guint8 luma_log2_weight_denom;
guint8 chroma_log2_weight_denom;
guint8 luma_weight_l0[32];
guint8 luma_offset_l0[32];
/* if seq->ChromaArrayType != 0 */
guint8 chroma_weight_l0[32][2];
guint8 chroma_offset_l0[32][2];
/* if slice->slice_type % 5 == 1 */
guint8 luma_weight_l1[32];
guint8 luma_offset_l1[32];
/* and if seq->ChromaArrayType != 0 */
guint8 chroma_weight_l1[32][2];
guint8 chroma_offset_l1[32][2];
};
struct _GstH264Slice
{
GstNalUnit nal_unit;
guint32 first_mb_in_slice;
guint32 type;
GstH264Picture *picture;
/* if seq->separate_colour_plane_flag */
guint8 colour_plane_id;
guint16 frame_num;
guint8 field_pic_flag;
guint8 bottom_field_flag;
/* if nal_unit.type == 5 */
guint16 idr_pic_id;
/* if seq->pic_order_cnt_type == 0 */
guint16 pic_order_cnt_lsb;
gint32 delta_pic_order_cnt_bottom;
gint32 delta_pic_order_cnt[2];
guint8 redundant_pic_cnt;
/* if slice_type == B_SLICE */
guint8 direct_spatial_mv_pred_flag;
guint8 num_ref_idx_l0_active_minus1;
guint8 num_ref_idx_l1_active_minus1;
GstH264PredWeightTable pred_weight_table;
/* if nal_unit.ref_idc != 0 */
GstH264DecRefPicMarking dec_ref_pic_marking;
/* calculated values */
guint32 MaxPicNum;
};
struct _GstH264ClockTimestamp
{
guint8 ct_type;
guint8 nuit_field_based_flag;
guint8 counting_type;
guint8 discontinuity_flag;
guint8 cnt_dropped_flag;
guint8 n_frames;
guint8 seconds_flag;
guint8 seconds_value;
guint8 minutes_flag;
guint8 minutes_value;
guint8 hours_flag;
guint8 hours_value;
guint32 time_offset;
};
struct _GstH264PicTiming
{
guint8 cpb_removal_delay;
guint8 dpb_output_delay;
guint8 pic_struct_present_flag;
/* if pic_struct_present_flag */
guint8 pic_struct;
guint8 clock_timestamp_flag[3];
GstH264ClockTimestamp clock_timestamp[3];
};
struct _GstH264BufferingPeriod
{
GstH264Sequence *seq;
/* seq->vui_parameters->nal_hrd_parameters_present_flag */
guint8 nal_initial_cpb_removal_delay[32];
guint8 nal_initial_cpb_removal_delay_offset[32];
/* seq->vui_parameters->vcl_hrd_parameters_present_flag */
guint8 vcl_initial_cpb_removal_delay[32];
guint8 vcl_initial_cpb_removal_delay_offset[32];
};
struct _GstH264SEIMessage
{
guint32 payloadType;
union {
GstH264BufferingPeriod buffering_period;
GstH264PicTiming pic_timing;
};
};
#define GST_TYPE_H264_PARSER (gst_h264_parser_get_type ())
#define GST_H264_PARSER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_PARSER, GstH264Parser))
#define GST_H264_PARSER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264_PARSER, GstH264ParserClass))
#define GST_IS_H264_PARSER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264_PARSER))
#define GST_IS_H264_PARSER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264_PARSER))
#define GST_H264_PARSER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264_PARSER, GstH264ParserClass))
typedef struct _GstH264ParserClass GstH264ParserClass;
typedef struct _GstH264Parser GstH264Parser;
struct _GstH264ParserClass
{
GObjectClass parent_class;
};
struct _GstH264Parser
{
GObject parent_instance;
GHashTable *sequences;
GHashTable *pictures;
};
GType gst_h264_parser_get_type (void) G_GNUC_CONST;
GstH264Sequence *gst_h264_parser_parse_sequence (GstH264Parser * parser, guint8 * data, guint size);
GstH264Picture *gst_h264_parser_parse_picture (GstH264Parser * parser, guint8 * data, guint size);
gboolean gst_h264_parser_parse_slice_header (GstH264Parser * parser, GstH264Slice * slice, guint8 * data, guint size, GstNalUnit nal_unit);
gboolean gst_h264_parser_parse_sei_message (GstH264Parser * parser, GstH264Sequence *seq, GstH264SEIMessage * sei, guint8 * data, guint size);
G_END_DECLS
#endif /* _GST_H264_PARSER_H_ */

View file

@ -1,503 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstnalreader.h"
static gboolean gst_nal_reader_read (GstNalReader * reader, guint nbits);
/**
* SECTION:gstnalreader
* @short_description: Bit reader which automatically skips
* emulation_prevention bytes
*
* #GstNalReader provides a bit reader which automatically skips
* emulation_prevention bytes. It provides functions for reading any number of bits
* into 8, 16, 32 and 64 bit variables. It also provides functions for reading
* Exp-Golomb values.
*/
/**
* gst_nal_reader_new:
* @data: Data from which the #GstNalReader should read
* @size: Size of @data in bytes
*
* Create a new #GstNalReader instance, which will read from @data.
*
* Returns: a new #GstNalReader instance
*
* Since: 0.10.22
*/
GstNalReader *
gst_nal_reader_new (const guint8 * data, guint size)
{
GstNalReader *ret = g_slice_new0 (GstNalReader);
ret->data = data;
ret->size = size;
ret->first_byte = 0xff;
ret->cache = 0xff;
return ret;
}
/**
* gst_nal_reader_new_from_buffer:
* @buffer: Buffer from which the #GstNalReader should read
*
* Create a new #GstNalReader instance, which will read from the
* #GstBuffer @buffer.
*
* Returns: a new #GstNalReader instance
*
* Since: 0.10.22
*/
GstNalReader *
gst_nal_reader_new_from_buffer (const GstBuffer * buffer)
{
g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);
return gst_nal_reader_new (GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer));
}
/**
* gst_nal_reader_free:
* @reader: a #GstNalReader instance
*
* Frees a #GstNalReader instance, which was previously allocated by
* gst_nal_reader_new() or gst_nal_reader_new_from_buffer().
*
* Since: 0.10.22
*/
void
gst_nal_reader_free (GstNalReader * reader)
{
g_return_if_fail (reader != NULL);
g_slice_free (GstNalReader, reader);
}
/**
* gst_nal_reader_init:
* @reader: a #GstNalReader instance
* @data: Data from which the #GstNalReader should read
* @size: Size of @data in bytes
*
* Initializes a #GstNalReader instance to read from @data. This function
* can be called on already initialized instances.
*
* Since: 0.10.22
*/
void
gst_nal_reader_init (GstNalReader * reader, const guint8 * data, guint size)
{
g_return_if_fail (reader != NULL);
reader->data = data;
reader->size = size;
reader->byte = 0;
reader->bits_in_cache = 0;
/* fill with something other than 0 to detect emulation prevention bytes */
reader->first_byte = 0xff;
reader->cache = 0xff;
}
/**
* gst_nal_reader_init_from_buffer:
* @reader: a #GstNalReader instance
* @buffer: Buffer from which the #GstNalReader should read
*
* Initializes a #GstNalReader instance to read from @buffer. This function
* can be called on already initialized instances.
*
* Since: 0.10.22
*/
void
gst_nal_reader_init_from_buffer (GstNalReader * reader,
const GstBuffer * buffer)
{
g_return_if_fail (GST_IS_BUFFER (buffer));
gst_nal_reader_init (reader, GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer));
}
/**
* gst_nal_reader_skip:
* @reader: a #GstNalReader instance
* @nbits: the number of bits to skip
*
* Skips @nbits bits of the #GstNalReader instance.
*
* Returns: %TRUE if @nbits bits could be skipped, %FALSE otherwise.
*
* Since: 0.10.22
*/
gboolean
gst_nal_reader_skip (GstNalReader * reader, guint nbits)
{
g_return_val_if_fail (reader != NULL, FALSE);
if (G_UNLIKELY (!gst_nal_reader_read (reader, nbits)))
return FALSE;
reader->bits_in_cache -= nbits;
return TRUE;
}
/**
* gst_nal_reader_skip_to_byte:
* @reader: a #GstNalReader instance
*
* Skips until the next byte.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
gboolean
gst_nal_reader_skip_to_byte (GstNalReader * reader)
{
g_return_val_if_fail (reader != NULL, FALSE);
if (reader->bits_in_cache == 0) {
if (G_LIKELY ((reader->size - reader->byte) > 0))
reader->byte++;
else
return FALSE;
}
reader->bits_in_cache = 0;
return TRUE;
}
/**
* gst_nal_reader_get_pos:
* @reader: a #GstNalReader instance
*
* Returns the current position of a GstNalReader instance in bits.
*
* Returns: The current position in bits
*
*/
guint
gst_nal_reader_get_pos (const GstNalReader * reader)
{
return reader->byte * 8 - reader->bits_in_cache;
}
/**
* gst_nal_reader_get_remaining:
* @reader: a #GstNalReader instance
*
* Returns the remaining number of bits of a GstNalReader instance.
*
* Returns: The remaining number of bits.
*
*/
guint
gst_nal_reader_get_remaining (const GstNalReader * reader)
{
return (reader->size - reader->byte) * 8 + reader->bits_in_cache;
}
/**
* gst_nal_reader_get_bits_uint8:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint8 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val and update the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_get_bits_uint16:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint16 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val and update the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_get_bits_uint32:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint32 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val and update the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_get_bits_uint64:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint64 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val and update the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_peek_bits_uint8:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint8 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val but keep the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_peek_bits_uint16:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint16 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val but keep the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_peek_bits_uint32:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint32 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val but keep the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
/**
* gst_nal_reader_peek_bits_uint64:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint64 to store the result
* @nbits: number of bits to read
*
* Read @nbits bits into @val but keep the current position.
*
* Returns: %TRUE if successful, %FALSE otherwise.
*
* Since: 0.10.22
*/
static gboolean
gst_nal_reader_read (GstNalReader * reader, guint nbits)
{
if (G_UNLIKELY (reader->byte * 8 + (nbits - reader->bits_in_cache) >
reader->size * 8))
return FALSE;
while (reader->bits_in_cache < nbits) {
guint8 byte;
gboolean check_three_byte;
check_three_byte = TRUE;
next_byte:
if (G_UNLIKELY (reader->byte >= reader->size))
return FALSE;
byte = reader->data[reader->byte++];
/* check if the byte is a emulation_prevention_three_byte */
if (check_three_byte && byte == 0x03 && reader->first_byte == 0x00 &&
((reader->cache & 0xff) == 0)) {
/* next byte goes unconditionally to the cache, even if it's 0x03 */
check_three_byte = FALSE;
goto next_byte;
}
reader->cache = (reader->cache << 8) | reader->first_byte;
reader->first_byte = byte;
reader->bits_in_cache += 8;
}
return TRUE;
}
#define GST_NAL_READER_READ_BITS(bits) \
gboolean \
gst_nal_reader_get_bits_uint##bits (GstNalReader *reader, guint##bits *val, guint nbits) \
{ \
guint shift; \
\
g_return_val_if_fail (reader != NULL, FALSE); \
g_return_val_if_fail (val != NULL, FALSE); \
g_return_val_if_fail (nbits <= bits, FALSE); \
\
if (!gst_nal_reader_read (reader, nbits)) \
return FALSE; \
\
/* bring the required bits down and truncate */ \
shift = reader->bits_in_cache - nbits; \
*val = reader->first_byte >> shift; \
\
*val |= reader->cache << (8 - shift); \
/* mask out required bits */ \
if (nbits < bits) \
*val &= ((guint##bits)1 << nbits) - 1; \
\
reader->bits_in_cache = shift; \
\
return TRUE; \
} \
\
gboolean \
gst_nal_reader_peek_bits_uint##bits (const GstNalReader *reader, guint##bits *val, guint nbits) \
{ \
GstNalReader tmp; \
\
g_return_val_if_fail (reader != NULL, FALSE); \
tmp = *reader; \
return gst_nal_reader_get_bits_uint##bits (&tmp, val, nbits); \
}
GST_NAL_READER_READ_BITS (8);
GST_NAL_READER_READ_BITS (16);
GST_NAL_READER_READ_BITS (32);
GST_NAL_READER_READ_BITS (64);
/**
* gst_nal_reader_get_ue:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint32 to store the result
*
* Reads an unsigned Exp-Golomb value into val
*
* Returns: %TRUE if successful, %FALSE otherwise.
*/
gboolean
gst_nal_reader_get_ue (GstNalReader * reader, guint32 * val)
{
guint i = 0;
guint8 bit;
guint32 value;
if (G_UNLIKELY (!gst_nal_reader_get_bits_uint8 (reader, &bit, 1)))
return FALSE;
while (bit == 0) {
i++;
if G_UNLIKELY
((!gst_nal_reader_get_bits_uint8 (reader, &bit, 1)))
return FALSE;
}
g_return_val_if_fail (i <= 32, FALSE);
if (G_UNLIKELY (!gst_nal_reader_get_bits_uint32 (reader, &value, i)))
return FALSE;
*val = (1 << i) - 1 + value;
return TRUE;
}
/**
* gst_nal_reader_peek_ue:
* @reader: a #GstNalReader instance
* @val: Pointer to a #guint32 to store the result
*
* Read an unsigned Exp-Golomb value into val but keep the current position
*
* Returns: %TRUE if successful, %FALSE otherwise.
*/
gboolean
gst_nal_reader_peek_ue (const GstNalReader * reader, guint32 * val)
{
GstNalReader tmp;
g_return_val_if_fail (reader != NULL, FALSE);
tmp = *reader;
return gst_nal_reader_get_ue (&tmp, val);
}
/**
* gst_nal_reader_get_se:
* @reader: a #GstNalReader instance
* @val: Pointer to a #gint32 to store the result
*
* Reads a signed Exp-Golomb value into val
*
* Returns: %TRUE if successful, %FALSE otherwise.
*/
gboolean
gst_nal_reader_get_se (GstNalReader * reader, gint32 * val)
{
guint32 value;
if (G_UNLIKELY (!gst_nal_reader_get_ue (reader, &value)))
return FALSE;
if (value % 2)
*val = (value / 2) + 1;
else
*val = -(value / 2);
return TRUE;
}
/**
* gst_nal_reader_peek_se:
* @reader: a #GstNalReader instance
* @val: Pointer to a #gint32 to store the result
*
* Read a signed Exp-Golomb value into val but keep the current position
*
* Returns: %TRUE if successful, %FALSE otherwise.
*/
gboolean
gst_nal_reader_peek_se (const GstNalReader * reader, gint32 * val)
{
GstNalReader tmp;
g_return_val_if_fail (reader != NULL, FALSE);
tmp = *reader;
return gst_nal_reader_get_se (&tmp, val);
}

View file

@ -1,99 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_NAL_READER_H__
#define __GST_NAL_READER_H__
#include <gst/gst.h>
G_BEGIN_DECLS
typedef struct _GstNalReader GstNalReader;
struct _GstNalReader
{
const guint8 *data;
guint size;
guint byte; /* Byte position */
guint bits_in_cache; /* bitpos in the cache of next bit */
guint8 first_byte;
guint64 cache; /* cached bytes */
};
GstNalReader *gst_nal_reader_new (const guint8 *data, guint size);
GstNalReader *gst_nal_reader_new_from_buffer (const GstBuffer *buffer);
void gst_nal_reader_free (GstNalReader * reader);
void gst_nal_reader_init (GstNalReader * reader, const guint8 * data, guint size);
void gst_nal_reader_init_from_buffer (GstNalReader * reader, const GstBuffer * buffer);
gboolean gst_nal_reader_skip (GstNalReader *reader, guint nbits);
gboolean gst_nal_reader_skip_to_byte (GstNalReader *reader);
guint gst_nal_reader_get_pos (const GstNalReader * reader);
guint gst_nal_reader_get_remaining (const GstNalReader *reader);
gboolean gst_nal_reader_get_bits_uint8 (GstNalReader *reader, guint8 *val, guint nbits);
gboolean gst_nal_reader_get_bits_uint16 (GstNalReader *reader, guint16 *val, guint nbits);
gboolean gst_nal_reader_get_bits_uint32 (GstNalReader *reader, guint32 *val, guint nbits);
gboolean gst_nal_reader_get_bits_uint64 (GstNalReader *reader, guint64 *val, guint nbits);
gboolean gst_nal_reader_peek_bits_uint8 (const GstNalReader *reader, guint8 *val, guint nbits);
gboolean gst_nal_reader_peek_bits_uint16 (const GstNalReader *reader, guint16 *val, guint nbits);
gboolean gst_nal_reader_peek_bits_uint32 (const GstNalReader *reader, guint32 *val, guint nbits);
gboolean gst_nal_reader_peek_bits_uint64 (const GstNalReader *reader, guint64 *val, guint nbits);
gboolean gst_nal_reader_get_ue (GstNalReader *reader, guint32 *val);
gboolean gst_nal_reader_peek_ue (const GstNalReader *reader, guint32 *val);
gboolean gst_nal_reader_get_se (GstNalReader *reader, gint32 *val);
gboolean gst_nal_reader_peek_se (const GstNalReader *reader, gint32 *val);
/**
* GST_NAL_READER_INIT:
* @data: Data from which the #GstNalReader should read
* @size: Size of @data in bytes
*
* A #GstNalReader must be initialized with this macro, before it can be
* used. This macro can used be to initialize a variable, but it cannot
* be assigned to a variable. In that case you have to use
* gst_bit_reader_init().
*
* Since: 0.10.22
*/
#define GST_NAL_READER_INIT(data, size) {data, size, 0, 0, 0xff, 0xff}
/**
* GST_NAL_READER_INIT_FROM_BUFFER:
* @buffer: Buffer from which the #GstNalReader should read
*
* A #GstNalReader must be initialized with this macro, before it can be
* used. This macro can used be to initialize a variable, but it cannot
* be assigned to a variable. In that case you have to use
* gst_bit_reader_init().
*
* Since: 0.10.22
*/
#define GST_NAL_READER_INIT_FROM_BUFFER(buffer) {GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 0, 0, 0xff, 0xff}
G_END_DECLS
#endif /* __GST_NAL_READER_H__ */

File diff suppressed because it is too large Load diff

View file

@ -22,10 +22,9 @@
#define __GST_VDP_H264_DEC_H__
#include <gst/gst.h>
#include <gst/codecparsers/gsth264parser.h>
#include "../gstvdp/gstvdpdecoder.h"
#include "gsth264parser.h"
#include "../gstvdpdecoder.h"
#include "gsth264dpb.h"
G_BEGIN_DECLS
@ -45,16 +44,17 @@ typedef struct _GstVdpH264DecClass GstVdpH264DecClass;
struct _GstVdpH264Dec {
GstVdpDecoder vdp_decoder;
gboolean packetized;
guint8 nal_length_size;
GstH264Parser *parser;
GstH264DPB *dpb;
GstH264SPS *sps[GST_H264_MAX_SPS_COUNT];
GstH264PPS *pps[GST_H264_MAX_PPS_COUNT];
/* Current SPS being used. Default:-1 */
gint current_sps;
GstH264Sequence *sequence;
gboolean got_idr;
VdpDecoder decoder;
GstVideoCodecState *input_state;
guint poc_msb;
guint prev_poc_lsb;
};
@ -67,4 +67,4 @@ GType gst_vdp_h264_dec_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_H264_DEC_H__ */
#endif /* __GST_VDP_H264_DEC_H__ */

View file

@ -38,11 +38,12 @@
#include <gst/gst.h>
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <gst/codecparsers/gstmpegvideoparser.h>
#include <gst/codecparsers/gstmpegvideometa.h>
#include <string.h>
#include "mpegutil.h"
#include "gstvdpmpegdec.h"
#include "gstvdpvideomemory.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug
@ -58,24 +59,24 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
"systemstream = (boolean) false")
);
#define DEBUG_INIT(bla) \
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, \
"VDPAU mpeg decoder");
GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec,
GstVdpDecoder, GST_TYPE_VDP_DECODER, DEBUG_INIT);
#define gst_vdp_mpeg_dec_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpMpegDec, gst_vdp_mpeg_dec, GST_TYPE_VDP_DECODER,
DEBUG_INIT);
static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
#define SYNC_CODE_SIZE 3
static VdpDecoderProfile
gst_vdp_mpeg_dec_get_profile (MPEGSeqExtHdr * hdr)
gst_vdp_mpeg_dec_get_profile (GstMpegVideoSequenceExt * hdr)
{
VdpDecoderProfile profile;
switch (hdr->profile) {
case 5:
case GST_MPEG_VIDEO_PROFILE_SIMPLE:
profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
break;
default:
@ -88,86 +89,105 @@ gst_vdp_mpeg_dec_get_profile (MPEGSeqExtHdr * hdr)
static gboolean
gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec,
GstBuffer * buffer, GstVideoFrame * frame)
GstMpegVideoPictureExt * pic_ext, GstVideoCodecFrame * frame)
{
MPEGPictureExt pic_ext;
VdpPictureInfoMPEG1Or2 *info;
#if 0
gint fields;
#endif
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoPictureExt");
info = &mpeg_dec->vdp_info;
if (!mpeg_util_parse_picture_coding_extension (&pic_ext, buffer))
return FALSE;
/* FIXME : Set defaults when pic_ext isn't present */
memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4);
memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext->f_code, 4);
info->intra_dc_precision = pic_ext.intra_dc_precision;
info->picture_structure = pic_ext.picture_structure;
info->top_field_first = pic_ext.top_field_first;
info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct;
info->concealment_motion_vectors = pic_ext.concealment_motion_vectors;
info->q_scale_type = pic_ext.q_scale_type;
info->intra_vlc_format = pic_ext.intra_vlc_format;
info->alternate_scan = pic_ext.alternate_scan;
info->intra_dc_precision = pic_ext->intra_dc_precision;
info->picture_structure = pic_ext->picture_structure;
info->top_field_first = pic_ext->top_field_first;
info->frame_pred_frame_dct = pic_ext->frame_pred_frame_dct;
info->concealment_motion_vectors = pic_ext->concealment_motion_vectors;
info->q_scale_type = pic_ext->q_scale_type;
info->intra_vlc_format = pic_ext->intra_vlc_format;
info->alternate_scan = pic_ext->alternate_scan;
#if 0
fields = 2;
if (pic_ext.picture_structure == 3) {
if (pic_ext->picture_structure == 3) {
if (mpeg_dec->stream_info.interlaced) {
if (pic_ext.progressive_frame == 0)
if (pic_ext->progressive_frame == 0)
fields = 2;
if (pic_ext.progressive_frame == 0 && pic_ext.repeat_first_field == 0)
if (pic_ext->progressive_frame == 0 && pic_ext->repeat_first_field == 0)
fields = 2;
if (pic_ext.progressive_frame == 1 && pic_ext.repeat_first_field == 1)
if (pic_ext->progressive_frame == 1 && pic_ext->repeat_first_field == 1)
fields = 3;
} else {
if (pic_ext.repeat_first_field == 0)
if (pic_ext->repeat_first_field == 0)
fields = 2;
if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 0)
if (pic_ext->repeat_first_field == 1 && pic_ext->top_field_first == 0)
fields = 4;
if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 1)
if (pic_ext->repeat_first_field == 1 && pic_ext->top_field_first == 1)
fields = 6;
}
} else
fields = 1;
#endif
frame->n_fields = fields;
if (pic_ext.top_field_first)
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_TFF);
if (pic_ext->top_field_first)
GST_FIXME ("Set TFF on outgoing buffer");
#if 0
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_TFF);
#endif
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_picture (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
gst_vdp_mpeg_dec_handle_picture (GstVdpMpegDec * mpeg_dec,
GstMpegVideoPictureHdr * pic_hdr)
{
MPEGPictureHdr pic_hdr;
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoPictureHdr");
if (!mpeg_util_parse_picture_hdr (&pic_hdr, buffer))
return FALSE;
mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type;
mpeg_dec->vdp_info.picture_coding_type = pic_hdr->pic_type;
if (mpeg_dec->stream_info.version == 1) {
mpeg_dec->vdp_info.full_pel_forward_vector =
pic_hdr.full_pel_forward_vector;
pic_hdr->full_pel_forward_vector;
mpeg_dec->vdp_info.full_pel_backward_vector =
pic_hdr.full_pel_backward_vector;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4);
pic_hdr->full_pel_backward_vector;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr->f_code, 4);
}
mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr.tsn;
mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr->tsn;
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
gst_vdp_mpeg_dec_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state)
{
MPEGGop gop;
GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) decoder;
/* FIXME : Check the hardware can handle the level/profile */
if (mpeg_dec->input_state)
gst_video_codec_state_unref (mpeg_dec->input_state);
mpeg_dec->input_state = gst_video_codec_state_ref (state);
return TRUE;
}
#if 0
static gboolean
gst_vdp_mpeg_dec_handle_gop (GstVdpMpegDec * mpeg_dec, const guint8 * data,
gsize size, guint offset)
{
GstMpegVideoGop gop;
GstClockTime time;
if (!mpeg_util_parse_gop (&gop, buffer))
if (!gst_mpeg_video_parse_gop (&gop, data, size, offset))
return FALSE;
time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second);
@ -183,185 +203,157 @@ gst_vdp_mpeg_dec_handle_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
return TRUE;
}
#endif
static gboolean
gst_vdp_mpeg_dec_handle_quant_matrix (GstVdpMpegDec * mpeg_dec,
GstBuffer * buffer)
GstMpegVideoQuantMatrixExt * qm)
{
MPEGQuantMatrix qm;
if (!mpeg_util_parse_quant_matrix (&qm, buffer))
return FALSE;
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoQuantMatrixExt");
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&qm.intra_quantizer_matrix, 64);
&qm->intra_quantiser_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&qm.non_intra_quantizer_matrix, 64);
&qm->non_intra_quantiser_matrix, 64);
return TRUE;
}
static GstFlowReturn
gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
GstBuffer * seq, GstBuffer * seq_ext)
GstMpegVideoSequenceHdr * hdr, GstMpegVideoSequenceExt * ext)
{
GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (mpeg_dec);
MPEGSeqHdr hdr;
GstFlowReturn ret;
GstVideoDecoder *video_decoder = GST_VIDEO_DECODER (mpeg_dec);
GstVdpMpegStreamInfo stream_info;
if (!mpeg_util_parse_sequence_hdr (&hdr, seq))
return GST_FLOW_CUSTOM_ERROR;
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoSequenceHdr");
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&hdr.intra_quantizer_matrix, 64);
&hdr->intra_quantizer_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&hdr.non_intra_quantizer_matrix, 64);
&hdr->non_intra_quantizer_matrix, 64);
stream_info.width = hdr.width;
stream_info.height = hdr.height;
stream_info.width = hdr->width;
stream_info.height = hdr->height;
stream_info.fps_n = hdr.fps_n;
stream_info.fps_d = hdr.fps_d;
stream_info.fps_n = hdr->fps_n;
stream_info.fps_d = hdr->fps_d;
stream_info.par_n = hdr.par_w;
stream_info.par_d = hdr.par_h;
stream_info.par_n = hdr->par_w;
stream_info.par_d = hdr->par_h;
stream_info.interlaced = FALSE;
stream_info.version = 1;
stream_info.profile = VDP_DECODER_PROFILE_MPEG1;
if (seq_ext) {
MPEGSeqExtHdr ext;
if (ext) {
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoSequenceExt");
if (!mpeg_util_parse_sequence_extension (&ext, seq_ext))
return GST_FLOW_CUSTOM_ERROR;
/* FIXME : isn't this already processed by mpegvideoparse ? */
stream_info.fps_n *= (ext->fps_n_ext + 1);
stream_info.fps_d *= (ext->fps_d_ext + 1);
stream_info.fps_n *= (ext.fps_n_ext + 1);
stream_info.fps_d *= (ext.fps_d_ext + 1);
stream_info.width += (ext->horiz_size_ext << 12);
stream_info.height += (ext->vert_size_ext << 12);
stream_info.width += (ext.horiz_size_ext << 12);
stream_info.height += (ext.vert_size_ext << 12);
stream_info.interlaced = !ext.progressive;
stream_info.interlaced = !ext->progressive;
stream_info.version = 2;
stream_info.profile = gst_vdp_mpeg_dec_get_profile (&ext);
stream_info.profile = gst_vdp_mpeg_dec_get_profile (ext);
}
if (memcmp (&mpeg_dec->stream_info, &stream_info,
sizeof (GstVdpMpegStreamInfo)) != 0) {
GstVideoState state;
GstFlowReturn ret;
state = gst_base_video_decoder_get_state (base_video_decoder);
state.width = stream_info.width;
state.height = stream_info.height;
state.fps_n = stream_info.fps_n;
state.fps_d = stream_info.fps_d;
state.par_n = stream_info.par_n;
state.par_d = stream_info.par_d;
state.interlaced = stream_info.interlaced;
gst_base_video_decoder_set_state (base_video_decoder, state);
ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg_dec),
stream_info.profile, 2);
if (ret != GST_FLOW_OK)
return ret;
memcpy (&mpeg_dec->stream_info, &stream_info,
sizeof (GstVdpMpegStreamInfo));
}
GST_DEBUG_OBJECT (mpeg_dec, "Setting output state to %dx%d",
stream_info.width, stream_info.height);
mpeg_dec->output_state =
gst_video_decoder_set_output_state (video_decoder, GST_VIDEO_FORMAT_YV12,
stream_info.width, stream_info.height, mpeg_dec->input_state);
if (stream_info.interlaced)
mpeg_dec->output_state->info.interlace_mode =
GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
gst_video_decoder_negotiate (video_decoder);
ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg_dec),
stream_info.profile, 2, mpeg_dec->output_state);
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA;
return GST_FLOW_OK;
return ret;
}
static GstFlowReturn
gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame, GstClockTimeDiff deadline)
gst_vdp_mpeg_dec_handle_frame (GstVideoDecoder * video_decoder,
GstVideoCodecFrame * frame)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
VdpPictureInfoMPEG1Or2 *info;
GstVdpMpegFrame *mpeg_frame;
GstMpegVideoMeta *mpeg_meta;
GstVdpVideoMemory *vmem;
GstFlowReturn ret = GST_FLOW_OK;
VdpBitstreamBuffer vbit[1];
GstVdpVideoBuffer *outbuf;
GstMapInfo mapinfo;
/* MPEG_PACKET_SEQUENCE */
mpeg_frame = GST_VDP_MPEG_FRAME (frame);
if (mpeg_frame->seq) {
ret = gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_frame->seq,
mpeg_frame->seq_ext);
if (ret != GST_FLOW_OK) {
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
}
/* FIXME : Specify in sink query that we need the mpeg video meta */
/* Parse all incoming data from the frame */
mpeg_meta = gst_buffer_get_mpeg_video_meta (frame->input_buffer);
if (!mpeg_meta)
goto no_meta;
/* GST_MPEG_VIDEO_PACKET_SEQUENCE */
if (mpeg_meta->sequencehdr) {
ret =
gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_meta->sequencehdr,
mpeg_meta->sequenceext);
if (ret != GST_FLOW_OK)
goto sequence_parse_fail;
}
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) {
GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't found a "
"MPEG_PACKET_SEQUENCE yet");
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE)
goto need_sequence;
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
/* GST_MPEG_VIDEO_PACKET_PICTURE */
if (mpeg_meta->pichdr)
gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_meta->pichdr);
/* MPEG_PACKET_PICTURE */
if (mpeg_frame->pic)
gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_frame->pic);
/* GST_MPEG_VIDEO_PACKET_EXT_PICTURE_CODING */
if (mpeg_meta->picext)
gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_meta->picext, frame);
/* MPEG_PACKET_EXT_PICTURE_CODING */
if (mpeg_frame->pic_ext)
gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_frame->pic_ext,
frame);
/* MPEG_PACKET_GOP */
if (mpeg_frame->gop)
gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame->gop);
/* MPEG_PACKET_EXT_QUANT_MATRIX */
if (mpeg_frame->qm_ext)
gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_frame->qm_ext);
/* GST_MPEG_VIDEO_PACKET_GOP */
/* if (mpeg_meta->gop) */
/* GST_FIXME_OBJECT (mpeg_dec, "Handle GOP !"); */
/* gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame.gop); */
/* GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX */
if (mpeg_meta->quantext)
gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_meta->quantext);
info = &mpeg_dec->vdp_info;
info->slice_count = mpeg_frame->n_slices;
info->slice_count = mpeg_meta->num_slices;
GST_DEBUG_OBJECT (mpeg_dec, "picture coding type %d",
info->picture_coding_type);
/* check if we can decode the frame */
if (info->picture_coding_type != I_FRAME
&& info->backward_reference == VDP_INVALID_HANDLE) {
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got an I_FRAME yet");
if (info->picture_coding_type != GST_MPEG_VIDEO_PICTURE_TYPE_I
&& info->backward_reference == VDP_INVALID_HANDLE)
goto need_i_frame;
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
if (info->picture_coding_type == B_FRAME
&& info->forward_reference == VDP_INVALID_HANDLE) {
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got two non B_FRAMES yet");
if (info->picture_coding_type == GST_MPEG_VIDEO_PICTURE_TYPE_B
&& info->forward_reference == VDP_INVALID_HANDLE)
goto need_non_b_frame;
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
if (info->picture_coding_type != B_FRAME) {
if (info->picture_coding_type != GST_MPEG_VIDEO_PICTURE_TYPE_B) {
if (info->backward_reference != VDP_INVALID_HANDLE) {
ret = gst_base_video_decoder_finish_frame (base_video_decoder,
mpeg_dec->b_frame);
GST_DEBUG_OBJECT (mpeg_dec, "Pushing B frame");
ret = gst_video_decoder_finish_frame (video_decoder, mpeg_dec->b_frame);
}
if (info->forward_reference != VDP_INVALID_HANDLE) {
gst_video_frame_unref (mpeg_dec->f_frame);
GST_DEBUG_OBJECT (mpeg_dec, "Releasing no-longer needed forward frame");
gst_video_codec_frame_unref (mpeg_dec->f_frame);
info->forward_reference = VDP_INVALID_HANDLE;
}
@ -371,211 +363,112 @@ gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
info->backward_reference = VDP_INVALID_HANDLE;
}
if (ret != GST_FLOW_OK) {
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
}
if (ret != GST_FLOW_OK)
goto exit_after_b_frame;
/* decode */
if (!gst_buffer_map (frame->input_buffer, &mapinfo, GST_MAP_READ))
goto map_fail;
vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
vbit[0].bitstream = GST_BUFFER_DATA (mpeg_frame->slices);
vbit[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg_frame->slices);
vbit[0].bitstream = mapinfo.data + mpeg_meta->slice_offset;
vbit[0].bitstream_bytes = mapinfo.size - mpeg_meta->slice_offset;
ret = gst_vdp_decoder_render (GST_VDP_DECODER (mpeg_dec),
(VdpPictureInfo *) info, 1, vbit, &outbuf);
(VdpPictureInfo *) info, 1, vbit, frame);
gst_buffer_unmap (frame->input_buffer, &mapinfo);
if (ret != GST_FLOW_OK)
return ret;
goto render_fail;
frame->src_buffer = GST_BUFFER_CAST (outbuf);
vmem = (GstVdpVideoMemory *) gst_buffer_get_memory (frame->output_buffer, 0);
if (info->picture_coding_type == B_FRAME) {
ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame);
if (info->picture_coding_type == GST_MPEG_VIDEO_PICTURE_TYPE_B) {
ret = gst_video_decoder_finish_frame (video_decoder, frame);
} else {
info->backward_reference = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
mpeg_dec->b_frame = gst_video_frame_ref (frame);
info->backward_reference = vmem->surface;
mpeg_dec->b_frame = gst_video_codec_frame_ref (frame);
}
return ret;
}
static GstVideoFrame *
gst_vdp_mpeg_dec_create_frame (GstBaseVideoDecoder * base_video_decoder)
{
return GST_VIDEO_FRAME (gst_vdp_mpeg_frame_new ());
}
/* EARLY EXIT */
need_sequence:
{
GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't found a "
"GST_MPEG_VIDEO_PACKET_SEQUENCE yet");
static GstFlowReturn
gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
gst_video_decoder_finish_frame (video_decoder, frame);
return GST_FLOW_OK;
}
GstVdpMpegFrame *mpeg_frame;
GstFlowReturn ret = GST_FLOW_OK;
GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 start_code;
need_i_frame:
{
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got an I_FRAME yet");
if (gst_bit_reader_get_remaining (&b_reader) < 8 * 3 + 8)
gst_video_decoder_finish_frame (video_decoder, frame);
return GST_FLOW_OK;
}
need_non_b_frame:
{
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got two non B_FRAME yet");
gst_video_decoder_finish_frame (video_decoder, frame);
return GST_FLOW_OK;
}
/* ERRORS */
no_meta:
{
GST_ERROR_OBJECT (video_decoder,
"Input buffer does not have MpegVideo GstMeta");
gst_video_decoder_drop_frame (video_decoder, frame);
return GST_FLOW_ERROR;
/* skip sync_code */
gst_bit_reader_skip_unchecked (&b_reader, 8 * 3);
/* start_code */
start_code = gst_bit_reader_get_bits_uint8_unchecked (&b_reader, 8);
mpeg_frame = GST_VDP_MPEG_FRAME_CAST (frame);
if (start_code >= MPEG_PACKET_SLICE_MIN
&& start_code <= MPEG_PACKET_SLICE_MAX) {
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");
gst_vdp_mpeg_frame_add_slice (mpeg_frame, buf);
goto done;
}
switch (start_code) {
case MPEG_PACKET_SEQUENCE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
if (mpeg_dec->prev_packet != -1)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg_frame);
mpeg_frame->seq = buf;
break;
case MPEG_PACKET_PICTURE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE &&
mpeg_dec->prev_packet != MPEG_PACKET_GOP)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg_frame);
mpeg_frame->pic = buf;
break;
case MPEG_PACKET_GOP:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg_frame);
mpeg_frame->gop = buf;
break;
case MPEG_PACKET_EXTENSION:
{
guint8 ext_code;
/* ext_code */
if (!gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4)) {
ret = GST_FLOW_ERROR;
gst_buffer_unref (buf);
goto done;
}
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION: %d", ext_code);
switch (ext_code) {
case MPEG_PACKET_EXT_SEQUENCE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE");
mpeg_frame->seq_ext = buf;
/* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE
* or MPEG_PACKET_GOP after this */
start_code = MPEG_PACKET_SEQUENCE;
break;
case MPEG_PACKET_EXT_SEQUENCE_DISPLAY:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE_DISPLAY");
/* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE
* or MPEG_PACKET_GOP after this */
start_code = MPEG_PACKET_SEQUENCE;
break;
case MPEG_PACKET_EXT_PICTURE_CODING:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING");
mpeg_frame->pic_ext = buf;
break;
case MPEG_PACKET_EXT_QUANT_MATRIX:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");
mpeg_frame->qm_ext = buf;
break;
default:
gst_buffer_unref (buf);
}
break;
}
default:
gst_buffer_unref (buf);
sequence_parse_fail:
{
GST_ERROR_OBJECT (video_decoder, "Failed to handle sequence header");
gst_video_decoder_finish_frame (video_decoder, frame);
return ret;
}
if (at_eos && mpeg_frame->slices)
ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
exit_after_b_frame:
{
GST_WARNING_OBJECT (video_decoder, "Leaving after pushing B frame");
gst_video_decoder_finish_frame (video_decoder, frame);
return ret;
}
done:
mpeg_dec->prev_packet = start_code;
map_fail:
{
GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer");
gst_video_decoder_drop_frame (video_decoder, frame);
return GST_FLOW_ERROR;
}
return ret;
}
static gint
gst_vdp_mpeg_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter)
{
gint m;
m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, 0,
gst_adapter_available (adapter));
if (m == -1)
return gst_adapter_available (adapter) - SYNC_CODE_SIZE;
return m;
}
static GstBaseVideoDecoderScanResult
gst_vdp_mpeg_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter, guint * size, gboolean at_eos)
{
guint8 *data;
guint32 sync_code;
data = g_slice_alloc (SYNC_CODE_SIZE);
gst_adapter_copy (adapter, data, 0, SYNC_CODE_SIZE);
sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]);
if (sync_code != 0x000001)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;
*size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE);
if (*size == -1)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
render_fail:
{
GST_ERROR_OBJECT (video_decoder, "Error when rendering the frame");
gst_video_decoder_drop_frame (video_decoder, frame);
return ret;
}
}
static gboolean
gst_vdp_mpeg_dec_flush (GstBaseVideoDecoder * base_video_decoder)
gst_vdp_mpeg_dec_reset (GstVideoDecoder * video_decoder, gboolean hard)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
gst_video_frame_unref (mpeg_dec->f_frame);
gst_video_codec_frame_unref (mpeg_dec->f_frame);
if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
gst_video_frame_unref (mpeg_dec->b_frame);
gst_video_codec_frame_unref (mpeg_dec->b_frame);
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
@ -585,9 +478,11 @@ gst_vdp_mpeg_dec_flush (GstBaseVideoDecoder * base_video_decoder)
}
static gboolean
gst_vdp_mpeg_dec_start (GstBaseVideoDecoder * base_video_decoder)
gst_vdp_mpeg_dec_start (GstVideoDecoder * video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
GST_DEBUG_OBJECT (video_decoder, "Starting");
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
@ -596,14 +491,13 @@ gst_vdp_mpeg_dec_start (GstBaseVideoDecoder * base_video_decoder)
memset (&mpeg_dec->stream_info, 0, sizeof (GstVdpMpegStreamInfo));
return GST_BASE_VIDEO_DECODER_CLASS
(parent_class)->start (base_video_decoder);
return GST_VIDEO_DECODER_CLASS (parent_class)->start (video_decoder);
}
static gboolean
gst_vdp_mpeg_dec_stop (GstBaseVideoDecoder * base_video_decoder)
gst_vdp_mpeg_dec_stop (GstVideoDecoder * video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
@ -612,13 +506,18 @@ gst_vdp_mpeg_dec_stop (GstBaseVideoDecoder * base_video_decoder)
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE;
return GST_BASE_VIDEO_DECODER_CLASS (parent_class)->stop (base_video_decoder);
return GST_VIDEO_DECODER_CLASS (parent_class)->stop (video_decoder);
}
/* initialize the vdpaumpegdecoder's class */
static void
gst_vdp_mpeg_dec_base_init (gpointer gclass)
gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GstElementClass *element_class;
GstVideoDecoderClass *video_decoder_class;
element_class = GST_ELEMENT_CLASS (klass);
video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
gst_element_class_set_static_metadata (element_class,
"VDPAU Mpeg Decoder",
@ -628,27 +527,13 @@ gst_vdp_mpeg_dec_base_init (gpointer gclass)
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
}
/* initialize the vdpaumpegdecoder's class */
static void
gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
{
GstBaseVideoDecoderClass *base_video_decoder_class;
video_decoder_class->start = gst_vdp_mpeg_dec_start;
video_decoder_class->stop = gst_vdp_mpeg_dec_stop;
video_decoder_class->reset = gst_vdp_mpeg_dec_reset;
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
base_video_decoder_class->start = gst_vdp_mpeg_dec_start;
base_video_decoder_class->stop = gst_vdp_mpeg_dec_stop;
base_video_decoder_class->flush = gst_vdp_mpeg_dec_flush;
base_video_decoder_class->scan_for_sync = gst_vdp_mpeg_dec_scan_for_sync;
base_video_decoder_class->scan_for_packet_end =
gst_vdp_mpeg_dec_scan_for_packet_end;
base_video_decoder_class->parse_data = gst_vdp_mpeg_dec_parse_data;
base_video_decoder_class->handle_frame = gst_vdp_mpeg_dec_handle_frame;
base_video_decoder_class->create_frame = gst_vdp_mpeg_dec_create_frame;
video_decoder_class->handle_frame = gst_vdp_mpeg_dec_handle_frame;
video_decoder_class->set_format = gst_vdp_mpeg_dec_set_format;
}
static void
@ -669,6 +554,6 @@ gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info)
}
static void
gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass)
gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec)
{
}

View file

@ -24,10 +24,20 @@
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include "../gstvdp/gstvdpdecoder.h"
#include "gstvdpmpegframe.h"
#include "../gstvdpdecoder.h"
G_BEGIN_DECLS
typedef struct _GstVdpMpegStreamInfo GstVdpMpegStreamInfo;
struct _GstVdpMpegStreamInfo
{
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
gboolean interlaced;
gint version;
VdpDecoderProfile profile;
};
#define GST_TYPE_VDP_MPEG_DEC (gst_vdp_mpeg_dec_get_type())
#define GST_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDec))
@ -50,11 +60,13 @@ struct _GstVdpMpegDec
VdpDecoder decoder;
GstVdpMpegStreamInfo stream_info;
GstVdpMpegStreamInfo stream_info;
/* decoder state */
GstVideoCodecState *input_state;
GstVideoCodecState *output_state;
GstVdpMpegDecState state;
gint prev_packet;
gint prev_packet;
/* currently decoded frame info */
VdpPictureInfoMPEG1Or2 vdp_info;
@ -64,8 +76,7 @@ struct _GstVdpMpegDec
guint64 gop_frame;
/* forward and backward reference */
GstVideoFrame *f_frame, *b_frame;
GstVideoCodecFrame *f_frame, *b_frame;
};
struct _GstVdpMpegDecClass

View file

@ -1,134 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpmpegframe.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_frame_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg_frame_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_frame_debug, "gstvdpmpegframe", 0, "Video Frame");
void
gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame * mpeg_frame, GstBuffer * buf)
{
if (!mpeg_frame->slices)
mpeg_frame->slices = buf;
else
mpeg_frame->slices = gst_buffer_append (mpeg_frame->slices, buf);
mpeg_frame->n_slices++;
}
GstVdpMpegFrame *
gst_vdp_mpeg_frame_new (void)
{
GstVdpMpegFrame *frame;
frame =
GST_VDP_MPEG_FRAME_CAST (gst_mini_object_new (GST_TYPE_VDP_MPEG_FRAME));
return frame;
}
static GObjectClass *gst_vdp_mpeg_frame_parent_class;
static void
gst_vdp_mpeg_frame_finalize (GstVdpMpegFrame * mpeg_frame)
{
if (mpeg_frame->seq)
gst_buffer_unref (mpeg_frame->seq);
if (mpeg_frame->seq_ext)
gst_buffer_unref (mpeg_frame->seq_ext);
if (mpeg_frame->pic)
gst_buffer_unref (mpeg_frame->pic);
if (mpeg_frame->pic_ext)
gst_buffer_unref (mpeg_frame->pic_ext);
if (mpeg_frame->gop)
gst_buffer_unref (mpeg_frame->gop);
if (mpeg_frame->qm_ext)
gst_buffer_unref (mpeg_frame->qm_ext);
if (mpeg_frame->slices)
gst_buffer_unref (mpeg_frame->slices);
GST_MINI_OBJECT_CLASS (gst_vdp_mpeg_frame_parent_class)->finalize
(GST_MINI_OBJECT (mpeg_frame));
}
static void
gst_vdp_mpeg_frame_init (GstVdpMpegFrame * mpeg_frame, gpointer g_class)
{
mpeg_frame->seq = NULL;
mpeg_frame->seq_ext = NULL;
mpeg_frame->pic = NULL;
mpeg_frame->pic_ext = NULL;
mpeg_frame->gop = NULL;
mpeg_frame->qm_ext = NULL;
mpeg_frame->n_slices = 0;
mpeg_frame->slices = NULL;
}
static void
gst_vdp_mpeg_frame_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_vdp_mpeg_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_vdp_mpeg_frame_finalize;
}
GType
gst_vdp_mpeg_frame_get_type (void)
{
static GType _gst_vdp_mpeg_frame_type = 0;
if (G_UNLIKELY (_gst_vdp_mpeg_frame_type == 0)) {
static const GTypeInfo info = {
sizeof (GstVdpMpegFrameClass),
NULL,
NULL,
gst_vdp_mpeg_frame_class_init,
NULL,
NULL,
sizeof (GstVdpMpegFrame),
0,
(GInstanceInitFunc) gst_vdp_mpeg_frame_init,
NULL
};
_gst_vdp_mpeg_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME,
"GstVdpMpegFrame", &info, 0);
DEBUG_INIT ();
}
return _gst_vdp_mpeg_frame_type;
}

View file

@ -1,78 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_MPEG_FRAME_H_
#define _GST_VDP_MPEG_FRAME_H_
#include <gst/gst.h>
#include <vdpau/vdpau.h>
#include "../basevideodecoder/gstvideoframe.h"
#define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type())
#define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME))
#define GST_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_MPEG_FRAME, GstVdpMpegFrame))
#define GST_VDP_MPEG_FRAME_CAST(obj) ((GstVdpMpegFrame *)obj)
typedef struct _GstVdpMpegStreamInfo GstVdpMpegStreamInfo;
struct _GstVdpMpegStreamInfo
{
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
gboolean interlaced;
gint version;
VdpDecoderProfile profile;
};
typedef struct _GstVdpMpegFrame GstVdpMpegFrame;
typedef struct _GstVdpMpegFrameClass GstVdpMpegFrameClass;
struct _GstVdpMpegFrame
{
GstVideoFrame video_frame;
GstBuffer *seq;
GstBuffer *seq_ext;
GstBuffer *pic;
GstBuffer *pic_ext;
GstBuffer *gop;
GstBuffer *qm_ext;
gint n_slices;
GstBuffer *slices;
};
struct _GstVdpMpegFrameClass
{
GstVideoFrameClass video_frame_class;
};
void gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame *mpeg_frame, GstBuffer *buf);
GstVdpMpegFrame *gst_vdp_mpeg_frame_new (void);
GType gst_vdp_mpeg_frame_get_type (void);
#endif

View file

@ -1,429 +0,0 @@
/* GStreamer
* Copyright (C) 2007 Jan Schmidt <thaytan@mad.scientist.com>
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/base/gstbitreader.h>
#include <string.h>
#include "mpegutil.h"
/* default intra quant matrix, in zig-zag order */
const guint8 default_intra_quantizer_matrix[64] = {
8,
16, 16,
19, 16, 19,
22, 22, 22, 22,
22, 22, 26, 24, 26,
27, 27, 27, 26, 26, 26,
26, 27, 27, 27, 29, 29, 29,
34, 34, 34, 29, 29, 29, 27, 27,
29, 29, 32, 32, 34, 34, 37,
38, 37, 35, 35, 34, 35,
38, 38, 40, 40, 40,
48, 48, 46, 46,
56, 56, 58,
69, 69,
83
};
const guint8 mpeg_zigzag_8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36,
29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46,
53, 60, 61, 54, 47, 55, 62, 63
};
#define READ_UINT8(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint8 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT16(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint16 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT32(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint32 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT64(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint64 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto error; \
} \
}
static void
set_fps_from_code (MPEGSeqHdr * hdr, guint8 fps_code)
{
const gint framerates[][2] = {
{30, 1}, {24000, 1001}, {24, 1}, {25, 1},
{30000, 1001}, {30, 1}, {50, 1}, {60000, 1001},
{60, 1}, {30, 1}
};
if (fps_code < 10) {
hdr->fps_n = framerates[fps_code][0];
hdr->fps_d = framerates[fps_code][1];
} else {
/* Force a valid framerate */
hdr->fps_n = 30000;
hdr->fps_d = 1001;
}
}
/* Set the Pixel Aspect Ratio in our hdr from a DAR code in the data */
static void
set_par_from_dar (MPEGSeqHdr * hdr, guint8 asr_code)
{
/* Pixel_width = DAR_width * display_vertical_size */
/* Pixel_height = DAR_height * display_horizontal_size */
switch (asr_code) {
case 0x02: /* 3:4 DAR = 4:3 pixels */
hdr->par_w = 4 * hdr->height;
hdr->par_h = 3 * hdr->width;
break;
case 0x03: /* 9:16 DAR */
hdr->par_w = 16 * hdr->height;
hdr->par_h = 9 * hdr->width;
break;
case 0x04: /* 1:2.21 DAR */
hdr->par_w = 221 * hdr->height;
hdr->par_h = 100 * hdr->width;
break;
case 0x01: /* Square pixels */
default:
hdr->par_w = hdr->par_h = 1;
break;
}
}
gboolean
mpeg_util_parse_sequence_extension (MPEGSeqExtHdr * hdr, GstBuffer * buffer)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);;
/* skip sync word */
if (!gst_bit_reader_skip (&reader, 8 * 4))
return FALSE;
/* skip extension code */
if (!gst_bit_reader_skip (&reader, 4))
return FALSE;
/* skip profile and level escape bit */
if (!gst_bit_reader_skip (&reader, 1))
return FALSE;
READ_UINT8 (&reader, hdr->profile, 3);
READ_UINT8 (&reader, hdr->level, 4);
/* progressive */
READ_UINT8 (&reader, hdr->progressive, 1);
/* chroma format */
READ_UINT8 (&reader, hdr->chroma_format, 2);
/* resolution extension */
READ_UINT8 (&reader, hdr->horiz_size_ext, 2);
READ_UINT8 (&reader, hdr->vert_size_ext, 2);
READ_UINT16 (&reader, hdr->bitrate_ext, 12);
/* skip to framerate extension */
if (!gst_bit_reader_skip (&reader, 9))
return FALSE;
/* framerate extension */
READ_UINT8 (&reader, hdr->fps_n_ext, 2);
READ_UINT8 (&reader, hdr->fps_d_ext, 2);
return TRUE;
error:
GST_WARNING ("error parsing \"Sequence Extension\"");
return FALSE;
}
gboolean
mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, GstBuffer * buffer)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
guint8 dar_idx, par_idx;
guint8 load_intra_flag, load_non_intra_flag;
/* skip sync word */
if (!gst_bit_reader_skip (&reader, 8 * 4))
return FALSE;
/* resolution */
READ_UINT16 (&reader, hdr->width, 12);
READ_UINT16 (&reader, hdr->height, 12);
/* aspect ratio */
READ_UINT8 (&reader, dar_idx, 4);
set_par_from_dar (hdr, dar_idx);
/* framerate */
READ_UINT8 (&reader, par_idx, 4);
set_fps_from_code (hdr, par_idx);
/* bitrate */
READ_UINT32 (&reader, hdr->bitrate, 18);
if (!gst_bit_reader_skip (&reader, 1))
return FALSE;
/* VBV buffer size */
READ_UINT16 (&reader, hdr->vbv_buffer, 10);
/* constrained parameters flag */
READ_UINT8 (&reader, hdr->constrained_parameters_flag, 1);
/* intra quantizer matrix */
READ_UINT8 (&reader, load_intra_flag, 1);
if (load_intra_flag) {
gint i;
for (i = 0; i < 64; i++)
READ_UINT8 (&reader, hdr->intra_quantizer_matrix[mpeg_zigzag_8x8[i]], 8);
} else
memcpy (hdr->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
/* non intra quantizer matrix */
READ_UINT8 (&reader, load_non_intra_flag, 1);
if (load_non_intra_flag) {
gint i;
for (i = 0; i < 64; i++)
READ_UINT8 (&reader, hdr->non_intra_quantizer_matrix[mpeg_zigzag_8x8[i]],
8);
} else
memset (hdr->non_intra_quantizer_matrix, 16, 64);
return TRUE;
error:
GST_WARNING ("error parsing \"Sequence Header\"");
return FALSE;
}
gboolean
mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer * buffer)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
/* skip sync word */
if (!gst_bit_reader_skip (&reader, 8 * 4))
return FALSE;
/* temperal sequence number */
if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->tsn, 10))
return FALSE;
/* frame type */
if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->pic_type, 3))
return FALSE;
if (hdr->pic_type == 0 || hdr->pic_type > 4)
return FALSE; /* Corrupted picture packet */
/* VBV delay */
if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->vbv_delay, 16))
return FALSE;
if (hdr->pic_type == P_FRAME || hdr->pic_type == B_FRAME) {
READ_UINT8 (&reader, hdr->full_pel_forward_vector, 1);
READ_UINT8 (&reader, hdr->f_code[0][0], 3);
hdr->f_code[0][1] = hdr->f_code[0][0];
} else {
hdr->full_pel_forward_vector = 0;
hdr->f_code[0][0] = hdr->f_code[0][1] = 0;
}
if (hdr->pic_type == B_FRAME) {
READ_UINT8 (&reader, hdr->full_pel_backward_vector, 1);
READ_UINT8 (&reader, hdr->f_code[1][0], 3);
hdr->f_code[1][1] = hdr->f_code[1][0];
} else {
hdr->full_pel_backward_vector = 0;
hdr->f_code[1][0] = hdr->f_code[1][1] = 0;
}
return TRUE;
error:
GST_WARNING ("error parsing \"Picture Header\"");
return FALSE;
}
gboolean
mpeg_util_parse_picture_coding_extension (MPEGPictureExt * ext,
GstBuffer * buffer)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
/* skip sync word */
if (!gst_bit_reader_skip (&reader, 8 * 4))
return FALSE;
/* skip extension code */
if (!gst_bit_reader_skip (&reader, 4))
return FALSE;
/* f_code */
READ_UINT8 (&reader, ext->f_code[0][0], 4);
READ_UINT8 (&reader, ext->f_code[0][1], 4);
READ_UINT8 (&reader, ext->f_code[1][0], 4);
READ_UINT8 (&reader, ext->f_code[1][1], 4);
/* intra DC precision */
READ_UINT8 (&reader, ext->intra_dc_precision, 2);
/* picture structure */
READ_UINT8 (&reader, ext->picture_structure, 2);
/* top field first */
READ_UINT8 (&reader, ext->top_field_first, 1);
/* frame pred frame dct */
READ_UINT8 (&reader, ext->frame_pred_frame_dct, 1);
/* concealment motion vectors */
READ_UINT8 (&reader, ext->concealment_motion_vectors, 1);
/* q scale type */
READ_UINT8 (&reader, ext->q_scale_type, 1);
/* intra vlc format */
READ_UINT8 (&reader, ext->intra_vlc_format, 1);
/* alternate scan */
READ_UINT8 (&reader, ext->alternate_scan, 1);
/* repeat first field */
READ_UINT8 (&reader, ext->repeat_first_field, 1);
/* chroma_420_type */
READ_UINT8 (&reader, ext->chroma_420_type, 1);
/* progressive_frame */
READ_UINT8 (&reader, ext->progressive_frame, 1);
return TRUE;
error:
GST_WARNING ("error parsing \"Picture Coding Extension\"");
return FALSE;
}
gboolean
mpeg_util_parse_gop (MPEGGop * gop, GstBuffer * buffer)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
/* skip sync word */
if (!gst_bit_reader_skip (&reader, 8 * 4))
return FALSE;
READ_UINT8 (&reader, gop->drop_frame_flag, 1);
READ_UINT8 (&reader, gop->hour, 5);
READ_UINT8 (&reader, gop->minute, 6);
/* skip unused bit */
if (!gst_bit_reader_skip (&reader, 1))
return FALSE;
READ_UINT8 (&reader, gop->second, 6);
READ_UINT8 (&reader, gop->frame, 6);
READ_UINT8 (&reader, gop->closed_gop, 1);
READ_UINT8 (&reader, gop->broken_gop, 1);
return TRUE;
error:
GST_WARNING ("error parsing \"GOP\"");
return FALSE;
}
gboolean
mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, GstBuffer * buffer)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
guint8 load_intra_flag, load_non_intra_flag;
/* skip sync word */
if (!gst_bit_reader_skip (&reader, 8 * 4))
return FALSE;
/* skip extension code */
if (!gst_bit_reader_skip (&reader, 4))
return FALSE;
/* intra quantizer matrix */
READ_UINT8 (&reader, load_intra_flag, 1);
if (load_intra_flag) {
gint i;
for (i = 0; i < 64; i++) {
READ_UINT8 (&reader, qm->intra_quantizer_matrix[mpeg_zigzag_8x8[i]], 8);
}
} else
memcpy (qm->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
/* non intra quantizer matrix */
READ_UINT8 (&reader, load_non_intra_flag, 1);
if (load_non_intra_flag) {
gint i;
for (i = 0; i < 64; i++) {
READ_UINT8 (&reader, qm->non_intra_quantizer_matrix[mpeg_zigzag_8x8[i]],
8);
}
} else
memset (qm->non_intra_quantizer_matrix, 16, 64);
return TRUE;
error:
GST_WARNING ("error parsing \"Quant Matrix Extension\"");
return FALSE;
}
#undef READ_UINT8
#undef READ_UINT16
#undef READ_UINT32
#undef READ_UINT64

View file

@ -1,150 +0,0 @@
/* GStreamer
* Copyright (C) 2007 Jan Schmidt <thaytan@mad.scientist.com>
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __MPEGUTIL_H__
#define __MPEGUTIL_H__
#include <gst/gst.h>
typedef struct MPEGSeqHdr MPEGSeqHdr;
typedef struct MPEGSeqExtHdr MPEGSeqExtHdr;
typedef struct MPEGPictureHdr MPEGPictureHdr;
typedef struct MPEGPictureExt MPEGPictureExt;
typedef struct MPEGGop MPEGGop;
typedef struct MPEGQuantMatrix MPEGQuantMatrix;
/* Packet ID codes for different packet types we
* care about */
#define MPEG_PACKET_PICTURE 0x00
#define MPEG_PACKET_SLICE_MIN 0x01
#define MPEG_PACKET_SLICE_MAX 0xaf
#define MPEG_PACKET_SEQUENCE 0xb3
#define MPEG_PACKET_EXTENSION 0xb5
#define MPEG_PACKET_SEQUENCE_END 0xb7
#define MPEG_PACKET_GOP 0xb8
#define MPEG_PACKET_NONE 0xff
/* Extension codes we care about */
#define MPEG_PACKET_EXT_SEQUENCE 0x01
#define MPEG_PACKET_EXT_SEQUENCE_DISPLAY 0x02
#define MPEG_PACKET_EXT_QUANT_MATRIX 0x03
#define MPEG_PACKET_EXT_PICTURE_CODING 0x08
/* frame types */
#define I_FRAME 1
#define P_FRAME 2
#define B_FRAME 3
struct MPEGSeqHdr
{
/* Pixel-Aspect Ratio from DAR code via set_par_from_dar */
guint par_w, par_h;
/* Width and Height of the video */
guint16 width, height;
/* Framerate */
guint fps_n, fps_d;
guint32 bitrate;
guint16 vbv_buffer;
guint8 constrained_parameters_flag;
guint8 intra_quantizer_matrix[64];
guint8 non_intra_quantizer_matrix[64];
};
struct MPEGSeqExtHdr
{
/* mpeg2 decoder profile */
guint8 profile;
/* mpeg2 decoder level */
guint8 level;
guint8 progressive;
guint8 chroma_format;
guint8 horiz_size_ext, vert_size_ext;
guint16 bitrate_ext;
guint8 fps_n_ext, fps_d_ext;
};
struct MPEGPictureHdr
{
guint16 tsn;
guint8 pic_type;
guint16 vbv_delay;
guint8 full_pel_forward_vector, full_pel_backward_vector;
guint8 f_code[2][2];
};
struct MPEGPictureExt
{
guint8 f_code[2][2];
guint8 intra_dc_precision;
guint8 picture_structure;
guint8 top_field_first;
guint8 frame_pred_frame_dct;
guint8 concealment_motion_vectors;
guint8 q_scale_type;
guint8 intra_vlc_format;
guint8 alternate_scan;
guint8 repeat_first_field;
guint8 chroma_420_type;
guint8 progressive_frame;
};
struct MPEGGop
{
guint8 drop_frame_flag;
guint8 hour, minute, second, frame;
guint8 closed_gop;
guint8 broken_gop;
};
struct MPEGQuantMatrix
{
guint8 intra_quantizer_matrix[64];
guint8 non_intra_quantizer_matrix[64];
};
gboolean mpeg_util_parse_sequence_hdr (MPEGSeqHdr *hdr, GstBuffer *buffer);
gboolean mpeg_util_parse_sequence_extension (MPEGSeqExtHdr *hdr,
GstBuffer *buffer);
gboolean mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer *buffer);
gboolean mpeg_util_parse_picture_coding_extension (MPEGPictureExt *ext,
GstBuffer *buffer);
gboolean mpeg_util_parse_gop (MPEGGop * gop, GstBuffer *buffer);
gboolean mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, GstBuffer *buffer);
#endif

View file

@ -23,24 +23,14 @@
#include <gst/gst.h>
#include "../basevideodecoder/gstvideoframe.h"
#include "mpeg4util.h"
#define GST_TYPE_MPEG4_FRAME (gst_mpeg4_frame_get_type())
#define GST_IS_MPEG4_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MPEG4_FRAME))
#define GST_MPEG4_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MPEG4_FRAME, GstMpeg4Frame))
#define GST_MPEG4_FRAME_CAST(obj) ((GstMpeg4Frame *)obj)
#define GST_MPEG4_FRAME_GOT_PRIMARY GST_VIDEO_FRAME_FLAG_LAST
typedef struct _GstMpeg4Frame GstMpeg4Frame;
typedef struct _GstMpeg4FrameClass GstMpeg4FrameClass;
struct _GstMpeg4Frame
{
GstVideoFrame video_frame;
GstBuffer *vos_buf;
GstBuffer *vo_buf;
GstBuffer *vol_buf;
@ -50,15 +40,6 @@ struct _GstMpeg4Frame
guint32 vop_time;
};
struct _GstMpeg4FrameClass
{
GstVideoFrameClass video_frame_class;
};
GstMpeg4Frame *gst_mpeg4_frame_new (void);
GType gst_mpeg4_frame_get_type (void);
#endif
#endif

View file

@ -61,8 +61,8 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg4_dec_debug, "vdpaumpeg4dec", 0, \
"VDPAU mpeg4 decoder");
GST_BOILERPLATE_FULL (GstVdpMpeg4Dec, gst_vdp_mpeg4_dec,
GstVdpDecoder, GST_TYPE_VDP_DECODER, DEBUG_INIT);
G_DEFINE_TYPE_FULL (GstVdpMpeg4Dec, gst_vdp_mpeg4_dec, GST_TYPE_VDP_DECODER,
DEBUG_INIT);
#define SYNC_CODE_SIZE 3
@ -81,8 +81,8 @@ gst_vdp_mpeg4_dec_fill_info (GstVdpMpeg4Dec * mpeg4_dec,
/* forward reference */
if (vop->coding_type != I_VOP && mpeg4_dec->f_frame) {
info.forward_reference =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->
f_frame)->src_buffer)->surface;
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->f_frame)->
src_buffer)->surface;
}
if (vop->coding_type == B_VOP) {
@ -100,8 +100,8 @@ gst_vdp_mpeg4_dec_fill_info (GstVdpMpeg4Dec * mpeg4_dec,
/* backward reference */
if (mpeg4_dec->b_frame) {
info.backward_reference =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->
b_frame)->src_buffer)->surface;
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->b_frame)->
src_buffer)->surface;
}
}

View file

@ -23,7 +23,7 @@
#include <gst/gst.h>
#include "../gstvdp/gstvdpdecoder.h"
#include "../gstvdpdecoder.h"
#include "mpeg4util.h"
#include "gstmpeg4frame.h"