vdpau: base vdpaumpegdec on GstBaseVideoDecoder

This commit is contained in:
Carl-Anton Ingmarsson 2010-06-09 15:43:43 +02:00
parent 4926c6c617
commit 0490cb89c6
35 changed files with 3125 additions and 1243 deletions

View file

@ -1728,6 +1728,8 @@ sys/qtwrapper/Makefile
sys/shm/Makefile
sys/vcd/Makefile
sys/vdpau/Makefile
sys/vdpau/gstvdp/Makefile
sys/vdpau/basevideodecoder/Makefile
sys/wasapi/Makefile
sys/wininet/Makefile
sys/winks/Makefile

View file

@ -1,53 +1,35 @@
SUBDIRS = basevideodecoder gstvdp
plugin_LTLIBRARIES = libgstvdpau.la
libgstvdpau_la_SOURCES = \
gstvdpmpegdec.c \
mpegutil.c \
gstvdpau.c \
gstvdpvideopostprocess.c \
gstvdpsink.c
gstvdpsink.c \
mpeg/gstvdpmpegframe.c \
mpeg/mpegutil.c \
mpeg/gstvdpmpegdec.c \
h264/gstnalreader.c \
h264/gsth264parser.c \
h264/gstvdph264dec.c
libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
-lgstinterfaces-$(GST_MAJORMINOR) $(VDPAU_LIBS) libgstvdp-@GST_MAJORMINOR@.la
-lgstinterfaces-$(GST_MAJORMINOR) $(VDPAU_LIBS) \
basevideodecoder/libgstbasevideodecoder.la \
gstvdp/libgstvdp-@GST_MAJORMINOR@.la
libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = \
gstvdpmpegdec.h \
mpegutil.h \
gstvdputils.h \
gstvdpvideopostprocess.h \
gstvdpsink.h
lib_LTLIBRARIES = libgstvdp-@GST_MAJORMINOR@.la
libgstvdp_@GST_MAJORMINOR@_la_SOURCES = \
gstvdpdevice.c \
gstvdputils.c \
gstvdpvideobuffer.c \
gstvdpoutputbuffer.c \
gstvdpvideosrcpad.c \
gstvdpoutputsrcpad.c \
gstvdp.c
libgstvdp_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/vdpau
libgstvdp_@GST_MAJORMINOR@include_HEADERS = \
gstvdpdevice.h \
gstvdpvideobuffer.h \
gstvdpoutputbuffer.h \
gstvdpvideosrcpad.h \
gstvdpoutputsrcpad.h \
gstvdp.h
libgstvdp_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdp_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) $(X11_LIBS) $(VDPAU_LIBS) \
-lgstvideo-$(GST_MAJORMINOR)
libgstvdp_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS)
libgstvdp_@GST_MAJORMINOR@_la_LIBTOOLFLAGS = --tag=disable-static
gstvdpsink.h \
mpeg/gstvdpmpegframe.h \
mpeg/mpegutil.h \
mpeg/gstvdpmpegdec.h \
h264/gstnalreader.h \
h264/gsth264parser.h \
h264/gstvdph264dec.h

View file

@ -0,0 +1,15 @@
noinst_LTLIBRARIES = libgstbasevideodecoder.la
libgstbasevideodecoder_la_SOURCES = \
gstvideoframe.c \
gstbasevideodecoder.c
libgstbasevideodecoder_la_CFLAGS = $(GST_CFLAGS)
libgstbasevideodecoder_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR)
libgstbasevideodecoder_la_LDFLAGS = $(GST_ALL_LDFLAGS) -module -avoid-version
noinst_HEADERS = \
gstvideoframe.h \
gstbasevideodecoder.h \
gstbasevideoutils.h

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,206 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_DECODER_H_
#define _GST_BASE_VIDEO_DECODER_H_
#include "gstbasevideoutils.h"
#include "gstvideoframe.h"
G_BEGIN_DECLS
#define GST_TYPE_BASE_VIDEO_DECODER \
(gst_base_video_decoder_get_type())
#define GST_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoder))
#define GST_BASE_VIDEO_DECODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
#define GST_IS_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_DECODER))
#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_DECODER))
/**
* GST_BASE_VIDEO_DECODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink"
/**
* GST_BASE_VIDEO_DECODER_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define GST_BASE_VIDEO_DECODER_SRC_NAME "src"
/**
* GST_BASE_VIDEO_CODEC_SRC_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the source #GstPad object of the element.
*/
#define GST_BASE_VIDEO_DECODER_SRC_PAD(obj) (((GstBaseVideoDecoder *) (obj))->srcpad)
/**
* GST_BASE_VIDEO_CODEC_SINK_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the sink #GstPad object of the element.
*/
#define GST_BASE_VIDEO_DECODER_SINK_PAD(obj) (((GstBaseVideoDecoder *) (obj))->sinkpad)
/**
* * GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA:
* *
* */
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
typedef enum _GstBaseVideoDecoderScanResult GstBaseVideoDecoderScanResult;
enum _GstBaseVideoDecoderScanResult
{
GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK,
GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC,
GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA
};
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
struct _GstBaseVideoDecoder
{
GstElement element;
/*< private >*/
GstPad *sinkpad;
GstPad *srcpad;
GstAdapter *input_adapter;
GList *frames;
gboolean have_sync;
gboolean discont;
GstVideoState state;
GstSegment segment;
guint64 presentation_frame_number;
guint64 system_frame_number;
GstCaps *caps;
gboolean have_src_caps;
GstVideoFrame *current_frame;
gint distance_from_sync;
gint reorder_depth;
GstClockTime buffer_timestamp;
GstClockTime timestamp_offset;
gdouble proportion;
GstClockTime earliest_time;
guint64 input_offset;
guint64 frame_offset;
GstClockTime last_timestamp;
guint64 base_picture_number;
gint field_index;
gboolean is_delta_unit;
GList *timestamps;
gboolean have_segment;
/* properties */
gboolean sink_clipping;
gboolean packetized;
};
struct _GstBaseVideoDecoderClass
{
GstElementClass element_class;
gboolean (*start) (GstBaseVideoDecoder *coder);
gboolean (*stop) (GstBaseVideoDecoder *coder);
gboolean (*flush) (GstBaseVideoDecoder *coder);
gboolean (*set_sink_caps) (GstBaseVideoDecoder *base_video_decoder,
GstCaps *caps);
GstPad *(*create_srcpad) (GstBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass *base_video_decoder_class);
gint (*scan_for_sync) (GstBaseVideoDecoder *coder, GstAdapter *adapter);
GstBaseVideoDecoderScanResult (*scan_for_packet_end)
(GstBaseVideoDecoder *coder, GstAdapter *adapter, guint *size, gboolean at_eos);
GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder,
GstBuffer *buf, gboolean at_eos);
GstFlowReturn (*parse_codec_data) (GstBaseVideoDecoder *decoder,
GstBuffer *buf);
GstVideoFrame *(*create_frame) (GstBaseVideoDecoder *coder);
GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame,
GstClockTimeDiff deadline);
GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder,
GstBuffer *buf);
};
GType gst_base_video_decoder_get_type (void);
GstVideoFrame *gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder
*base_video_decoder);
GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder,
gint frame_number);
GstVideoFrame *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder);
GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame *frame);
void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame);
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame **new_frame);
GstVideoState * gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder,
GstVideoState *state);
void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_update_src_caps (GstBaseVideoDecoder *base_video_decoder);
G_END_DECLS
#endif

View file

@ -0,0 +1,55 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_UTILS_H_
#define _GST_BASE_VIDEO_UTILS_H_
#define GST_USE_UNSTABLE_API 1
#ifndef GST_USE_UNSTABLE_API
#warning "The base video utils API is unstable and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
typedef struct _GstVideoState GstVideoState;
struct _GstVideoState
{
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
gboolean interlaced;
gint clean_width, clean_height;
gint clean_offset_left, clean_offset_top;
gint bytes_per_picture;
GstBuffer *codec_data;
};
#endif /* _GST_BASE_VIDEO_UTILS_H_ */

View file

@ -0,0 +1,105 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvideoframe.h"
GST_DEBUG_CATEGORY_STATIC (gst_video_frame_debug);
#define GST_CAT_DEFAULT gst_video_frame_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_video_frame_debug, "gstvideoframe", 0, "Video Frame");
GstVideoFrame *
gst_video_frame_new (void)
{
GstVideoFrame *frame;
frame = (GstVideoFrame *) gst_mini_object_new (GST_TYPE_VIDEO_FRAME);
return frame;
}
static GObjectClass *gst_video_frame_parent_class;
static void
gst_video_frame_finalize (GstVideoFrame * frame)
{
if (frame->sink_buffer)
gst_buffer_unref (frame->sink_buffer);
if (frame->src_buffer)
gst_buffer_unref (frame->src_buffer);
GST_MINI_OBJECT_CLASS (gst_video_frame_parent_class)->finalize
(GST_MINI_OBJECT (frame));
}
static void
gst_video_frame_init (GstVideoFrame * frame, gpointer g_class)
{
frame->decode_timestamp = GST_CLOCK_TIME_NONE;
frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
frame->presentation_duration = GST_CLOCK_TIME_NONE;
frame->n_fields = 2;
frame->sink_buffer = NULL;
frame->src_buffer = NULL;
}
static void
gst_video_frame_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_video_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_video_frame_finalize;
}
GType
gst_video_frame_get_type (void)
{
static GType _gst_video_frame_type = 0;
if (G_UNLIKELY (_gst_video_frame_type == 0)) {
static const GTypeInfo info = {
sizeof (GstVideoFrameClass),
NULL,
NULL,
gst_video_frame_class_init,
NULL,
NULL,
sizeof (GstVideoFrame),
0,
(GInstanceInitFunc) gst_video_frame_init,
NULL
};
_gst_video_frame_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
"GstVideoFrame", &info, 0);
DEBUG_INIT ();
}
return _gst_video_frame_type;
}

View file

@ -0,0 +1,155 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VIDEO_FRAME_H_
#define _GST_VIDEO_FRAME_H_
#include <gst/gst.h>
#define GST_TYPE_VIDEO_FRAME (gst_video_frame_get_type())
#define GST_IS_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_FRAME))
#define GST_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_FRAME, GstVideoFrame))
/**
* GstVideoFrameFlag:
* @GST_VIDEO_FRAME_FLAG_PREROLL: the frame is part of a preroll and should not be
* displayed.
* @GST_VIDEO_FRAME_FLAG_DISCONT: the frame marks a discontinuity in the stream.
* This typically occurs after a seek or a dropped buffer from a live or
* network source.
* @GST_VIDEO_FRAME_FLAG_GAP: the frame has been created to fill a gap in the
* stream and contains media neutral data (elements can switch to optimized code
* path that ignores the buffer content).
* @GST_VIDEO_FRAME_FLAG_DELTA_UNIT: the frame is a keyframe.
* @GST_VIDEO_FRAME_FLAG_SYNC_POINT: the frame marks a sync point.
* @GST_VIDEO_FRAME_FLAG_EOS: the frame is the last in the stream.
* @GST_VIDEO_FRAME_FLAG_TFF: If the frame is interlaced, then the first
* field in the video frame is the top field. If unset, the bottom field is first.
* @GST_VIDEO_FRAME_FLAG_LAST: additional flags can be added starting from this flag.
* A set of frame flags used to describe properties of a #GstVideoFrame.
*/
typedef enum
{
GST_VIDEO_FRAME_FLAG_PREROLL = (GST_MINI_OBJECT_FLAG_LAST << 0),
GST_VIDEO_FRAME_FLAG_DISCONT = (GST_MINI_OBJECT_FLAG_LAST << 1),
GST_VIDEO_FRAME_FLAG_GAP = (GST_MINI_OBJECT_FLAG_LAST << 2),
GST_VIDEO_FRAME_FLAG_KEYFRAME = (GST_MINI_OBJECT_FLAG_LAST << 3),
GST_VIDEO_FRAME_FLAG_SYNC_POINT = (GST_MINI_OBJECT_FLAG_LAST << 4),
GST_VIDEO_FRAME_FLAG_EOS = (GST_MINI_OBJECT_FLAG_LAST << 5),
GST_VIDEO_FRAME_FLAG_TFF = (GST_MINI_OBJECT_FLAG_LAST << 6),
GST_VIDEO_FRAME_FLAG_LAST = (GST_MINI_OBJECT_FLAG_LAST << 7)
} GstVideoFrameFlag;
typedef struct _GstVideoFrame GstVideoFrame;
typedef struct _GstVideoFrameClass GstVideoFrameClass;
struct _GstVideoFrame
{
GstMiniObject mini_object;
GstClockTime decode_timestamp;
GstClockTime presentation_timestamp;
GstClockTime presentation_duration;
gint system_frame_number;
gint decode_frame_number;
gint presentation_frame_number;
gint distance_from_sync;
GstBuffer *sink_buffer;
GstBuffer *src_buffer;
gint field_index;
gint n_fields;
};
struct _GstVideoFrameClass
{
GstMiniObjectClass mini_object_class;
};
/* refcounting */
/**
* gst_video_frame_ref:
* @frame: a #GstVideoFrame.
*
* Increases the refcount of the given frame by one.
*
* Returns: @frame
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC GstVideoFrame * gst_buffer_ref (GstVideoFrame * frame);
#endif
static inline GstVideoFrame *
gst_video_frame_ref (GstVideoFrame *frame)
{
return (GstVideoFrame *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (frame));
}
/**
* gst_video_frame_unref:
* @frame: a #GstVideoFrame.
*
* Decreases the refcount of the frame. If the refcount reaches 0, the frame
* will be freed.
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC void gst_video_frame_unref (GstVideoFrame * frame);
#endif
static inline void
gst_video_frame_unref (GstVideoFrame * frame)
{
gst_mini_object_unref (GST_MINI_OBJECT_CAST (frame));
}
/**
* GST_VIDEO_FRAME_FLAG_IS_SET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to check.
*
* Gives the status of a specific flag on a video frame.
*/
#define GST_VIDEO_FRAME_FLAG_IS_SET(frame,flag) GST_MINI_OBJECT_FLAG_IS_SET (frame, flag)
/**
* GST_VIDEO_FRAME_FLAG_SET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to set.
*
* Sets a frame flag on a video frame.
*/
#define GST_VIDEO_FRAME_FLAG_SET(frame,flag) GST_MINI_OBJECT_FLAG_SET (frame, flag)
/**
* GST_VIDEO_FRAME_FLAG_UNSET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to clear.
*
* Clears a frame flag.
*/
#define GST_VIDEO_FRAME_FLAG_UNSET(frame,flag) GST_MINI_OBJECT_FLAG_UNSET (frame, flag)
GstVideoFrame *gst_video_frame_new (void);
GType gst_video_frame_get_type (void);
#endif

View file

@ -0,0 +1,28 @@
lib_LTLIBRARIES = libgstvdp-@GST_MAJORMINOR@.la
libgstvdp_@GST_MAJORMINOR@_la_SOURCES = \
gstvdpdevice.c \
gstvdputils.c \
gstvdpvideobuffer.c \
gstvdpoutputbuffer.c \
gstvdpvideosrcpad.c \
gstvdpoutputsrcpad.c \
gstvdp.c
libgstvdp_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/vdpau
libgstvdp_@GST_MAJORMINOR@include_HEADERS = \
gstvdpdevice.h \
gstvdpvideobuffer.h \
gstvdpoutputbuffer.h \
gstvdpvideosrcpad.h \
gstvdpoutputsrcpad.h \
gstvdp.h
libgstvdp_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdp_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) $(X11_LIBS) $(VDPAU_LIBS) \
-lgstvideo-$(GST_MAJORMINOR)
libgstvdp_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS)
libgstvdp_@GST_MAJORMINOR@_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -5,9 +5,10 @@
#include <gst/gst.h>
#include "gstvdp.h"
#include "gstvdp/gstvdp.h"
#include "gstvdpmpegdec.h"
#include "mpeg/gstvdpmpegdec.h"
#include "h264/gstvdph264dec.h"
#include "gstvdpvideopostprocess.h"
#include "gstvdpsink.h"
@ -20,6 +21,8 @@ vdpau_init (GstPlugin * vdpau_plugin)
* least the generic/states test when there's no device available */
gst_element_register (vdpau_plugin, "vdpaumpegdec",
GST_RANK_NONE, GST_TYPE_VDP_MPEG_DEC);
gst_element_register (vdpau_plugin, "vdpauh264dec",
GST_RANK_NONE, GST_TYPE_VDP_H264_DEC);
gst_element_register (vdpau_plugin, "vdpauvideopostprocess",
GST_RANK_MARGINAL, GST_TYPE_VDP_VIDEO_POST_PROCESS);
gst_element_register (vdpau_plugin, "vdpausink",

File diff suppressed because it is too large Load diff

View file

@ -29,7 +29,7 @@
/* Debugging category */
#include <gst/gstinfo.h>
#include "gstvdpoutputbuffer.h"
#include "gstvdp/gstvdpoutputbuffer.h"
/* Object header */
#include "gstvdpsink.h"

View file

@ -29,7 +29,7 @@
#include <string.h>
#include <math.h>
#include "gstvdpdevice.h"
#include "gstvdp/gstvdpdevice.h"
G_BEGIN_DECLS

View file

@ -45,9 +45,9 @@
#include <gst/gst.h>
#include <gst/video/gstvideosink.h>
#include "gstvdputils.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpoutputsrcpad.h"
#include "gstvdp/gstvdputils.h"
#include "gstvdp/gstvdpoutputbuffer.h"
#include "gstvdp/gstvdpoutputsrcpad.h"
#include "gstvdpvideopostprocess.h"

View file

@ -23,8 +23,8 @@
#include <gst/gst.h>
#include "gstvdpdevice.h"
#include "gstvdpvideobuffer.h"
#include "gstvdp/gstvdpdevice.h"
#include "gstvdp/gstvdpvideobuffer.h"
G_BEGIN_DECLS

View file

@ -0,0 +1,872 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-vdpaumpegdec
*
* FIXME:Describe vdpaumpegdec here.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v -m fakesrc ! vdpaumpegdec ! fakesink silent=TRUE
* ]|
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <gst/gst.h>
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <string.h>
#include "mpegutil.h"
#include "../gstvdp/gstvdpvideosrcpad.h"
#include "../gstvdp/gstvdpvideobuffer.h"
#include "gstvdpmpegdec.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
PROP_0,
PROP_DISPLAY
};
/* the capabilities of the inputs and outputs.
*
* describe the real formats here.
*/
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) [ 1, 2 ], "
"systemstream = (boolean) false")
);
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, \
"VDPAU mpeg decoder");
GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec,
GstBaseVideoDecoder, GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT);
static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
#define SYNC_CODE_SIZE 3
static VdpDecoderProfile
gst_vdp_mpeg_dec_get_profile (MPEGSeqExtHdr * hdr)
{
VdpDecoderProfile profile;
switch (hdr->profile) {
case 5:
profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
break;
default:
profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
break;
}
return profile;
}
static GstFlowReturn
gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec,
GstVdpVideoBuffer ** outbuf)
{
GstVdpVideoSrcPad *vdp_pad;
GstFlowReturn ret = GST_FLOW_OK;
vdp_pad = (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec);
ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf);
if (ret != GST_FLOW_OK)
return ret;
return GST_FLOW_OK;
}
static GstFlowReturn
gst_vdp_mpeg_dec_shape_output (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf)
{
GstVdpVideoSrcPad *vdp_pad;
vdp_pad =
(GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder);
return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf));
}
static gboolean
gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec,
GstBuffer * buffer, GstVideoFrame * frame)
{
MPEGPictureExt pic_ext;
VdpPictureInfoMPEG1Or2 *info;
gint fields;
info = &mpeg_dec->vdp_info;
if (!mpeg_util_parse_picture_coding_extension (&pic_ext, buffer))
return FALSE;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4);
info->intra_dc_precision = pic_ext.intra_dc_precision;
info->picture_structure = pic_ext.picture_structure;
info->top_field_first = pic_ext.top_field_first;
info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct;
info->concealment_motion_vectors = pic_ext.concealment_motion_vectors;
info->q_scale_type = pic_ext.q_scale_type;
info->intra_vlc_format = pic_ext.intra_vlc_format;
info->alternate_scan = pic_ext.alternate_scan;
fields = 2;
if (pic_ext.picture_structure == 3) {
if (mpeg_dec->stream_info.interlaced) {
if (pic_ext.progressive_frame == 0)
fields = 2;
if (pic_ext.progressive_frame == 0 && pic_ext.repeat_first_field == 0)
fields = 2;
if (pic_ext.progressive_frame == 1 && pic_ext.repeat_first_field == 1)
fields = 3;
} else {
if (pic_ext.repeat_first_field == 0)
fields = 2;
if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 0)
fields = 4;
if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 1)
fields = 6;
}
} else
fields = 1;
frame->n_fields = fields;
if (pic_ext.top_field_first)
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_TFF);
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_picture (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
{
MPEGPictureHdr pic_hdr;
if (!mpeg_util_parse_picture_hdr (&pic_hdr, buffer))
return FALSE;
mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type;
if (mpeg_dec->stream_info.version == 1) {
mpeg_dec->vdp_info.full_pel_forward_vector =
pic_hdr.full_pel_forward_vector;
mpeg_dec->vdp_info.full_pel_backward_vector =
pic_hdr.full_pel_backward_vector;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4);
}
mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr.tsn;
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
{
MPEGGop gop;
GstClockTime time;
if (!mpeg_util_parse_gop (&gop, buffer))
return FALSE;
time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second);
GST_DEBUG ("gop timestamp: %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
mpeg_dec->gop_frame =
gst_util_uint64_scale (time, mpeg_dec->stream_info.fps_n,
mpeg_dec->stream_info.fps_d * GST_SECOND) + gop.frame;
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_GOP)
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA;
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_quant_matrix (GstVdpMpegDec * mpeg_dec,
GstBuffer * buffer)
{
MPEGQuantMatrix qm;
if (!mpeg_util_parse_quant_matrix (&qm, buffer))
return FALSE;
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&qm.intra_quantizer_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&qm.non_intra_quantizer_matrix, 64);
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_create_decoder (GstVdpMpegDec * mpeg_dec)
{
GstVdpDevice *device;
device = gst_vdp_video_src_pad_get_device
(GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)));
if (device) {
VdpStatus status;
GstVdpMpegStreamInfo *stream_info;
stream_info = &mpeg_dec->stream_info;
if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
device->vdp_decoder_destroy (mpeg_dec->decoder);
status = device->vdp_decoder_create (device->device, stream_info->profile,
stream_info->width, stream_info->height, 2, &mpeg_dec->decoder);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
("Could not create vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return FALSE;
}
}
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
GstBuffer * seq, GstBuffer * seq_ext)
{
GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (mpeg_dec);
MPEGSeqHdr hdr;
GstVdpMpegStreamInfo stream_info;
if (!mpeg_util_parse_sequence_hdr (&hdr, seq))
return FALSE;
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&hdr.intra_quantizer_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&hdr.non_intra_quantizer_matrix, 64);
stream_info.width = hdr.width;
stream_info.height = hdr.height;
stream_info.fps_n = hdr.fps_n;
stream_info.fps_d = hdr.fps_d;
stream_info.par_n = hdr.par_w;
stream_info.par_d = hdr.par_h;
stream_info.interlaced = FALSE;
stream_info.version = 1;
stream_info.profile = VDP_DECODER_PROFILE_MPEG1;
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE)
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA;
if (seq_ext) {
MPEGSeqExtHdr ext;
if (!mpeg_util_parse_sequence_extension (&ext, seq_ext))
return FALSE;
stream_info.fps_n *= (ext.fps_n_ext + 1);
stream_info.fps_d *= (ext.fps_d_ext + 1);
stream_info.width += (ext.horiz_size_ext << 12);
stream_info.height += (ext.vert_size_ext << 12);
stream_info.interlaced = !ext.progressive;
stream_info.version = 2;
stream_info.profile = gst_vdp_mpeg_dec_get_profile (&ext);
}
if (memcmp (&mpeg_dec->stream_info, &stream_info,
sizeof (GstVdpMpegStreamInfo)) != 0) {
GstVideoState *state;
state = gst_base_video_decoder_get_state (base_video_decoder);
state->width = stream_info.width;
state->height = stream_info.height;
state->fps_n = stream_info.fps_n;
state->fps_d = stream_info.fps_d;
state->par_n = stream_info.par_n;
state->par_d = stream_info.par_d;
state->interlaced = stream_info.interlaced;
gst_base_video_decoder_set_state (base_video_decoder, state);
gst_base_video_decoder_update_src_caps (base_video_decoder);
memcpy (&mpeg_dec->stream_info, &stream_info,
sizeof (GstVdpMpegStreamInfo));
}
return TRUE;
}
static GstFlowReturn
gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame, GstClockTimeDiff deadline)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
VdpPictureInfoMPEG1Or2 *info;
GstVdpMpegFrame *mpeg_frame;
GstFlowReturn ret;
GstVdpVideoBuffer *outbuf;
VdpVideoSurface surface;
GstVdpDevice *device;
VdpBitstreamBuffer vbit[1];
VdpStatus status;
/* MPEG_PACKET_SEQUENCE */
mpeg_frame = GST_VDP_MPEG_FRAME (frame);
if (mpeg_frame->seq) {
gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_frame->seq,
mpeg_frame->seq_ext);
}
/* MPEG_PACKET_PICTURE */
if (mpeg_frame->pic)
gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_frame->pic);
/* MPEG_PACKET_EXT_PICTURE_CODING */
if (mpeg_frame->pic_ext)
gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_frame->pic_ext,
frame);
/* MPEG_PACKET_GOP */
if (mpeg_frame->gop)
gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame->gop);
/* MPEG_PACKET_EXT_QUANT_MATRIX */
if (mpeg_frame->qm_ext)
gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_frame->qm_ext);
info = &mpeg_dec->vdp_info;
info->slice_count = mpeg_frame->n_slices;
/* check if we can decode the frame */
if (info->picture_coding_type != I_FRAME
&& info->backward_reference == VDP_INVALID_HANDLE) {
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got an I_FRAME yet");
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
if (info->picture_coding_type == B_FRAME
&& info->forward_reference == VDP_INVALID_HANDLE) {
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got two non B_FRAMES yet");
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
if (info->picture_coding_type != B_FRAME) {
if (info->backward_reference != VDP_INVALID_HANDLE) {
gst_base_video_decoder_finish_frame (base_video_decoder,
mpeg_dec->b_frame);
}
if (info->forward_reference != VDP_INVALID_HANDLE) {
gst_video_frame_unref (mpeg_dec->f_frame);
info->forward_reference = VDP_INVALID_HANDLE;
}
info->forward_reference = info->backward_reference;
mpeg_dec->f_frame = mpeg_dec->b_frame;
info->backward_reference = VDP_INVALID_HANDLE;
}
if ((ret = gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK))
goto alloc_error;
/* create decoder */
if (mpeg_dec->decoder == VDP_INVALID_HANDLE)
gst_vdp_mpeg_dec_create_decoder (mpeg_dec);
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
if (info->picture_coding_type == I_FRAME)
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
if (info->top_field_first)
GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
else
GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);
surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
vbit[0].bitstream = GST_BUFFER_DATA (mpeg_frame->slices);
vbit[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg_frame->slices);
status = device->vdp_decoder_render (mpeg_dec->decoder, surface,
(VdpPictureInfo *) info, 1, vbit);
if (status != VDP_STATUS_OK)
goto decode_error;
frame->src_buffer = GST_BUFFER_CAST (outbuf);
if (info->picture_coding_type == B_FRAME) {
gst_base_video_decoder_finish_frame (base_video_decoder, frame);
} else {
info->backward_reference = surface;
mpeg_dec->b_frame = gst_video_frame_ref (frame);
}
return GST_FLOW_OK;
alloc_error:
GST_ERROR_OBJECT (mpeg_dec, "Could not allocate output buffer");
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
decode_error:
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
("Could not decode"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
gst_buffer_unref (GST_BUFFER_CAST (outbuf));
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_ERROR;
}
static GstVideoFrame *
gst_vdp_mpeg_dec_create_frame (GstBaseVideoDecoder * base_video_decoder)
{
return GST_VIDEO_FRAME (gst_vdp_mpeg_frame_new ());
}
static GstFlowReturn
gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf, gboolean at_eos)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
GstVdpMpegFrame *mpeg_frame;
GstFlowReturn ret = GST_FLOW_OK;
GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint32 sync_code;
guint8 start_code;
/* skip sync_code */
gst_bit_reader_get_bits_uint32 (&b_reader, &sync_code, 8 * 3);
/* start_code */
gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8);
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) {
if (start_code != MPEG_PACKET_SEQUENCE) {
GST_DEBUG_OBJECT (mpeg_dec, "Drop data since we haven't found a "
"MPEG_PACKET_SEQUENCE yet");
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
}
mpeg_frame = (GstVdpMpegFrame *)
gst_base_video_decoder_get_current_frame (base_video_decoder);
if (start_code >= MPEG_PACKET_SLICE_MIN
&& start_code <= MPEG_PACKET_SLICE_MAX) {
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");
gst_vdp_mpeg_frame_add_slice (mpeg_frame, buf);
goto done;
}
switch (start_code) {
case MPEG_PACKET_SEQUENCE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
if (mpeg_dec->prev_packet != -1) {
ret = gst_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame);
}
mpeg_frame->seq = buf;
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA;
break;
case MPEG_PACKET_PICTURE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE &&
mpeg_dec->prev_packet != MPEG_PACKET_GOP) {
ret = gst_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame);
}
mpeg_frame->pic = buf;
break;
case MPEG_PACKET_GOP:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE) {
ret = gst_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame);
}
mpeg_frame->gop = buf;
break;
case MPEG_PACKET_EXTENSION:
{
guint8 ext_code;
/* ext_code */
gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4);
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION: %d", ext_code);
switch (ext_code) {
case MPEG_PACKET_EXT_SEQUENCE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE");
mpeg_frame->seq_ext = buf;
/* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE
* or MPEG_PACKET_GOP after this */
start_code = MPEG_PACKET_SEQUENCE;
break;
case MPEG_PACKET_EXT_SEQUENCE_DISPLAY:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE_DISPLAY");
/* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE
* or MPEG_PACKET_GOP after this */
start_code = MPEG_PACKET_SEQUENCE;
break;
case MPEG_PACKET_EXT_PICTURE_CODING:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING");
mpeg_frame->pic_ext = buf;
break;
case MPEG_PACKET_EXT_QUANT_MATRIX:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");
mpeg_frame->qm_ext = buf;
break;
default:
gst_buffer_unref (buf);
}
break;
}
default:
gst_buffer_unref (buf);
}
done:
mpeg_dec->prev_packet = start_code;
return ret;
}
static GstPad *
gst_vdp_mpeg_dec_create_srcpad (GstBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass * base_video_decoder_class)
{
GstPadTemplate *pad_template;
GstVdpVideoSrcPad *vdp_pad;
pad_template = gst_element_class_get_pad_template
(GST_ELEMENT_CLASS (base_video_decoder_class),
GST_BASE_VIDEO_DECODER_SRC_NAME);
vdp_pad = gst_vdp_video_src_pad_new (pad_template,
GST_BASE_VIDEO_DECODER_SRC_NAME);
return GST_PAD (vdp_pad);
}
static gint
gst_vdp_mpeg_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter)
{
gint m;
m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, 0,
gst_adapter_available (adapter));
if (m == -1)
return gst_adapter_available (adapter) - SYNC_CODE_SIZE;
return m;
}
static GstBaseVideoDecoderScanResult
gst_vdp_mpeg_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter, guint * size, gboolean at_eos)
{
guint8 *data;
guint32 sync_code;
data = g_slice_alloc (SYNC_CODE_SIZE);
gst_adapter_copy (adapter, data, 0, SYNC_CODE_SIZE);
sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]);
if (sync_code != 0x000001)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;
*size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE);
if (*size == -1)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
}
static gboolean
gst_vdp_mpeg_dec_flush (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
gst_video_frame_unref (mpeg_dec->f_frame);
if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
gst_video_frame_unref (mpeg_dec->b_frame);
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
mpeg_dec->prev_packet = -1;
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_start (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
mpeg_dec->decoder = VDP_INVALID_HANDLE;
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE;
memset (&mpeg_dec->stream_info, 0, sizeof (GstVdpMpegStreamInfo));
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_stop (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
GstVdpVideoSrcPad *vdp_pad;
GstVdpDevice *device;
vdp_pad =
GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder));
if ((device = gst_vdp_video_src_pad_get_device (vdp_pad))) {
if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
device->vdp_decoder_destroy (mpeg_dec->decoder);
}
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
mpeg_dec->vdp_info.backward_reference = VDP_INVALID_HANDLE;
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE;
return TRUE;
}
/* GObject vmethod implementations */
static void
gst_vdp_mpeg_dec_finalize (GObject * object)
{
}
static void
gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object;
switch (prop_id) {
case PROP_DISPLAY:
g_object_get_property
(G_OBJECT (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display",
value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object;
switch (prop_id) {
case PROP_DISPLAY:
g_object_set_property
(G_OBJECT (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display",
value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_mpeg_dec_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GstCaps *src_caps;
GstPadTemplate *src_template;
gst_element_class_set_details_simple (element_class,
"VDPAU Mpeg Decoder",
"Decoder",
"Decode mpeg stream with vdpau",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420);
src_template = gst_pad_template_new (GST_BASE_VIDEO_DECODER_SRC_NAME,
GST_PAD_SRC, GST_PAD_ALWAYS, src_caps);
gst_element_class_add_pad_template (element_class, src_template);
}
/* initialize the vdpaumpegdecoder's class */
static void
gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseVideoDecoderClass *base_video_decoder_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
base_video_decoder_class = (GstBaseVideoDecoderClass *) klass;
gobject_class->get_property = gst_vdp_mpeg_dec_get_property;
gobject_class->set_property = gst_vdp_mpeg_dec_set_property;
gobject_class->finalize = gst_vdp_mpeg_dec_finalize;
base_video_decoder_class->start = gst_vdp_mpeg_dec_start;
base_video_decoder_class->stop = gst_vdp_mpeg_dec_stop;
base_video_decoder_class->flush = gst_vdp_mpeg_dec_flush;
base_video_decoder_class->create_srcpad = gst_vdp_mpeg_dec_create_srcpad;
base_video_decoder_class->scan_for_sync = gst_vdp_mpeg_dec_scan_for_sync;
base_video_decoder_class->scan_for_packet_end =
gst_vdp_mpeg_dec_scan_for_packet_end;
base_video_decoder_class->parse_data = gst_vdp_mpeg_dec_parse_data;
base_video_decoder_class->handle_frame = gst_vdp_mpeg_dec_handle_frame;
base_video_decoder_class->create_frame = gst_vdp_mpeg_dec_create_frame;
base_video_decoder_class->shape_output = gst_vdp_mpeg_dec_shape_output;
g_object_class_install_property (gobject_class,
PROP_DISPLAY, g_param_spec_string ("display", "Display", "X Display name",
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
}
static void
gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info)
{
vdp_info->forward_reference = VDP_INVALID_HANDLE;
vdp_info->backward_reference = VDP_INVALID_HANDLE;
vdp_info->slice_count = 0;
vdp_info->picture_structure = 3;
vdp_info->picture_coding_type = 0;
vdp_info->intra_dc_precision = 0;
vdp_info->frame_pred_frame_dct = 1;
vdp_info->concealment_motion_vectors = 0;
vdp_info->intra_vlc_format = 0;
vdp_info->alternate_scan = 0;
vdp_info->q_scale_type = 0;
vdp_info->top_field_first = 1;
}
static void
gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass)
{
}

View file

@ -24,7 +24,8 @@
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include "gstvdpvideobuffer.h"
#include "../basevideodecoder/gstbasevideodecoder.h"
#include "gstvdpmpegframe.h"
G_BEGIN_DECLS
@ -35,9 +36,9 @@ G_BEGIN_DECLS
#define GST_IS_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_MPEG_DEC))
typedef enum {
GST_VDP_MPEG_DEC_NEED_SEQUENCE,
GST_VDP_MPEG_DEC_NEED_GOP,
GST_VDP_MPEG_DEC_NEED_DATA
GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE,
GST_VDP_MPEG_DEC_STATE_NEED_GOP,
GST_VDP_MPEG_DEC_STATE_NEED_DATA
} GstVdpMpegDecState;
typedef struct _GstVdpMpegDec GstVdpMpegDec;
@ -45,55 +46,31 @@ typedef struct _GstVdpMpegDecClass GstVdpMpegDecClass;
struct _GstVdpMpegDec
{
GstElement element;
GstBaseVideoDecoder base_video_decoder;
/* pads */
GstPad *src;
GstPad *sink;
VdpDecoderProfile profile;
VdpDecoder decoder;
/* stream info */
gint width, height;
gint fps_n, fps_d;
gboolean interlaced;
gint version;
GstVdpMpegStreamInfo stream_info;
/* decoder state */
GstVdpMpegDecState state;
gint prev_packet;
/* currently decoded frame info */
GstAdapter *adapter;
VdpPictureInfoMPEG1Or2 vdp_info;
guint64 frame_nr;
GstClockTime duration;
/* frame_nr from GOP */
guint64 gop_frame;
/* forward and backward reference */
GstVdpVideoBuffer *f_buffer;
GstVdpVideoBuffer *b_buffer;
/* calculated timestamp, size and duration */
GstClockTime next_timestamp;
guint64 accumulated_size;
guint64 accumulated_duration;
/* seek data */
GstSegment segment;
gboolean seeking;
gint64 byterate;
/* mutex */
GMutex *mutex;
GstVideoFrame *f_frame, *b_frame;
};
struct _GstVdpMpegDecClass
{
GstElementClass element_class;
GstBaseVideoDecoderClass base_video_decoder_class;
};
GType gst_vdp_mpeg_dec_get_type (void);

View file

@ -0,0 +1,133 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpmpegframe.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_frame_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg_frame_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_frame_debug, "gstvdpmpegframe", 0, "Video Frame");
void
gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame * mpeg_frame, GstBuffer * buf)
{
if (!mpeg_frame->slices)
mpeg_frame->slices = buf;
else
mpeg_frame->slices = gst_buffer_join (mpeg_frame->slices, buf);
mpeg_frame->n_slices++;
}
GstVdpMpegFrame *
gst_vdp_mpeg_frame_new (void)
{
GstVdpMpegFrame *frame;
frame = (GstVdpMpegFrame *) gst_mini_object_new (GST_TYPE_VDP_MPEG_FRAME);
return frame;
}
static GObjectClass *gst_vdp_mpeg_frame_parent_class;
static void
gst_vdp_mpeg_frame_finalize (GstVdpMpegFrame * mpeg_frame)
{
if (mpeg_frame->seq)
gst_buffer_unref (mpeg_frame->seq);
if (mpeg_frame->seq_ext)
gst_buffer_unref (mpeg_frame->seq_ext);
if (mpeg_frame->pic)
gst_buffer_unref (mpeg_frame->pic);
if (mpeg_frame->pic_ext)
gst_buffer_unref (mpeg_frame->pic_ext);
if (mpeg_frame->gop)
gst_buffer_unref (mpeg_frame->gop);
if (mpeg_frame->qm_ext)
gst_buffer_unref (mpeg_frame->qm_ext);
if (mpeg_frame->slices)
gst_buffer_unref (mpeg_frame->slices);
GST_MINI_OBJECT_CLASS (gst_vdp_mpeg_frame_parent_class)->finalize
(GST_MINI_OBJECT (mpeg_frame));
}
static void
gst_vdp_mpeg_frame_init (GstVdpMpegFrame * mpeg_frame, gpointer g_class)
{
mpeg_frame->seq = NULL;
mpeg_frame->seq_ext = NULL;
mpeg_frame->pic = NULL;
mpeg_frame->pic_ext = NULL;
mpeg_frame->gop = NULL;
mpeg_frame->qm_ext = NULL;
mpeg_frame->n_slices = 0;
mpeg_frame->slices = NULL;
}
static void
gst_vdp_mpeg_frame_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_vdp_mpeg_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_vdp_mpeg_frame_finalize;
}
GType
gst_vdp_mpeg_frame_get_type (void)
{
static GType _gst_vdp_mpeg_frame_type = 0;
if (G_UNLIKELY (_gst_vdp_mpeg_frame_type == 0)) {
static const GTypeInfo info = {
sizeof (GstVdpMpegFrameClass),
NULL,
NULL,
gst_vdp_mpeg_frame_class_init,
NULL,
NULL,
sizeof (GstVdpMpegFrame),
0,
(GInstanceInitFunc) gst_vdp_mpeg_frame_init,
NULL
};
_gst_vdp_mpeg_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME,
"GstVdpMpegFrame", &info, 0);
DEBUG_INIT ();
}
return _gst_vdp_mpeg_frame_type;
}

View file

@ -0,0 +1,77 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDP_MPEG_FRAME_H_
#define _GST_VDP_MPEG_FRAME_H_
#include <gst/gst.h>
#include <vdpau/vdpau.h>
#include "../basevideodecoder/gstvideoframe.h"
#define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type())
#define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME))
#define GST_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_MPEG_FRAME, GstVdpMpegFrame))
typedef struct _GstVdpMpegStreamInfo GstVdpMpegStreamInfo;
struct _GstVdpMpegStreamInfo
{
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
gboolean interlaced;
gint version;
VdpDecoderProfile profile;
};
typedef struct _GstVdpMpegFrame GstVdpMpegFrame;
typedef struct _GstVdpMpegFrameClass GstVdpMpegFrameClass;
struct _GstVdpMpegFrame
{
GstVideoFrame video_frame;
GstBuffer *seq;
GstBuffer *seq_ext;
GstBuffer *pic;
GstBuffer *pic_ext;
GstBuffer *gop;
GstBuffer *qm_ext;
gint n_slices;
GstBuffer *slices;
};
struct _GstVdpMpegFrameClass
{
GstVideoFrameClass video_frame_class;
};
void gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame *mpeg_frame, GstBuffer *buf);
GstVdpMpegFrame *gst_vdp_mpeg_frame_new (void);
GType gst_vdp_mpeg_frame_get_type (void);
#endif