vdpau: rename GstBaseVideoDecoder to SatBaseVideoDecoder

This commit is contained in:
Carl-Anton Ingmarsson 2010-06-24 13:18:55 +02:00
parent e312e2b121
commit aa0444f204
18 changed files with 704 additions and 698 deletions

View file

@ -20,7 +20,7 @@ libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $
libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \ libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \ $(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
-lgstinterfaces-$(GST_MAJORMINOR) $(VDPAU_LIBS) \ -lgstinterfaces-$(GST_MAJORMINOR) $(VDPAU_LIBS) \
basevideodecoder/libgstbasevideodecoder.la \ basevideodecoder/libsatbasevideodecoder.la \
gstvdp/libgstvdp-@GST_MAJORMINOR@.la gstvdp/libgstvdp-@GST_MAJORMINOR@.la
libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)

View file

@ -1,15 +1,15 @@
noinst_LTLIBRARIES = libgstbasevideodecoder.la noinst_LTLIBRARIES = libsatbasevideodecoder.la
libgstbasevideodecoder_la_SOURCES = \ libsatbasevideodecoder_la_SOURCES = \
gstvideoframe.c \ satvideoframe.c \
gstbasevideodecoder.c satbasevideodecoder.c
libgstbasevideodecoder_la_CFLAGS = $(GST_CFLAGS) libsatbasevideodecoder_la_CFLAGS = $(GST_CFLAGS)
libgstbasevideodecoder_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \ libsatbasevideodecoder_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR)
libgstbasevideodecoder_la_LDFLAGS = $(GST_ALL_LDFLAGS) -module -avoid-version libsatbasevideodecoder_la_LDFLAGS = $(GST_ALL_LDFLAGS) -module -avoid-version
noinst_HEADERS = \ noinst_HEADERS = \
gstvideoframe.h \ satvideoframe.h \
gstbasevideodecoder.h \ satbasevideodecoder.h \
gstbasevideoutils.h satbasevideoutils.h

View file

@ -1,205 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_DECODER_H_
#define _GST_BASE_VIDEO_DECODER_H_
#include "gstbasevideoutils.h"
#include "gstvideoframe.h"
G_BEGIN_DECLS
#define GST_TYPE_BASE_VIDEO_DECODER \
(gst_base_video_decoder_get_type())
#define GST_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoder))
#define GST_BASE_VIDEO_DECODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
#define GST_IS_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_DECODER))
#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_DECODER))
/**
* GST_BASE_VIDEO_DECODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink"
/**
* GST_BASE_VIDEO_DECODER_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define GST_BASE_VIDEO_DECODER_SRC_NAME "src"
/**
* GST_BASE_VIDEO_CODEC_SRC_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the source #GstPad object of the element.
*/
#define GST_BASE_VIDEO_DECODER_SRC_PAD(obj) (((GstBaseVideoDecoder *) (obj))->srcpad)
/**
* GST_BASE_VIDEO_CODEC_SINK_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the sink #GstPad object of the element.
*/
#define GST_BASE_VIDEO_DECODER_SINK_PAD(obj) (((GstBaseVideoDecoder *) (obj))->sinkpad)
/**
* * GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA:
* *
* */
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
typedef enum _GstBaseVideoDecoderScanResult GstBaseVideoDecoderScanResult;
enum _GstBaseVideoDecoderScanResult
{
GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK,
GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC,
GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA
};
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
struct _GstBaseVideoDecoder
{
GstElement element;
/*< private >*/
GstPad *sinkpad;
GstPad *srcpad;
GstAdapter *input_adapter;
gboolean have_sync;
gboolean discont;
GstVideoState state;
GstSegment segment;
guint64 presentation_frame_number;
guint64 system_frame_number;
GstCaps *caps;
gboolean have_src_caps;
GstVideoFrame *current_frame;
gint distance_from_sync;
gint reorder_depth;
GstClockTime buffer_timestamp;
GstClockTime timestamp_offset;
gdouble proportion;
GstClockTime earliest_time;
guint64 input_offset;
guint64 frame_offset;
GstClockTime last_timestamp;
guint64 base_picture_number;
gint field_index;
gboolean is_delta_unit;
GList *timestamps;
gboolean have_segment;
/* properties */
gboolean sink_clipping;
gboolean packetized;
};
struct _GstBaseVideoDecoderClass
{
GstElementClass element_class;
gboolean (*start) (GstBaseVideoDecoder *coder);
gboolean (*stop) (GstBaseVideoDecoder *coder);
gboolean (*flush) (GstBaseVideoDecoder *coder);
gboolean (*set_sink_caps) (GstBaseVideoDecoder *base_video_decoder,
GstCaps *caps);
GstPad *(*create_srcpad) (GstBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass *base_video_decoder_class);
gint (*scan_for_sync) (GstBaseVideoDecoder *coder, GstAdapter *adapter);
GstBaseVideoDecoderScanResult (*scan_for_packet_end)
(GstBaseVideoDecoder *coder, GstAdapter *adapter, guint *size, gboolean at_eos);
GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder,
GstBuffer *buf, gboolean at_eos);
GstVideoFrame *(*create_frame) (GstBaseVideoDecoder *coder);
GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame,
GstClockTimeDiff deadline);
GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder,
GstBuffer *buf);
};
GType gst_base_video_decoder_get_type (void);
GstVideoFrame *gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder
*base_video_decoder);
GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder,
gint frame_number);
GstVideoFrame *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder);
GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame *frame);
void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame);
void
gst_base_video_decoder_frame_start (GstBaseVideoDecoder *base_video_decoder,
GstBuffer *buf);
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame **new_frame);
GstVideoState * gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder,
GstVideoState *state);
void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_update_src_caps (GstBaseVideoDecoder *base_video_decoder);
G_END_DECLS
#endif

View file

@ -1,156 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VIDEO_FRAME_H_
#define _GST_VIDEO_FRAME_H_
#include <gst/gst.h>
#define GST_TYPE_VIDEO_FRAME (gst_video_frame_get_type())
#define GST_IS_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_FRAME))
#define GST_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_FRAME, GstVideoFrame))
#define GST_VIDEO_FRAME_CAST(obj) ((GstVideoFrame *)obj)
/**
* GstVideoFrameFlag:
* @GST_VIDEO_FRAME_FLAG_PREROLL: the frame is part of a preroll and should not be
* displayed.
* @GST_VIDEO_FRAME_FLAG_DISCONT: the frame marks a discontinuity in the stream.
* This typically occurs after a seek or a dropped buffer from a live or
* network source.
* @GST_VIDEO_FRAME_FLAG_GAP: the frame has been created to fill a gap in the
* stream and contains media neutral data (elements can switch to optimized code
* path that ignores the buffer content).
* @GST_VIDEO_FRAME_FLAG_DELTA_UNIT: the frame is a keyframe.
* @GST_VIDEO_FRAME_FLAG_SYNC_POINT: the frame marks a sync point.
* @GST_VIDEO_FRAME_FLAG_EOS: the frame is the last in the stream.
* @GST_VIDEO_FRAME_FLAG_TFF: If the frame is interlaced, then the first
* field in the video frame is the top field. If unset, the bottom field is first.
* @GST_VIDEO_FRAME_FLAG_LAST: additional flags can be added starting from this flag.
* A set of frame flags used to describe properties of a #GstVideoFrame.
*/
typedef enum
{
GST_VIDEO_FRAME_FLAG_PREROLL = (GST_MINI_OBJECT_FLAG_LAST << 0),
GST_VIDEO_FRAME_FLAG_DISCONT = (GST_MINI_OBJECT_FLAG_LAST << 1),
GST_VIDEO_FRAME_FLAG_GAP = (GST_MINI_OBJECT_FLAG_LAST << 2),
GST_VIDEO_FRAME_FLAG_KEYFRAME = (GST_MINI_OBJECT_FLAG_LAST << 3),
GST_VIDEO_FRAME_FLAG_SYNC_POINT = (GST_MINI_OBJECT_FLAG_LAST << 4),
GST_VIDEO_FRAME_FLAG_EOS = (GST_MINI_OBJECT_FLAG_LAST << 5),
GST_VIDEO_FRAME_FLAG_TFF = (GST_MINI_OBJECT_FLAG_LAST << 6),
GST_VIDEO_FRAME_FLAG_LAST = (GST_MINI_OBJECT_FLAG_LAST << 7)
} GstVideoFrameFlag;
typedef struct _GstVideoFrame GstVideoFrame;
typedef struct _GstVideoFrameClass GstVideoFrameClass;
struct _GstVideoFrame
{
GstMiniObject mini_object;
GstClockTime decode_timestamp;
GstClockTime presentation_timestamp;
GstClockTime presentation_duration;
gint system_frame_number;
gint decode_frame_number;
gint presentation_frame_number;
gint distance_from_sync;
GstBuffer *sink_buffer;
GstBuffer *src_buffer;
gint field_index;
gint n_fields;
};
struct _GstVideoFrameClass
{
GstMiniObjectClass mini_object_class;
};
/* refcounting */
/**
* gst_video_frame_ref:
* @frame: a #GstVideoFrame.
*
* Increases the refcount of the given frame by one.
*
* Returns: @frame
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC GstVideoFrame * gst_buffer_ref (GstVideoFrame * frame);
#endif
static inline GstVideoFrame *
gst_video_frame_ref (GstVideoFrame *frame)
{
return (GstVideoFrame *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (frame));
}
/**
* gst_video_frame_unref:
* @frame: a #GstVideoFrame.
*
* Decreases the refcount of the frame. If the refcount reaches 0, the frame
* will be freed.
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC void gst_video_frame_unref (GstVideoFrame * frame);
#endif
static inline void
gst_video_frame_unref (GstVideoFrame * frame)
{
gst_mini_object_unref (GST_MINI_OBJECT_CAST (frame));
}
/**
* GST_VIDEO_FRAME_FLAG_IS_SET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to check.
*
* Gives the status of a specific flag on a video frame.
*/
#define GST_VIDEO_FRAME_FLAG_IS_SET(frame,flag) GST_MINI_OBJECT_FLAG_IS_SET (frame, flag)
/**
* GST_VIDEO_FRAME_FLAG_SET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to set.
*
* Sets a frame flag on a video frame.
*/
#define GST_VIDEO_FRAME_FLAG_SET(frame,flag) GST_MINI_OBJECT_FLAG_SET (frame, flag)
/**
* GST_VIDEO_FRAME_FLAG_UNSET:
* @buf: a #GstVideoFrame.
* @flag: the #GstVideoFrameFlag to clear.
*
* Clears a frame flag.
*/
#define GST_VIDEO_FRAME_FLAG_UNSET(frame,flag) GST_MINI_OBJECT_FLAG_UNSET (frame, flag)
GstVideoFrame *gst_video_frame_new (void);
GType gst_video_frame_get_type (void);
#endif

View file

@ -21,7 +21,7 @@
#include "config.h" #include "config.h"
#endif #endif
#include "gstbasevideodecoder.h" #include "satbasevideodecoder.h"
#include <string.h> #include <string.h>
@ -36,11 +36,11 @@ enum
}; };
static GstFlowReturn gst_base_video_decoder_drain (GstBaseVideoDecoder * dec, static GstFlowReturn sat_base_video_decoder_drain (SatBaseVideoDecoder * dec,
gboolean at_eos); gboolean at_eos);
GST_BOILERPLATE (GstBaseVideoDecoder, gst_base_video_decoder, GST_BOILERPLATE (SatBaseVideoDecoder, sat_base_video_decoder,
GstElement, GST_TYPE_ELEMENT); GstElement, GST_TYPE_ELEMENT);
@ -54,7 +54,7 @@ struct _Timestamp
}; };
static void static void
gst_base_video_decoder_add_timestamp (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_add_timestamp (SatBaseVideoDecoder * base_video_decoder,
GstBuffer * buffer) GstBuffer * buffer)
{ {
Timestamp *ts; Timestamp *ts;
@ -74,7 +74,7 @@ gst_base_video_decoder_add_timestamp (GstBaseVideoDecoder * base_video_decoder,
} }
static void static void
gst_base_video_decoder_get_timestamp_at_offset (GstBaseVideoDecoder * sat_base_video_decoder_get_timestamp_at_offset (SatBaseVideoDecoder *
base_video_decoder, guint64 offset, GstClockTime * timestamp, base_video_decoder, guint64 offset, GstClockTime * timestamp,
GstClockTime * duration) GstClockTime * duration)
{ {
@ -105,7 +105,7 @@ gst_base_video_decoder_get_timestamp_at_offset (GstBaseVideoDecoder *
} }
static guint64 static guint64
gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_get_timestamp (SatBaseVideoDecoder * base_video_decoder,
gint picture_number) gint picture_number)
{ {
if (base_video_decoder->state.fps_d == 0) { if (base_video_decoder->state.fps_d == 0) {
@ -126,7 +126,7 @@ gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder,
} }
static guint64 static guint64
gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder * sat_base_video_decoder_get_field_timestamp (SatBaseVideoDecoder *
base_video_decoder, gint field_offset) base_video_decoder, gint field_offset)
{ {
if (base_video_decoder->state.fps_d == 0) { if (base_video_decoder->state.fps_d == 0) {
@ -143,7 +143,7 @@ gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder *
} }
static guint64 static guint64
gst_base_video_decoder_get_field_duration (GstBaseVideoDecoder * sat_base_video_decoder_get_field_duration (SatBaseVideoDecoder *
base_video_decoder, gint n_fields) base_video_decoder, gint n_fields)
{ {
if (base_video_decoder->state.fps_d == 0) { if (base_video_decoder->state.fps_d == 0) {
@ -158,18 +158,18 @@ gst_base_video_decoder_get_field_duration (GstBaseVideoDecoder *
base_video_decoder->state.fps_n * 2); base_video_decoder->state.fps_n * 2);
} }
static GstVideoFrame * static SatVideoFrame *
gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_new_frame (SatBaseVideoDecoder * base_video_decoder)
{ {
GstBaseVideoDecoderClass *base_video_decoder_class = SatBaseVideoDecoderClass *base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
GstVideoFrame *frame; SatVideoFrame *frame;
if (base_video_decoder_class->create_frame) if (base_video_decoder_class->create_frame)
frame = base_video_decoder_class->create_frame (base_video_decoder); frame = base_video_decoder_class->create_frame (base_video_decoder);
else else
frame = gst_video_frame_new (); frame = sat_video_frame_new ();
frame->system_frame_number = base_video_decoder->system_frame_number; frame->system_frame_number = base_video_decoder->system_frame_number;
base_video_decoder->system_frame_number++; base_video_decoder->system_frame_number++;
@ -181,7 +181,7 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
} }
static void static void
gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_reset (SatBaseVideoDecoder * base_video_decoder)
{ {
GST_DEBUG ("reset"); GST_DEBUG ("reset");
@ -203,7 +203,7 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder)
} }
if (base_video_decoder->current_frame) { if (base_video_decoder->current_frame) {
gst_video_frame_unref (base_video_decoder->current_frame); sat_video_frame_unref (base_video_decoder->current_frame);
base_video_decoder->current_frame = NULL; base_video_decoder->current_frame = NULL;
} }
@ -216,14 +216,14 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder)
} }
static void static void
gst_base_video_decoder_flush (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_flush (SatBaseVideoDecoder * base_video_decoder)
{ {
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
gst_base_video_decoder_reset (base_video_decoder); sat_base_video_decoder_reset (base_video_decoder);
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
if (base_video_decoder_class->flush) if (base_video_decoder_class->flush)
base_video_decoder_class->flush (base_video_decoder); base_video_decoder_class->flush (base_video_decoder);
@ -231,18 +231,18 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * base_video_decoder)
static gboolean static gboolean
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) sat_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
GstStructure *structure; GstStructure *structure;
const GValue *codec_data; const GValue *codec_data;
GstVideoState *state; SatVideoState *state;
gboolean ret = TRUE; gboolean ret = TRUE;
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps); GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps);
@ -251,7 +251,7 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
if (state->codec_data) { if (state->codec_data) {
gst_buffer_unref (state->codec_data); gst_buffer_unref (state->codec_data);
} }
memset (state, 0, sizeof (GstVideoState)); memset (state, 0, sizeof (SatVideoState));
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
@ -274,24 +274,24 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
} }
static gboolean static gboolean
gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) sat_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
gboolean ret = FALSE; gboolean ret = FALSE;
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS: case GST_EVENT_EOS:
{ {
if (!base_video_decoder->packetized) if (!base_video_decoder->packetized)
gst_base_video_decoder_drain (base_video_decoder, TRUE); sat_base_video_decoder_drain (base_video_decoder, TRUE);
ret = ret =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder), event); (base_video_decoder), event);
} }
break; break;
@ -313,7 +313,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
goto newseg_wrong_format; goto newseg_wrong_format;
if (!update) { if (!update) {
gst_base_video_decoder_flush (base_video_decoder); sat_base_video_decoder_flush (base_video_decoder);
} }
base_video_decoder->timestamp_offset = start; base_video_decoder->timestamp_offset = start;
@ -331,7 +331,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update); GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update);
ret = ret =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder), event); (base_video_decoder), event);
} }
break; break;
@ -344,7 +344,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
default: default:
/* FIXME this changes the order of events */ /* FIXME this changes the order of events */
ret = ret =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder), event); (base_video_decoder), event);
break; break;
} }
@ -363,19 +363,19 @@ newseg_wrong_format:
#if 0 #if 0
static gboolean static gboolean
gst_base_video_decoder_sink_convert (GstPad * pad, sat_base_video_decoder_sink_convert (GstPad * pad,
GstFormat src_format, gint64 src_value, GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value) GstFormat * dest_format, gint64 * dest_value)
{ {
gboolean res = TRUE; gboolean res = TRUE;
GstBaseVideoDecoder *enc; SatBaseVideoDecoder *enc;
if (src_format == *dest_format) { if (src_format == *dest_format) {
*dest_value = src_value; *dest_value = src_value;
return TRUE; return TRUE;
} }
enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); enc = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
/* FIXME: check if we are in a decoding state */ /* FIXME: check if we are in a decoding state */
@ -418,19 +418,19 @@ gst_base_video_decoder_sink_convert (GstPad * pad,
#endif #endif
static gboolean static gboolean
gst_base_video_decoder_src_convert (GstPad * pad, sat_base_video_decoder_src_convert (GstPad * pad,
GstFormat src_format, gint64 src_value, GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value) GstFormat * dest_format, gint64 * dest_value)
{ {
gboolean res = TRUE; gboolean res = TRUE;
GstBaseVideoDecoder *enc; SatBaseVideoDecoder *enc;
if (src_format == *dest_format) { if (src_format == *dest_format) {
*dest_value = src_value; *dest_value = src_value;
return TRUE; return TRUE;
} }
enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); enc = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
/* FIXME: check if we are in a encoding state */ /* FIXME: check if we are in a encoding state */
@ -472,12 +472,12 @@ gst_base_video_decoder_src_convert (GstPad * pad,
} }
static gboolean static gboolean
gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) sat_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
gboolean res = FALSE; gboolean res = FALSE;
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK: case GST_EVENT_SEEK:
@ -498,12 +498,12 @@ gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
tformat = GST_FORMAT_TIME; tformat = GST_FORMAT_TIME;
res = res =
gst_base_video_decoder_src_convert (pad, format, cur, &tformat, sat_base_video_decoder_src_convert (pad, format, cur, &tformat,
&tcur); &tcur);
if (!res) if (!res)
goto convert_error; goto convert_error;
res = res =
gst_base_video_decoder_src_convert (pad, format, stop, &tformat, sat_base_video_decoder_src_convert (pad, format, stop, &tformat,
&tstop); &tstop);
if (!res) if (!res)
goto convert_error; goto convert_error;
@ -512,7 +512,7 @@ gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
flags, cur_type, tcur, stop_type, tstop); flags, cur_type, tcur, stop_type, tstop);
res = res =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SINK_PAD
(base_video_decoder), real_seek); (base_video_decoder), real_seek);
break; break;
@ -551,13 +551,13 @@ gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
GST_TIME_ARGS (timestamp), diff, proportion); GST_TIME_ARGS (timestamp), diff, proportion);
res = res =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SINK_PAD
(base_video_decoder), event); (base_video_decoder), event);
break; break;
} }
default: default:
res = res =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SINK_PAD
(base_video_decoder), event); (base_video_decoder), event);
break; break;
} }
@ -571,7 +571,7 @@ convert_error:
} }
static const GstQueryType * static const GstQueryType *
gst_base_video_decoder_get_query_types (GstPad * pad) sat_base_video_decoder_get_query_types (GstPad * pad)
{ {
static const GstQueryType query_types[] = { static const GstQueryType query_types[] = {
GST_QUERY_POSITION, GST_QUERY_POSITION,
@ -584,12 +584,12 @@ gst_base_video_decoder_get_query_types (GstPad * pad)
} }
static gboolean static gboolean
gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) sat_base_video_decoder_src_query (GstPad * pad, GstQuery * query)
{ {
GstBaseVideoDecoder *dec; SatBaseVideoDecoder *dec;
gboolean res = TRUE; gboolean res = TRUE;
dec = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); dec = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
switch GST_QUERY_TYPE switch GST_QUERY_TYPE
(query) { (query) {
@ -628,7 +628,7 @@ gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query)
gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
res = res =
gst_base_video_decoder_src_convert (pad, src_fmt, src_val, &dest_fmt, sat_base_video_decoder_src_convert (pad, src_fmt, src_val, &dest_fmt,
&dest_val); &dest_val);
if (!res) if (!res)
goto error; goto error;
@ -648,12 +648,12 @@ error:
} }
static gboolean static gboolean
gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query) sat_base_video_decoder_sink_query (GstPad * pad, GstQuery * query)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
gboolean res = FALSE; gboolean res = FALSE;
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (base_video_decoder, "sink query fps=%d/%d", GST_DEBUG_OBJECT (base_video_decoder, "sink query fps=%d/%d",
base_video_decoder->state.fps_n, base_video_decoder->state.fps_d); base_video_decoder->state.fps_n, base_video_decoder->state.fps_d);
@ -670,10 +670,10 @@ gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query)
} }
static void static void
gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_set_src_caps (SatBaseVideoDecoder * base_video_decoder)
{ {
GstCaps *caps; GstCaps *caps;
GstVideoState *state = &base_video_decoder->state; SatVideoState *state = &base_video_decoder->state;
if (base_video_decoder->have_src_caps) if (base_video_decoder->have_src_caps)
return; return;
@ -695,7 +695,7 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps); GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps);
gst_pad_set_caps (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), caps); gst_pad_set_caps (SAT_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), caps);
base_video_decoder->have_src_caps = TRUE; base_video_decoder->have_src_caps = TRUE;
@ -713,13 +713,13 @@ empty_caps:
} }
static GstFlowReturn static GstFlowReturn
gst_base_video_decoder_drain (GstBaseVideoDecoder * dec, gboolean at_eos) sat_base_video_decoder_drain (SatBaseVideoDecoder * dec, gboolean at_eos)
{ {
GstBaseVideoDecoderClass *klass; SatBaseVideoDecoderClass *klass;
GstBaseVideoDecoderScanResult res; SatBaseVideoDecoderScanResult res;
guint size; guint size;
klass = GST_BASE_VIDEO_DECODER_GET_CLASS (dec); klass = SAT_BASE_VIDEO_DECODER_GET_CLASS (dec);
if (gst_adapter_available (dec->input_adapter) == 0) if (gst_adapter_available (dec->input_adapter) == 0)
return GST_FLOW_OK; return GST_FLOW_OK;
@ -759,7 +759,7 @@ lost_sync:
} }
res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos);
while (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK) { while (res == SAT_BASE_VIDEO_DECODER_SCAN_RESULT_OK) {
GstBuffer *buf; GstBuffer *buf;
GstFlowReturn ret; GstFlowReturn ret;
@ -779,11 +779,11 @@ lost_sync:
} }
switch (res) { switch (res) {
case GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC: case SAT_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC:
dec->have_sync = FALSE; dec->have_sync = FALSE;
goto lost_sync; goto lost_sync;
case GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA: case SAT_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA:
return GST_FLOW_OK; return GST_FLOW_OK;
default: default:
@ -793,10 +793,10 @@ lost_sync:
} }
static GstFlowReturn static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) sat_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
GstFlowReturn ret; GstFlowReturn ret;
GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
@ -812,9 +812,9 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
} }
#endif #endif
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder = SAT_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
GST_DEBUG_OBJECT (base_video_decoder, "chain"); GST_DEBUG_OBJECT (base_video_decoder, "chain");
@ -833,7 +833,7 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
GST_CLOCK_TIME_NONE, 0); GST_CLOCK_TIME_NONE, 0);
ret = ret =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), gst_pad_push_event (SAT_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
event); event);
if (!ret) { if (!ret) {
GST_ERROR ("new segment event ret=%d", ret); GST_ERROR ("new segment event ret=%d", ret);
@ -843,16 +843,16 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
gst_base_video_decoder_flush (base_video_decoder); sat_base_video_decoder_flush (base_video_decoder);
} }
if (base_video_decoder->current_frame == NULL) { if (base_video_decoder->current_frame == NULL) {
base_video_decoder->current_frame = base_video_decoder->current_frame =
gst_base_video_decoder_new_frame (base_video_decoder); sat_base_video_decoder_new_frame (base_video_decoder);
} }
if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
gst_base_video_decoder_add_timestamp (base_video_decoder, buf); sat_base_video_decoder_add_timestamp (base_video_decoder, buf);
} }
base_video_decoder->input_offset += GST_BUFFER_SIZE (buf); base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
@ -860,12 +860,12 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
if (base_video_decoder->packetized) { if (base_video_decoder->packetized) {
base_video_decoder->current_frame->sink_buffer = buf; base_video_decoder->current_frame->sink_buffer = buf;
ret = gst_base_video_decoder_have_frame (base_video_decoder, NULL); ret = sat_base_video_decoder_have_frame (base_video_decoder, NULL);
} else { } else {
gst_adapter_push (base_video_decoder->input_adapter, buf); gst_adapter_push (base_video_decoder->input_adapter, buf);
ret = gst_base_video_decoder_drain (base_video_decoder, FALSE); ret = sat_base_video_decoder_drain (base_video_decoder, FALSE);
} }
gst_object_unref (base_video_decoder); gst_object_unref (base_video_decoder);
@ -873,14 +873,14 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
} }
static gboolean static gboolean
gst_base_video_decoder_stop (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_stop (SatBaseVideoDecoder * base_video_decoder)
{ {
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
GST_DEBUG ("stop"); GST_DEBUG ("stop");
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
if (base_video_decoder_class->stop) if (base_video_decoder_class->stop)
return base_video_decoder_class->stop (base_video_decoder); return base_video_decoder_class->stop (base_video_decoder);
@ -889,16 +889,16 @@ gst_base_video_decoder_stop (GstBaseVideoDecoder * base_video_decoder)
} }
static gboolean static gboolean
gst_base_video_decoder_start (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_start (SatBaseVideoDecoder * base_video_decoder)
{ {
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
GST_DEBUG ("start"); GST_DEBUG ("start");
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
gst_base_video_decoder_reset (base_video_decoder); sat_base_video_decoder_reset (base_video_decoder);
if (base_video_decoder_class->start) if (base_video_decoder_class->start)
return base_video_decoder_class->start (base_video_decoder); return base_video_decoder_class->start (base_video_decoder);
@ -907,17 +907,17 @@ gst_base_video_decoder_start (GstBaseVideoDecoder * base_video_decoder)
} }
static GstStateChangeReturn static GstStateChangeReturn
gst_base_video_decoder_change_state (GstElement * element, sat_base_video_decoder_change_state (GstElement * element,
GstStateChange transition) GstStateChange transition)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
GstStateChangeReturn ret; GstStateChangeReturn ret;
base_video_decoder = GST_BASE_VIDEO_DECODER (element); base_video_decoder = SAT_BASE_VIDEO_DECODER (element);
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED: case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_base_video_decoder_start (base_video_decoder); sat_base_video_decoder_start (base_video_decoder);
break; break;
default: default:
@ -928,7 +928,7 @@ gst_base_video_decoder_change_state (GstElement * element,
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_base_video_decoder_stop (base_video_decoder); sat_base_video_decoder_stop (base_video_decoder);
break; break;
default: default:
@ -939,19 +939,19 @@ gst_base_video_decoder_change_state (GstElement * element,
} }
GstFlowReturn GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_finish_frame (SatBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame) SatVideoFrame * frame)
{ {
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
GstBuffer *src_buffer; GstBuffer *src_buffer;
GST_DEBUG ("finish frame"); GST_DEBUG ("finish frame");
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT, GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT,
GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT), SAT_VIDEO_FRAME_FLAG_IS_SET (frame, SAT_VIDEO_FRAME_FLAG_SYNC_POINT),
GST_TIME_ARGS (frame->presentation_timestamp)); GST_TIME_ARGS (frame->presentation_timestamp));
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
@ -969,7 +969,7 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
frame->presentation_timestamp = GST_CLOCK_TIME_NONE; frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
} }
} else { } else {
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) { if (SAT_VIDEO_FRAME_FLAG_IS_SET (frame, SAT_VIDEO_FRAME_FLAG_SYNC_POINT)) {
GST_WARNING ("sync point doesn't have timestamp"); GST_WARNING ("sync point doesn't have timestamp");
if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
GST_WARNING GST_WARNING
@ -985,16 +985,16 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
frame->presentation_timestamp = frame->presentation_timestamp =
gst_base_video_decoder_get_field_timestamp (base_video_decoder, sat_base_video_decoder_get_field_timestamp (base_video_decoder,
frame->field_index); frame->field_index);
frame->presentation_duration = GST_CLOCK_TIME_NONE; frame->presentation_duration = GST_CLOCK_TIME_NONE;
frame->decode_timestamp = frame->decode_timestamp =
gst_base_video_decoder_get_timestamp (base_video_decoder, sat_base_video_decoder_get_timestamp (base_video_decoder,
frame->decode_frame_number); frame->decode_frame_number);
} }
if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
frame->presentation_duration = frame->presentation_duration =
gst_base_video_decoder_get_field_duration (base_video_decoder, sat_base_video_decoder_get_field_duration (base_video_decoder,
frame->n_fields); frame->n_fields);
} }
@ -1021,7 +1021,7 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7) #define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif #endif
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) { if (SAT_VIDEO_FRAME_FLAG_IS_SET (frame, SAT_VIDEO_FRAME_FLAG_TFF)) {
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF); GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);
} else { } else {
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF); GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);
@ -1047,7 +1047,7 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("pushing frame %" GST_TIME_FORMAT, GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp)); GST_TIME_ARGS (frame->presentation_timestamp));
gst_base_video_decoder_set_src_caps (base_video_decoder); sat_base_video_decoder_set_src_caps (base_video_decoder);
if (base_video_decoder->sink_clipping) { if (base_video_decoder->sink_clipping) {
gint64 start = GST_BUFFER_TIMESTAMP (src_buffer); gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
@ -1079,35 +1079,35 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GST_TIME_ARGS (base_video_decoder->segment.start), GST_TIME_ARGS (base_video_decoder->segment.start),
GST_TIME_ARGS (base_video_decoder->segment.stop), GST_TIME_ARGS (base_video_decoder->segment.stop),
GST_TIME_ARGS (base_video_decoder->segment.time)); GST_TIME_ARGS (base_video_decoder->segment.time));
gst_video_frame_unref (frame); sat_video_frame_unref (frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
} }
gst_buffer_ref (src_buffer); gst_buffer_ref (src_buffer);
gst_video_frame_unref (frame); sat_video_frame_unref (frame);
if (base_video_decoder_class->shape_output) if (base_video_decoder_class->shape_output)
return base_video_decoder_class->shape_output (base_video_decoder, return base_video_decoder_class->shape_output (base_video_decoder,
src_buffer); src_buffer);
return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), return gst_pad_push (SAT_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
src_buffer); src_buffer);
} }
void void
gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_skip_frame (SatBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame) SatVideoFrame * frame)
{ {
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
GST_DEBUG ("skip frame"); GST_DEBUG ("skip frame");
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
GST_DEBUG ("skip frame sync=%d pts=%" GST_TIME_FORMAT, GST_DEBUG ("skip frame sync=%d pts=%" GST_TIME_FORMAT,
GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT), SAT_VIDEO_FRAME_FLAG_IS_SET (frame, SAT_VIDEO_FRAME_FLAG_SYNC_POINT),
GST_TIME_ARGS (frame->presentation_timestamp)); GST_TIME_ARGS (frame->presentation_timestamp));
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
@ -1125,7 +1125,7 @@ gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
frame->presentation_timestamp = GST_CLOCK_TIME_NONE; frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
} }
} else { } else {
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) { if (SAT_VIDEO_FRAME_FLAG_IS_SET (frame, SAT_VIDEO_FRAME_FLAG_SYNC_POINT)) {
GST_WARNING ("sync point doesn't have timestamp"); GST_WARNING ("sync point doesn't have timestamp");
if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
GST_WARNING GST_WARNING
@ -1141,16 +1141,16 @@ gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
frame->presentation_timestamp = frame->presentation_timestamp =
gst_base_video_decoder_get_field_timestamp (base_video_decoder, sat_base_video_decoder_get_field_timestamp (base_video_decoder,
frame->field_index); frame->field_index);
frame->presentation_duration = GST_CLOCK_TIME_NONE; frame->presentation_duration = GST_CLOCK_TIME_NONE;
frame->decode_timestamp = frame->decode_timestamp =
gst_base_video_decoder_get_timestamp (base_video_decoder, sat_base_video_decoder_get_timestamp (base_video_decoder,
frame->decode_frame_number); frame->decode_frame_number);
} }
if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
frame->presentation_duration = frame->presentation_duration =
gst_base_video_decoder_get_field_duration (base_video_decoder, sat_base_video_decoder_get_field_duration (base_video_decoder,
frame->n_fields); frame->n_fields);
} }
@ -1159,29 +1159,29 @@ gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("skipping frame %" GST_TIME_FORMAT, GST_DEBUG ("skipping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp)); GST_TIME_ARGS (frame->presentation_timestamp));
gst_video_frame_unref (frame); sat_video_frame_unref (frame);
} }
GstFlowReturn GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_have_frame (SatBaseVideoDecoder * base_video_decoder,
GstVideoFrame ** new_frame) SatVideoFrame ** new_frame)
{ {
GstVideoFrame *frame = base_video_decoder->current_frame; SatVideoFrame *frame = base_video_decoder->current_frame;
GstBaseVideoDecoderClass *klass; SatBaseVideoDecoderClass *klass;
GstClockTime timestamp, duration; GstClockTime timestamp, duration;
GstClockTime running_time; GstClockTime running_time;
GstClockTimeDiff deadline; GstClockTimeDiff deadline;
GstFlowReturn ret; GstFlowReturn ret;
klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); klass = SAT_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder, sat_base_video_decoder_get_timestamp_at_offset (base_video_decoder,
base_video_decoder->frame_offset, &timestamp, &duration); base_video_decoder->frame_offset, &timestamp, &duration);
frame->presentation_duration = timestamp; frame->presentation_duration = timestamp;
frame->presentation_duration = duration; frame->presentation_duration = duration;
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) if (SAT_VIDEO_FRAME_FLAG_IS_SET (frame, SAT_VIDEO_FRAME_FLAG_SYNC_POINT))
base_video_decoder->distance_from_sync = 0; base_video_decoder->distance_from_sync = 0;
frame->distance_from_sync = base_video_decoder->distance_from_sync; frame->distance_from_sync = base_video_decoder->distance_from_sync;
@ -1208,7 +1208,7 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
/* create new frame */ /* create new frame */
base_video_decoder->current_frame = base_video_decoder->current_frame =
gst_base_video_decoder_new_frame (base_video_decoder); sat_base_video_decoder_new_frame (base_video_decoder);
if (new_frame) if (new_frame)
*new_frame = base_video_decoder->current_frame; *new_frame = base_video_decoder->current_frame;
@ -1217,29 +1217,29 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
} }
void void
gst_base_video_decoder_frame_start (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_frame_start (SatBaseVideoDecoder * base_video_decoder,
GstBuffer * buf) GstBuffer * buf)
{ {
base_video_decoder->frame_offset = GST_BUFFER_OFFSET (buf); base_video_decoder->frame_offset = GST_BUFFER_OFFSET (buf);
} }
GstVideoState * SatVideoState *
gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_get_state (SatBaseVideoDecoder * base_video_decoder)
{ {
return &base_video_decoder->state; return &base_video_decoder->state;
} }
void void
gst_base_video_decoder_set_state (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_set_state (SatBaseVideoDecoder * base_video_decoder,
GstVideoState * state) SatVideoState * state)
{ {
memcpy (&base_video_decoder->state, state, sizeof (*state)); memcpy (&base_video_decoder->state, state, sizeof (*state));
} }
void void
gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder) sat_base_video_decoder_lost_sync (SatBaseVideoDecoder * base_video_decoder)
{ {
g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder)); g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder));
@ -1252,29 +1252,29 @@ gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder)
base_video_decoder->have_sync = FALSE; base_video_decoder->have_sync = FALSE;
} }
GstVideoFrame * SatVideoFrame *
gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder * sat_base_video_decoder_get_current_frame (SatBaseVideoDecoder *
base_video_decoder) base_video_decoder)
{ {
return base_video_decoder->current_frame; return base_video_decoder->current_frame;
} }
void void
gst_base_video_decoder_update_src_caps (GstBaseVideoDecoder * sat_base_video_decoder_update_src_caps (SatBaseVideoDecoder *
base_video_decoder) base_video_decoder)
{ {
g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder)); g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder));
base_video_decoder->have_src_caps = FALSE; base_video_decoder->have_src_caps = FALSE;
gst_base_video_decoder_set_src_caps (base_video_decoder); sat_base_video_decoder_set_src_caps (base_video_decoder);
} }
/* GObject vmethod implementations */ /* GObject vmethod implementations */
static void static void
gst_base_video_decoder_get_property (GObject * object, guint property_id, sat_base_video_decoder_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec) GValue * value, GParamSpec * pspec)
{ {
GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (object); SatBaseVideoDecoder *base_video_decoder = SAT_BASE_VIDEO_DECODER (object);
switch (property_id) { switch (property_id) {
case PROP_PACKETIZED: case PROP_PACKETIZED:
@ -1290,10 +1290,10 @@ gst_base_video_decoder_get_property (GObject * object, guint property_id,
} }
static void static void
gst_base_video_decoder_set_property (GObject * object, guint property_id, sat_base_video_decoder_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec) const GValue * value, GParamSpec * pspec)
{ {
GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (object); SatBaseVideoDecoder *base_video_decoder = SAT_BASE_VIDEO_DECODER (object);
switch (property_id) { switch (property_id) {
case PROP_PACKETIZED: case PROP_PACKETIZED:
@ -1309,16 +1309,16 @@ gst_base_video_decoder_set_property (GObject * object, guint property_id,
} }
static void static void
gst_base_video_decoder_finalize (GObject * object) sat_base_video_decoder_finalize (GObject * object)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (object)); g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (object));
base_video_decoder = GST_BASE_VIDEO_DECODER (object); base_video_decoder = SAT_BASE_VIDEO_DECODER (object);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object); base_video_decoder_class = SAT_BASE_VIDEO_DECODER_GET_CLASS (object);
gst_base_video_decoder_reset (base_video_decoder); sat_base_video_decoder_reset (base_video_decoder);
if (base_video_decoder->input_adapter) { if (base_video_decoder->input_adapter) {
g_object_unref (base_video_decoder->input_adapter); g_object_unref (base_video_decoder->input_adapter);
@ -1331,14 +1331,14 @@ gst_base_video_decoder_finalize (GObject * object)
} }
static void static void
gst_base_video_decoder_base_init (gpointer g_class) sat_base_video_decoder_base_init (gpointer g_class)
{ {
GST_DEBUG_CATEGORY_INIT (basevideodecoder_debug, "basevideodecoder", 0, GST_DEBUG_CATEGORY_INIT (basevideodecoder_debug, "basevideodecoder", 0,
"Base Video Decoder"); "Base Video Decoder");
} }
static void static void
gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass) sat_base_video_decoder_class_init (SatBaseVideoDecoderClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *gstelement_class; GstElementClass *gstelement_class;
@ -1346,9 +1346,9 @@ gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass)
gobject_class = G_OBJECT_CLASS (klass); gobject_class = G_OBJECT_CLASS (klass);
gstelement_class = GST_ELEMENT_CLASS (klass); gstelement_class = GST_ELEMENT_CLASS (klass);
gobject_class->finalize = gst_base_video_decoder_finalize; gobject_class->finalize = sat_base_video_decoder_finalize;
gobject_class->get_property = gst_base_video_decoder_get_property; gobject_class->get_property = sat_base_video_decoder_get_property;
gobject_class->set_property = gst_base_video_decoder_set_property; gobject_class->set_property = sat_base_video_decoder_set_property;
g_object_class_install_property (gobject_class, PROP_PACKETIZED, g_object_class_install_property (gobject_class, PROP_PACKETIZED,
g_param_spec_boolean ("packetized", "Packetized", g_param_spec_boolean ("packetized", "Packetized",
@ -1357,22 +1357,22 @@ gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass)
g_object_class_install_property (gobject_class, PROP_PACKETIZED, g_object_class_install_property (gobject_class, PROP_PACKETIZED,
g_param_spec_boolean ("sink-clipping", "Sink Clipping", g_param_spec_boolean ("sink-clipping", "Sink Clipping",
"If enabled GstBaseVideoDecoder will clip outgoing frames", FALSE, "If enabled SatBaseVideoDecoder will clip outgoing frames", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state = gst_base_video_decoder_change_state; gstelement_class->change_state = sat_base_video_decoder_change_state;
parent_class = g_type_class_peek_parent (klass); parent_class = g_type_class_peek_parent (klass);
} }
static void static void
gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, sat_base_video_decoder_init (SatBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass * base_video_decoder_class) SatBaseVideoDecoderClass * base_video_decoder_class)
{ {
GstPadTemplate *pad_template; GstPadTemplate *pad_template;
GstPad *pad; GstPad *pad;
GST_DEBUG ("gst_base_video_decoder_init"); GST_DEBUG ("sat_base_video_decoder_init");
pad_template = pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS gst_element_class_get_pad_template (GST_ELEMENT_CLASS
@ -1383,10 +1383,10 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
gst_pad_new_from_template (pad_template, "sink"); gst_pad_new_from_template (pad_template, "sink");
gst_element_add_pad (GST_ELEMENT (base_video_decoder), pad); gst_element_add_pad (GST_ELEMENT (base_video_decoder), pad);
gst_pad_set_chain_function (pad, gst_base_video_decoder_chain); gst_pad_set_chain_function (pad, sat_base_video_decoder_chain);
gst_pad_set_event_function (pad, gst_base_video_decoder_sink_event); gst_pad_set_event_function (pad, sat_base_video_decoder_sink_event);
gst_pad_set_setcaps_function (pad, gst_base_video_decoder_sink_setcaps); gst_pad_set_setcaps_function (pad, sat_base_video_decoder_sink_setcaps);
gst_pad_set_query_function (pad, gst_base_video_decoder_sink_query); gst_pad_set_query_function (pad, sat_base_video_decoder_sink_query);
if (base_video_decoder_class->create_srcpad) { if (base_video_decoder_class->create_srcpad) {
base_video_decoder->srcpad = pad = base_video_decoder->srcpad = pad =
@ -1403,9 +1403,9 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
} }
gst_element_add_pad (GST_ELEMENT (base_video_decoder), pad); gst_element_add_pad (GST_ELEMENT (base_video_decoder), pad);
gst_pad_set_event_function (pad, gst_base_video_decoder_src_event); gst_pad_set_event_function (pad, sat_base_video_decoder_src_event);
gst_pad_set_query_type_function (pad, gst_base_video_decoder_get_query_types); gst_pad_set_query_type_function (pad, sat_base_video_decoder_get_query_types);
gst_pad_set_query_function (pad, gst_base_video_decoder_src_query); gst_pad_set_query_function (pad, sat_base_video_decoder_src_query);
gst_pad_use_fixed_caps (pad); gst_pad_use_fixed_caps (pad);
base_video_decoder->input_adapter = gst_adapter_new (); base_video_decoder->input_adapter = gst_adapter_new ();
@ -1413,7 +1413,7 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME); gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);
base_video_decoder->current_frame = base_video_decoder->current_frame =
gst_base_video_decoder_new_frame (base_video_decoder); sat_base_video_decoder_new_frame (base_video_decoder);
/* properties */ /* properties */
base_video_decoder->packetized = FALSE; base_video_decoder->packetized = FALSE;

View file

@ -0,0 +1,201 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _SAT_BASE_VIDEO_DECODER_H_
#define _SAT_BASE_VIDEO_DECODER_H_
#include "satbasevideoutils.h"
#include "satvideoframe.h"
G_BEGIN_DECLS
#define SAT_TYPE_BASE_VIDEO_DECODER \
(sat_base_video_decoder_get_type())
#define SAT_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),SAT_TYPE_BASE_VIDEO_DECODER,SatBaseVideoDecoder))
#define SAT_BASE_VIDEO_DECODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),SAT_TYPE_BASE_VIDEO_DECODER,SatBaseVideoDecoderClass))
#define SAT_BASE_VIDEO_DECODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),SAT_TYPE_BASE_VIDEO_DECODER,SatBaseVideoDecoderClass))
#define GST_IS_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),SAT_TYPE_BASE_VIDEO_DECODER))
#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),SAT_TYPE_BASE_VIDEO_DECODER))
/**
* SAT_BASE_VIDEO_DECODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define SAT_BASE_VIDEO_DECODER_SINK_NAME "sink"
/**
* SAT_BASE_VIDEO_DECODER_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define SAT_BASE_VIDEO_DECODER_SRC_NAME "src"
/**
* SAT_BASE_VIDEO_CODEC_SRC_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the source #GstPad object of the element.
*/
#define SAT_BASE_VIDEO_DECODER_SRC_PAD(obj) (((SatBaseVideoDecoder *) (obj))->srcpad)
/**
* SAT_BASE_VIDEO_CODEC_SINK_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the sink #GstPad object of the element.
*/
#define SAT_BASE_VIDEO_DECODER_SINK_PAD(obj) (((SatBaseVideoDecoder *) (obj))->sinkpad)
/**
* * SAT_BASE_VIDEO_DECODER_FLOW_NEED_DATA:
* *
* */
#define SAT_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
typedef enum _SatBaseVideoDecoderScanResult SatBaseVideoDecoderScanResult;
enum _SatBaseVideoDecoderScanResult
{
SAT_BASE_VIDEO_DECODER_SCAN_RESULT_OK,
SAT_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC,
SAT_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA
};
typedef struct _SatBaseVideoDecoder SatBaseVideoDecoder;
typedef struct _SatBaseVideoDecoderClass SatBaseVideoDecoderClass;
struct _SatBaseVideoDecoder
{
GstElement element;
/*< private >*/
GstPad *sinkpad;
GstPad *srcpad;
GstAdapter *input_adapter;
gboolean have_sync;
gboolean discont;
SatVideoState state;
GstSegment segment;
guint64 presentation_frame_number;
guint64 system_frame_number;
GstCaps *caps;
gboolean have_src_caps;
SatVideoFrame *current_frame;
gint distance_from_sync;
gint reorder_depth;
GstClockTime buffer_timestamp;
GstClockTime timestamp_offset;
gdouble proportion;
GstClockTime earliest_time;
guint64 input_offset;
guint64 frame_offset;
GstClockTime last_timestamp;
guint64 base_picture_number;
gint field_index;
gboolean is_delta_unit;
GList *timestamps;
gboolean have_segment;
/* properties */
gboolean sink_clipping;
gboolean packetized;
};
struct _SatBaseVideoDecoderClass
{
GstElementClass element_class;
gboolean (*start) (SatBaseVideoDecoder *coder);
gboolean (*stop) (SatBaseVideoDecoder *coder);
gboolean (*flush) (SatBaseVideoDecoder *coder);
gboolean (*set_sink_caps) (SatBaseVideoDecoder *base_video_decoder,
GstCaps *caps);
GstPad *(*create_srcpad) (SatBaseVideoDecoder * base_video_decoder,
SatBaseVideoDecoderClass *base_video_decoder_class);
gint (*scan_for_sync) (SatBaseVideoDecoder *coder, GstAdapter *adapter);
SatBaseVideoDecoderScanResult (*scan_for_packet_end)
(SatBaseVideoDecoder *coder, GstAdapter *adapter, guint *size, gboolean at_eos);
GstFlowReturn (*parse_data) (SatBaseVideoDecoder *decoder,
GstBuffer *buf, gboolean at_eos);
SatVideoFrame *(*create_frame) (SatBaseVideoDecoder *coder);
GstFlowReturn (*handle_frame) (SatBaseVideoDecoder *coder, SatVideoFrame *frame,
GstClockTimeDiff deadline);
GstFlowReturn (*shape_output) (SatBaseVideoDecoder *coder,
GstBuffer *buf);
};
GType sat_base_video_decoder_get_type (void);
SatVideoFrame *sat_base_video_decoder_get_current_frame (SatBaseVideoDecoder
*base_video_decoder);
GstFlowReturn sat_base_video_decoder_finish_frame (SatBaseVideoDecoder *base_video_decoder,
SatVideoFrame *frame);
void sat_base_video_decoder_skip_frame (SatBaseVideoDecoder * base_video_decoder,
SatVideoFrame * frame);
void
sat_base_video_decoder_frame_start (SatBaseVideoDecoder *base_video_decoder,
GstBuffer *buf);
GstFlowReturn
sat_base_video_decoder_have_frame (SatBaseVideoDecoder *base_video_decoder,
SatVideoFrame **new_frame);
SatVideoState * sat_base_video_decoder_get_state (SatBaseVideoDecoder *base_video_decoder);
void sat_base_video_decoder_set_state (SatBaseVideoDecoder *base_video_decoder,
SatVideoState *state);
void sat_base_video_decoder_lost_sync (SatBaseVideoDecoder *base_video_decoder);
void sat_base_video_decoder_update_src_caps (SatBaseVideoDecoder *base_video_decoder);
G_END_DECLS
#endif

View file

@ -17,8 +17,8 @@
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
#ifndef _GST_BASE_VIDEO_UTILS_H_ #ifndef _SAT_BASE_VIDEO_UTILS_H_
#define _GST_BASE_VIDEO_UTILS_H_ #define _SAT_BASE_VIDEO_UTILS_H_
#define GST_USE_UNSTABLE_API 1 #define GST_USE_UNSTABLE_API 1
@ -33,9 +33,9 @@
G_BEGIN_DECLS G_BEGIN_DECLS
typedef struct _GstVideoState GstVideoState; typedef struct _SatVideoState SatVideoState;
struct _GstVideoState struct _SatVideoState
{ {
gint width, height; gint width, height;
gint fps_n, fps_d; gint fps_n, fps_d;
@ -52,4 +52,4 @@ struct _GstVideoState
}; };
#endif /* _GST_BASE_VIDEO_UTILS_H_ */ #endif /* _SAT_BASE_VIDEO_UTILS_H_ */

View file

@ -22,40 +22,40 @@
#include "config.h" #include "config.h"
#endif #endif
#include "gstvideoframe.h" #include "satvideoframe.h"
GST_DEBUG_CATEGORY_STATIC (gst_video_frame_debug); GST_DEBUG_CATEGORY_STATIC (sat_video_frame_debug);
#define GST_CAT_DEFAULT gst_video_frame_debug #define GST_CAT_DEFAULT sat_video_frame_debug
#define DEBUG_INIT(bla) \ #define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_video_frame_debug, "gstvideoframe", 0, "Video Frame"); GST_DEBUG_CATEGORY_INIT (sat_video_frame_debug, "satvideoframe", 0, "Video Frame");
GstVideoFrame * SatVideoFrame *
gst_video_frame_new (void) sat_video_frame_new (void)
{ {
GstVideoFrame *frame; SatVideoFrame *frame;
frame = (GstVideoFrame *) gst_mini_object_new (GST_TYPE_VIDEO_FRAME); frame = (SatVideoFrame *) gst_mini_object_new (SAT_TYPE_VIDEO_FRAME);
return frame; return frame;
} }
static GObjectClass *gst_video_frame_parent_class; static GObjectClass *sat_video_frame_parent_class;
static void static void
gst_video_frame_finalize (GstVideoFrame * frame) sat_video_frame_finalize (SatVideoFrame * frame)
{ {
if (frame->sink_buffer) if (frame->sink_buffer)
gst_buffer_unref (frame->sink_buffer); gst_buffer_unref (frame->sink_buffer);
if (frame->src_buffer) if (frame->src_buffer)
gst_buffer_unref (frame->src_buffer); gst_buffer_unref (frame->src_buffer);
GST_MINI_OBJECT_CLASS (gst_video_frame_parent_class)->finalize GST_MINI_OBJECT_CLASS (sat_video_frame_parent_class)->finalize
(GST_MINI_OBJECT (frame)); (GST_MINI_OBJECT (frame));
} }
static void static void
gst_video_frame_init (GstVideoFrame * frame, gpointer g_class) sat_video_frame_init (SatVideoFrame * frame, gpointer g_class)
{ {
frame->decode_timestamp = GST_CLOCK_TIME_NONE; frame->decode_timestamp = GST_CLOCK_TIME_NONE;
frame->presentation_timestamp = GST_CLOCK_TIME_NONE; frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
@ -67,39 +67,39 @@ gst_video_frame_init (GstVideoFrame * frame, gpointer g_class)
} }
static void static void
gst_video_frame_class_init (gpointer g_class, gpointer class_data) sat_video_frame_class_init (gpointer g_class, gpointer class_data)
{ {
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class); GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_video_frame_parent_class = g_type_class_peek_parent (g_class); sat_video_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction) mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_video_frame_finalize; sat_video_frame_finalize;
} }
GType GType
gst_video_frame_get_type (void) sat_video_frame_get_type (void)
{ {
static GType _gst_video_frame_type = 0; static GType _sat_video_frame_type = 0;
if (G_UNLIKELY (_gst_video_frame_type == 0)) { if (G_UNLIKELY (_sat_video_frame_type == 0)) {
static const GTypeInfo info = { static const GTypeInfo info = {
sizeof (GstVideoFrameClass), sizeof (SatVideoFrameClass),
NULL, NULL,
NULL, NULL,
gst_video_frame_class_init, sat_video_frame_class_init,
NULL, NULL,
NULL, NULL,
sizeof (GstVideoFrame), sizeof (SatVideoFrame),
0, 0,
(GInstanceInitFunc) gst_video_frame_init, (GInstanceInitFunc) sat_video_frame_init,
NULL NULL
}; };
_gst_video_frame_type = g_type_register_static (GST_TYPE_MINI_OBJECT, _sat_video_frame_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
"GstVideoFrame", &info, 0); "SatVideoFrame", &info, 0);
DEBUG_INIT (); DEBUG_INIT ();
} }
return _gst_video_frame_type; return _sat_video_frame_type;
} }

View file

@ -0,0 +1,156 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _SAT_VIDEO_FRAME_H_
#define _SAT_VIDEO_FRAME_H_
#include <gst/gst.h>
#define SAT_TYPE_VIDEO_FRAME (sat_video_frame_get_type())
#define GST_IS_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), SAT_TYPE_VIDEO_FRAME))
#define SAT_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), SAT_TYPE_VIDEO_FRAME, SatVideoFrame))
#define SAT_VIDEO_FRAME_CAST(obj) ((SatVideoFrame *)obj)
/**
* SatVideoFrameFlag:
* @SAT_VIDEO_FRAME_FLAG_PREROLL: the frame is part of a preroll and should not be
* displayed.
* @SAT_VIDEO_FRAME_FLAG_DISCONT: the frame marks a discontinuity in the stream.
* This typically occurs after a seek or a dropped buffer from a live or
* network source.
* @SAT_VIDEO_FRAME_FLAG_GAP: the frame has been created to fill a gap in the
* stream and contains media neutral data (elements can switch to optimized code
* path that ignores the buffer content).
* @SAT_VIDEO_FRAME_FLAG_DELTA_UNIT: the frame is a keyframe.
* @SAT_VIDEO_FRAME_FLAG_SYNC_POINT: the frame marks a sync point.
* @SAT_VIDEO_FRAME_FLAG_EOS: the frame is the last in the stream.
* @SAT_VIDEO_FRAME_FLAG_TFF: If the frame is interlaced, then the first
* field in the video frame is the top field. If unset, the bottom field is first.
* @SAT_VIDEO_FRAME_FLAG_LAST: additional flags can be added starting from this flag.
* A set of frame flags used to describe properties of a #SatVideoFrame.
*/
typedef enum
{
SAT_VIDEO_FRAME_FLAG_PREROLL = (GST_MINI_OBJECT_FLAG_LAST << 0),
SAT_VIDEO_FRAME_FLAG_DISCONT = (GST_MINI_OBJECT_FLAG_LAST << 1),
SAT_VIDEO_FRAME_FLAG_GAP = (GST_MINI_OBJECT_FLAG_LAST << 2),
SAT_VIDEO_FRAME_FLAG_KEYFRAME = (GST_MINI_OBJECT_FLAG_LAST << 3),
SAT_VIDEO_FRAME_FLAG_SYNC_POINT = (GST_MINI_OBJECT_FLAG_LAST << 4),
SAT_VIDEO_FRAME_FLAG_EOS = (GST_MINI_OBJECT_FLAG_LAST << 5),
SAT_VIDEO_FRAME_FLAG_TFF = (GST_MINI_OBJECT_FLAG_LAST << 6),
SAT_VIDEO_FRAME_FLAG_LAST = (GST_MINI_OBJECT_FLAG_LAST << 7)
} SatVideoFrameFlag;
typedef struct _SatVideoFrame SatVideoFrame;
typedef struct _SatVideoFrameClass SatVideoFrameClass;
struct _SatVideoFrame
{
GstMiniObject mini_object;
GstClockTime decode_timestamp;
GstClockTime presentation_timestamp;
GstClockTime presentation_duration;
gint system_frame_number;
gint decode_frame_number;
gint presentation_frame_number;
gint distance_from_sync;
GstBuffer *sink_buffer;
GstBuffer *src_buffer;
gint field_index;
gint n_fields;
};
struct _SatVideoFrameClass
{
GstMiniObjectClass mini_object_class;
};
/* refcounting */
/**
* sat_video_frame_ref:
* @frame: a #SatVideoFrame.
*
* Increases the refcount of the given frame by one.
*
* Returns: @frame
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC SatVideoFrame * gst_buffer_ref (SatVideoFrame * frame);
#endif
static inline SatVideoFrame *
sat_video_frame_ref (SatVideoFrame *frame)
{
return (SatVideoFrame *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (frame));
}
/**
* sat_video_frame_unref:
* @frame: a #SatVideoFrame.
*
* Decreases the refcount of the frame. If the refcount reaches 0, the frame
* will be freed.
*/
#ifdef _FOOL_GTK_DOC_
G_INLINE_FUNC void sat_video_frame_unref (SatVideoFrame * frame);
#endif
static inline void
sat_video_frame_unref (SatVideoFrame * frame)
{
gst_mini_object_unref (GST_MINI_OBJECT_CAST (frame));
}
/**
* SAT_VIDEO_FRAME_FLAG_IS_SET:
* @buf: a #SatVideoFrame.
* @flag: the #SatVideoFrameFlag to check.
*
* Gives the status of a specific flag on a video frame.
*/
#define SAT_VIDEO_FRAME_FLAG_IS_SET(frame,flag) GST_MINI_OBJECT_FLAG_IS_SET (frame, flag)
/**
* SAT_VIDEO_FRAME_FLAG_SET:
* @buf: a #SatVideoFrame.
* @flag: the #SatVideoFrameFlag to set.
*
* Sets a frame flag on a video frame.
*/
#define SAT_VIDEO_FRAME_FLAG_SET(frame,flag) GST_MINI_OBJECT_FLAG_SET (frame, flag)
/**
* SAT_VIDEO_FRAME_FLAG_UNSET:
* @buf: a #SatVideoFrame.
* @flag: the #SatVideoFrameFlag to clear.
*
* Clears a frame flag.
*/
#define SAT_VIDEO_FRAME_FLAG_UNSET(frame,flag) GST_MINI_OBJECT_FLAG_UNSET (frame, flag)
SatVideoFrame *sat_video_frame_new (void);
GType sat_video_frame_get_type (void);
#endif

View file

@ -49,7 +49,7 @@ gst_h264_dpb_fill_reference_frames (GstH264DPB * dpb,
GstVdpH264Frame *frame = frames[i]; GstVdpH264Frame *frame = frames[i];
reference_frames[i].surface = reference_frames[i].surface =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME_CAST (frame)->src_buffer)-> GST_VDP_VIDEO_BUFFER (SAT_VIDEO_FRAME_CAST (frame)->src_buffer)->
surface; surface;
reference_frames[i].is_long_term = frame->is_long_term; reference_frames[i].is_long_term = frame->is_long_term;
@ -74,7 +74,7 @@ gst_h264_dpb_remove (GstH264DPB * dpb, guint idx)
guint i; guint i;
frames = dpb->frames; frames = dpb->frames;
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (frames[idx])); sat_video_frame_unref (SAT_VIDEO_FRAME_CAST (frames[idx]));
dpb->n_frames--; dpb->n_frames--;
for (i = idx; i < dpb->n_frames; i++) for (i = idx; i < dpb->n_frames; i++)
@ -86,7 +86,7 @@ gst_h264_dpb_output (GstH264DPB * dpb, guint idx)
{ {
GstVdpH264Frame *frame = dpb->frames[idx]; GstVdpH264Frame *frame = dpb->frames[idx];
gst_video_frame_ref (GST_VIDEO_FRAME_CAST (frame)); sat_video_frame_ref (SAT_VIDEO_FRAME_CAST (frame));
dpb->output (dpb, frame); dpb->output (dpb, frame);
frame->output_needed = FALSE; frame->output_needed = FALSE;
@ -160,7 +160,7 @@ gst_h264_dpb_add (GstH264DPB * dpb, GstVdpH264Frame * h264_frame)
void void
gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output) gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output)
{ {
GstVideoFrame **frames; SatVideoFrame **frames;
guint i; guint i;
GST_DEBUG ("flush"); GST_DEBUG ("flush");
@ -168,9 +168,9 @@ gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output)
if (output) if (output)
while (gst_h264_dpb_bump (dpb, G_MAXUINT)); while (gst_h264_dpb_bump (dpb, G_MAXUINT));
frames = (GstVideoFrame **) dpb->frames; frames = (SatVideoFrame **) dpb->frames;
for (i = 0; i < dpb->n_frames; i++) for (i = 0; i < dpb->n_frames; i++)
gst_video_frame_unref (frames[i]); sat_video_frame_unref (frames[i]);
dpb->n_frames = 0; dpb->n_frames = 0;
@ -183,6 +183,8 @@ gst_h264_dpb_mark_sliding (GstH264DPB * dpb)
guint i; guint i;
gint mark_idx = -1; gint mark_idx = -1;
GST_DEBUG ("mark_sliding");
if (dpb->n_frames != dpb->max_frames) if (dpb->n_frames != dpb->max_frames)
return; return;
@ -215,6 +217,8 @@ gst_h264_dpb_mark_long_term (GstH264DPB * dpb, guint16 pic_num,
guint i; guint i;
gint mark_idx = -1; gint mark_idx = -1;
GST_DEBUG ("mark_long_term");
frames = dpb->frames; frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) { for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term && if (frames[i]->is_reference && !frames[i]->is_long_term &&
@ -237,6 +241,8 @@ gst_h264_dpb_mark_short_term_unused (GstH264DPB * dpb, guint16 pic_num)
guint i; guint i;
gint mark_idx = -1; gint mark_idx = -1;
GST_DEBUG ("mark_short_term_unused");
frames = dpb->frames; frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) { for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term && if (frames[i]->is_reference && !frames[i]->is_long_term &&
@ -261,6 +267,8 @@ gst_h264_dpb_mark_long_term_unused (GstH264DPB * dpb, guint16 long_term_pic_num)
guint i; guint i;
gint mark_idx = -1; gint mark_idx = -1;
GST_DEBUG ("mark_long_term_unused");
frames = dpb->frames; frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) { for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && frames[i]->is_long_term && if (frames[i]->is_reference && frames[i]->is_long_term &&
@ -284,6 +292,8 @@ gst_h264_dpb_mark_all_unused (GstH264DPB * dpb)
GstVdpH264Frame **frames; GstVdpH264Frame **frames;
guint i; guint i;
GST_DEBUG ("mark_all_unused");
frames = dpb->frames; frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) { for (i = 0; i < dpb->n_frames; i++) {
frames[i]->is_reference = FALSE; frames[i]->is_reference = FALSE;

View file

@ -36,7 +36,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_vdp_h264_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_h264_dec_debug #define GST_CAT_DEFAULT gst_vdp_h264_dec_debug
static GstStaticPadTemplate sink_template = static GstStaticPadTemplate sink_template =
GST_STATIC_PAD_TEMPLATE (GST_BASE_VIDEO_DECODER_SINK_NAME, GST_STATIC_PAD_TEMPLATE (SAT_BASE_VIDEO_DECODER_SINK_NAME,
GST_PAD_SINK, GST_PAD_SINK,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264, " "interlaced = (boolean) false") GST_STATIC_CAPS ("video/x-h264, " "interlaced = (boolean) false")
@ -46,8 +46,8 @@ GST_STATIC_PAD_TEMPLATE (GST_BASE_VIDEO_DECODER_SINK_NAME,
GST_DEBUG_CATEGORY_INIT (gst_vdp_h264_dec_debug, "vdpauh264dec", 0, \ GST_DEBUG_CATEGORY_INIT (gst_vdp_h264_dec_debug, "vdpauh264dec", 0, \
"VDPAU h264 decoder"); "VDPAU h264 decoder");
GST_BOILERPLATE_FULL (GstVdpH264Dec, gst_vdp_h264_dec, GstBaseVideoDecoder, GST_BOILERPLATE_FULL (GstVdpH264Dec, gst_vdp_h264_dec, SatBaseVideoDecoder,
GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT); SAT_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT);
#define SYNC_CODE_SIZE 3 #define SYNC_CODE_SIZE 3
@ -79,7 +79,7 @@ gst_vdp_h264_dec_alloc_buffer (GstVdpH264Dec * h264_dec,
GstVdpVideoSrcPad *vdp_pad; GstVdpVideoSrcPad *vdp_pad;
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
vdp_pad = (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (h264_dec); vdp_pad = (GstVdpVideoSrcPad *) SAT_BASE_VIDEO_DECODER_SRC_PAD (h264_dec);
ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf); ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf);
if (ret != GST_FLOW_OK) if (ret != GST_FLOW_OK)
return ret; return ret;
@ -88,7 +88,7 @@ gst_vdp_h264_dec_alloc_buffer (GstVdpH264Dec * h264_dec,
} }
static gboolean static gboolean
gst_vdp_h264_dec_set_sink_caps (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_set_sink_caps (SatBaseVideoDecoder * base_video_decoder,
GstCaps * caps) GstCaps * caps)
{ {
GstVdpH264Dec *h264_dec; GstVdpH264Dec *h264_dec;
@ -169,13 +169,13 @@ gst_vdp_h264_dec_set_sink_caps (GstBaseVideoDecoder * base_video_decoder,
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_h264_dec_shape_output (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_shape_output (SatBaseVideoDecoder * base_video_decoder,
GstBuffer * buf) GstBuffer * buf)
{ {
GstVdpVideoSrcPad *vdp_pad; GstVdpVideoSrcPad *vdp_pad;
vdp_pad = vdp_pad =
(GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder); (GstVdpVideoSrcPad *) SAT_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder);
return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf)); return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf));
} }
@ -183,13 +183,13 @@ gst_vdp_h264_dec_shape_output (GstBaseVideoDecoder * base_video_decoder,
static void static void
gst_vdp_h264_dec_output (GstH264DPB * dpb, GstVdpH264Frame * h264_frame) gst_vdp_h264_dec_output (GstH264DPB * dpb, GstVdpH264Frame * h264_frame)
{ {
GstBaseVideoDecoder *base_video_decoder; SatBaseVideoDecoder *base_video_decoder;
GST_DEBUG ("poc: %d", h264_frame->poc); GST_DEBUG ("poc: %d", h264_frame->poc);
base_video_decoder = g_object_get_data (G_OBJECT (dpb), "decoder"); base_video_decoder = g_object_get_data (G_OBJECT (dpb), "decoder");
gst_base_video_decoder_finish_frame (base_video_decoder, sat_base_video_decoder_finish_frame (base_video_decoder,
GST_VIDEO_FRAME_CAST (h264_frame)); SAT_VIDEO_FRAME_CAST (h264_frame));
} }
static guint static guint
@ -292,14 +292,14 @@ gst_vdp_h264_dec_idr (GstVdpH264Dec * h264_dec, GstVdpH264Frame * h264_frame)
GstFlowReturn ret; GstFlowReturn ret;
GstVdpDevice *device; GstVdpDevice *device;
gst_base_video_decoder_update_src_caps (GST_BASE_VIDEO_DECODER (h264_dec)); sat_base_video_decoder_update_src_caps (SAT_BASE_VIDEO_DECODER (h264_dec));
ret = gst_vdp_video_src_pad_get_device ret = gst_vdp_video_src_pad_get_device
(GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD (h264_dec)), (GST_VDP_VIDEO_SRC_PAD (SAT_BASE_VIDEO_DECODER_SRC_PAD (h264_dec)),
&device, NULL); &device, NULL);
if (ret == GST_FLOW_OK) { if (ret == GST_FLOW_OK) {
GstVideoState *state; SatVideoState *state;
VdpDecoderProfile profile; VdpDecoderProfile profile;
VdpStatus status; VdpStatus status;
@ -309,7 +309,7 @@ gst_vdp_h264_dec_idr (GstVdpH264Dec * h264_dec, GstVdpH264Frame * h264_frame)
} }
state = state =
gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (h264_dec)); sat_base_video_decoder_get_state (SAT_BASE_VIDEO_DECODER (h264_dec));
switch (seq->profile_idc) { switch (seq->profile_idc) {
case 66: case 66:
@ -459,8 +459,8 @@ gst_vdp_h264_dec_create_bitstream_buffers (GstVdpH264Dec * h264_dec,
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_handle_frame (SatBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame, GstClockTimeDiff deadline) SatVideoFrame * frame, GstClockTimeDiff deadline)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -491,14 +491,14 @@ gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
if (gst_vdp_h264_dec_idr (h264_dec, h264_frame)) if (gst_vdp_h264_dec_idr (h264_dec, h264_frame))
h264_dec->got_idr = TRUE; h264_dec->got_idr = TRUE;
else { else {
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
} }
/* check if we've got a IDR frame yet */ /* check if we've got a IDR frame yet */
if (!h264_dec->got_idr) { if (!h264_dec->got_idr) {
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
@ -594,7 +594,7 @@ gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
return GST_FLOW_OK; return GST_FLOW_OK;
alloc_error: alloc_error:
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret; return ret;
decode_error: decode_error:
@ -604,13 +604,13 @@ decode_error:
device->vdp_get_error_string (status))); device->vdp_get_error_string (status)));
gst_buffer_unref (GST_BUFFER_CAST (outbuf)); gst_buffer_unref (GST_BUFFER_CAST (outbuf));
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
static gint static gint
gst_vdp_h264_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_scan_for_sync (SatBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter) GstAdapter * adapter)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -627,8 +627,8 @@ gst_vdp_h264_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
return m; return m;
} }
static GstBaseVideoDecoderScanResult static SatBaseVideoDecoderScanResult
gst_vdp_h264_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_scan_for_packet_end (SatBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter, guint * size, gboolean at_eos) GstAdapter * adapter, guint * size, gboolean at_eos)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -636,7 +636,7 @@ gst_vdp_h264_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
avail = gst_adapter_available (adapter); avail = gst_adapter_available (adapter);
if (avail < h264_dec->nal_length_size) if (avail < h264_dec->nal_length_size)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;
if (h264_dec->packetized) { if (h264_dec->packetized) {
guint8 *data; guint8 *data;
@ -674,23 +674,23 @@ gst_vdp_h264_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("start_code: %d", start_code); GST_DEBUG ("start_code: %d", start_code);
if (start_code == 0x000001) if (start_code == 0x000001)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;
n = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, n = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
SYNC_CODE_SIZE, avail - SYNC_CODE_SIZE); SYNC_CODE_SIZE, avail - SYNC_CODE_SIZE);
if (n == -1) if (n == -1)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;
*size = n; *size = n;
} }
GST_DEBUG ("NAL size: %d", *size); GST_DEBUG ("NAL size: %d", *size);
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_parse_data (SatBaseVideoDecoder * base_video_decoder,
GstBuffer * buf, gboolean at_eos) GstBuffer * buf, gboolean at_eos)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -702,7 +702,7 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
guint size; guint size;
gint i; gint i;
GstVideoFrame *frame; SatVideoFrame *frame;
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
GST_MEMDUMP ("data", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); GST_MEMDUMP ("data", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
@ -743,20 +743,20 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
i--; i--;
} }
frame = gst_base_video_decoder_get_current_frame (base_video_decoder); frame = sat_base_video_decoder_get_current_frame (base_video_decoder);
/* does this mark the beginning of a new access unit */ /* does this mark the beginning of a new access unit */
if (nal_unit.type == GST_NAL_AU_DELIMITER) { if (nal_unit.type == GST_NAL_AU_DELIMITER) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame); ret = sat_base_video_decoder_have_frame (base_video_decoder, &frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf); sat_base_video_decoder_frame_start (base_video_decoder, buf);
} }
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) { if (SAT_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) {
if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS || if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS ||
nal_unit.type == GST_NAL_SEI || nal_unit.type == GST_NAL_SEI ||
(nal_unit.type >= 14 && nal_unit.type <= 18)) { (nal_unit.type >= 14 && nal_unit.type <= 18)) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame); ret = sat_base_video_decoder_have_frame (base_video_decoder, &frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf); sat_base_video_decoder_frame_start (base_video_decoder, buf);
} }
} }
@ -768,7 +768,7 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
goto invalid_packet; goto invalid_packet;
if (slice.redundant_pic_cnt == 0) { if (slice.redundant_pic_cnt == 0) {
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) { if (SAT_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) {
GstH264Slice *p_slice; GstH264Slice *p_slice;
guint8 pic_order_cnt_type, p_pic_order_cnt_type; guint8 pic_order_cnt_type, p_pic_order_cnt_type;
gboolean finish_frame = FALSE; gboolean finish_frame = FALSE;
@ -803,18 +803,18 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
finish_frame = TRUE; finish_frame = TRUE;
if (finish_frame) { if (finish_frame) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame); ret = sat_base_video_decoder_have_frame (base_video_decoder, &frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf); sat_base_video_decoder_frame_start (base_video_decoder, buf);
} }
} }
if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) { if (!SAT_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) {
if (GST_H264_IS_I_SLICE (slice.type) if (GST_H264_IS_I_SLICE (slice.type)
|| GST_H264_IS_SI_SLICE (slice.type)) || GST_H264_IS_SI_SLICE (slice.type))
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_KEYFRAME); SAT_VIDEO_FRAME_FLAG_SET (frame, SAT_VIDEO_FRAME_FLAG_KEYFRAME);
GST_VDP_H264_FRAME_CAST (frame)->slice_hdr = slice; GST_VDP_H264_FRAME_CAST (frame)->slice_hdr = slice;
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY); SAT_VIDEO_FRAME_FLAG_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY);
} }
} }
gst_vdp_h264_frame_add_slice ((GstVdpH264Frame *) frame, buf); gst_vdp_h264_frame_add_slice ((GstVdpH264Frame *) frame, buf);
@ -840,31 +840,31 @@ invalid_packet:
return GST_FLOW_OK; return GST_FLOW_OK;
} }
static GstVideoFrame * static SatVideoFrame *
gst_vdp_h264_dec_create_frame (GstBaseVideoDecoder * base_video_decoder) gst_vdp_h264_dec_create_frame (SatBaseVideoDecoder * base_video_decoder)
{ {
return GST_VIDEO_FRAME_CAST (gst_vdp_h264_frame_new ()); return SAT_VIDEO_FRAME_CAST (gst_vdp_h264_frame_new ());
} }
static GstPad * static GstPad *
gst_vdp_h264_dec_create_srcpad (GstBaseVideoDecoder * base_video_decoder, gst_vdp_h264_dec_create_srcpad (SatBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass * base_video_decoder_class) SatBaseVideoDecoderClass * base_video_decoder_class)
{ {
GstPadTemplate *pad_template; GstPadTemplate *pad_template;
GstVdpVideoSrcPad *vdp_pad; GstVdpVideoSrcPad *vdp_pad;
pad_template = gst_element_class_get_pad_template pad_template = gst_element_class_get_pad_template
(GST_ELEMENT_CLASS (base_video_decoder_class), (GST_ELEMENT_CLASS (base_video_decoder_class),
GST_BASE_VIDEO_DECODER_SRC_NAME); SAT_BASE_VIDEO_DECODER_SRC_NAME);
vdp_pad = gst_vdp_video_src_pad_new (pad_template, vdp_pad = gst_vdp_video_src_pad_new (pad_template,
GST_BASE_VIDEO_DECODER_SRC_NAME); SAT_BASE_VIDEO_DECODER_SRC_NAME);
return GST_PAD (vdp_pad); return GST_PAD (vdp_pad);
} }
static gboolean static gboolean
gst_vdp_h264_dec_flush (GstBaseVideoDecoder * base_video_decoder) gst_vdp_h264_dec_flush (SatBaseVideoDecoder * base_video_decoder)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -875,7 +875,7 @@ gst_vdp_h264_dec_flush (GstBaseVideoDecoder * base_video_decoder)
} }
static gboolean static gboolean
gst_vdp_h264_dec_start (GstBaseVideoDecoder * base_video_decoder) gst_vdp_h264_dec_start (SatBaseVideoDecoder * base_video_decoder)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -895,7 +895,7 @@ gst_vdp_h264_dec_start (GstBaseVideoDecoder * base_video_decoder)
} }
static gboolean static gboolean
gst_vdp_h264_dec_stop (GstBaseVideoDecoder * base_video_decoder) gst_vdp_h264_dec_stop (SatBaseVideoDecoder * base_video_decoder)
{ {
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder); GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
@ -907,7 +907,7 @@ gst_vdp_h264_dec_stop (GstBaseVideoDecoder * base_video_decoder)
g_object_unref (h264_dec->dpb); g_object_unref (h264_dec->dpb);
vdp_pad = vdp_pad =
GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD GST_VDP_VIDEO_SRC_PAD (SAT_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder)); (base_video_decoder));
ret = gst_vdp_video_src_pad_get_device (vdp_pad, &device, NULL); ret = gst_vdp_video_src_pad_get_device (vdp_pad, &device, NULL);
@ -938,7 +938,7 @@ gst_vdp_h264_dec_base_init (gpointer g_class)
gst_static_pad_template_get (&sink_template)); gst_static_pad_template_get (&sink_template));
src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420); src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420);
src_template = gst_pad_template_new (GST_BASE_VIDEO_DECODER_SRC_NAME, src_template = gst_pad_template_new (SAT_BASE_VIDEO_DECODER_SRC_NAME,
GST_PAD_SRC, GST_PAD_ALWAYS, src_caps); GST_PAD_SRC, GST_PAD_ALWAYS, src_caps);
gst_element_class_add_pad_template (element_class, src_template); gst_element_class_add_pad_template (element_class, src_template);
@ -959,8 +959,8 @@ static void
gst_vdp_h264_dec_class_init (GstVdpH264DecClass * klass) gst_vdp_h264_dec_class_init (GstVdpH264DecClass * klass)
{ {
GObjectClass *gobject_class = G_OBJECT_CLASS (klass); GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseVideoDecoderClass *base_video_decoder_class = SatBaseVideoDecoderClass *base_video_decoder_class =
GST_BASE_VIDEO_DECODER_CLASS (klass); SAT_BASE_VIDEO_DECODER_CLASS (klass);
gobject_class->finalize = gst_vdp_h264_dec_finalize; gobject_class->finalize = gst_vdp_h264_dec_finalize;

View file

@ -23,7 +23,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include "../basevideodecoder/gstbasevideodecoder.h" #include "../basevideodecoder/satbasevideodecoder.h"
#include "gsth264parser.h" #include "gsth264parser.h"
#include "gsth264dpb.h" #include "gsth264dpb.h"
@ -43,7 +43,7 @@ typedef struct _GstVdpH264DecClass GstVdpH264DecClass;
struct _GstVdpH264Dec { struct _GstVdpH264Dec {
GstBaseVideoDecoder base_video_decoder; SatBaseVideoDecoder base_video_decoder;
gboolean packetized; gboolean packetized;
guint8 nal_length_size; guint8 nal_length_size;
@ -60,7 +60,7 @@ struct _GstVdpH264Dec {
}; };
struct _GstVdpH264DecClass { struct _GstVdpH264DecClass {
GstBaseVideoDecoderClass base_video_decoder_class; SatBaseVideoDecoderClass base_video_decoder_class;
}; };
GType gst_vdp_h264_dec_get_type (void); GType gst_vdp_h264_dec_get_type (void);

View file

@ -95,7 +95,7 @@ gst_vdp_h264_frame_get_type (void)
(GInstanceInitFunc) gst_vdp_h264_frame_init, (GInstanceInitFunc) gst_vdp_h264_frame_init,
NULL NULL
}; };
_gst_vdp_h264_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME, _gst_vdp_h264_frame_type = g_type_register_static (SAT_TYPE_VIDEO_FRAME,
"GstVdpH264Frame", &info, 0); "GstVdpH264Frame", &info, 0);
DEBUG_INIT (); DEBUG_INIT ();

View file

@ -23,7 +23,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include "../basevideodecoder/gstvideoframe.h" #include "../basevideodecoder/satvideoframe.h"
#include "gsth264parser.h" #include "gsth264parser.h"
@ -32,14 +32,14 @@
#define GST_VDP_H264_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_H264_FRAME, GstVdpH264Frame)) #define GST_VDP_H264_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_H264_FRAME, GstVdpH264Frame))
#define GST_VDP_H264_FRAME_CAST(obj) ((GstVdpH264Frame *)obj) #define GST_VDP_H264_FRAME_CAST(obj) ((GstVdpH264Frame *)obj)
#define GST_VDP_H264_FRAME_GOT_PRIMARY GST_VIDEO_FRAME_FLAG_LAST #define GST_VDP_H264_FRAME_GOT_PRIMARY SAT_VIDEO_FRAME_FLAG_LAST
typedef struct _GstVdpH264Frame GstVdpH264Frame; typedef struct _GstVdpH264Frame GstVdpH264Frame;
typedef struct _GstVdpH264FrameClass GstVdpH264FrameClass; typedef struct _GstVdpH264FrameClass GstVdpH264FrameClass;
struct _GstVdpH264Frame struct _GstVdpH264Frame
{ {
GstVideoFrame video_frame; SatVideoFrame video_frame;
GstH264Slice slice_hdr; GstH264Slice slice_hdr;
GPtrArray *slices; GPtrArray *slices;
@ -53,7 +53,7 @@ struct _GstVdpH264Frame
struct _GstVdpH264FrameClass struct _GstVdpH264FrameClass
{ {
GstVideoFrameClass video_frame_class; SatVideoFrameClass video_frame_class;
}; };
void gst_vdp_h264_frame_add_slice (GstVdpH264Frame *h264_frame, GstBuffer *buf); void gst_vdp_h264_frame_add_slice (GstVdpH264Frame *h264_frame, GstBuffer *buf);

View file

@ -78,7 +78,7 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
"VDPAU mpeg decoder"); "VDPAU mpeg decoder");
GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec, GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec,
GstBaseVideoDecoder, GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT); SatBaseVideoDecoder, SAT_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT);
static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info); static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
@ -108,7 +108,7 @@ gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec,
GstVdpVideoSrcPad *vdp_pad; GstVdpVideoSrcPad *vdp_pad;
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
vdp_pad = (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec); vdp_pad = (GstVdpVideoSrcPad *) SAT_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec);
ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf); ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf);
if (ret != GST_FLOW_OK) if (ret != GST_FLOW_OK)
return ret; return ret;
@ -117,20 +117,20 @@ gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec,
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_mpeg_dec_shape_output (GstBaseVideoDecoder * base_video_decoder, gst_vdp_mpeg_dec_shape_output (SatBaseVideoDecoder * base_video_decoder,
GstBuffer * buf) GstBuffer * buf)
{ {
GstVdpVideoSrcPad *vdp_pad; GstVdpVideoSrcPad *vdp_pad;
vdp_pad = vdp_pad =
(GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder); (GstVdpVideoSrcPad *) SAT_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder);
return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf)); return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf));
} }
static gboolean static gboolean
gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec, gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec,
GstBuffer * buffer, GstVideoFrame * frame) GstBuffer * buffer, SatVideoFrame * frame)
{ {
MPEGPictureExt pic_ext; MPEGPictureExt pic_ext;
VdpPictureInfoMPEG1Or2 *info; VdpPictureInfoMPEG1Or2 *info;
@ -175,7 +175,7 @@ gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec,
frame->n_fields = fields; frame->n_fields = fields;
if (pic_ext.top_field_first) if (pic_ext.top_field_first)
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_TFF); SAT_VIDEO_FRAME_FLAG_SET (frame, SAT_VIDEO_FRAME_FLAG_TFF);
return TRUE; return TRUE;
} }
@ -249,7 +249,7 @@ gst_vdp_mpeg_dec_create_decoder (GstVdpMpegDec * mpeg_dec)
GstVdpDevice *device; GstVdpDevice *device;
ret = gst_vdp_video_src_pad_get_device ret = gst_vdp_video_src_pad_get_device
(GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), (GST_VDP_VIDEO_SRC_PAD (SAT_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)),
&device, NULL); &device, NULL);
if (ret == GST_FLOW_OK) { if (ret == GST_FLOW_OK) {
@ -280,7 +280,7 @@ static gboolean
gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec, gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
GstBuffer * seq, GstBuffer * seq_ext) GstBuffer * seq, GstBuffer * seq_ext)
{ {
GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (mpeg_dec); SatBaseVideoDecoder *base_video_decoder = SAT_BASE_VIDEO_DECODER (mpeg_dec);
MPEGSeqHdr hdr; MPEGSeqHdr hdr;
GstVdpMpegStreamInfo stream_info; GstVdpMpegStreamInfo stream_info;
@ -328,9 +328,9 @@ gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
if (memcmp (&mpeg_dec->stream_info, &stream_info, if (memcmp (&mpeg_dec->stream_info, &stream_info,
sizeof (GstVdpMpegStreamInfo)) != 0) { sizeof (GstVdpMpegStreamInfo)) != 0) {
GstVideoState *state; SatVideoState *state;
state = gst_base_video_decoder_get_state (base_video_decoder); state = sat_base_video_decoder_get_state (base_video_decoder);
state->width = stream_info.width; state->width = stream_info.width;
state->height = stream_info.height; state->height = stream_info.height;
@ -343,8 +343,8 @@ gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
state->interlaced = stream_info.interlaced; state->interlaced = stream_info.interlaced;
gst_base_video_decoder_set_state (base_video_decoder, state); sat_base_video_decoder_set_state (base_video_decoder, state);
gst_base_video_decoder_update_src_caps (base_video_decoder); sat_base_video_decoder_update_src_caps (base_video_decoder);
memcpy (&mpeg_dec->stream_info, &stream_info, memcpy (&mpeg_dec->stream_info, &stream_info,
sizeof (GstVdpMpegStreamInfo)); sizeof (GstVdpMpegStreamInfo));
@ -354,8 +354,8 @@ gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, gst_vdp_mpeg_dec_handle_frame (SatBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame, GstClockTimeDiff deadline) SatVideoFrame * frame, GstClockTimeDiff deadline)
{ {
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
@ -404,7 +404,7 @@ gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG_OBJECT (mpeg_dec, GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got an I_FRAME yet"); "Drop frame since we haven't got an I_FRAME yet");
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
if (info->picture_coding_type == B_FRAME if (info->picture_coding_type == B_FRAME
@ -412,19 +412,19 @@ gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG_OBJECT (mpeg_dec, GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got two non B_FRAMES yet"); "Drop frame since we haven't got two non B_FRAMES yet");
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
if (info->picture_coding_type != B_FRAME) { if (info->picture_coding_type != B_FRAME) {
if (info->backward_reference != VDP_INVALID_HANDLE) { if (info->backward_reference != VDP_INVALID_HANDLE) {
gst_base_video_decoder_finish_frame (base_video_decoder, sat_base_video_decoder_finish_frame (base_video_decoder,
mpeg_dec->b_frame); mpeg_dec->b_frame);
} }
if (info->forward_reference != VDP_INVALID_HANDLE) { if (info->forward_reference != VDP_INVALID_HANDLE) {
gst_video_frame_unref (mpeg_dec->f_frame); sat_video_frame_unref (mpeg_dec->f_frame);
info->forward_reference = VDP_INVALID_HANDLE; info->forward_reference = VDP_INVALID_HANDLE;
} }
@ -468,16 +468,16 @@ gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
frame->src_buffer = GST_BUFFER_CAST (outbuf); frame->src_buffer = GST_BUFFER_CAST (outbuf);
if (info->picture_coding_type == B_FRAME) { if (info->picture_coding_type == B_FRAME) {
gst_base_video_decoder_finish_frame (base_video_decoder, frame); sat_base_video_decoder_finish_frame (base_video_decoder, frame);
} else { } else {
info->backward_reference = surface; info->backward_reference = surface;
mpeg_dec->b_frame = gst_video_frame_ref (frame); mpeg_dec->b_frame = sat_video_frame_ref (frame);
} }
return GST_FLOW_OK; return GST_FLOW_OK;
alloc_error: alloc_error:
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret; return ret;
decode_error: decode_error:
@ -487,19 +487,19 @@ decode_error:
device->vdp_get_error_string (status))); device->vdp_get_error_string (status)));
gst_buffer_unref (GST_BUFFER_CAST (outbuf)); gst_buffer_unref (GST_BUFFER_CAST (outbuf));
gst_base_video_decoder_skip_frame (base_video_decoder, frame); sat_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
static GstVideoFrame * static SatVideoFrame *
gst_vdp_mpeg_dec_create_frame (GstBaseVideoDecoder * base_video_decoder) gst_vdp_mpeg_dec_create_frame (SatBaseVideoDecoder * base_video_decoder)
{ {
return GST_VIDEO_FRAME (gst_vdp_mpeg_frame_new ()); return SAT_VIDEO_FRAME (gst_vdp_mpeg_frame_new ());
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder, gst_vdp_mpeg_dec_parse_data (SatBaseVideoDecoder * base_video_decoder,
GstBuffer * buf, gboolean at_eos) GstBuffer * buf, gboolean at_eos)
{ {
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
@ -528,7 +528,7 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
} }
mpeg_frame = (GstVdpMpegFrame *) mpeg_frame = (GstVdpMpegFrame *)
gst_base_video_decoder_get_current_frame (base_video_decoder); sat_base_video_decoder_get_current_frame (base_video_decoder);
if (start_code >= MPEG_PACKET_SLICE_MIN if (start_code >= MPEG_PACKET_SLICE_MIN
&& start_code <= MPEG_PACKET_SLICE_MAX) { && start_code <= MPEG_PACKET_SLICE_MAX) {
@ -543,9 +543,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE"); GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
if (mpeg_dec->prev_packet != -1) { if (mpeg_dec->prev_packet != -1) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, ret = sat_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame); (SatVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf); sat_base_video_decoder_frame_start (base_video_decoder, buf);
} }
mpeg_frame->seq = buf; mpeg_frame->seq = buf;
@ -557,9 +557,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE && if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE &&
mpeg_dec->prev_packet != MPEG_PACKET_GOP) { mpeg_dec->prev_packet != MPEG_PACKET_GOP) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, ret = sat_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame); (SatVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf); sat_base_video_decoder_frame_start (base_video_decoder, buf);
} }
mpeg_frame->pic = buf; mpeg_frame->pic = buf;
@ -569,9 +569,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP"); GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE) { if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, ret = sat_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame); (SatVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf); sat_base_video_decoder_frame_start (base_video_decoder, buf);
} }
mpeg_frame->gop = buf; mpeg_frame->gop = buf;
@ -635,24 +635,24 @@ done:
} }
static GstPad * static GstPad *
gst_vdp_mpeg_dec_create_srcpad (GstBaseVideoDecoder * base_video_decoder, gst_vdp_mpeg_dec_create_srcpad (SatBaseVideoDecoder * base_video_decoder,
GstBaseVideoDecoderClass * base_video_decoder_class) SatBaseVideoDecoderClass * base_video_decoder_class)
{ {
GstPadTemplate *pad_template; GstPadTemplate *pad_template;
GstVdpVideoSrcPad *vdp_pad; GstVdpVideoSrcPad *vdp_pad;
pad_template = gst_element_class_get_pad_template pad_template = gst_element_class_get_pad_template
(GST_ELEMENT_CLASS (base_video_decoder_class), (GST_ELEMENT_CLASS (base_video_decoder_class),
GST_BASE_VIDEO_DECODER_SRC_NAME); SAT_BASE_VIDEO_DECODER_SRC_NAME);
vdp_pad = gst_vdp_video_src_pad_new (pad_template, vdp_pad = gst_vdp_video_src_pad_new (pad_template,
GST_BASE_VIDEO_DECODER_SRC_NAME); SAT_BASE_VIDEO_DECODER_SRC_NAME);
return GST_PAD (vdp_pad); return GST_PAD (vdp_pad);
} }
static gint static gint
gst_vdp_mpeg_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder, gst_vdp_mpeg_dec_scan_for_sync (SatBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter) GstAdapter * adapter)
{ {
gint m; gint m;
@ -665,8 +665,8 @@ gst_vdp_mpeg_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
return m; return m;
} }
static GstBaseVideoDecoderScanResult static SatBaseVideoDecoderScanResult
gst_vdp_mpeg_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder, gst_vdp_mpeg_dec_scan_for_packet_end (SatBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter, guint * size, gboolean at_eos) GstAdapter * adapter, guint * size, gboolean at_eos)
{ {
guint8 *data; guint8 *data;
@ -677,26 +677,26 @@ gst_vdp_mpeg_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]); sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]);
if (sync_code != 0x000001) if (sync_code != 0x000001)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;
*size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, *size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE); SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE);
if (*size == -1) if (*size == -1)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK; return SAT_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
} }
static gboolean static gboolean
gst_vdp_mpeg_dec_flush (GstBaseVideoDecoder * base_video_decoder) gst_vdp_mpeg_dec_flush (SatBaseVideoDecoder * base_video_decoder)
{ {
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
gst_video_frame_unref (mpeg_dec->f_frame); sat_video_frame_unref (mpeg_dec->f_frame);
if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE) if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
gst_video_frame_unref (mpeg_dec->b_frame); sat_video_frame_unref (mpeg_dec->b_frame);
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info); gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
@ -706,7 +706,7 @@ gst_vdp_mpeg_dec_flush (GstBaseVideoDecoder * base_video_decoder)
} }
static gboolean static gboolean
gst_vdp_mpeg_dec_start (GstBaseVideoDecoder * base_video_decoder) gst_vdp_mpeg_dec_start (SatBaseVideoDecoder * base_video_decoder)
{ {
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
@ -721,7 +721,7 @@ gst_vdp_mpeg_dec_start (GstBaseVideoDecoder * base_video_decoder)
} }
static gboolean static gboolean
gst_vdp_mpeg_dec_stop (GstBaseVideoDecoder * base_video_decoder) gst_vdp_mpeg_dec_stop (SatBaseVideoDecoder * base_video_decoder)
{ {
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
@ -730,7 +730,7 @@ gst_vdp_mpeg_dec_stop (GstBaseVideoDecoder * base_video_decoder)
GstVdpDevice *device; GstVdpDevice *device;
vdp_pad = vdp_pad =
GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD GST_VDP_VIDEO_SRC_PAD (SAT_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder)); (base_video_decoder));
ret = gst_vdp_video_src_pad_get_device (vdp_pad, &device, NULL); ret = gst_vdp_video_src_pad_get_device (vdp_pad, &device, NULL);
@ -767,7 +767,7 @@ gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id,
switch (prop_id) { switch (prop_id) {
case PROP_DISPLAY: case PROP_DISPLAY:
g_object_get_property g_object_get_property
(G_OBJECT (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display", (G_OBJECT (SAT_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display",
value); value);
break; break;
default: default:
@ -785,7 +785,7 @@ gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id,
switch (prop_id) { switch (prop_id) {
case PROP_DISPLAY: case PROP_DISPLAY:
g_object_set_property g_object_set_property
(G_OBJECT (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display", (G_OBJECT (SAT_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display",
value); value);
break; break;
default: default:
@ -813,7 +813,7 @@ gst_vdp_mpeg_dec_base_init (gpointer gclass)
gst_static_pad_template_get (&sink_template)); gst_static_pad_template_get (&sink_template));
src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420); src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420);
src_template = gst_pad_template_new (GST_BASE_VIDEO_DECODER_SRC_NAME, src_template = gst_pad_template_new (SAT_BASE_VIDEO_DECODER_SRC_NAME,
GST_PAD_SRC, GST_PAD_ALWAYS, src_caps); GST_PAD_SRC, GST_PAD_ALWAYS, src_caps);
gst_element_class_add_pad_template (element_class, src_template); gst_element_class_add_pad_template (element_class, src_template);
@ -825,11 +825,11 @@ gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *gstelement_class; GstElementClass *gstelement_class;
GstBaseVideoDecoderClass *base_video_decoder_class; SatBaseVideoDecoderClass *base_video_decoder_class;
gobject_class = (GObjectClass *) klass; gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass; gstelement_class = (GstElementClass *) klass;
base_video_decoder_class = (GstBaseVideoDecoderClass *) klass; base_video_decoder_class = (SatBaseVideoDecoderClass *) klass;
gobject_class->get_property = gst_vdp_mpeg_dec_get_property; gobject_class->get_property = gst_vdp_mpeg_dec_get_property;
gobject_class->set_property = gst_vdp_mpeg_dec_set_property; gobject_class->set_property = gst_vdp_mpeg_dec_set_property;

View file

@ -24,7 +24,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/base/gstadapter.h> #include <gst/base/gstadapter.h>
#include "../basevideodecoder/gstbasevideodecoder.h" #include "../basevideodecoder/satbasevideodecoder.h"
#include "gstvdpmpegframe.h" #include "gstvdpmpegframe.h"
G_BEGIN_DECLS G_BEGIN_DECLS
@ -46,7 +46,7 @@ typedef struct _GstVdpMpegDecClass GstVdpMpegDecClass;
struct _GstVdpMpegDec struct _GstVdpMpegDec
{ {
GstBaseVideoDecoder base_video_decoder; SatBaseVideoDecoder base_video_decoder;
VdpDecoder decoder; VdpDecoder decoder;
@ -64,13 +64,13 @@ struct _GstVdpMpegDec
guint64 gop_frame; guint64 gop_frame;
/* forward and backward reference */ /* forward and backward reference */
GstVideoFrame *f_frame, *b_frame; SatVideoFrame *f_frame, *b_frame;
}; };
struct _GstVdpMpegDecClass struct _GstVdpMpegDecClass
{ {
GstBaseVideoDecoderClass base_video_decoder_class; SatBaseVideoDecoderClass base_video_decoder_class;
}; };
GType gst_vdp_mpeg_dec_get_type (void); GType gst_vdp_mpeg_dec_get_type (void);

View file

@ -124,7 +124,7 @@ gst_vdp_mpeg_frame_get_type (void)
(GInstanceInitFunc) gst_vdp_mpeg_frame_init, (GInstanceInitFunc) gst_vdp_mpeg_frame_init,
NULL NULL
}; };
_gst_vdp_mpeg_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME, _gst_vdp_mpeg_frame_type = g_type_register_static (SAT_TYPE_VIDEO_FRAME,
"GstVdpMpegFrame", &info, 0); "GstVdpMpegFrame", &info, 0);
DEBUG_INIT (); DEBUG_INIT ();

View file

@ -25,7 +25,7 @@
#include <vdpau/vdpau.h> #include <vdpau/vdpau.h>
#include "../basevideodecoder/gstvideoframe.h" #include "../basevideodecoder/satvideoframe.h"
#define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type()) #define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type())
#define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME)) #define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME))
@ -48,7 +48,7 @@ typedef struct _GstVdpMpegFrameClass GstVdpMpegFrameClass;
struct _GstVdpMpegFrame struct _GstVdpMpegFrame
{ {
GstVideoFrame video_frame; SatVideoFrame video_frame;
GstBuffer *seq; GstBuffer *seq;
GstBuffer *seq_ext; GstBuffer *seq_ext;
@ -65,7 +65,7 @@ struct _GstVdpMpegFrame
struct _GstVdpMpegFrameClass struct _GstVdpMpegFrameClass
{ {
GstVideoFrameClass video_frame_class; SatVideoFrameClass video_frame_class;
}; };
void gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame *mpeg_frame, GstBuffer *buf); void gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame *mpeg_frame, GstBuffer *buf);