Merge branch 'master' into 0.11

Conflicts:
	gst-libs/gst/audio/audio.h
	gst-libs/gst/audio/gstaudiodecoder.c
	gst-libs/gst/audio/gstaudiodecoder.h
	gst-libs/gst/audio/gstaudioencoder.c
	gst-libs/gst/audio/gstbaseaudioencoder.h
	gst/playback/Makefile.am
	gst/playback/gstplaybin.c
	gst/playback/gstplaysink.c
	gst/playback/gstplaysinkvideoconvert.c
	gst/playback/gstsubtitleoverlay.c
	gst/videorate/gstvideorate.c
	gst/videoscale/gstvideoscale.c
	win32/common/libgstaudio.def
This commit is contained in:
Wim Taymans 2011-09-06 15:24:32 +02:00
commit 7012e88090
51 changed files with 14261 additions and 1228 deletions

View file

@ -79,7 +79,13 @@ SCANOBJ_DEPS = \
$(top_builddir)/gst-libs/gst/pbutils/libgstpbutils-@GST_MAJORMINOR@.la
# Header files to ignore when scanning.
IGNORE_HFILES = pbutils-private.h
IGNORE_HFILES = pbutils-private.h gsttageditingprivate.h id3v2.h \
kiss_fft_f32.h kiss_fft_f64.h kiss_fftr_f32.h kiss_fftr_f64.h \
kiss_fftr_s16.h kiss_fftr_s32.h kiss_fft_s16.h kiss_fft_s32.h \
_kiss_fft_guts_f32.h _kiss_fft_guts_f64.h _kiss_fft_guts_s16.h \
_kiss_fft_guts_s16.h _kiss_fft_guts_s32.h _kiss_fft_guts_s32.h \
interfaces-marshal.h pbutils-marshal.h
# Images to copy into HTML directory.
HTML_IMAGES =

View file

@ -44,6 +44,8 @@
</para>
<xi:include href="xml/gstaudio.xml" />
<xi:include href="xml/gstaudioclock.xml" />
<xi:include href="xml/gstaudiodecoder.xml" />
<xi:include href="xml/gstaudioencoder.xml" />
<xi:include href="xml/gstaudiofilter.xml" />
<xi:include href="xml/gstaudiomixerutils.xml" />
<xi:include href="xml/gstaudiosink.xml" />

View file

@ -35,6 +35,7 @@ GST_IS_APP_BUFFER
GST_IS_APP_BUFFER_CLASS
GST_TYPE_APP_BUFFER
GST_TYPE_APP_STREAM_TYPE
gst_app_stream_type_get_type
<SUBSECTION Private>
GstAppSrc
GstAppSrcPrivate
@ -124,6 +125,86 @@ GST_IS_AUDIO_CLOCK_CLASS
GST_AUDIO_CLOCK_CAST
</SECTION>
<SECTION>
<FILE>gstaudiodecoder</FILE>
<INCLUDE>gst/audio/gstaudiodecoder.h</INCLUDE>
GstAudioDecoder
GstAudioDecoderClass
GST_AUDIO_DECODER_ERROR
GST_AUDIO_DECODER_SINK_NAME
GST_AUDIO_DECODER_SINK_PAD
GST_AUDIO_DECODER_SRC_NAME
GST_AUDIO_DECODER_SRC_PAD
gst_audio_decoder_finish_frame
gst_audio_decoder_get_audio_info
gst_audio_decoder_get_byte_time
gst_audio_decoder_get_delay
gst_audio_decoder_get_latency
gst_audio_decoder_get_max_errors
gst_audio_decoder_get_min_latency
gst_audio_decoder_get_parse_state
gst_audio_decoder_get_plc
gst_audio_decoder_get_plc_aware
gst_audio_decoder_get_tolerance
gst_audio_decoder_set_byte_time
gst_audio_decoder_set_latency
gst_audio_decoder_set_max_errors
gst_audio_decoder_set_min_latency
gst_audio_decoder_set_plc
gst_audio_decoder_set_plc_aware
gst_audio_decoder_set_tolerance
<SUBSECTION Standard>
GST_AUDIO_DECODER
GST_IS_AUDIO_DECODER
GST_TYPE_AUDIO_DECODER
gst_audio_decoder_get_type
GST_AUDIO_DECODER_CLASS
GST_IS_AUDIO_DECODER_CLASS
GST_AUDIO_DECODER_GET_CLASS
GstAudioDecoderPrivate
</SECTION>
<SECTION>
<FILE>gstaudioencoder</FILE>
<INCLUDE>gst/audio/gstaudioencoder.h</INCLUDE>
GstAudioEncoder
GstAudioEncoderClass
GST_AUDIO_ENCODER_SEGMENT
GST_AUDIO_ENCODER_SINK_NAME
GST_AUDIO_ENCODER_SINK_PAD
GST_AUDIO_ENCODER_SRC_NAME
GST_AUDIO_ENCODER_SRC_PAD
gst_audio_encoder_finish_frame
gst_audio_encoder_get_audio_info
gst_audio_encoder_get_frame_max
gst_audio_encoder_get_frame_samples
gst_audio_encoder_get_hard_resync
gst_audio_encoder_get_latency
gst_audio_encoder_get_lookahead
gst_audio_encoder_get_mark_granule
gst_audio_encoder_get_perfect_timestamp
gst_audio_encoder_get_tolerance
gst_audio_encoder_proxy_getcaps
gst_audio_encoder_set_frame_max
gst_audio_encoder_set_frame_samples
gst_audio_encoder_set_hard_resync
gst_audio_encoder_set_latency
gst_audio_encoder_set_lookahead
gst_audio_encoder_set_mark_granule
gst_audio_encoder_set_perfect_timestamp
gst_audio_encoder_set_tolerance
<SUBSECTION Standard>
GST_AUDIO_ENCODER
GST_AUDIO_ENCODER_CAST
GST_IS_AUDIO_ENCODER
GST_TYPE_AUDIO_ENCODER
gst_audio_encoder_get_type
GST_AUDIO_ENCODER_CLASS
GST_IS_AUDIO_ENCODER_CLASS
GST_AUDIO_ENCODER_GET_CLASS
GstAudioEncoderPrivate
</SECTION>
<SECTION>
<FILE>gstaudiofilter</FILE>
<INCLUDE>gst/audio/gstaudiofilter.h</INCLUDE>
@ -465,6 +546,8 @@ KISS_FFT_F64_SIN
<INCLUDE>gst/floatcast/floatcast.h</INCLUDE>
gst_cast_double
gst_cast_float
<SUBSECTION Standard>
inline
</SECTION>
@ -1832,7 +1915,7 @@ GST_IS_TAG_MUX_CLASS
GST_TAG_MUX
GST_TAG_MUX_CLASS
GST_TYPE_TAG_MUX
gst_tag_demux_get_type
gst_tag_mux_get_type
</SECTION>
<SECTION>
@ -1849,6 +1932,7 @@ gst_tag_get_language_code_iso_639_2T
<SECTION>
<FILE>gsttaglicenses</FILE>
<INCLUDE>gst/tag/tag.h</INCLUDE>
GstTagLicenseFlags
gst_tag_get_license_flags
gst_tag_get_license_nick
gst_tag_get_license_title
@ -2138,6 +2222,7 @@ GST_VIDEO_SIZE_RANGE
GST_VIDEO_BUFFER_TFF
GST_VIDEO_BUFFER_RFF
GST_VIDEO_BUFFER_ONEFIELD
GST_VIDEO_BUFFER_PROGRESSIVE
GstVideoFormat
gst_video_calculate_display_ratio
gst_video_frame_rate
@ -2242,6 +2327,7 @@ GstDiscovererStreamInfo
GstDiscovererContainerInfo
GstDiscovererAudioInfo
GstDiscovererVideoInfo
GstDiscovererSubtitleInfo
gst_discoverer_stream_info_get_caps
gst_discoverer_stream_info_get_misc
gst_discoverer_stream_info_get_next
@ -2283,6 +2369,7 @@ GST_DISCOVERER_CONTAINER_INFO
GST_DISCOVERER_INFO
GST_DISCOVERER_STREAM_INFO
GST_DISCOVERER_VIDEO_INFO
GST_DISCOVERER_SUBTITLE_INFO
GST_IS_DISCOVERER
GST_IS_DISCOVERER_INFO
GST_IS_DISCOVERER_AUDIO_INFO
@ -2290,6 +2377,7 @@ GST_IS_DISCOVERER_CLASS
GST_IS_DISCOVERER_CONTAINER_INFO
GST_IS_DISCOVERER_STREAM_INFO
GST_IS_DISCOVERER_VIDEO_INFO
GST_IS_DISCOVERER_SUBTITLE_INFO
GST_TYPE_DISCOVERER
GST_TYPE_DISCOVERER_AUDIO_INFO
GST_TYPE_DISCOVERER_CONTAINER_INFO
@ -2297,12 +2385,14 @@ GST_TYPE_DISCOVERER_INFO
GST_TYPE_DISCOVERER_RESULT
GST_TYPE_DISCOVERER_STREAM_INFO
GST_TYPE_DISCOVERER_VIDEO_INFO
GST_TYPE_DISCOVERER_SUBTITLE_INFO
GstDiscovererAudioInfoClass
GstDiscovererClass
GstDiscovererContainerInfoClass
GstDiscovererPrivate
GstDiscovererStreamInfoClass
GstDiscovererVideoInfoClass
GstDiscovererSubtitleInfoClass
GstDiscovererInfoClass
gst_discoverer_audio_info_get_type
gst_discoverer_container_info_get_type

View file

@ -3,6 +3,10 @@
#include <gst/audio/gstaudioclock.h>
gst_audio_clock_get_type
#include <gst/audio/gstaudiodecoder.h>
gst_audio_decoder_get_type
#include <gst/audio/gstaudioencoder.h>
gst_audio_encoder_get_type
#include <gst/audio/gstaudiofilter.h>
gst_audio_filter_get_type
#include <gst/audio/gstaudiosink.h>

View file

@ -16,14 +16,15 @@ lib_LTLIBRARIES = \
CLEANFILES = $(BUILT_SOURCES)
# FIXME 0.11: rename GstBaseAudioSink to GstAudioBaseSink or merge with GstAudioSink
libgstaudio_@GST_MAJORMINOR@_la_SOURCES = \
audio.c \
gstringbuffer.c \
gstaudioclock.c \
mixerutils.c \
multichannel.c \
gstbaseaudiodecoder.c \
gstbaseaudioencoder.c \
gstaudiodecoder.c \
gstaudioencoder.c \
gstbaseaudiosink.c \
gstbaseaudiosrc.c \
gstaudiofilter.c \
@ -38,8 +39,8 @@ libgstaudio_@GST_MAJORMINOR@include_HEADERS = \
gstringbuffer.h \
gstaudioclock.h \
gstaudiofilter.h \
gstbaseaudiodecoder.h \
gstbaseaudioencoder.h \
gstaudiodecoder.h \
gstaudioencoder.h \
gstbaseaudiosink.h \
gstbaseaudiosrc.h \
gstaudiosink.h \

View file

@ -218,9 +218,9 @@ struct _GstAudioFormatInfo {
#define GST_AUDIO_FORMAT_INFO_NAME(info) ((info)->name)
#define GST_AUDIO_FORMAT_INFO_FLAGS(info) ((info)->flags)
#define GST_AUDIO_FORMAT_INFO_IS_INTEGER(info) ((info)->flags & GST_AUDIO_FORMAT_FLAG_INTEGER)
#define GST_AUDIO_FORMAT_INFO_IS_FLOAT(info) ((info)->flags & GST_AUDIO_FORMAT_FLAG_FLOAT)
#define GST_AUDIO_FORMAT_INFO_IS_SIGNED(info) ((info)->flags & GST_AUDIO_FORMAT_FLAG_SIGNED)
#define GST_AUDIO_FORMAT_INFO_IS_INTEGER(info) !!((info)->flags & GST_AUDIO_FORMAT_FLAG_INTEGER)
#define GST_AUDIO_FORMAT_INFO_IS_FLOAT(info) !!((info)->flags & GST_AUDIO_FORMAT_FLAG_FLOAT)
#define GST_AUDIO_FORMAT_INFO_IS_SIGNED(info) !!((info)->flags & GST_AUDIO_FORMAT_FLAG_SIGNED)
#define GST_AUDIO_FORMAT_INFO_ENDIANNESS(info) ((info)->endianness)
#define GST_AUDIO_FORMAT_INFO_IS_LE(info) ((info)->endianness == G_LITTLE_ENDIAN)

View file

@ -21,11 +21,11 @@
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_AUDIO_DECODER_H_
#define _GST_BASE_AUDIO_DECODER_H_
#ifndef _GST_AUDIO_DECODER_H_
#define _GST_AUDIO_DECODER_H_
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseAudioDecoder is unstable API and may change in future."
#warning "GstAudioDecoder is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
@ -35,70 +35,70 @@
G_BEGIN_DECLS
#define GST_TYPE_BASE_AUDIO_DECODER \
(gst_base_audio_decoder_get_type())
#define GST_BASE_AUDIO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_AUDIO_DECODER,GstBaseAudioDecoder))
#define GST_BASE_AUDIO_DECODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_AUDIO_DECODER,GstBaseAudioDecoderClass))
#define GST_BASE_AUDIO_DECODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_AUDIO_DECODER,GstBaseAudioDecoderClass))
#define GST_IS_BASE_AUDIO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_AUDIO_DECODER))
#define GST_IS_BASE_AUDIO_DECODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_AUDIO_DECODER))
#define GST_TYPE_AUDIO_DECODER \
(gst_audio_decoder_get_type())
#define GST_AUDIO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_DECODER,GstAudioDecoder))
#define GST_AUDIO_DECODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_DECODER,GstAudioDecoderClass))
#define GST_AUDIO_DECODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_AUDIO_DECODER,GstAudioDecoderClass))
#define GST_IS_AUDIO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_DECODER))
#define GST_IS_AUDIO_DECODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_DECODER))
/**
* GST_BASE_AUDIO_DECODER_SINK_NAME:
* GST_AUDIO_DECODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_DECODER_SINK_NAME "sink"
#define GST_AUDIO_DECODER_SINK_NAME "sink"
/**
* GST_BASE_AUDIO_DECODER_SRC_NAME:
* GST_AUDIO_DECODER_SRC_NAME:
*
* The name of the templates for the source pad.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_DECODER_SRC_NAME "src"
#define GST_AUDIO_DECODER_SRC_NAME "src"
/**
* GST_BASE_AUDIO_DECODER_SRC_PAD:
* GST_AUDIO_DECODER_SRC_PAD:
* @obj: base audio codec instance
*
* Gives the pointer to the source #GstPad object of the element.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_DECODER_SRC_PAD(obj) (((GstBaseAudioDecoder *) (obj))->srcpad)
#define GST_AUDIO_DECODER_SRC_PAD(obj) (((GstAudioDecoder *) (obj))->srcpad)
/**
* GST_BASE_AUDIO_DECODER_SINK_PAD:
* GST_AUDIO_DECODER_SINK_PAD:
* @obj: base audio codec instance
*
* Gives the pointer to the sink #GstPad object of the element.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_DECODER_SINK_PAD(obj) (((GstBaseAudioDecoder *) (obj))->sinkpad)
#define GST_AUDIO_DECODER_SINK_PAD(obj) (((GstAudioDecoder *) (obj))->sinkpad)
typedef struct _GstBaseAudioDecoder GstBaseAudioDecoder;
typedef struct _GstBaseAudioDecoderClass GstBaseAudioDecoderClass;
typedef struct _GstAudioDecoder GstAudioDecoder;
typedef struct _GstAudioDecoderClass GstAudioDecoderClass;
typedef struct _GstBaseAudioDecoderPrivate GstBaseAudioDecoderPrivate;
typedef struct _GstAudioDecoderPrivate GstAudioDecoderPrivate;
/* do not use this one, use macro below */
GstFlowReturn _gst_base_audio_decoder_error (GstBaseAudioDecoder *dec, gint weight,
GQuark domain, gint code,
gchar *txt, gchar *debug,
const gchar *file, const gchar *function,
gint line);
GstFlowReturn _gst_audio_decoder_error (GstAudioDecoder *dec, gint weight,
GQuark domain, gint code,
gchar *txt, gchar *debug,
const gchar *file, const gchar *function,
gint line);
/**
* GST_BASE_AUDIO_DECODER_ERROR:
* GST_AUDIO_DECODER_ERROR:
* @el: the base audio decoder element that generates the error
* @weight: element defined weight of the error, added to error count
* @domain: like CORE, LIBRARY, RESOURCE or STREAM (see #gstreamer-GstGError)
@ -120,24 +120,24 @@ GstFlowReturn _gst_base_audio_decoder_error (GstBaseAudioDecoder *dec, gint weig
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \
#define GST_AUDIO_DECODER_ERROR(el, weight, domain, code, text, debug, ret) \
G_STMT_START { \
gchar *__txt = _gst_element_error_printf text; \
gchar *__dbg = _gst_element_error_printf debug; \
GstBaseAudioDecoder *dec = GST_BASE_AUDIO_DECODER (el); \
ret = _gst_base_audio_decoder_error (dec, w, GST_ ## domain ## _ERROR, \
GstAudioDecoder *dec = GST_AUDIO_DECODER (el); \
ret = _gst_audio_decoder_error (dec, weight, GST_ ## domain ## _ERROR, \
GST_ ## domain ## _ERROR_ ## code, __txt, __dbg, __FILE__, \
GST_FUNCTION, __LINE__); \
} G_STMT_END
/**
* GstBaseAudioDecoder:
* GstAudioDecoder:
*
* The opaque #GstBaseAudioDecoder data structure.
* The opaque #GstAudioDecoder data structure.
*
* Since: 0.10.36
*/
struct _GstBaseAudioDecoder
struct _GstAudioDecoder
{
GstElement element;
@ -150,12 +150,13 @@ struct _GstBaseAudioDecoder
GstSegment segment;
/*< private >*/
GstBaseAudioDecoderPrivate *priv;
GstAudioDecoderPrivate *priv;
gpointer _gst_reserved[GST_PADDING_LARGE];
};
/**
* GstBaseAudioDecoderClass:
* GstAudioDecoderClass:
* @element_class: The parent class structure
* @start: Optional.
* Called when the element starts processing.
* Allows opening external resources.
@ -191,87 +192,93 @@ struct _GstBaseAudioDecoder
*
* Since: 0.10.36
*/
struct _GstBaseAudioDecoderClass
struct _GstAudioDecoderClass
{
GstElementClass parent_class;
GstElementClass element_class;
/*< public >*/
/* virtual methods for subclasses */
gboolean (*start) (GstBaseAudioDecoder *dec);
gboolean (*start) (GstAudioDecoder *dec);
gboolean (*stop) (GstBaseAudioDecoder *dec);
gboolean (*stop) (GstAudioDecoder *dec);
gboolean (*set_format) (GstBaseAudioDecoder *dec,
gboolean (*set_format) (GstAudioDecoder *dec,
GstCaps *caps);
GstFlowReturn (*parse) (GstBaseAudioDecoder *dec,
GstFlowReturn (*parse) (GstAudioDecoder *dec,
GstAdapter *adapter,
gint *offset, gint *length);
GstFlowReturn (*handle_frame) (GstBaseAudioDecoder *dec,
GstFlowReturn (*handle_frame) (GstAudioDecoder *dec,
GstBuffer *buffer);
void (*flush) (GstBaseAudioDecoder *dec, gboolean hard);
void (*flush) (GstAudioDecoder *dec, gboolean hard);
GstFlowReturn (*pre_push) (GstBaseAudioDecoder *dec,
GstFlowReturn (*pre_push) (GstAudioDecoder *dec,
GstBuffer **buffer);
gboolean (*event) (GstBaseAudioDecoder *dec,
gboolean (*event) (GstAudioDecoder *dec,
GstEvent *event);
/*< private >*/
gpointer _gst_reserved[GST_PADDING_LARGE];
};
gboolean gst_base_audio_decoder_src_setcaps (GstBaseAudioDecoder * dec,
GstCaps * caps);
GstFlowReturn gst_base_audio_decoder_finish_frame (GstBaseAudioDecoder * dec,
GstBuffer * buf, gint frames);
GType gst_audio_decoder_get_type (void);
GstFlowReturn gst_audio_decoder_finish_frame (GstAudioDecoder * dec,
GstBuffer * buf, gint frames);
/* context parameters */
GstAudioInfo * gst_base_audio_decoder_get_audio_info (GstBaseAudioDecoder * dec);
GstAudioInfo * gst_audio_decoder_get_audio_info (GstAudioDecoder * dec);
void gst_base_audio_decoder_set_plc_aware (GstBaseAudioDecoder * dec,
gboolean plc);
gint gst_base_audio_decoder_get_plc_aware (GstBaseAudioDecoder * dec);
void gst_audio_decoder_set_plc_aware (GstAudioDecoder * dec,
gboolean plc);
void gst_base_audio_decoder_set_byte_time (GstBaseAudioDecoder * dec,
gboolean enabled);
gint gst_base_audio_decoder_get_byte_time (GstBaseAudioDecoder * dec);
gint gst_audio_decoder_get_plc_aware (GstAudioDecoder * dec);
gint gst_base_audio_decoder_get_delay (GstBaseAudioDecoder * dec);
void gst_audio_decoder_set_byte_time (GstAudioDecoder * dec,
gboolean enabled);
void gst_base_audio_decoder_set_max_errors (GstBaseAudioDecoder * enc,
gint num);
gint gst_base_audio_decoder_get_max_errors (GstBaseAudioDecoder * dec);
gint gst_audio_decoder_get_byte_time (GstAudioDecoder * dec);
void gst_base_audio_decoder_set_latency (GstBaseAudioDecoder * dec,
GstClockTime min, GstClockTime max);
void gst_base_audio_decoder_get_latency (GstBaseAudioDecoder * dec,
GstClockTime * min, GstClockTime * max);
gint gst_audio_decoder_get_delay (GstAudioDecoder * dec);
void gst_base_audio_decoder_get_parse_state (GstBaseAudioDecoder * dec,
gboolean * sync, gboolean * eos);
void gst_audio_decoder_set_max_errors (GstAudioDecoder * dec,
gint num);
gint gst_audio_decoder_get_max_errors (GstAudioDecoder * dec);
void gst_audio_decoder_set_latency (GstAudioDecoder * dec,
GstClockTime min,
GstClockTime max);
void gst_audio_decoder_get_latency (GstAudioDecoder * dec,
GstClockTime * min,
GstClockTime * max);
void gst_audio_decoder_get_parse_state (GstAudioDecoder * dec,
gboolean * sync,
gboolean * eos);
/* object properties */
void gst_base_audio_decoder_set_plc (GstBaseAudioDecoder * dec,
gboolean enabled);
gboolean gst_base_audio_decoder_get_plc (GstBaseAudioDecoder * dec);
void gst_audio_decoder_set_plc (GstAudioDecoder * dec,
gboolean enabled);
void gst_base_audio_decoder_set_min_latency (GstBaseAudioDecoder * dec,
gint64 num);
gint64 gst_base_audio_decoder_get_min_latency (GstBaseAudioDecoder * dec);
gboolean gst_audio_decoder_get_plc (GstAudioDecoder * dec);
void gst_base_audio_decoder_set_tolerance (GstBaseAudioDecoder * dec,
gint64 tolerance);
void gst_audio_decoder_set_min_latency (GstAudioDecoder * dec,
gint64 num);
gint64 gst_base_audio_decoder_get_tolerance (GstBaseAudioDecoder * dec);
gint64 gst_audio_decoder_get_min_latency (GstAudioDecoder * dec);
GType gst_base_audio_decoder_get_type (void);
void gst_audio_decoder_set_tolerance (GstAudioDecoder * dec,
gint64 tolerance);
gint64 gst_audio_decoder_get_tolerance (GstAudioDecoder * dec);
G_END_DECLS
#endif
#endif /* _GST_AUDIO_DECODER_H_ */

View file

@ -0,0 +1,243 @@
/* GStreamer
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
* Contact: Stefan Kost <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_AUDIO_ENCODER_H__
#define __GST_AUDIO_ENCODER_H__
#ifndef GST_USE_UNSTABLE_API
#warning "GstAudioEncoder is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/audio/audio.h>
G_BEGIN_DECLS
#define GST_TYPE_AUDIO_ENCODER (gst_audio_encoder_get_type())
#define GST_AUDIO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_ENCODER,GstAudioEncoder))
#define GST_AUDIO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_ENCODER,GstAudioEncoderClass))
#define GST_AUDIO_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_AUDIO_ENCODER,GstAudioEncoderClass))
#define GST_IS_AUDIO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_ENCODER))
#define GST_IS_AUDIO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_ENCODER))
#define GST_AUDIO_ENCODER_CAST(obj) ((GstAudioEncoder *)(obj))
/**
* GST_AUDIO_ENCODER_SINK_NAME:
*
* the name of the templates for the sink pad
*
* Since: 0.10.36
*/
#define GST_AUDIO_ENCODER_SINK_NAME "sink"
/**
* GST_AUDIO_ENCODER_SRC_NAME:
*
* the name of the templates for the source pad
*
* Since: 0.10.36
*/
#define GST_AUDIO_ENCODER_SRC_NAME "src"
/**
* GST_AUDIO_ENCODER_SRC_PAD:
* @obj: base parse instance
*
* Gives the pointer to the source #GstPad object of the element.
*
* Since: 0.10.36
*/
#define GST_AUDIO_ENCODER_SRC_PAD(obj) (GST_AUDIO_ENCODER_CAST (obj)->srcpad)
/**
* GST_AUDIO_ENCODER_SINK_PAD:
* @obj: base parse instance
*
* Gives the pointer to the sink #GstPad object of the element.
*
* Since: 0.10.36
*/
#define GST_AUDIO_ENCODER_SINK_PAD(obj) (GST_AUDIO_ENCODER_CAST (obj)->sinkpad)
/**
* GST_AUDIO_ENCODER_SEGMENT:
* @obj: base parse instance
*
* Gives the segment of the element.
*
* Since: 0.10.36
*/
#define GST_AUDIO_ENCODER_SEGMENT(obj) (GST_AUDIO_ENCODER_CAST (obj)->segment)
typedef struct _GstAudioEncoder GstAudioEncoder;
typedef struct _GstAudioEncoderClass GstAudioEncoderClass;
typedef struct _GstAudioEncoderPrivate GstAudioEncoderPrivate;
/**
* GstAudioEncoder:
*
* The opaque #GstAudioEncoder data structure.
*
* Since: 0.10.36
*/
struct _GstAudioEncoder {
GstElement element;
/*< protected >*/
/* source and sink pads */
GstPad *sinkpad;
GstPad *srcpad;
/* MT-protected (with STREAM_LOCK) */
GstSegment segment;
/*< private >*/
GstAudioEncoderPrivate *priv;
gpointer _gst_reserved[GST_PADDING_LARGE];
};
/**
* GstAudioEncoderClass:
* @element_class: The parent class structure
* @start: Optional.
* Called when the element starts processing.
* Allows opening external resources.
* @stop: Optional.
* Called when the element stops processing.
* Allows closing external resources.
* @set_format: Notifies subclass of incoming data format.
* GstAudioInfo contains the format according to provided caps.
* @handle_frame: Provides input samples (or NULL to clear any remaining data)
* according to directions as configured by the subclass
* using the API. Input data ref management is performed
* by base class, subclass should not care or intervene.
* @flush: Optional.
* Instructs subclass to clear any codec caches and discard
* any pending samples and not yet returned encoded data.
* @event: Optional.
* Event handler on the sink pad. This function should return
* TRUE if the event was handled and should be discarded
* (i.e. not unref'ed).
* @pre_push: Optional.
* Called just prior to pushing (encoded data) buffer downstream.
* Subclass has full discretionary access to buffer,
* and a not OK flow return will abort downstream pushing.
* @getcaps: Optional.
* Allows for a custom sink getcaps implementation (e.g.
* for multichannel input specification). If not implemented,
* default returns gst_audio_encoder_proxy_getcaps
* applied to sink template caps.
*
* Subclasses can override any of the available virtual methods or not, as
* needed. At minimum @set_format and @handle_frame needs to be overridden.
*
* Since: 0.10.36
*/
struct _GstAudioEncoderClass {
GstElementClass element_class;
/*< public >*/
/* virtual methods for subclasses */
gboolean (*start) (GstAudioEncoder *enc);
gboolean (*stop) (GstAudioEncoder *enc);
gboolean (*set_format) (GstAudioEncoder *enc,
GstAudioInfo *info);
GstFlowReturn (*handle_frame) (GstAudioEncoder *enc,
GstBuffer *buffer);
void (*flush) (GstAudioEncoder *enc);
GstFlowReturn (*pre_push) (GstAudioEncoder *enc,
GstBuffer **buffer);
gboolean (*event) (GstAudioEncoder *enc,
GstEvent *event);
GstCaps * (*getcaps) (GstAudioEncoder *enc, GstCaps *filter);
/*< private >*/
gpointer _gst_reserved[GST_PADDING_LARGE];
};
GType gst_audio_encoder_get_type (void);
GstFlowReturn gst_audio_encoder_finish_frame (GstAudioEncoder * enc,
GstBuffer * buffer,
gint samples);
GstCaps * gst_audio_encoder_proxy_getcaps (GstAudioEncoder * enc,
GstCaps * caps);
/* context parameters */
GstAudioInfo * gst_audio_encoder_get_audio_info (GstAudioEncoder * enc);
gint gst_audio_encoder_get_frame_samples (GstAudioEncoder * enc);
void gst_audio_encoder_set_frame_samples (GstAudioEncoder * enc, gint num);
gint gst_audio_encoder_get_frame_max (GstAudioEncoder * enc);
void gst_audio_encoder_set_frame_max (GstAudioEncoder * enc, gint num);
gint gst_audio_encoder_get_lookahead (GstAudioEncoder * enc);
void gst_audio_encoder_set_lookahead (GstAudioEncoder * enc, gint num);
void gst_audio_encoder_get_latency (GstAudioEncoder * enc,
GstClockTime * min,
GstClockTime * max);
void gst_audio_encoder_set_latency (GstAudioEncoder * enc,
GstClockTime min,
GstClockTime max);
/* object properties */
void gst_audio_encoder_set_mark_granule (GstAudioEncoder * enc,
gboolean enabled);
gboolean gst_audio_encoder_get_mark_granule (GstAudioEncoder * enc);
void gst_audio_encoder_set_perfect_timestamp (GstAudioEncoder * enc,
gboolean enabled);
gboolean gst_audio_encoder_get_perfect_timestamp (GstAudioEncoder * enc);
void gst_audio_encoder_set_hard_resync (GstAudioEncoder * enc,
gboolean enabled);
gboolean gst_audio_encoder_get_hard_resync (GstAudioEncoder * enc);
void gst_audio_encoder_set_tolerance (GstAudioEncoder * enc,
gint64 tolerance);
gint64 gst_audio_encoder_get_tolerance (GstAudioEncoder * enc);
G_END_DECLS
#endif /* __GST_AUDIO_ENCODER_H__ */

View file

@ -0,0 +1,262 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2003> David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* This file was (probably) generated from
* $Id$
* and
* $Id$
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
#include <string.h>
GST_DEBUG_CATEGORY_STATIC (audio_filter_template_debug);
#define GST_CAT_DEFAULT audio_filter_template_debug
static const GstElementDetails audio_filter_template_details =
GST_ELEMENT_DETAILS ("Audio filter template",
"Filter/Effect/Audio",
"Filters audio",
"David Schleef <ds@schleef.org>");
typedef struct _GstAudioFilterTemplate GstAudioFilterTemplate;
typedef struct _GstAudioFilterTemplateClass GstAudioFilterTemplateClass;
#define GST_TYPE_AUDIO_FILTER_TEMPLATE \
(gst_audio_filter_template_get_type())
#define GST_AUDIO_FILTER_TEMPLATE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_FILTER_TEMPLATE,GstAudioFilterTemplate))
#define GST_AUDIO_FILTER_TEMPLATE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_FILTER_TEMPLATE,GstAudioFilterTemplateClass))
#define GST_IS_AUDIO_FILTER_TEMPLATE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_FILTER_TEMPLATE))
#define GST_IS_AUDIO_FILTER_TEMPLATE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_FILTER_TEMPLATE))
struct _GstAudioFilterTemplate
{
GstAudioFilter audiofilter;
};
struct _GstAudioFilterTemplateClass
{
GstAudioFilterClass parent_class;
};
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
/* FILL ME */
};
GST_BOILERPLATE (GstAudioFilterTemplate, gst_audio_filter_template,
GstAudioFilter, GST_TYPE_AUDIO_FILTER);
static void gst_audio_filter_template_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_audio_filter_template_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static gboolean gst_audio_filter_template_setup (GstAudioFilter * filter,
GstRingBufferSpec * spec);
static GstFlowReturn gst_audio_filter_template_filter (GstBaseTransform * bt,
GstBuffer * outbuf, GstBuffer * inbuf);
static GstFlowReturn
gst_audio_filter_template_filter_inplace (GstBaseTransform * base_transform,
GstBuffer * buf);
#define ALLOWED_CAPS_STRING \
GST_AUDIO_INT_STANDARD_PAD_TEMPLATE_CAPS
static void
gst_audio_filter_template_base_init (gpointer g_class)
{
GstAudioFilterTemplateClass *klass = (GstAudioFilterTemplateClass *) g_class;
GstAudioFilterClass *audiofilter_class = GST_AUDIO_FILTER_CLASS (g_class);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstCaps *caps;
gst_element_class_set_details (element_class, &audio_filter_template_details);
caps = gst_caps_from_string (ALLOWED_CAPS_STRING);
gst_audio_filter_class_add_pad_templates (audiofilter_class, caps);
gst_caps_unref (caps);
}
static void
gst_audio_filter_template_class_init (GstAudioFilterTemplateClass * klass)
{
GObjectClass *gobject_class;
GstBaseTransformClass *btrans_class;
GstAudioFilterClass *audio_filter_class;
gobject_class = (GObjectClass *) klass;
btrans_class = (GstBaseTransformClass *) klass;
audio_filter_class = (GstAudioFilterClass *) klass;
#if 0
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_AUDIOTEMPLATE_METHOD, GST_AUDIOTEMPLATE_METHOD_1,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#endif
gobject_class->set_property = gst_audio_filter_template_set_property;
gobject_class->get_property = gst_audio_filter_template_get_property;
/* this function will be called whenever the format changes */
audio_filter_class->setup = gst_audio_filter_template_setup;
/* here you set up functions to process data (either in place, or from
* one input buffer to another output buffer); only one is required */
btrans_class->transform = gst_audio_filter_template_filter;
btrans_class->transform_ip = gst_audio_filter_template_filter_inplace;
}
static void
gst_audio_filter_template_init (GstAudioFilterTemplate * audio_filter_template,
GstAudioFilterTemplateClass * g_class)
{
GST_DEBUG ("init");
/* do stuff if you need to */
}
static void
gst_audio_filter_template_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstAudioFilterTemplate *filter;
filter = GST_AUDIO_FILTER_TEMPLATE (object);
GST_DEBUG ("set property %u", prop_id);
GST_OBJECT_LOCK (filter);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
GST_OBJECT_UNLOCK (filter);
}
static void
gst_audio_filter_template_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstAudioFilterTemplate *filter;
filter = GST_AUDIO_FILTER_TEMPLATE (object);
GST_DEBUG ("get property %u", prop_id);
GST_OBJECT_LOCK (filter);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
GST_OBJECT_UNLOCK (filter);
}
static gboolean
gst_audio_filter_template_setup (GstAudioFilter * filter,
GstRingBufferSpec * spec)
{
GstAudioFilterTemplate *audio_filter_template;
audio_filter_template = GST_AUDIO_FILTER_TEMPLATE (filter);
/* if any setup needs to be done, do it here */
return TRUE; /* it's all good */
}
/* You may choose to implement either a copying filter or an
* in-place filter (or both). Implementing only one will give
* full functionality, however, implementing both will cause
* audiofilter to use the optimal function in every situation,
* with a minimum of memory copies. */
static GstFlowReturn
gst_audio_filter_template_filter (GstBaseTransform * base_transform,
GstBuffer * inbuf, GstBuffer * outbuf)
{
GstAudioFilterTemplate *audio_filter_template;
GstAudioFilter *audiofilter;
audiofilter = GST_AUDIO_FILTER (base_transform);
audio_filter_template = GST_AUDIO_FILTER_TEMPLATE (base_transform);
/* do something interesting here. This simply copies the source
* to the destination. */
memcpy (GST_BUFFER_DATA (outbuf), GST_BUFFER_DATA (inbuf),
GST_BUFFER_SIZE (inbuf));
return GST_FLOW_OK;
}
static GstFlowReturn
gst_audio_filter_template_filter_inplace (GstBaseTransform * base_transform,
GstBuffer * buf)
{
GstAudioFilterTemplate *audio_filter_template;
GstAudioFilter *audiofilter;
audiofilter = GST_AUDIO_FILTER (base_transform);
audio_filter_template = GST_AUDIO_FILTER_TEMPLATE (base_transform);
/* do something interesting here. This simply copies the source
* to the destination. */
return GST_FLOW_OK;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (audio_filter_template_debug, "audiofilterexample",
0, "audiofilterexample");
return gst_element_register (plugin, "audiofilterexample", GST_RANK_NONE,
GST_TYPE_AUDIO_FILTER_TEMPLATE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"gstaudio_filter_template",
"Audio filter template",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);

View file

@ -0,0 +1,224 @@
/* GStreamer RTSP extension
* Copyright (C) 2007 Wim Taymans <wim.taymans@gmail.com>
*
* gstrtspextension.c: RTSP extension mechanism
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:gstrtspextension
* @short_description: Interface for extending RTSP protocols
*
* <refsect2>
* <para>
* This interface is implemented e.g. by the Windows Media Streaming RTSP
* exentension (rtspwms) and the RealMedia RTSP extension (rtspreal).
* </para>
* </refsect2>
*
* Last reviewed on 2007-07-25 (0.10.14)
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstrtsp-marshal.h"
#include "gstrtsp-enumtypes.h"
#include "gstrtspextension.h"
static void gst_rtsp_extension_iface_init (GstRTSPExtension * iface);
enum
{
SIGNAL_SEND,
LAST_SIGNAL
};
static guint gst_rtsp_extension_signals[LAST_SIGNAL] = { 0 };
GType
gst_rtsp_extension_get_type (void)
{
static GType gst_rtsp_extension_type = 0;
if (!gst_rtsp_extension_type) {
static const GTypeInfo gst_rtsp_extension_info = {
sizeof (GstRTSPExtensionInterface),
(GBaseInitFunc) gst_rtsp_extension_iface_init,
NULL,
NULL,
NULL,
NULL,
0,
0,
NULL,
};
gst_rtsp_extension_type = g_type_register_static (G_TYPE_INTERFACE,
"GstRTSPExtension", &gst_rtsp_extension_info, 0);
}
return gst_rtsp_extension_type;
}
static void
gst_rtsp_extension_iface_init (GstRTSPExtension * iface)
{
static gboolean initialized = FALSE;
if (!initialized) {
gst_rtsp_extension_signals[SIGNAL_SEND] =
g_signal_new ("send", G_TYPE_FROM_CLASS (iface),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPExtensionInterface,
send), NULL, NULL, gst_rtsp_marshal_ENUM__POINTER_POINTER,
GST_TYPE_RTSP_RESULT, 2, G_TYPE_POINTER, G_TYPE_POINTER);
initialized = TRUE;
}
}
gboolean
gst_rtsp_extension_detect_server (GstRTSPExtension * ext, GstRTSPMessage * resp)
{
GstRTSPExtensionInterface *iface;
gboolean res = TRUE;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->detect_server)
res = iface->detect_server (ext, resp);
return res;
}
GstRTSPResult
gst_rtsp_extension_before_send (GstRTSPExtension * ext, GstRTSPMessage * req)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_OK;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->before_send)
res = iface->before_send (ext, req);
return res;
}
GstRTSPResult
gst_rtsp_extension_after_send (GstRTSPExtension * ext, GstRTSPMessage * req,
GstRTSPMessage * resp)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_OK;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->after_send)
res = iface->after_send (ext, req, resp);
return res;
}
GstRTSPResult
gst_rtsp_extension_parse_sdp (GstRTSPExtension * ext, GstSDPMessage * sdp,
GstStructure * s)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_OK;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->parse_sdp)
res = iface->parse_sdp (ext, sdp, s);
return res;
}
GstRTSPResult
gst_rtsp_extension_setup_media (GstRTSPExtension * ext, GstSDPMedia * media)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_OK;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->setup_media)
res = iface->setup_media (ext, media);
return res;
}
gboolean
gst_rtsp_extension_configure_stream (GstRTSPExtension * ext, GstCaps * caps)
{
GstRTSPExtensionInterface *iface;
gboolean res = TRUE;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->configure_stream)
res = iface->configure_stream (ext, caps);
return res;
}
GstRTSPResult
gst_rtsp_extension_get_transports (GstRTSPExtension * ext,
GstRTSPLowerTrans protocols, gchar ** transport)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_OK;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->get_transports)
res = iface->get_transports (ext, protocols, transport);
return res;
}
GstRTSPResult
gst_rtsp_extension_stream_select (GstRTSPExtension * ext, GstRTSPUrl * url)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_OK;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->stream_select)
res = iface->stream_select (ext, url);
return res;
}
GstRTSPResult
gst_rtsp_extension_receive_request (GstRTSPExtension * ext,
GstRTSPMessage * msg)
{
GstRTSPExtensionInterface *iface;
GstRTSPResult res = GST_RTSP_ENOTIMPL;
iface = GST_RTSP_EXTENSION_GET_IFACE (ext);
if (iface->receive_request)
res = iface->receive_request (ext, msg);
return res;
}
GstRTSPResult
gst_rtsp_extension_send (GstRTSPExtension * ext, GstRTSPMessage * req,
GstRTSPMessage * resp)
{
GstRTSPResult res = GST_RTSP_OK;
g_signal_emit (ext, gst_rtsp_extension_signals[SIGNAL_SEND], 0,
req, resp, &res);
return res;
}

View file

@ -0,0 +1,61 @@
/* GStreamer Audio Process
* Copyright (C) 2010 Wim Taymans <wim.taymans@gmail.com>
*
* gstaudioprocess.h: Audio processing extension
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_AUDIO_PROCESS_H__
#define __GST_AUDIO_PROCESS_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_AUDIO_PROCESS \
(gst_audio_process_get_type ())
#define GST_AUDIO_PROCESS(obj) \
(GST_IMPLEMENTS_INTERFACE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AUDIO_PROCESS, GstAudioProcess))
#define GST_IS_AUDIO_PROCESS(obj) \
(GST_IMPLEMENTS_INTERFACE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AUDIO_PROCESS))
#define GST_AUDIO_PROCESS_GET_IFACE(inst) \
(G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_AUDIO_PROCESS, GstAudioProcessInterface))
typedef struct _GstAudioProcess GstAudioProcess;
typedef struct _GstAudioProcessInterface GstAudioProcessInterface;
struct _GstAudioProcessInterface {
GTypeInterface parent;
/* vfunctions */
gint (*activate) (GstAudioProcess *process, gboolean active);
gint (*process) (GstAudioProcess *process, gpointer src_in, gpointer sink_in,
gpointer src_out, guint length);
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
GType gst_audio_process_get_type (void);
/* invoke vfunction on interface */
gint gst_audio_process_process (GstAudioProcess *ext, gpointer src_in, gpointer sink_in,
gpointer src_out, guint length);
G_END_DECLS
#endif /* __GST_AUDIO_PROCESS_H__ */

View file

@ -0,0 +1,216 @@
/* GStreamer
* Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
* 2005 Wim Taymans <wim@fluendo.com>
*
* gstaudioringbuffer.c: simple audio ringbuffer base class
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <string.h>
#include "gstaudioringbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_audio_ring_buffer_debug);
#define GST_CAT_DEFAULT gst_audio_ring_buffer_debug
static void gst_audio_ring_buffer_class_init (GstAudioRingBufferClass * klass);
static void gst_audio_ring_buffer_init (GstAudioRingBuffer * ringbuffer,
GstAudioRingBufferClass * klass);
static void gst_audio_ring_buffer_dispose (GObject * object);
static void gst_audio_ring_buffer_finalize (GObject * object);
static GstRingBufferClass *ring_parent_class = NULL;
static gboolean gst_audio_ring_buffer_start (GstRingBuffer * buf);
static gboolean gst_audio_ring_buffer_pause (GstRingBuffer * buf);
static gboolean gst_audio_ring_buffer_stop (GstRingBuffer * buf);
static gboolean gst_audio_ring_buffer_activate (GstRingBuffer * buf,
gboolean active);
/* ringbuffer abstract base class */
GType
gst_audio_ring_buffer_get_type (void)
{
static GType ringbuffer_type = 0;
if (!ringbuffer_type) {
static const GTypeInfo ringbuffer_info = {
sizeof (GstAudioRingBufferClass),
NULL,
NULL,
(GClassInitFunc) gst_audio_ring_buffer_class_init,
NULL,
NULL,
sizeof (GstAudioRingBuffer),
0,
(GInstanceInitFunc) gst_audio_ring_buffer_init,
NULL
};
ringbuffer_type =
g_type_register_static (GST_TYPE_RING_BUFFER, "GstAudioSinkRingBuffer",
&ringbuffer_info, G_TYPE_FLAG_ABSTRACT);
GST_DEBUG_CATEGORY_INIT (gst_audio_ring_buffer_debug, "audioringbuffer", 0,
"audio ringbuffer");
}
return ringbuffer_type;
}
static void
gst_audio_ring_buffer_class_init (GstAudioRingBufferClass * klass)
{
GObjectClass *gobject_class;
GstRingBufferClass *gstringbuffer_class;
gobject_class = (GObjectClass *) klass;
gstringbuffer_class = (GstRingBufferClass *) klass;
ring_parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = gst_audio_ring_buffer_dispose;
gobject_class->finalize = gst_audio_ring_buffer_finalize;
gstringbuffer_class->start = GST_DEBUG_FUNCPTR (gst_audio_ring_buffer_start);
gstringbuffer_class->pause = GST_DEBUG_FUNCPTR (gst_audio_ring_buffer_pause);
gstringbuffer_class->resume = GST_DEBUG_FUNCPTR (gst_audio_ring_buffer_start);
gstringbuffer_class->stop = GST_DEBUG_FUNCPTR (gst_audio_ring_buffer_stop);
gstringbuffer_class->activate =
GST_DEBUG_FUNCPTR (gst_audio_ring_buffer_activate);
}
static void
gst_audio_ring_buffer_init (GstAudioRingBuffer * ringbuffer,
GstAudioRingBufferClass * g_class)
{
}
static void
gst_audio_ring_buffer_dispose (GObject * object)
{
G_OBJECT_CLASS (ring_parent_class)->dispose (object);
}
static void
gst_audio_ring_buffer_finalize (GObject * object)
{
G_OBJECT_CLASS (ring_parent_class)->finalize (object);
}
static gboolean
gst_audio_ring_buffer_activate (GstRingBuffer * buf, gboolean active)
{
GstAudioRingBuffer *abuf;
gboolean res;
abuf = GST_AUDIO_RING_BUFFER_CAST (buf);
GST_OBJECT_UNLOCK (buf);
res = gst_ring_buffer_thread_activate (abuf->thread, active);
GST_OBJECT_LOCK (buf);
return res;
}
gboolean
gst_audio_ring_buffer_set_thread (GstAudioRingBuffer * buf,
GstRingBufferThread * thread)
{
GstRingBufferThread *old;
g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
old = buf->thread;
if (thread)
gst_object_ref (thread);
buf->thread = thread;
if (old)
gst_object_unref (old);
if (thread)
gst_ring_buffer_thread_set_ringbuffer (thread, buf);
return TRUE;
}
gboolean
gst_audio_ring_buffer_link (GstAudioRingBuffer * buf1,
GstAudioRingBuffer * buf2)
{
buf1->link = buf2;
buf2->link = buf1;
return TRUE;
}
static gboolean
gst_audio_ring_buffer_start (GstRingBuffer * buf)
{
GstAudioRingBuffer *abuf;
abuf = GST_AUDIO_RING_BUFFER_CAST (buf);
GST_DEBUG_OBJECT (buf, "start, sending signal");
return gst_ring_buffer_thread_start (abuf->thread);
}
static gboolean
gst_audio_ring_buffer_pause (GstRingBuffer * buf)
{
GstAudioRingBuffer *abuf;
GstAudioRingBufferClass *cbuf;
abuf = GST_AUDIO_RING_BUFFER_CAST (buf);
cbuf = GST_AUDIO_RING_BUFFER_GET_CLASS (abuf);
/* unblock any pending writes to the audio device */
if (cbuf->reset) {
GST_DEBUG_OBJECT (abuf, "reset...");
cbuf->reset (abuf);
GST_DEBUG_OBJECT (abuf, "reset done");
}
return TRUE;
}
static gboolean
gst_audio_ring_buffer_stop (GstRingBuffer * buf)
{
GstAudioRingBuffer *abuf;
GstAudioRingBufferClass *cbuf;
abuf = GST_AUDIO_RING_BUFFER_CAST (buf);
cbuf = GST_AUDIO_RING_BUFFER_GET_CLASS (abuf);
/* unblock any pending writes to the audio device */
if (cbuf->reset) {
GST_DEBUG_OBJECT (abuf, "reset...");
cbuf->reset (abuf);
GST_DEBUG_OBJECT (abuf, "reset done");
}
#if 0
if (abuf->running) {
GST_DEBUG_OBJECT (sink, "stop, waiting...");
GST_AUDIO_RING_BUFFER_WAIT (buf);
GST_DEBUG_OBJECT (sink, "stopped");
}
#endif
return TRUE;
}

View file

@ -0,0 +1,98 @@
/* GStreamer
* Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
* 2005 Wim Taymans <wim@fluendo.com>
*
* gstaudioringbuffer.h:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_AUDIO_RING_BUFFER_H__
#define __GST_AUDIO_RING_BUFFER_H__
#include <gst/gst.h>
#include <gst/audio/gstringbuffer.h>
G_BEGIN_DECLS
#define GST_TYPE_AUDIO_RING_BUFFER (gst_audio_ring_buffer_get_type())
#define GST_AUDIO_RING_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_RING_BUFFER,GstAudioRingBuffer))
#define GST_AUDIO_RING_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_RING_BUFFER,GstAudioRingBufferClass))
#define GST_AUDIO_RING_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_AUDIO_RING_BUFFER,GstAudioRingBufferClass))
#define GST_IS_AUDIO_RING_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_RING_BUFFER))
#define GST_IS_AUDIO_RING_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_RING_BUFFER))
#define GST_AUDIO_RING_BUFFER_CAST(obj) ((GstAudioRingBuffer *)obj)
typedef struct _GstAudioRingBuffer GstAudioRingBuffer;
typedef struct _GstAudioRingBufferClass GstAudioRingBufferClass;
#include <gst/audio/gstringbufferthread.h>
typedef enum {
GST_AUDIO_RING_BUFFER_MODE_UNKNOWN,
GST_AUDIO_RING_BUFFER_MODE_PLAYBACK,
GST_AUDIO_RING_BUFFER_MODE_CAPTURE
} GstAudioRingBufferMode;
/**
* GstAudioRingBuffer:
*
* Opaque #GstAudioRingBuffer.
*/
struct _GstAudioRingBuffer {
GstRingBuffer element;
/*< protected >*/
GstAudioRingBufferMode mode;
GstRingBufferThread *thread;
GstAudioRingBuffer *link;
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
/**
* GstAudioRingBufferClass:
* @parent_class: the parent class structure.
* @process: Write/Read data to/from the device.
* @reset: Returns as quickly as possible from a write/read and flush any pending
* samples from the device.
*
* #GstAudioRingBuffer class. Override the vmethods to implement functionality.
*/
struct _GstAudioRingBufferClass {
GstRingBufferClass parent_class;
/* vtable */
/* write/read samples to the device */
gint (*process) (GstAudioRingBuffer *buf, gpointer data, guint length);
/* reset the audio device, unblock from a read/write */
void (*reset) (GstAudioRingBuffer *buf);
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
GType gst_audio_ring_buffer_get_type(void);
gboolean gst_audio_ring_buffer_link (GstAudioRingBuffer *buf1, GstAudioRingBuffer *buf2);
gboolean gst_audio_ring_buffer_set_thread (GstAudioRingBuffer *buf, GstRingBufferThread *thread);
G_END_DECLS
#endif /* __GST_AUDIO_RING_BUFFER_H__ */

View file

@ -1,235 +0,0 @@
/* GStreamer
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
* Contact: Stefan Kost <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_BASE_AUDIO_ENCODER_H__
#define __GST_BASE_AUDIO_ENCODER_H__
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseAudioEncoder is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/audio/audio.h>
G_BEGIN_DECLS
#define GST_TYPE_BASE_AUDIO_ENCODER (gst_base_audio_encoder_get_type())
#define GST_BASE_AUDIO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_AUDIO_ENCODER,GstBaseAudioEncoder))
#define GST_BASE_AUDIO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_AUDIO_ENCODER,GstBaseAudioEncoderClass))
#define GST_BASE_AUDIO_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_AUDIO_ENCODER,GstBaseAudioEncoderClass))
#define GST_IS_BASE_AUDIO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_AUDIO_ENCODER))
#define GST_IS_BASE_AUDIO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_AUDIO_ENCODER))
#define GST_BASE_AUDIO_ENCODER_CAST(obj) ((GstBaseAudioEncoder *)(obj))
/**
* GST_BASE_AUDIO_ENCODER_SINK_NAME:
*
* the name of the templates for the sink pad
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_ENCODER_SINK_NAME "sink"
/**
* GST_BASE_AUDIO_ENCODER_SRC_NAME:
*
* the name of the templates for the source pad
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_ENCODER_SRC_NAME "src"
/**
* GST_BASE_AUDIO_ENCODER_SRC_PAD:
* @obj: base parse instance
*
* Gives the pointer to the source #GstPad object of the element.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_ENCODER_SRC_PAD(obj) (GST_BASE_AUDIO_ENCODER_CAST (obj)->srcpad)
/**
* GST_BASE_AUDIO_ENCODER_SINK_PAD:
* @obj: base parse instance
*
* Gives the pointer to the sink #GstPad object of the element.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_ENCODER_SINK_PAD(obj) (GST_BASE_AUDIO_ENCODER_CAST (obj)->sinkpad)
/**
* GST_BASE_AUDIO_ENCODER_SEGMENT:
* @obj: base parse instance
*
* Gives the segment of the element.
*
* Since: 0.10.36
*/
#define GST_BASE_AUDIO_ENCODER_SEGMENT(obj) (GST_BASE_AUDIO_ENCODER_CAST (obj)->segment)
typedef struct _GstBaseAudioEncoder GstBaseAudioEncoder;
typedef struct _GstBaseAudioEncoderClass GstBaseAudioEncoderClass;
typedef struct _GstBaseAudioEncoderPrivate GstBaseAudioEncoderPrivate;
/**
* GstBaseAudioEncoder:
* @element: the parent element.
*
* The opaque #GstBaseAudioEncoder data structure.
*
* Since: 0.10.36
*/
struct _GstBaseAudioEncoder {
GstElement element;
/*< protected >*/
/* source and sink pads */
GstPad *sinkpad;
GstPad *srcpad;
/* MT-protected (with STREAM_LOCK) */
GstSegment segment;
/*< private >*/
GstBaseAudioEncoderPrivate *priv;
gpointer _gst_reserved[GST_PADDING_LARGE];
};
/**
* GstBaseAudioEncoderClass:
* @start: Optional.
* Called when the element starts processing.
* Allows opening external resources.
* @stop: Optional.
* Called when the element stops processing.
* Allows closing external resources.
* @set_format: Notifies subclass of incoming data format.
* GstAudioInfo contains the format according to provided caps.
* @handle_frame: Provides input samples (or NULL to clear any remaining data)
* according to directions as provided by subclass in the
* #GstBaseAudioEncoderContext. Input data ref management
* is performed by base class, subclass should not care or
* intervene.
* @flush: Optional.
* Instructs subclass to clear any codec caches and discard
* any pending samples and not yet returned encoded data.
* @event: Optional.
* Event handler on the sink pad. This function should return
* TRUE if the event was handled and should be discarded
* (i.e. not unref'ed).
* @pre_push: Optional.
* Called just prior to pushing (encoded data) buffer downstream.
* Subclass has full discretionary access to buffer,
* and a not OK flow return will abort downstream pushing.
* @getcaps: Optional.
* Allows for a custom sink getcaps implementation (e.g.
* for multichannel input specification). If not implemented,
* default returns gst_base_audio_encoder_proxy_getcaps
* applied to sink template caps.
*
* Subclasses can override any of the available virtual methods or not, as
* needed. At minimum @set_format and @handle_frame needs to be overridden.
*
* Since: 0.10.36
*/
struct _GstBaseAudioEncoderClass {
GstElementClass parent_class;
/*< public >*/
/* virtual methods for subclasses */
gboolean (*start) (GstBaseAudioEncoder *enc);
gboolean (*stop) (GstBaseAudioEncoder *enc);
gboolean (*set_format) (GstBaseAudioEncoder *enc,
GstAudioInfo *info);
GstFlowReturn (*handle_frame) (GstBaseAudioEncoder *enc,
GstBuffer *buffer);
void (*flush) (GstBaseAudioEncoder *enc);
GstFlowReturn (*pre_push) (GstBaseAudioEncoder *enc,
GstBuffer **buffer);
gboolean (*event) (GstBaseAudioEncoder *enc,
GstEvent *event);
GstCaps * (*getcaps) (GstBaseAudioEncoder *enc, GstCaps * filter);
/*< private >*/
gpointer _gst_reserved[GST_PADDING_LARGE];
};
GType gst_base_audio_encoder_get_type (void);
GstFlowReturn gst_base_audio_encoder_finish_frame (GstBaseAudioEncoder * enc,
GstBuffer *buffer, gint samples);
GstCaps * gst_base_audio_encoder_proxy_getcaps (GstBaseAudioEncoder * enc,
GstCaps * caps);
/* context parameters */
GstAudioInfo * gst_base_audio_encoder_get_audio_info (GstBaseAudioEncoder * enc);
gint gst_base_audio_encoder_get_frame_samples (GstBaseAudioEncoder * enc);
void gst_base_audio_encoder_set_frame_samples (GstBaseAudioEncoder * enc,
gint num);
gint gst_base_audio_encoder_get_frame_max (GstBaseAudioEncoder * enc);
void gst_base_audio_encoder_set_frame_max (GstBaseAudioEncoder * enc,
gint num);
gint gst_base_audio_encoder_get_lookahead (GstBaseAudioEncoder * enc);
void gst_base_audio_encoder_set_lookahead (GstBaseAudioEncoder * enc,
gint num);
void gst_base_audio_encoder_get_latency (GstBaseAudioEncoder * enc,
GstClockTime * min, GstClockTime * max);
void gst_base_audio_encoder_set_latency (GstBaseAudioEncoder * enc,
GstClockTime min, GstClockTime max);
/* object properties */
void gst_base_audio_encoder_set_mark_granule (GstBaseAudioEncoder * enc,
gboolean enabled);
gboolean gst_base_audio_encoder_get_mark_granule (GstBaseAudioEncoder * enc);
void gst_base_audio_encoder_set_perfect_timestamp (GstBaseAudioEncoder * enc,
gboolean enabled);
gboolean gst_base_audio_encoder_get_perfect_timestamp (GstBaseAudioEncoder * enc);
void gst_base_audio_encoder_set_hard_resync (GstBaseAudioEncoder * enc,
gboolean enabled);
gboolean gst_base_audio_encoder_get_hard_resync (GstBaseAudioEncoder * enc);
void gst_base_audio_encoder_set_tolerance (GstBaseAudioEncoder * enc,
gint64 tolerance);
gint64 gst_base_audio_encoder_get_tolerance (GstBaseAudioEncoder * enc);
G_END_DECLS
#endif /* __GST_BASE_AUDIO_ENCODER_H__ */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,127 @@
*************** gst_base_audio_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
*** 865,935 ****
running_time_segment = running_time_sample / sps;
/* the segment currently read from the ringbuffer */
- current_segment = sample / sps;
-
- /* the skew we have between running_time and the ringbuffertime */
- segment_skew = running_time_segment - current_segment;
-
- GST_DEBUG_OBJECT (bsrc, "\n running_time = %" GST_TIME_FORMAT
- "\n timestamp = %" GST_TIME_FORMAT
- "\n running_time_segment = %d"
- "\n current_segment = %d"
- "\n segment_skew = %d",
GST_TIME_ARGS (running_time),
GST_TIME_ARGS (timestamp),
- running_time_segment, current_segment, segment_skew);
/* Resync the ringbuffer if:
- * 1. We are more than the length of the ringbuffer in front.
- * The length of the ringbuffer then gets to dictate
- * the threshold for what is concidered "too far ahead"
- *
- * 2. We are more than the length of the ringbuffer behind.
* The length of the ringbuffer then gets to dictate
* the threshold for what is concidered "too late"
*
- * 3. If this is our first buffer.
* We know that we should catch up to running_time
* the first time we are ran.
*/
- if ((segment_skew <= -ringbuffer->spec.segtotal) ||
- (segment_skew >= ringbuffer->spec.segtotal) ||
- (current_segment == 0)) {
- gint segments_written;
- gint first_segment;
- gint last_segment;
- gint new_last_segment;
gint segment_diff;
- gint new_first_segment;
guint64 new_sample;
- /* we are going to say that the last segment was captured at the current time
- (running_time), minus one segment of creation-latency in the ringbuffer.
- This can be thought of as: The segment arrived in the ringbuffer at time X, and
- that means it was created at time X - (one segment). */
- new_last_segment = running_time_segment - 1;
-
- /* for better readablity */
- first_segment = current_segment;
-
- /* get the amount of segments written from the device by now */
- segments_written = g_atomic_int_get (&ringbuffer->segdone);
-
- /* subtract the base to segments_written to get the number of the
- last written segment in the ringbuffer (one segment written = segment 0) */
- last_segment = segments_written - ringbuffer->segbase - 1;
-
- /* we see how many segments the ringbuffer was timeshifted */
- segment_diff = new_last_segment - last_segment;
- /* we move the first segment an equal amount */
- new_first_segment = first_segment + segment_diff;
- /* and we also move the segmentbase the same amount */
- ringbuffer->segbase -= segment_diff;
/* we calculate the new sample value */
- new_sample = ((guint64) new_first_segment) * sps;
/* and get the relative time to this -> our new timestamp */
timestamp =
--- 874,926 ----
running_time_segment = running_time_sample / sps;
/* the segment currently read from the ringbuffer */
+ last_read_segment = sample / sps;
+
+ /* the skew we have between running_time and the ringbuffertime (last written to) */
+ segment_skew = running_time_segment - last_written_segment;
+
+ GST_DEBUG_OBJECT (bsrc,
+ "\n running_time = %" GST_TIME_FORMAT
+ "\n timestamp = %" GST_TIME_FORMAT
+ "\n running_time_segment = %d"
+ "\n last_written_segment = %d"
+ "\n segment_skew (running time segment - last_written_segment) = %d"
+ "\n last_read_segment = %d",
GST_TIME_ARGS (running_time),
GST_TIME_ARGS (timestamp),
+ running_time_segment,
+ last_written_segment,
+ segment_skew,
+ last_read_segment);
/* Resync the ringbuffer if:
+
+ * 1. We are more than the length of the ringbuffer behind.
* The length of the ringbuffer then gets to dictate
* the threshold for what is concidered "too late"
*
+ * 2. If this is our first buffer.
* We know that we should catch up to running_time
* the first time we are ran.
*/
+ if ((segment_skew >= ringbuffer->spec.segtotal) ||
+ (last_read_segment == 0))
+ {
+ gint new_read_segment;
gint segment_diff;
guint64 new_sample;
+ /* the difference between running_time and the last written segment */
+ segment_diff = running_time_segment - last_written_segment;
+ /* advance the ringbuffer */
+ gst_ring_buffer_advance(ringbuffer, segment_diff);
+ /* we move the new read segment to the last known written segment */
+ new_read_segment = g_atomic_int_get (&ringbuffer->segdone) - ringbuffer->segbase;
/* we calculate the new sample value */
+ new_sample = ((guint64) new_read_segment) * sps;
/* and get the relative time to this -> our new timestamp */
timestamp =

View file

@ -0,0 +1,362 @@
/* GStreamer
* Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
* 2005 Wim Taymans <wim@fluendo.com>
*
* gstaudioringbuffer.c: simple audio ringbuffer base class
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <string.h>
#include "gstringbufferthread.h"
GST_DEBUG_CATEGORY_STATIC (gst_ring_buffer_thread_debug);
#define GST_CAT_DEFAULT gst_ring_buffer_thread_debug
static void gst_ring_buffer_thread_class_init (GstRingBufferThreadClass *
klass);
static void gst_ring_buffer_thread_init (GstRingBufferThread * ringbuffer,
GstRingBufferThreadClass * klass);
static void gst_ring_buffer_thread_dispose (GObject * object);
static void gst_ring_buffer_thread_finalize (GObject * object);
static GstRingBufferClass *ring_parent_class = NULL;
GType
gst_ring_buffer_thread_get_type (void)
{
static GType ringbuffer_type = 0;
if (!ringbuffer_type) {
static const GTypeInfo ringbuffer_info = {
sizeof (GstRingBufferThreadClass),
NULL,
NULL,
(GClassInitFunc) gst_ring_buffer_thread_class_init,
NULL,
NULL,
sizeof (GstRingBufferThread),
0,
(GInstanceInitFunc) gst_ring_buffer_thread_init,
NULL
};
ringbuffer_type =
g_type_register_static (GST_TYPE_OBJECT, "GstRingBufferThread",
&ringbuffer_info, 0);
GST_DEBUG_CATEGORY_INIT (gst_ring_buffer_thread_debug, "ringbufferthread",
0, "ringbuffer thread");
}
return ringbuffer_type;
}
static void
gst_ring_buffer_thread_class_init (GstRingBufferThreadClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
ring_parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = gst_ring_buffer_thread_dispose;
gobject_class->finalize = gst_ring_buffer_thread_finalize;
}
typedef gint (*ProcessFunc) (GstAudioRingBuffer * buf, gpointer data,
guint length);
/* this internal thread does nothing else but write samples to the audio device.
* It will write each segment in the ringbuffer and will update the play
* pointer.
* The start/stop methods control the thread.
*/
static void
ring_buffer_thread_thread_func (GstRingBufferThread * thread)
{
GstElement *parent = NULL;
GstMessage *message;
GValue val = { 0 };
GstAudioRingBuffer *capture, *playback;
ProcessFunc writefunc = NULL, readfunc = NULL;
gint preroll = 1;
GST_DEBUG_OBJECT (thread, "enter thread");
GST_OBJECT_LOCK (thread);
GST_DEBUG_OBJECT (thread, "signal wait");
GST_RING_BUFFER_THREAD_SIGNAL (thread);
if ((capture = thread->capture))
gst_object_ref (capture);
if ((playback = thread->playback))
gst_object_ref (playback);
GST_OBJECT_UNLOCK (thread);
if (capture)
readfunc = GST_AUDIO_RING_BUFFER_GET_CLASS (capture)->process;
if (playback)
writefunc = GST_AUDIO_RING_BUFFER_GET_CLASS (playback)->process;
if (parent) {
g_value_init (&val, G_TYPE_POINTER);
g_value_set_pointer (&val, thread->thread);
message = gst_message_new_stream_status (GST_OBJECT_CAST (thread),
GST_STREAM_STATUS_TYPE_ENTER, NULL);
gst_message_set_stream_status_object (message, &val);
GST_DEBUG_OBJECT (thread, "posting ENTER stream status");
gst_element_post_message (parent, message);
}
while (TRUE) {
gint left, processed;
guint8 *read_ptr, *write_ptr;
gint read_seg, write_seg;
gint read_len, write_len;
gboolean read_active, write_active;
if (playback)
write_active =
gst_ring_buffer_prepare_read (GST_RING_BUFFER_CAST (playback),
&write_seg, &write_ptr, &write_len);
else
write_active = FALSE;
if (playback) {
if (!write_active) {
write_ptr = GST_RING_BUFFER_CAST (playback)->empty_seg;
write_len = GST_RING_BUFFER_CAST (playback)->spec.segsize;
}
left = write_len;
do {
processed = writefunc (playback, write_ptr, left);
GST_LOG_OBJECT (thread, "written %d bytes of %d from segment %d",
processed, left, write_seg);
if (processed < 0 || processed > left) {
/* might not be critical, it e.g. happens when aborting playback */
GST_WARNING_OBJECT (thread,
"error writing data in %s (reason: %s), skipping segment (left: %d, processed: %d)",
GST_DEBUG_FUNCPTR_NAME (writefunc),
(errno > 1 ? g_strerror (errno) : "unknown"), left, processed);
break;
}
left -= processed;
write_ptr += processed;
} while (left > 0);
/* we wrote one segment */
gst_ring_buffer_advance (GST_RING_BUFFER_CAST (playback), 1);
if (preroll > 0) {
/* do not start reading until we have read enough data */
preroll--;
GST_DEBUG_OBJECT (thread, "need more preroll");
continue;
}
}
if (capture)
read_active =
gst_ring_buffer_prepare_read (GST_RING_BUFFER_CAST (capture),
&read_seg, &read_ptr, &read_len);
else
read_active = FALSE;
if (capture) {
left = read_len;
do {
processed = readfunc (capture, read_ptr, left);
GST_LOG_OBJECT (thread, "read %d bytes of %d from segment %d",
processed, left, read_seg);
if (processed < 0 || processed > left) {
/* might not be critical, it e.g. happens when aborting playback */
GST_WARNING_OBJECT (thread,
"error reading data in %s (reason: %s), skipping segment (left: %d, processed: %d)",
GST_DEBUG_FUNCPTR_NAME (readfunc),
(errno > 1 ? g_strerror (errno) : "unknown"), left, processed);
break;
}
left -= processed;
read_ptr += processed;
} while (left > 0);
if (read_active)
/* we read one segment */
gst_ring_buffer_advance (GST_RING_BUFFER_CAST (capture), 1);
}
if (!read_active && !write_active) {
GST_OBJECT_LOCK (thread);
if (!thread->running)
goto stop_running;
GST_DEBUG_OBJECT (thread, "signal wait");
GST_RING_BUFFER_THREAD_SIGNAL (thread);
GST_DEBUG_OBJECT (thread, "wait for action");
GST_RING_BUFFER_THREAD_WAIT (thread);
GST_DEBUG_OBJECT (thread, "got signal");
if (!thread->running)
goto stop_running;
GST_DEBUG_OBJECT (thread, "continue running");
GST_OBJECT_UNLOCK (thread);
}
}
/* Will never be reached */
g_assert_not_reached ();
return;
/* ERROR */
stop_running:
{
GST_OBJECT_UNLOCK (thread);
GST_DEBUG_OBJECT (thread, "stop running, exit thread");
if (parent) {
message = gst_message_new_stream_status (GST_OBJECT_CAST (thread),
GST_STREAM_STATUS_TYPE_LEAVE, GST_ELEMENT_CAST (thread));
gst_message_set_stream_status_object (message, &val);
GST_DEBUG_OBJECT (thread, "posting LEAVE stream status");
gst_element_post_message (parent, message);
}
return;
}
}
static void
gst_ring_buffer_thread_init (GstRingBufferThread * thread,
GstRingBufferThreadClass * g_class)
{
thread->running = FALSE;
thread->cond = g_cond_new ();
}
static void
gst_ring_buffer_thread_dispose (GObject * object)
{
GstRingBufferThread *thread = GST_RING_BUFFER_THREAD_CAST (object);
GST_OBJECT_LOCK (thread);
if (thread->playback) {
gst_object_unref (thread->playback);
thread->playback = NULL;
}
if (thread->capture) {
gst_object_unref (thread->capture);
thread->capture = NULL;
}
GST_OBJECT_UNLOCK (thread);
G_OBJECT_CLASS (ring_parent_class)->dispose (object);
}
static void
gst_ring_buffer_thread_finalize (GObject * object)
{
GstRingBufferThread *thread = GST_RING_BUFFER_THREAD_CAST (object);
g_cond_free (thread->cond);
G_OBJECT_CLASS (ring_parent_class)->finalize (object);
}
gboolean
gst_ring_buffer_thread_activate (GstRingBufferThread * thread, gboolean active)
{
GError *error = NULL;
GST_OBJECT_LOCK (thread);
if (active) {
if (thread->active_count == 0) {
thread->running = TRUE;
GST_DEBUG_OBJECT (thread, "starting thread");
thread->thread =
g_thread_create ((GThreadFunc) ring_buffer_thread_thread_func, thread,
TRUE, &error);
if (!thread->thread || error != NULL)
goto thread_failed;
GST_DEBUG_OBJECT (thread, "waiting for thread");
/* the object lock is taken */
GST_RING_BUFFER_THREAD_WAIT (thread);
GST_DEBUG_OBJECT (thread, "thread is started");
}
thread->active_count++;
} else {
if (thread->active_count == 1) {
thread->running = FALSE;
GST_DEBUG_OBJECT (thread, "signal wait");
GST_RING_BUFFER_THREAD_SIGNAL (thread);
GST_OBJECT_UNLOCK (thread);
/* join the thread */
g_thread_join (thread->thread);
GST_OBJECT_LOCK (thread);
}
thread->active_count--;
}
GST_OBJECT_UNLOCK (thread);
return TRUE;
/* ERRORS */
thread_failed:
{
if (error)
GST_ERROR_OBJECT (thread, "could not create thread %s", error->message);
else
GST_ERROR_OBJECT (thread, "could not create thread for unknown reason");
thread->running = FALSE;
GST_OBJECT_UNLOCK (thread);
return FALSE;
}
}
gboolean
gst_ring_buffer_thread_set_ringbuffer (GstRingBufferThread * thread,
GstAudioRingBuffer * buf)
{
GstAudioRingBuffer *old, **new;
g_return_val_if_fail (GST_IS_RING_BUFFER_THREAD (thread), FALSE);
if (buf->mode == GST_AUDIO_RING_BUFFER_MODE_PLAYBACK)
new = &thread->playback;
else
new = &thread->capture;
old = *new;
if (buf)
gst_object_ref (buf);
*new = buf;
if (old)
gst_object_unref (old);
return TRUE;
}
gboolean
gst_ring_buffer_thread_start (GstRingBufferThread * thread)
{
g_return_val_if_fail (GST_IS_RING_BUFFER_THREAD (thread), FALSE);
GST_RING_BUFFER_THREAD_SIGNAL (thread);
return TRUE;
}

View file

@ -0,0 +1,93 @@
/* GStreamer
* Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
* 2005 Wim Taymans <wim@fluendo.com>
*
* gstaudioringbuffer.h:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_RING_BUFFER_THREAD_H__
#define __GST_RING_BUFFER_THREAD_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_RING_BUFFER_THREAD (gst_ring_buffer_thread_get_type())
#define GST_RING_BUFFER_THREAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RING_BUFFER_THREAD,GstRingBufferThread))
#define GST_RING_BUFFER_THREAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RING_BUFFER_THREAD,GstRingBufferThreadClass))
#define GST_RING_BUFFER_THREAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_RING_BUFFER_THREAD,GstRingBufferThreadClass))
#define GST_IS_RING_BUFFER_THREAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RING_BUFFER_THREAD))
#define GST_IS_RING_BUFFER_THREAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RING_BUFFER_THREAD))
#define GST_RING_BUFFER_THREAD_CAST(obj) ((GstRingBufferThread *)obj)
typedef struct _GstRingBufferThread GstRingBufferThread;
typedef struct _GstRingBufferThreadClass GstRingBufferThreadClass;
#include <gst/audio/gstaudioringbuffer.h>
#define GST_RING_BUFFER_THREAD_GET_COND(buf) (((GstRingBufferThread *)buf)->cond)
#define GST_RING_BUFFER_THREAD_WAIT(buf) (g_cond_wait (GST_RING_BUFFER_THREAD_GET_COND (buf), GST_OBJECT_GET_LOCK (buf)))
#define GST_RING_BUFFER_THREAD_SIGNAL(buf) (g_cond_signal (GST_RING_BUFFER_THREAD_GET_COND (buf)))
#define GST_RING_BUFFER_THREAD_BROADCAST(buf)(g_cond_broadcast (GST_RING_BUFFER_THREAD_GET_COND (buf)))
/**
* GstRingBufferThread:
*
* Opaque #GstRingBufferThread.
*/
struct _GstRingBufferThread {
GstObject parent;
gint active_count;
/*< private >*/ /* with LOCK */
GThread *thread;
gboolean running;
GCond *cond;
GstAudioRingBuffer *playback;
GstAudioRingBuffer *capture;
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
/**
* GstRingBufferThreadClass:
* @parent_class: the parent class structure.
*
* #GstRingBufferThread class. Override the vmethods to implement functionality.
*/
struct _GstRingBufferThreadClass {
GstObjectClass parent_class;
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
GType gst_ring_buffer_thread_get_type(void);
gboolean gst_ring_buffer_thread_set_ringbuffer (GstRingBufferThread *thread, GstAudioRingBuffer *buf);
gboolean gst_ring_buffer_thread_activate (GstRingBufferThread *thread, gboolean active);
gboolean gst_ring_buffer_thread_start (GstRingBufferThread *thread);
G_END_DECLS
#endif /* __GST_RING_BUFFER_THREAD_H__ */

View file

@ -0,0 +1,617 @@
? foo
Index: Makefile.am
===================================================================
RCS file: /cvs/gstreamer/gst-plugins-base/gst-libs/gst/cdda/Makefile.am,v
retrieving revision 1.4
diff -u -p -u -p -r1.4 Makefile.am
--- Makefile.am 3 Apr 2008 06:39:27 -0000 1.4
+++ Makefile.am 21 Aug 2008 14:17:21 -0000
@@ -1,9 +1,7 @@
lib_LTLIBRARIES = libgstcdda-@GST_MAJORMINOR@.la
libgstcdda_@GST_MAJORMINOR@_la_SOURCES = \
- gstcddabasesrc.c \
- sha1.c \
- sha1.h
+ gstcddabasesrc.c
libgstcdda_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/cdda
libgstcdda_@GST_MAJORMINOR@include_HEADERS = \
Index: gstcddabasesrc.c
===================================================================
RCS file: /cvs/gstreamer/gst-plugins-base/gst-libs/gst/cdda/gstcddabasesrc.c,v
retrieving revision 1.19
diff -u -p -u -p -r1.19 gstcddabasesrc.c
--- gstcddabasesrc.c 28 May 2008 15:48:33 -0000 1.19
+++ gstcddabasesrc.c 21 Aug 2008 14:17:21 -0000
@@ -1084,36 +1084,35 @@ cddb_sum (gint n)
return ret;
}
-#include "sha1.h"
-
static void
gst_cddabasesrc_calculate_musicbrainz_discid (GstCddaBaseSrc * src)
{
GString *s;
- SHA_INFO sha;
+ GChecksum *sha;
guchar digest[20];
gchar *ptr;
gchar tmp[9];
gulong i;
guint leadout_sector;
+ gsize digest_len;
s = g_string_new (NULL);
leadout_sector = src->tracks[src->num_tracks - 1].end + 1 + CD_MSF_OFFSET;
/* generate SHA digest */
- sha_init (&sha);
+ sha = g_checksum_new (G_CHECKSUM_SHA1);
g_snprintf (tmp, sizeof (tmp), "%02X", src->tracks[0].num);
g_string_append_printf (s, "%02X", src->tracks[0].num);
- sha_update (&sha, (SHA_BYTE *) tmp, 2);
+ g_checksum_update (sha, (guchar *) tmp, 2);
g_snprintf (tmp, sizeof (tmp), "%02X", src->tracks[src->num_tracks - 1].num);
g_string_append_printf (s, " %02X", src->tracks[src->num_tracks - 1].num);
- sha_update (&sha, (SHA_BYTE *) tmp, 2);
+ g_checksum_update (sha, (guchar *) tmp, 2);
g_snprintf (tmp, sizeof (tmp), "%08X", leadout_sector);
g_string_append_printf (s, " %08X", leadout_sector);
- sha_update (&sha, (SHA_BYTE *) tmp, 8);
+ g_checksum_update (sha, (guchar *) tmp, 8);
for (i = 0; i < 99; i++) {
if (i < src->num_tracks) {
@@ -1121,15 +1120,17 @@ gst_cddabasesrc_calculate_musicbrainz_di
g_snprintf (tmp, sizeof (tmp), "%08X", frame_offset);
g_string_append_printf (s, " %08X", frame_offset);
- sha_update (&sha, (SHA_BYTE *) tmp, 8);
+ g_checksum_update (sha, (guchar *) tmp, 8);
} else {
- sha_update (&sha, (SHA_BYTE *) "00000000", 8);
+ g_checksum_update (sha, (guchar *) "00000000", 8);
}
}
- sha_final (digest, &sha);
+ digest_len = 20;
+ g_checksum_get_digest (sha, (guint8 *) &digest, &digest_len);
/* re-encode to base64 */
- ptr = g_base64_encode (digest, 20);
+ ptr = g_base64_encode (digest, digest_len);
+ g_checksum_free (sha);
i = strlen (ptr);
g_assert (i < sizeof (src->mb_discid) + 1);
Index: sha1.c
===================================================================
RCS file: sha1.c
diff -N sha1.c
--- sha1.c 27 Feb 2008 10:42:08 -0000 1.2
+++ /dev/null 1 Jan 1970 00:00:00 -0000
@@ -1,450 +0,0 @@
-/* (PD) 2001 The Bitzi Corporation
- * Please see file COPYING or http://bitzi.com/publicdomain
- * for more info.
- *
- * NIST Secure Hash Algorithm
- * heavily modified by Uwe Hollerbach <uh@alumni.caltech edu>
- * from Peter C. Gutmann's implementation as found in
- * Applied Cryptography by Bruce Schneier
- * Further modifications to include the "UNRAVEL" stuff, below
- *
- * This code is in the public domain
- *
- * $Id: sha1.c,v 1.2 2008-02-27 10:42:08 slomo Exp $
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-#include <glib.h>
-#define SHA_BYTE_ORDER G_BYTE_ORDER
-
-#include <string.h>
-#include "sha1.h"
-
-/* UNRAVEL should be fastest & biggest */
-/* UNROLL_LOOPS should be just as big, but slightly slower */
-/* both undefined should be smallest and slowest */
-
-#define UNRAVEL
-/* #define UNROLL_LOOPS */
-
-/* SHA f()-functions */
-
-#define f1(x,y,z) ((x & y) | (~x & z))
-#define f2(x,y,z) (x ^ y ^ z)
-#define f3(x,y,z) ((x & y) | (x & z) | (y & z))
-#define f4(x,y,z) (x ^ y ^ z)
-
-/* SHA constants */
-
-#define CONST1 0x5a827999L
-#define CONST2 0x6ed9eba1L
-#define CONST3 0x8f1bbcdcL
-#define CONST4 0xca62c1d6L
-
-/* truncate to 32 bits -- should be a null op on 32-bit machines */
-
-#define T32(x) ((x) & 0xffffffffL)
-
-/* 32-bit rotate */
-
-#define R32(x,n) T32(((x << n) | (x >> (32 - n))))
-
-/* the generic case, for when the overall rotation is not unraveled */
-
-#define FG(n) \
- T = T32(R32(A,5) + f##n(B,C,D) + E + *WP++ + CONST##n); \
- E = D; D = C; C = R32(B,30); B = A; A = T
-
-/* specific cases, for when the overall rotation is unraveled */
-
-#define FA(n) \
- T = T32(R32(A,5) + f##n(B,C,D) + E + *WP++ + CONST##n); B = R32(B,30)
-
-#define FB(n) \
- E = T32(R32(T,5) + f##n(A,B,C) + D + *WP++ + CONST##n); A = R32(A,30)
-
-#define FC(n) \
- D = T32(R32(E,5) + f##n(T,A,B) + C + *WP++ + CONST##n); T = R32(T,30)
-
-#define FD(n) \
- C = T32(R32(D,5) + f##n(E,T,A) + B + *WP++ + CONST##n); E = R32(E,30)
-
-#define FE(n) \
- B = T32(R32(C,5) + f##n(D,E,T) + A + *WP++ + CONST##n); D = R32(D,30)
-
-#define FT(n) \
- A = T32(R32(B,5) + f##n(C,D,E) + T + *WP++ + CONST##n); C = R32(C,30)
-
-/* do SHA transformation */
-
-static void
-sha_transform (SHA_INFO * sha_info)
-{
- int i;
- SHA_BYTE *dp;
- SHA_LONG T, A, B, C, D, E, W[80], *WP;
-
- dp = sha_info->data;
-
-/*
-the following makes sure that at least one code block below is
-traversed or an error is reported, without the necessity for nested
-preprocessor if/else/endif blocks, which are a great pain in the
-nether regions of the anatomy...
-*/
-#undef SWAP_DONE
-
-#if (SHA_BYTE_ORDER == 1234)
-#define SWAP_DONE
- for (i = 0; i < 16; ++i) {
- memcpy (&T, dp, sizeof (SHA_LONG));
- dp += 4;
- W[i] = ((T << 24) & 0xff000000) | ((T << 8) & 0x00ff0000) |
- ((T >> 8) & 0x0000ff00) | ((T >> 24) & 0x000000ff);
- }
-#endif /* SHA_BYTE_ORDER == 1234 */
-
-#if (SHA_BYTE_ORDER == 4321)
-#define SWAP_DONE
- for (i = 0; i < 16; ++i) {
- memcpy (&T, dp, sizeof (SHA_LONG));
- dp += 4;
- W[i] = T32 (T);
- }
-#endif /* SHA_BYTE_ORDER == 4321 */
-
-#if (SHA_BYTE_ORDER == 12345678)
-#define SWAP_DONE
- for (i = 0; i < 16; i += 2) {
- memcpy (&T, dp, sizeof (SHA_LONG));
- dp += 8;
- W[i] = ((T << 24) & 0xff000000) | ((T << 8) & 0x00ff0000) |
- ((T >> 8) & 0x0000ff00) | ((T >> 24) & 0x000000ff);
- T >>= 32;
- W[i + 1] = ((T << 24) & 0xff000000) | ((T << 8) & 0x00ff0000) |
- ((T >> 8) & 0x0000ff00) | ((T >> 24) & 0x000000ff);
- }
-#endif /* SHA_BYTE_ORDER == 12345678 */
-
-#if (SHA_BYTE_ORDER == 87654321)
-#define SWAP_DONE
- for (i = 0; i < 16; i += 2) {
- memcpy (&T, dp, sizeof (SHA_LONG));
- dp += 8;
- W[i] = T32 (T >> 32);
- W[i + 1] = T32 (T);
- }
-#endif /* SHA_BYTE_ORDER == 87654321 */
-
-#ifndef SWAP_DONE
-#error Unknown byte order -- you need to add code here
-#endif /* SWAP_DONE */
-
- for (i = 16; i < 80; ++i) {
- W[i] = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
-#if (SHA_VERSION == 1)
- W[i] = R32 (W[i], 1);
-#endif /* SHA_VERSION */
- }
- A = sha_info->digest[0];
- B = sha_info->digest[1];
- C = sha_info->digest[2];
- D = sha_info->digest[3];
- E = sha_info->digest[4];
- WP = W;
-#ifdef UNRAVEL
- FA (1);
- FB (1);
- FC (1);
- FD (1);
- FE (1);
- FT (1);
- FA (1);
- FB (1);
- FC (1);
- FD (1);
- FE (1);
- FT (1);
- FA (1);
- FB (1);
- FC (1);
- FD (1);
- FE (1);
- FT (1);
- FA (1);
- FB (1);
- FC (2);
- FD (2);
- FE (2);
- FT (2);
- FA (2);
- FB (2);
- FC (2);
- FD (2);
- FE (2);
- FT (2);
- FA (2);
- FB (2);
- FC (2);
- FD (2);
- FE (2);
- FT (2);
- FA (2);
- FB (2);
- FC (2);
- FD (2);
- FE (3);
- FT (3);
- FA (3);
- FB (3);
- FC (3);
- FD (3);
- FE (3);
- FT (3);
- FA (3);
- FB (3);
- FC (3);
- FD (3);
- FE (3);
- FT (3);
- FA (3);
- FB (3);
- FC (3);
- FD (3);
- FE (3);
- FT (3);
- FA (4);
- FB (4);
- FC (4);
- FD (4);
- FE (4);
- FT (4);
- FA (4);
- FB (4);
- FC (4);
- FD (4);
- FE (4);
- FT (4);
- FA (4);
- FB (4);
- FC (4);
- FD (4);
- FE (4);
- FT (4);
- FA (4);
- FB (4);
- sha_info->digest[0] = T32 (sha_info->digest[0] + E);
- sha_info->digest[1] = T32 (sha_info->digest[1] + T);
- sha_info->digest[2] = T32 (sha_info->digest[2] + A);
- sha_info->digest[3] = T32 (sha_info->digest[3] + B);
- sha_info->digest[4] = T32 (sha_info->digest[4] + C);
-#else /* !UNRAVEL */
-#ifdef UNROLL_LOOPS
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (1);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (2);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (3);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
- FG (4);
-#else /* !UNROLL_LOOPS */
- for (i = 0; i < 20; ++i) {
- FG (1);
- }
- for (i = 20; i < 40; ++i) {
- FG (2);
- }
- for (i = 40; i < 60; ++i) {
- FG (3);
- }
- for (i = 60; i < 80; ++i) {
- FG (4);
- }
-#endif /* !UNROLL_LOOPS */
- sha_info->digest[0] = T32 (sha_info->digest[0] + A);
- sha_info->digest[1] = T32 (sha_info->digest[1] + B);
- sha_info->digest[2] = T32 (sha_info->digest[2] + C);
- sha_info->digest[3] = T32 (sha_info->digest[3] + D);
- sha_info->digest[4] = T32 (sha_info->digest[4] + E);
-#endif /* !UNRAVEL */
-}
-
-/* initialize the SHA digest */
-
-void
-sha_init (SHA_INFO * sha_info)
-{
- sha_info->digest[0] = 0x67452301L;
- sha_info->digest[1] = 0xefcdab89L;
- sha_info->digest[2] = 0x98badcfeL;
- sha_info->digest[3] = 0x10325476L;
- sha_info->digest[4] = 0xc3d2e1f0L;
- sha_info->count_lo = 0L;
- sha_info->count_hi = 0L;
- sha_info->local = 0;
-}
-
-/* update the SHA digest */
-
-void
-sha_update (SHA_INFO * sha_info, SHA_BYTE * buffer, int count)
-{
- int i;
- SHA_LONG clo;
-
- clo = T32 (sha_info->count_lo + ((SHA_LONG) count << 3));
- if (clo < sha_info->count_lo) {
- ++sha_info->count_hi;
- }
- sha_info->count_lo = clo;
- sha_info->count_hi += (SHA_LONG) count >> 29;
- if (sha_info->local) {
- i = SHA_BLOCKSIZE - sha_info->local;
- if (i > count) {
- i = count;
- }
- memcpy (((SHA_BYTE *) sha_info->data) + sha_info->local, buffer, i);
- count -= i;
- buffer += i;
- sha_info->local += i;
- if (sha_info->local == SHA_BLOCKSIZE) {
- sha_transform (sha_info);
- } else {
- return;
- }
- }
- while (count >= SHA_BLOCKSIZE) {
- memcpy (sha_info->data, buffer, SHA_BLOCKSIZE);
- buffer += SHA_BLOCKSIZE;
- count -= SHA_BLOCKSIZE;
- sha_transform (sha_info);
- }
- memcpy (sha_info->data, buffer, count);
- sha_info->local = count;
-}
-
-/* finish computing the SHA digest */
-
-void
-sha_final (unsigned char digest[20], SHA_INFO * sha_info)
-{
- int count;
- SHA_LONG lo_bit_count, hi_bit_count;
-
- lo_bit_count = sha_info->count_lo;
- hi_bit_count = sha_info->count_hi;
- count = (int) ((lo_bit_count >> 3) & 0x3f);
- ((SHA_BYTE *) sha_info->data)[count++] = 0x80;
- if (count > SHA_BLOCKSIZE - 8) {
- memset (((SHA_BYTE *) sha_info->data) + count, 0, SHA_BLOCKSIZE - count);
- sha_transform (sha_info);
- memset ((SHA_BYTE *) sha_info->data, 0, SHA_BLOCKSIZE - 8);
- } else {
- memset (((SHA_BYTE *) sha_info->data) + count, 0,
- SHA_BLOCKSIZE - 8 - count);
- }
- sha_info->data[56] = (unsigned char) ((hi_bit_count >> 24) & 0xff);
- sha_info->data[57] = (unsigned char) ((hi_bit_count >> 16) & 0xff);
- sha_info->data[58] = (unsigned char) ((hi_bit_count >> 8) & 0xff);
- sha_info->data[59] = (unsigned char) ((hi_bit_count >> 0) & 0xff);
- sha_info->data[60] = (unsigned char) ((lo_bit_count >> 24) & 0xff);
- sha_info->data[61] = (unsigned char) ((lo_bit_count >> 16) & 0xff);
- sha_info->data[62] = (unsigned char) ((lo_bit_count >> 8) & 0xff);
- sha_info->data[63] = (unsigned char) ((lo_bit_count >> 0) & 0xff);
- sha_transform (sha_info);
- digest[0] = (unsigned char) ((sha_info->digest[0] >> 24) & 0xff);
- digest[1] = (unsigned char) ((sha_info->digest[0] >> 16) & 0xff);
- digest[2] = (unsigned char) ((sha_info->digest[0] >> 8) & 0xff);
- digest[3] = (unsigned char) ((sha_info->digest[0]) & 0xff);
- digest[4] = (unsigned char) ((sha_info->digest[1] >> 24) & 0xff);
- digest[5] = (unsigned char) ((sha_info->digest[1] >> 16) & 0xff);
- digest[6] = (unsigned char) ((sha_info->digest[1] >> 8) & 0xff);
- digest[7] = (unsigned char) ((sha_info->digest[1]) & 0xff);
- digest[8] = (unsigned char) ((sha_info->digest[2] >> 24) & 0xff);
- digest[9] = (unsigned char) ((sha_info->digest[2] >> 16) & 0xff);
- digest[10] = (unsigned char) ((sha_info->digest[2] >> 8) & 0xff);
- digest[11] = (unsigned char) ((sha_info->digest[2]) & 0xff);
- digest[12] = (unsigned char) ((sha_info->digest[3] >> 24) & 0xff);
- digest[13] = (unsigned char) ((sha_info->digest[3] >> 16) & 0xff);
- digest[14] = (unsigned char) ((sha_info->digest[3] >> 8) & 0xff);
- digest[15] = (unsigned char) ((sha_info->digest[3]) & 0xff);
- digest[16] = (unsigned char) ((sha_info->digest[4] >> 24) & 0xff);
- digest[17] = (unsigned char) ((sha_info->digest[4] >> 16) & 0xff);
- digest[18] = (unsigned char) ((sha_info->digest[4] >> 8) & 0xff);
- digest[19] = (unsigned char) ((sha_info->digest[4]) & 0xff);
-}
Index: sha1.h
===================================================================
RCS file: sha1.h
diff -N sha1.h
--- sha1.h 13 Dec 2007 10:10:35 -0000 1.2
+++ /dev/null 1 Jan 1970 00:00:00 -0000
@@ -1,62 +0,0 @@
-/* NIST Secure Hash Algorithm */
-/* heavily modified by Uwe Hollerbach <uh@alumni.caltech edu> */
-/* from Peter C. Gutmann's implementation as found in */
-/* Applied Cryptography by Bruce Schneier */
-/* This code is in the public domain */
-/* $Id: sha1.h,v 1.2 2007-12-13 10:10:35 tpm Exp $ */
-
-#ifndef __GST_CDDA_SHA_H__
-#define __GST_CDDA_SHA_H__
-
-#include <stdlib.h>
-#include <stdio.h>
-
-/* Useful defines & typedefs */
-typedef unsigned char SHA_BYTE; /* 8-bit quantity */
-typedef unsigned long SHA_LONG; /* 32-or-more-bit quantity */
-
-#define SHA_BLOCKSIZE 64
-#define SHA_DIGESTSIZE 20
-
-typedef struct {
- SHA_LONG digest[5]; /* message digest */
- SHA_LONG count_lo, count_hi; /* 64-bit bit count */
- SHA_BYTE data[SHA_BLOCKSIZE]; /* SHA data buffer */
- int local; /* unprocessed amount in data */
-} SHA_INFO;
-
-#define sha_init __gst_cdda_sha_init
-#define sha_update __gst_cdda_sha_update
-#define sha_final __gst_cdda_sha_final
-
-void sha_init(SHA_INFO *);
-void sha_update(SHA_INFO *, SHA_BYTE *, int);
-void sha_final(unsigned char [20], SHA_INFO *);
-
-#define SHA_VERSION 1
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-
-
-#ifdef WORDS_BIGENDIAN
-# if SIZEOF_LONG == 4
-# define SHA_BYTE_ORDER 4321
-# elif SIZEOF_LONG == 8
-# define SHA_BYTE_ORDER 87654321
-# endif
-#else
-# if SIZEOF_LONG == 4
-# define SHA_BYTE_ORDER 1234
-# elif SIZEOF_LONG == 8
-# define SHA_BYTE_ORDER 12345678
-# endif
-#endif
-
-#else
-
-#define SHA_BYTE_ORDER 1234
-
-#endif
-
-#endif /* __GST_CDDA_SHA_H__ */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,224 @@
diff --git a/gst-libs/gst/rtp/gstrtcpbuffer.c b/gst-libs/gst/rtp/gstrtcpbuffer.c
index ab77c8a..fb35a92 100644
--- a/gst-libs/gst/rtp/gstrtcpbuffer.c
+++ b/gst-libs/gst/rtp/gstrtcpbuffer.c
@@ -449,6 +449,11 @@ gst_rtcp_buffer_add_packet (GstBuffer * buffer, GstRTCPType type,
case GST_RTCP_TYPE_APP:
len = 12;
break;
+ case GST_RTCP_TYPE_RTPFB:
+ len = 12;
+ break;
+ case GST_RTCP_TYPE_PSFB:
+ len = 12;
default:
goto unknown_type;
}
@@ -1637,6 +1642,147 @@ no_space:
}
/**
+ * gst_rtcp_packet_fb_get_sender_ssrc:
+ * @packet: a valid RTPFB or PSFB #GstRTCPPacket
+ *
+ * Get the sender SSRC field of the RTPFB or PSFB @packet.
+ *
+ * Returns: the sender SSRC.
+ */
+guint32
+gst_rtcp_packet_fb_get_sender_ssrc (GstRTCPPacket * packet)
+{
+ guint8 *data;
+ guint32 ssrc;
+
+ g_return_val_if_fail (packet != NULL, 0);
+ g_return_val_if_fail ((packet->type == GST_RTCP_TYPE_RTPFB ||
+ packet->type == GST_RTCP_TYPE_PSFB), 0);
+ g_return_val_if_fail (GST_IS_BUFFER (packet->buffer), 0);
+
+ data = GST_BUFFER_DATA (packet->buffer);
+
+ /* skip header */
+ data += packet->offset + 4;
+ ssrc = GST_READ_UINT32_BE (data);
+
+ return ssrc;
+}
+
+/**
+ * gst_rtcp_packet_fb_set_sender_ssrc:
+ * @packet: a valid RTPFB or PSFB #GstRTCPPacket
+ *
+ * Set the sender SSRC field of the RTPFB or PSFB @packet.
+ */
+void
+gst_rtcp_packet_fb_set_sender_ssrc (GstRTCPPacket *packet, guint32 ssrc)
+{
+ guint8 *data;
+
+ g_return_if_fail (packet != NULL);
+ g_return_if_fail (packet->type == GST_RTCP_TYPE_RTPFB ||
+ packet->type == GST_RTCP_TYPE_PSFB);
+ g_return_if_fail (GST_IS_BUFFER (packet->buffer));
+
+ data = GST_BUFFER_DATA (packet->buffer);
+
+ /* skip header */
+ data += packet->offset + 4;
+ GST_WRITE_UINT32_BE (data, ssrc);
+}
+
+/**
+ * gst_rtcp_packet_fb_get_media_ssrc:
+ * @packet: a valid RTPFB or PSFB #GstRTCPPacket
+ *
+ * Get the media SSRC field of the RTPFB or PSFB @packet.
+ *
+ * Returns: the media SSRC.
+ */
+guint32
+gst_rtcp_packet_fb_get_media_ssrc (GstRTCPPacket * packet)
+{
+ guint8 *data;
+ guint32 ssrc;
+
+ g_return_val_if_fail (packet != NULL, 0);
+ g_return_val_if_fail ((packet->type == GST_RTCP_TYPE_RTPFB ||
+ packet->type == GST_RTCP_TYPE_PSFB), 0);
+ g_return_val_if_fail (GST_IS_BUFFER (packet->buffer), 0);
+
+ data = GST_BUFFER_DATA (packet->buffer);
+
+ /* skip header and sender ssrc */
+ data += packet->offset + 8;
+ ssrc = GST_READ_UINT32_BE (data);
+
+ return ssrc;
+}
+
+/**
+ * gst_rtcp_packet_fb_set_media_ssrc:
+ * @packet: a valid RTPFB or PSFB #GstRTCPPacket
+ *
+ * Set the media SSRC field of the RTPFB or PSFB @packet.
+ */
+void
+gst_rtcp_packet_fb_set_media_ssrc (GstRTCPPacket *packet, guint32 ssrc)
+{
+ guint8 *data;
+
+ g_return_if_fail (packet != NULL);
+ g_return_if_fail (packet->type == GST_RTCP_TYPE_RTPFB ||
+ packet->type == GST_RTCP_TYPE_PSFB);
+ g_return_if_fail (GST_IS_BUFFER (packet->buffer));
+
+ data = GST_BUFFER_DATA (packet->buffer);
+
+ /* skip header and sender ssrc */
+ data += packet->offset + 8;
+ GST_WRITE_UINT32_BE (data, ssrc);
+}
+
+/**
+ * gst_rtcp_packet_psfb_get_type:
+ * @packet: a valid PSFB #GstRTCPPacket
+ *
+ * Get the feedback message type of the PSFB @packet.
+ *
+ * Returns: The feedback message type.
+ */
+GstRTCPPSFBType
+gst_rtcp_packet_psfb_get_type (GstRTCPPacket *packet)
+{
+ g_return_val_if_fail (packet != NULL, GST_RTCP_PSFB_TYPE_INVALID);
+ g_return_val_if_fail (packet->type == GST_RTCP_TYPE_PSFB,
+ GST_RTCP_PSFB_TYPE_INVALID);
+
+ return packet->count;
+}
+
+/**
+ * gst_rtcp_packet_psfb_set_type:
+ * @packet: a valid PSFB #GstRTCPPacket
+ * @type: the #GstRTCPPSFBType to set
+ *
+ * Set the feedback message type of the PSFB @packet.
+ */
+void
+gst_rtcp_packet_psfb_set_type (GstRTCPPacket *packet, GstRTCPPSFBType type)
+{
+ guint8 *data;
+
+ g_return_if_fail (packet != NULL);
+ g_return_if_fail (packet->type == GST_RTCP_TYPE_PSFB);
+ g_return_if_fail (GST_IS_BUFFER (packet->buffer));
+
+ data = GST_BUFFER_DATA (packet->buffer);
+
+ data[packet->offset] = (data[packet->offset] & 0xE0) | type;
+}
+
+/**
* gst_rtcp_ntp_to_unix:
* @ntptime: an NTP timestamp
*
diff --git a/gst-libs/gst/rtp/gstrtcpbuffer.h b/gst-libs/gst/rtp/gstrtcpbuffer.h
index 9c908a8..bb247c9 100644
--- a/gst-libs/gst/rtp/gstrtcpbuffer.h
+++ b/gst-libs/gst/rtp/gstrtcpbuffer.h
@@ -42,6 +42,8 @@ G_BEGIN_DECLS
* @GST_RTCP_TYPE_SDES: Source description
* @GST_RTCP_TYPE_BYE: Goodbye
* @GST_RTCP_TYPE_APP: Application defined
+ * @GST_RTCP_TYPE_RTPFB: Transport layer feedback
+ * @GST_RTCP_TYPE_PSFB: Payload-specific feedback
*
* Different RTCP packet types.
*/
@@ -52,9 +54,28 @@ typedef enum
GST_RTCP_TYPE_RR = 201,
GST_RTCP_TYPE_SDES = 202,
GST_RTCP_TYPE_BYE = 203,
- GST_RTCP_TYPE_APP = 204
+ GST_RTCP_TYPE_APP = 204,
+ GST_RTCP_TYPE_RTPFB = 205,
+ GST_RTCP_TYPE_PSFB = 206
} GstRTCPType;
+/**
+ * GstRTCPPSFBType:
+ * @GST_RTCP_PSFB_TYPE_INVALID: Invalid type
+ * @GST_RTCP_PSFB_TYPE_PLI: Picture Loss Indication
+ * @GST_RTCP_PSFB_TYPE_SLI: Slice Loss Indication
+ * @GST_RTCP_PSFB_TYPE_RPSI: Reference Picture Selection Indication
+ * @GST_RTCP_PSFB_TYPE_AFB: Application layer Feedback
+ */
+typedef enum
+{
+ GST_RTCP_PSFB_TYPE_INVALID = 0,
+ GST_RTCP_PSFB_TYPE_PLI = 1,
+ GST_RTCP_PSFB_TYPE_SLI = 2,
+ GST_RTCP_PSFB_TYPE_RPSI = 3,
+ GST_RTCP_PSFB_TYPE_AFB = 15
+} GstRTCPPSFBType;
+
/**
* GstRTCPSDESType:
* @GST_RTCP_SDES_INVALID: Invalid SDES entry
@@ -232,6 +253,16 @@ guint8 gst_rtcp_packet_bye_get_reason_len (GstRTCPPacket *packet);
gchar* gst_rtcp_packet_bye_get_reason (GstRTCPPacket *packet);
gboolean gst_rtcp_packet_bye_set_reason (GstRTCPPacket *packet, const gchar *reason);
+/* feedback packets */
+guint32 gst_rtcp_packet_fb_get_sender_ssrc (GstRTCPPacket *packet);
+void gst_rtcp_packet_fb_set_sender_ssrc (GstRTCPPacket *packet, guint32 ssrc);
+guint32 gst_rtcp_packet_fb_get_media_ssrc (GstRTCPPacket *packet);
+void gst_rtcp_packet_fb_set_media_ssrc (GstRTCPPacket *packet, guint32 ssrc);
+
+/* psfb packets */
+GstRTCPPSFBType gst_rtcp_packet_psfb_get_type (GstRTCPPacket *packet);
+void gst_rtcp_packet_psfb_set_type (GstRTCPPacket *packet, GstRTCPPSFBType type);
+
/* helper functions */
guint64 gst_rtcp_ntp_to_unix (guint64 ntptime);
guint64 gst_rtcp_unix_to_ntp (guint64 unixtime);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,16 @@
*************** struct _GstBaseRTPPayloadPrivate
*** 48,53 ****
guint16 next_seqnum;
gboolean perfect_rtptime;
gboolean timestamp_valid;
gint64 prop_max_ptime;
gint64 caps_max_ptime;
--- 48,54 ----
guint16 next_seqnum;
gboolean perfect_rtptime;
gboolean timestamp_valid;
+ gboolean notified_first_timestamp;
gint64 prop_max_ptime;
gint64 caps_max_ptime;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,85 @@
#include "rtsp-marshal.h"
#include <glib-object.h>
#ifdef G_ENABLE_DEBUG
#define g_marshal_value_peek_boolean(v) g_value_get_boolean (v)
#define g_marshal_value_peek_char(v) g_value_get_char (v)
#define g_marshal_value_peek_uchar(v) g_value_get_uchar (v)
#define g_marshal_value_peek_int(v) g_value_get_int (v)
#define g_marshal_value_peek_uint(v) g_value_get_uint (v)
#define g_marshal_value_peek_long(v) g_value_get_long (v)
#define g_marshal_value_peek_ulong(v) g_value_get_ulong (v)
#define g_marshal_value_peek_int64(v) g_value_get_int64 (v)
#define g_marshal_value_peek_uint64(v) g_value_get_uint64 (v)
#define g_marshal_value_peek_enum(v) g_value_get_enum (v)
#define g_marshal_value_peek_flags(v) g_value_get_flags (v)
#define g_marshal_value_peek_float(v) g_value_get_float (v)
#define g_marshal_value_peek_double(v) g_value_get_double (v)
#define g_marshal_value_peek_string(v) (char*) g_value_get_string (v)
#define g_marshal_value_peek_param(v) g_value_get_param (v)
#define g_marshal_value_peek_boxed(v) g_value_get_boxed (v)
#define g_marshal_value_peek_pointer(v) g_value_get_pointer (v)
#define g_marshal_value_peek_object(v) g_value_get_object (v)
#else /* !G_ENABLE_DEBUG */
/* WARNING: This code accesses GValues directly, which is UNSUPPORTED API.
* Do not access GValues directly in your code. Instead, use the
* g_value_get_*() functions
*/
#define g_marshal_value_peek_boolean(v) (v)->data[0].v_int
#define g_marshal_value_peek_char(v) (v)->data[0].v_int
#define g_marshal_value_peek_uchar(v) (v)->data[0].v_uint
#define g_marshal_value_peek_int(v) (v)->data[0].v_int
#define g_marshal_value_peek_uint(v) (v)->data[0].v_uint
#define g_marshal_value_peek_long(v) (v)->data[0].v_long
#define g_marshal_value_peek_ulong(v) (v)->data[0].v_ulong
#define g_marshal_value_peek_int64(v) (v)->data[0].v_int64
#define g_marshal_value_peek_uint64(v) (v)->data[0].v_uint64
#define g_marshal_value_peek_enum(v) (v)->data[0].v_long
#define g_marshal_value_peek_flags(v) (v)->data[0].v_ulong
#define g_marshal_value_peek_float(v) (v)->data[0].v_float
#define g_marshal_value_peek_double(v) (v)->data[0].v_double
#define g_marshal_value_peek_string(v) (v)->data[0].v_pointer
#define g_marshal_value_peek_param(v) (v)->data[0].v_pointer
#define g_marshal_value_peek_boxed(v) (v)->data[0].v_pointer
#define g_marshal_value_peek_pointer(v) (v)->data[0].v_pointer
#define g_marshal_value_peek_object(v) (v)->data[0].v_pointer
#endif /* !G_ENABLE_DEBUG */
/* ENUM:POINTER,POINTER (rtsp-marshal.list:1) */
void
gst_rtsp_marshal_ENUM__POINTER_POINTER (GClosure * closure,
GValue * return_value G_GNUC_UNUSED,
guint n_param_values,
const GValue * param_values,
gpointer invocation_hint G_GNUC_UNUSED, gpointer marshal_data)
{
typedef gint (*GMarshalFunc_ENUM__POINTER_POINTER) (gpointer data1,
gpointer arg_1, gpointer arg_2, gpointer data2);
register GMarshalFunc_ENUM__POINTER_POINTER callback;
register GCClosure *cc = (GCClosure *) closure;
register gpointer data1, data2;
gint v_return;
g_return_if_fail (return_value != NULL);
g_return_if_fail (n_param_values == 3);
if (G_CCLOSURE_SWAP_DATA (closure)) {
data1 = closure->data;
data2 = g_value_peek_pointer (param_values + 0);
} else {
data1 = g_value_peek_pointer (param_values + 0);
data2 = closure->data;
}
callback =
(GMarshalFunc_ENUM__POINTER_POINTER) (marshal_data ? marshal_data :
cc->callback);
v_return = callback (data1,
g_marshal_value_peek_pointer (param_values + 1),
g_marshal_value_peek_pointer (param_values + 2), data2);
g_value_set_enum (return_value, v_return);
}

View file

@ -0,0 +1,20 @@
#ifndef __gst_rtsp_marshal_MARSHAL_H__
#define __gst_rtsp_marshal_MARSHAL_H__
#include <glib-object.h>
G_BEGIN_DECLS
/* ENUM:POINTER,POINTER (rtsp-marshal.list:1) */
extern void gst_rtsp_marshal_ENUM__POINTER_POINTER (GClosure *closure,
GValue *return_value,
guint n_param_values,
const GValue *param_values,
gpointer invocation_hint,
gpointer marshal_data);
G_END_DECLS
#endif /* __gst_rtsp_marshal_MARSHAL_H__ */

View file

@ -0,0 +1,73 @@
commit bf5ef87699b8ef602548eec131312d7a733e278e
Author: Josep Torra <n770galaxy@gmail.com>
Date: Tue Apr 14 18:03:09 2009 +0200
Added RTSP headers related to Windows Media extension.
diff --git a/gst-libs/gst/rtsp/gstrtspdefs.c b/gst-libs/gst/rtsp/gstrtspdefs.c
index 0ab2b95..8b086e5 100644
--- a/gst-libs/gst/rtsp/gstrtspdefs.c
+++ b/gst-libs/gst/rtsp/gstrtspdefs.c
@@ -164,6 +164,27 @@ static const gchar *rtsp_headers[] = {
"ETag", /* ETag */
"If-Match", /* If-Match */
+ /* WM extensions [MS-RTSP] */
+ "Accept-Charset", /* Accept-Charset */
+ "Supported", /* Supported */
+ "Vary", /* Vary */
+ "X-Accelerate-Streaming", /* X-Accelerate-Streaming */
+ "X-Accept-Authentication", /* X-Accept-Authentication */
+ "X-Accept-Proxy-Authentication", /* X-Accept-Proxy-Authentication */
+ "X-Broadcast-Id", /* X-Broadcast-Id */
+ "X-Burst-Streaming", /* X-Burst-Streaming */
+ "X-Notice", /* X-Notice */
+ "X-Player-Lag-Time", /* X-Player-Lag-Time */
+ "X-Playlist", /* X-Playlist */
+ "X-Playlist-Change-Notice", /* X-Playlist-Change-Notice */
+ "X-Playlist-Gen-Id", /* X-Playlist-Gen-Id */
+ "X-Playlist-Seek-Id", /* X-Playlist-Seek-Id */
+ "X-Proxy-Client-Agent", /* X-Proxy-Client-Agent */
+ "X-Proxy-Client-Verb", /* X-Proxy-Client-Verb */
+ "X-Receding-PlaylistChange", /* X-Receding-PlaylistChange */
+ "X-RTP-Info", /* X-RTP-Info */
+ "X-StartupProfile", /* X-StartupProfile */
+
NULL
};
diff --git a/gst-libs/gst/rtsp/gstrtspdefs.h b/gst-libs/gst/rtsp/gstrtspdefs.h
index dd4b531..ae3b105 100644
--- a/gst-libs/gst/rtsp/gstrtspdefs.h
+++ b/gst-libs/gst/rtsp/gstrtspdefs.h
@@ -287,7 +287,29 @@ typedef enum {
/* Since 0.10.23 */
GST_RTSP_HDR_ETAG, /* ETag */
- GST_RTSP_HDR_IF_MATCH /* If-Match */
+ GST_RTSP_HDR_IF_MATCH, /* If-Match */
+
+ /* WM extensions [MS-RTSP] */
+ GST_RTSP_HDR_ACCEPT_CHARSET, /* Accept-Charset */
+ GST_RTSP_HDR_SUPPORTED, /* Supported */
+ GST_RTSP_HDR_VARY, /* Vary */
+ GST_RTSP_HDR_X_ACCELERATE_STREAMING, /* X-Accelerate-Streaming */
+ GST_RTSP_HDR_X_ACCEPT_AUTHENT, /* X-Accept-Authentication */
+ GST_RTSP_HDR_X_ACCEPT_PROXY_AUTHENT, /* X-Accept-Proxy-Authentication */
+ GST_RTSP_HDR_X_BROADCAST_ID, /* X-Broadcast-Id */
+ GST_RTSP_HDR_X_BURST_STREAMING, /* X-Burst-Streaming */
+ GST_RTSP_HDR_X_NOTICE, /* X-Notice */
+ GST_RTSP_HDR_X_PLAYER_LAG_TIME, /* X-Player-Lag-Time */
+ GST_RTSP_HDR_X_PLAYLIST, /* X-Playlist */
+ GST_RTSP_HDR_X_PLAYLIST_CHANGE_NOTICE, /* X-Playlist-Change-Notice */
+ GST_RTSP_HDR_X_PLAYLIST_GEN_ID, /* X-Playlist-Gen-Id */
+ GST_RTSP_HDR_X_PLAYLIST_SEEK_ID, /* X-Playlist-Seek-Id */
+ GST_RTSP_HDR_X_PROXY_CLIENT_AGENT, /* X-Proxy-Client-Agent */
+ GST_RTSP_HDR_X_PROXY_CLIENT_VERB, /* X-Proxy-Client-Verb */
+ GST_RTSP_HDR_X_RECEDING_PLAYLISTCHANGE, /* X-Receding-PlaylistChange */
+ GST_RTSP_HDR_X_RTP_INFO, /* X-RTP-Info */
+ GST_RTSP_HDR_X_STARTUPPROFILE /* X-StartupProfile */
+
} GstRTSPHeaderField;
typedef enum {

View file

@ -0,0 +1,82 @@
/* GStreamer
* Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_META_VIDEO_H__
#define __GST_META_VIDEO_H__
#include <gst/gst.h>
#include <gst/video/video.h>
G_BEGIN_DECLS
#define GST_META_API_VIDEO "GstMetaVideo"
#define GST_META_INFO_VIDEO (gst_meta_video_get_info())
typedef struct _GstMetaVideo GstMetaVideo;
/**
* GstMetaVideo:
* @meta: parent #GstMeta
* @flags: additional video flags
* @n_planes: the number of planes in the image
* @offset: array of offsets for the planes
* @stride: array of strides for the planes
* @map: map the memory of a plane
* @unmap: unmap the memory of a plane
*
* Extra buffer metadata describing image properties
*/
struct _GstMetaVideo {
GstMeta meta;
GstBuffer *buffer;
GstVideoFlags flags;
GstVideoFormat format;
guint width;
guint height;
guint n_planes;
gsize offset[GST_VIDEO_MAX_PLANES];
gint stride[GST_VIDEO_MAX_PLANES];
gpointer (*map) (GstMetaVideo *meta, guint plane, gint *stride,
GstMapFlags flags);
gboolean (*unmap) (GstMetaVideo *meta, guint plane, gpointer data);
};
const GstMetaInfo * gst_meta_video_get_info (void);
#define gst_buffer_get_meta_video(b) ((GstMetaVideo*)gst_buffer_get_meta((b),GST_META_INFO_VIDEO))
GstMetaVideo * gst_buffer_add_meta_video (GstBuffer *buffer, GstVideoFlags flags,
GstVideoFormat format, guint width, guint height);
GstMetaVideo * gst_buffer_add_meta_video_full (GstBuffer *buffer, GstVideoFlags flags,
GstVideoFormat format, guint width, guint height,
guint n_planes, gsize offset[GST_VIDEO_MAX_PLANES],
gint stride[GST_VIDEO_MAX_PLANES]);
gpointer gst_meta_video_map (GstMetaVideo *meta, guint plane, gint *stride,
GstMapFlags flags);
gboolean gst_meta_video_unmap (GstMetaVideo *meta, guint plane, gpointer data);
G_END_DECLS
#endif /* __GST_META_VIDEO_H__ */

View file

@ -1262,7 +1262,7 @@ _create_stream_group (GstEncodeBin * ebin, GstEncodingProfile * sprof,
/* FIXME : Once we have properties for specific converters, use those */
if (GST_IS_ENCODING_VIDEO_PROFILE (sprof)) {
const gboolean native_video =
! !(ebin->flags & GST_ENC_FLAG_NO_VIDEO_CONVERSION);
!!(ebin->flags & GST_ENC_FLAG_NO_VIDEO_CONVERSION);
GstElement *cspace = NULL, *scale, *vrate, *cspace2 = NULL;
GST_LOG ("Adding conversion elements for video stream");
@ -1488,17 +1488,59 @@ cleanup:
}
static gboolean
_factory_can_sink_caps (GstElementFactory * factory, const GstCaps * caps)
_gst_caps_match_foreach (GQuark field_id, const GValue * value, gpointer data)
{
GstStructure *structure = data;
const GValue *other_value = gst_structure_id_get_value (structure, field_id);
if (G_UNLIKELY (other_value == NULL))
return FALSE;
if (gst_value_compare (value, other_value) == GST_VALUE_EQUAL) {
return TRUE;
}
return FALSE;
}
/*
* checks that there is at least one structure on caps_a that has
* all its fields exactly the same as one structure on caps_b
*/
static gboolean
_gst_caps_match (const GstCaps * caps_a, const GstCaps * caps_b)
{
gint i, j;
gboolean res = FALSE;
for (i = 0; i < gst_caps_get_size (caps_a); i++) {
GstStructure *structure_a = gst_caps_get_structure (caps_a, i);
for (j = 0; j < gst_caps_get_size (caps_b); j++) {
GstStructure *structure_b = gst_caps_get_structure (caps_b, j);
res = gst_structure_foreach (structure_a, _gst_caps_match_foreach,
structure_b);
if (res)
goto end;
}
}
end:
return res;
}
static gboolean
_factory_can_handle_caps (GstElementFactory * factory, const GstCaps * caps,
GstPadDirection dir, gboolean exact)
{
GList *templates = factory->staticpadtemplates;
while (templates) {
GstStaticPadTemplate *template = (GstStaticPadTemplate *) templates->data;
if (template->direction == GST_PAD_SINK) {
if (template->direction == dir) {
GstCaps *tmp = gst_static_caps_get (&template->static_caps);
if (gst_caps_can_intersect (tmp, caps)) {
if ((exact && _gst_caps_match (caps, tmp)) ||
(!exact && gst_caps_can_intersect (tmp, caps))) {
gst_caps_unref (tmp);
return TRUE;
}
@ -1549,6 +1591,31 @@ beach:
return formatter;
}
static gint
compare_elements (gconstpointer a, gconstpointer b, gpointer udata)
{
GstCaps *caps = udata;
GstElementFactory *fac_a = (GstElementFactory *) a;
GstElementFactory *fac_b = (GstElementFactory *) b;
/* FIXME not quite sure this is the best algorithm to order the elements
* Some caps similarity comparison algorithm would fit better than going
* boolean (equals/not equals).
*/
gboolean equals_a = _factory_can_handle_caps (fac_a, caps, GST_PAD_SRC, TRUE);
gboolean equals_b = _factory_can_handle_caps (fac_b, caps, GST_PAD_SRC, TRUE);
if (equals_a == equals_b) {
return gst_plugin_feature_get_rank ((GstPluginFeature *) fac_b) -
gst_plugin_feature_get_rank ((GstPluginFeature *) fac_a);
} else if (equals_a) {
return -1;
} else if (equals_b) {
return 1;
}
return 0;
}
static inline GstElement *
_get_muxer (GstEncodeBin * ebin)
{
@ -1571,6 +1638,10 @@ _get_muxer (GstEncodeBin * ebin)
gst_element_factory_list_filter (ebin->formatters, format, GST_PAD_SRC,
TRUE);
muxers = g_list_sort_with_data (muxers, compare_elements, (gpointer) format);
formatters =
g_list_sort_with_data (formatters, compare_elements, (gpointer) format);
muxers = g_list_concat (muxers, formatters);
if (muxers == NULL)
@ -1591,10 +1662,10 @@ _get_muxer (GstEncodeBin * ebin)
for (tmp = profiles; tmp; tmp = tmp->next) {
GstEncodingProfile *sprof = (GstEncodingProfile *) tmp->data;
if (!_factory_can_sink_caps (muxerfact,
gst_encoding_profile_get_format (sprof))) {
GST_DEBUG ("Skipping muxer because it can't sink caps %" GST_PTR_FORMAT,
gst_encoding_profile_get_format (sprof));
if (!_factory_can_handle_caps (muxerfact,
gst_encoding_profile_get_format (sprof), GST_PAD_SINK, FALSE)) {
GST_DEBUG ("Skipping muxer because it can't sink caps %"
GST_PTR_FORMAT, gst_encoding_profile_get_format (sprof));
cansinkstreams = FALSE;
break;
}

View file

@ -8,6 +8,8 @@ built_headers = gstplay-marshal.h
plugin_LTLIBRARIES = libgstplayback.la
csp_cflags = -DCOLORSPACE=\"videoconvert\"
libgstplayback_la_SOURCES = \
gstdecodebin2.c \
gsturidecodebin.c \
@ -22,7 +24,7 @@ libgstplayback_la_SOURCES = \
gststreamsynchronizer.c
nodist_libgstplayback_la_SOURCES = $(built_sources)
libgstplayback_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
libgstplayback_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(csp_cflags)
libgstplayback_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstplayback_la_LIBADD = \
$(top_builddir)/gst-libs/gst/pbutils/libgstpbutils-@GST_MAJORMINOR@.la \
@ -57,7 +59,7 @@ Android.mk: Makefile.am $(BUILT_SOURCES)
-:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \
-:SOURCES $(libgstplayback_la_SOURCES) \
$(nodist_libgstplayback_la_SOURCES) \
-:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstplayback_la_CFLAGS) \
-:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstplayback_la_CFLAGS) $(csp_cflags) \
-:LDFLAGS $(libgstplayback_la_LDFLAGS) \
$(libgstplayback_la_LIBADD) \
-ldl \

View file

@ -1438,7 +1438,7 @@ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
for (i = 0; i < factories->n_values && !dontuse; i++) {
GstElementFactory *factory =
g_value_get_object (g_value_array_get_nth (factories, 0));
g_value_get_object (g_value_array_get_nth (factories, i));
GstCaps *tcaps;
/* We are only interested in skipping decoders */
@ -1577,9 +1577,9 @@ setup_caps_delay:
ppad->event_probe_id =
gst_pad_add_probe (pad, GST_PROBE_TYPE_EVENT, pad_event_cb, ppad, NULL);
chain->pending_pads = g_list_prepend (chain->pending_pads, ppad);
CHAIN_MUTEX_UNLOCK (chain);
g_signal_connect (G_OBJECT (pad), "notify::caps",
G_CALLBACK (caps_notify_cb), chain);
CHAIN_MUTEX_UNLOCK (chain);
return;
}
}

View file

@ -1132,13 +1132,13 @@ gen_video_deinterlace_chain (GstPlaySink * playsink)
bin = GST_BIN_CAST (chain->chain.bin);
gst_object_ref_sink (bin);
GST_DEBUG_OBJECT (playsink, "creating videoconvert");
chain->conv = gst_element_factory_make ("videoconvert", "vdconv");
GST_DEBUG_OBJECT (playsink, "creating " COLORSPACE);
chain->conv = gst_element_factory_make (COLORSPACE, "vdconv");
if (chain->conv == NULL) {
post_missing_element_message (playsink, "videoconvert");
post_missing_element_message (playsink, COLORSPACE);
GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
(_("Missing element '%s' - check your GStreamer installation."),
"videoconvert"), ("video rendering might fail"));
COLORSPACE), ("video rendering might fail"));
} else {
gst_bin_add (bin, chain->conv);
head = chain->conv;
@ -2304,8 +2304,9 @@ gst_play_sink_reconfigure (GstPlaySink * playsink)
add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
gst_object_replace ((GstObject **) & playsink->videochain->ts_offset,
NULL);
if (playsink->videochain->ts_offset)
gst_object_unref (playsink->videochain->ts_offset);
playsink->videochain->ts_offset = NULL;
}
if (playsink->videodeinterlacechain) {
@ -2358,8 +2359,9 @@ gst_play_sink_reconfigure (GstPlaySink * playsink)
disconnect_chain (playsink->audiochain, playsink);
playsink->audiochain->volume = NULL;
playsink->audiochain->mute = NULL;
gst_object_replace ((GstObject **) & playsink->audiochain->ts_offset,
NULL);
if (playsink->audiochain->ts_offset)
gst_object_unref (playsink->audiochain->ts_offset);
playsink->audiochain->ts_offset = NULL;
free_chain ((GstPlayChain *) playsink->audiochain);
playsink->audiochain = NULL;
playsink->volume_changed = playsink->mute_changed = FALSE;
@ -2429,8 +2431,9 @@ gst_play_sink_reconfigure (GstPlaySink * playsink)
disconnect_chain (playsink->audiochain, playsink);
playsink->audiochain->volume = NULL;
playsink->audiochain->mute = NULL;
gst_object_replace ((GstObject **) & playsink->audiochain->ts_offset,
NULL);
if (playsink->audiochain->ts_offset)
gst_object_unref (playsink->audiochain->ts_offset);
playsink->audiochain->ts_offset = NULL;
}
add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
@ -3422,8 +3425,16 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition)
disconnect_chain (playsink->audiochain, playsink);
playsink->audiochain->volume = NULL;
playsink->audiochain->mute = NULL;
gst_object_replace ((GstObject **) & playsink->audiochain->ts_offset,
NULL);
}
if (playsink->audiochain && playsink->audiochain->ts_offset) {
gst_object_unref (playsink->audiochain->ts_offset);
playsink->audiochain->ts_offset = NULL;
}
if (playsink->videochain && playsink->videochain->ts_offset) {
gst_object_unref (playsink->videochain->ts_offset);
playsink->videochain->ts_offset = NULL;
}
ret = GST_STATE_CHANGE_SUCCESS;
break;

View file

@ -109,12 +109,12 @@ pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL);
gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL);
self->conv = gst_element_factory_make ("videoconvert", "conv");
self->conv = gst_element_factory_make (COLORSPACE, "conv");
if (self->conv == NULL) {
post_missing_element_message (self, "videoconvert");
post_missing_element_message (self, COLORSPACE);
GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
(_("Missing element '%s' - check your GStreamer installation."),
"videoconvert"), ("video rendering might fail"));
COLORSPACE), ("video rendering might fail"));
} else {
gst_bin_add (bin, self->conv);
gst_element_sync_state_with_parent (self->conv);

View file

@ -939,7 +939,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
gst_object_unref (src);
if (G_UNLIKELY (!_create_element (self, &self->post_colorspace,
"videoconvert", NULL, "post-colorspace", FALSE))) {
COLORSPACE, NULL, "post-colorspace", FALSE))) {
continue;
}
@ -951,13 +951,13 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
sink = gst_element_get_static_pad (self->post_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
gst_object_unref (src);
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link overlay with videoconvert");
GST_WARNING_OBJECT (self, "Can't link overlay with " COLORSPACE);
gst_object_unref (src);
gst_object_unref (sink);
continue;
@ -966,7 +966,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
gst_object_unref (sink);
if (G_UNLIKELY (!_create_element (self, &self->pre_colorspace,
"videoconvert", NULL, "pre-colorspace", FALSE))) {
COLORSPACE, NULL, "pre-colorspace", FALSE))) {
continue;
}
@ -978,13 +978,13 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
src = gst_element_get_static_pad (self->pre_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get srcpad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get srcpad from " COLORSPACE);
gst_object_unref (sink);
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link videoconvert to textoverlay");
GST_WARNING_OBJECT (self, "Can't link " COLORSPACE " to textoverlay");
gst_object_unref (src);
gst_object_unref (sink);
continue;
@ -995,7 +995,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
/* Set src ghostpad target */
src = gst_element_get_static_pad (self->post_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get src pad from " COLORSPACE);
continue;
}
@ -1014,7 +1014,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
continue;
}
@ -1046,7 +1046,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
/* Set the sink ghostpad targets */
sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
continue;
}
@ -1096,7 +1096,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
/* First link everything internally */
if (G_UNLIKELY (!_create_element (self, &self->post_colorspace,
"videoconvert", NULL, "post-colorspace", FALSE))) {
COLORSPACE, NULL, "post-colorspace", FALSE))) {
continue;
}
@ -1108,13 +1108,13 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
sink = gst_element_get_static_pad (self->post_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
gst_object_unref (src);
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link renderer with videoconvert");
GST_WARNING_OBJECT (self, "Can't link renderer with " COLORSPACE);
gst_object_unref (src);
gst_object_unref (sink);
continue;
@ -1123,7 +1123,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
gst_object_unref (sink);
if (G_UNLIKELY (!_create_element (self, &self->pre_colorspace,
"videoconvert", NULL, "pre-colorspace", FALSE))) {
COLORSPACE, NULL, "pre-colorspace", FALSE))) {
continue;
}
@ -1135,13 +1135,13 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
src = gst_element_get_static_pad (self->pre_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get srcpad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get srcpad from " COLORSPACE);
gst_object_unref (sink);
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link videoconvert to renderer");
GST_WARNING_OBJECT (self, "Can't link " COLORSPACE " to renderer");
gst_object_unref (src);
gst_object_unref (sink);
continue;
@ -1152,7 +1152,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
/* Set src ghostpad target */
src = gst_element_get_static_pad (self->post_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get src pad from " COLORSPACE);
continue;
}
@ -1171,7 +1171,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
continue;
}
@ -1201,7 +1201,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
/* Set the sink ghostpad targets */
sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from videoconvert");
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
continue;
}

View file

@ -3,9 +3,9 @@ noinst_HEADERS = gstvideorate.h
plugin_LTLIBRARIES = libgstvideorate.la
libgstvideorate_la_SOURCES = gstvideorate.c
libgstvideorate_la_CFLAGS = $(GST_CFLAGS)
libgstvideorate_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS)
libgstvideorate_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideorate_la_LIBADD = $(GST_LIBS)
libgstvideorate_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS)
libgstvideorate_la_LIBTOOLFLAGS = --tag=disable-static
Android.mk: Makefile.am $(BUILT_SOURCES)

View file

@ -120,37 +120,56 @@ static GstStaticPadTemplate gst_video_rate_sink_template =
static void gst_video_rate_swap_prev (GstVideoRate * videorate,
GstBuffer * buffer, gint64 time);
static gboolean gst_video_rate_event (GstPad * pad, GstEvent * event);
static gboolean gst_video_rate_query (GstPad * pad, GstQuery * query);
static GstFlowReturn gst_video_rate_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_video_rate_sink_event (GstBaseTransform * trans,
GstEvent * event);
static gboolean gst_video_rate_query (GstBaseTransform * trans,
GstPadDirection direction, GstQuery * query);
static gboolean gst_video_rate_setcaps (GstBaseTransform * trans,
GstCaps * in_caps, GstCaps * out_caps);
static GstCaps *gst_video_rate_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static void gst_video_rate_fixate_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
static GstFlowReturn gst_video_rate_transform_ip (GstBaseTransform * trans,
GstBuffer * buf);
static gboolean gst_video_rate_start (GstBaseTransform * trans);
static void gst_video_rate_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_video_rate_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_video_rate_change_state (GstElement * element,
GstStateChange transition);
/*static guint gst_video_rate_signals[LAST_SIGNAL] = { 0 }; */
static GParamSpec *pspec_drop = NULL;
static GParamSpec *pspec_duplicate = NULL;
#define gst_video_rate_parent_class parent_class
G_DEFINE_TYPE (GstVideoRate, gst_video_rate, GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstVideoRate, gst_video_rate, GST_TYPE_BASE_TRANSFORM);
static void
gst_video_rate_class_init (GstVideoRateClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
GstBaseTransformClass *base_class = GST_BASE_TRANSFORM_CLASS (klass);
object_class->set_property = gst_video_rate_set_property;
object_class->get_property = gst_video_rate_get_property;
base_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_rate_setcaps);
base_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_rate_transform_caps);
base_class->transform_ip = GST_DEBUG_FUNCPTR (gst_video_rate_transform_ip);
base_class->sink_event = GST_DEBUG_FUNCPTR (gst_video_rate_sink_event);
base_class->start = GST_DEBUG_FUNCPTR (gst_video_rate_start);
base_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_video_rate_fixate_caps);
base_class->query = GST_DEBUG_FUNCPTR (gst_video_rate_query);
g_object_class_install_property (object_class, ARG_IN,
g_param_spec_uint64 ("in", "In",
"Number of input frames", 0, G_MAXUINT64, 0,
@ -186,22 +205,12 @@ gst_video_rate_class_init (GstVideoRateClass * klass)
"Don't produce buffers before the first one we receive",
DEFAULT_SKIP_TO_FIRST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"Video rate adjuster", "Filter/Effect/Video",
"Drops/duplicates/adjusts timestamps on video frames to make a perfect stream",
"Wim Taymans <wim@fluendo.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_src_template));
/**
* GstVideoRate:drop-only:
*
* Only drop frames, no duplicates are produced.
*
* Since: 0.10.34
* Since: 0.10.36
*/
g_object_class_install_property (object_class, ARG_DROP_ONLY,
g_param_spec_boolean ("drop-only", "Only Drop",
@ -215,7 +224,7 @@ gst_video_rate_class_init (GstVideoRateClass * klass)
* where the framerate is calculated using a moving average over the
* configured.
*
* Since: 0.10.34
* Since: 0.10.36
*/
g_object_class_install_property (object_class, ARG_AVERAGE_PERIOD,
g_param_spec_uint64 ("average-period", "Period over which to average",
@ -223,113 +232,79 @@ gst_video_rate_class_init (GstVideoRateClass * klass)
0, G_MAXINT64, DEFAULT_AVERAGE_PERIOD,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
element_class->change_state = GST_DEBUG_FUNCPTR (gst_video_rate_change_state);
}
gst_element_class_set_details_simple (element_class,
"Video rate adjuster", "Filter/Effect/Video",
"Drops/duplicates/adjusts timestamps on video frames to make a perfect stream",
"Wim Taymans <wim@fluendo.com>");
/* return the caps that can be used on out_pad given in_caps on in_pad */
static gboolean
gst_video_rate_transformcaps (GstPad * in_pad, GstCaps * in_caps,
GstPad * out_pad, GstCaps ** out_caps, GstCaps * filter)
{
GstCaps *intersect, *in_templ;
gint i;
GSList *extra_structures = NULL;
GSList *iter;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_src_template));
in_templ = gst_pad_get_pad_template_caps (in_pad);
intersect =
gst_caps_intersect_full (in_caps, in_templ, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (in_templ);
/* all possible framerates are allowed */
for (i = 0; i < gst_caps_get_size (intersect); i++) {
GstStructure *structure;
structure = gst_caps_get_structure (intersect, i);
if (gst_structure_has_field (structure, "framerate")) {
GstStructure *copy_structure;
copy_structure = gst_structure_copy (structure);
gst_structure_set (copy_structure,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
extra_structures = g_slist_append (extra_structures, copy_structure);
}
}
/* append the extra structures */
for (iter = extra_structures; iter != NULL; iter = g_slist_next (iter)) {
gst_caps_append_structure (intersect, (GstStructure *) iter->data);
}
g_slist_free (extra_structures);
if (filter) {
GstCaps *tmp;
tmp = gst_caps_intersect_full (filter, intersect, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (intersect);
intersect = tmp;
}
*out_caps = intersect;
return TRUE;
}
static GstCaps *
gst_video_rate_getcaps (GstPad * pad, GstCaps * filter)
gst_video_rate_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstVideoRate *videorate;
GstPad *otherpad;
GstCaps *caps;
GstCaps *ret;
GstStructure *s;
videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));
/* Should always be called with simple caps */
g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);
otherpad = (pad == videorate->srcpad) ? videorate->sinkpad :
videorate->srcpad;
ret = gst_caps_copy (caps);
/* we can do what the peer can */
caps = gst_pad_peer_get_caps (otherpad, filter);
if (caps) {
GstCaps *transform, *intersect;
s = gst_structure_copy (gst_caps_get_structure (caps, 0));
gst_video_rate_transformcaps (otherpad, caps, pad, &transform, filter);
/* set the framerate as a range */
gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
G_MAXINT, 1, NULL);
/* Now prefer the downstream caps if possible */
intersect =
gst_caps_intersect_full (caps, transform, GST_CAPS_INTERSECT_FIRST);
if (!gst_caps_is_empty (intersect)) {
gst_caps_append (intersect, transform);
gst_caps_unref (caps);
caps = intersect;
} else {
gst_caps_unref (intersect);
caps = transform;
}
} else {
/* no peer, our padtemplate is enough then */
caps = gst_pad_get_pad_template_caps (pad);
if (filter) {
GstCaps *intersection;
intersection =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
}
gst_caps_append_structure (ret, s);
return caps;
return ret;
}
static void
gst_video_rate_fixate_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
{
GstStructure *s;
gint num, denom;
s = gst_caps_get_structure (caps, 0);
if (G_UNLIKELY (!gst_structure_get_fraction (s, "framerate", &num, &denom)))
return;
s = gst_caps_get_structure (othercaps, 0);
gst_structure_fixate_field_nearest_fraction (s, "framerate", num, denom);
}
static gboolean
gst_video_rate_set_src_caps (GstVideoRate * videorate, GstCaps * caps)
gst_video_rate_setcaps (GstBaseTransform * trans, GstCaps * in_caps,
GstCaps * out_caps)
{
GstVideoRate *videorate = GST_VIDEO_RATE (trans);
GstStructure *structure;
gboolean ret = TRUE;
gint rate_numerator, rate_denominator;
GST_DEBUG_OBJECT (videorate, "src caps %" GST_PTR_FORMAT, caps);
videorate = GST_VIDEO_RATE (trans);
structure = gst_caps_get_structure (caps, 0);
GST_DEBUG_OBJECT (trans, "setcaps called in: %" GST_PTR_FORMAT
" out: %" GST_PTR_FORMAT, in_caps, out_caps);
structure = gst_caps_get_structure (in_caps, 0);
if (!gst_structure_get_fraction (structure, "framerate",
&rate_numerator, &rate_denominator))
goto no_framerate;
videorate->from_rate_numerator = rate_numerator;
videorate->from_rate_denominator = rate_denominator;
structure = gst_caps_get_structure (out_caps, 0);
if (!gst_structure_get_fraction (structure, "framerate",
&rate_numerator, &rate_denominator))
goto no_framerate;
@ -346,108 +321,26 @@ gst_video_rate_set_src_caps (GstVideoRate * videorate, GstCaps * caps)
videorate->out_frame_count = 0;
videorate->to_rate_numerator = rate_numerator;
videorate->to_rate_denominator = rate_denominator;
videorate->wanted_diff = gst_util_uint64_scale_int (GST_SECOND,
rate_denominator, rate_numerator);
gst_pad_push_event (videorate->srcpad, gst_event_new_caps (caps));
if (rate_numerator)
videorate->wanted_diff = gst_util_uint64_scale_int (GST_SECOND,
rate_denominator, rate_numerator);
else
videorate->wanted_diff = 0;
return TRUE;
/* ERRORS */
no_framerate:
{
GST_DEBUG_OBJECT (videorate, "no framerate specified");
return FALSE;
}
}
static gboolean
gst_video_rate_set_sink_caps (GstVideoRate * videorate, GstCaps * caps)
{
GstStructure *structure;
gboolean ret = TRUE;
gint rate_numerator, rate_denominator;
GST_DEBUG_OBJECT (videorate, "sink caps %" GST_PTR_FORMAT, caps);
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_fraction (structure, "framerate",
&rate_numerator, &rate_denominator))
goto no_framerate;
videorate->from_rate_numerator = rate_numerator;
videorate->from_rate_denominator = rate_denominator;
/* now try to find something for the peer */
if (gst_pad_peer_accept_caps (videorate->srcpad, caps)) {
/* the peer accepts the caps as they are */
ret = gst_video_rate_set_src_caps (videorate, caps);
} else {
GstCaps *transform = NULL;
ret = FALSE;
/* see how we can transform the input caps */
if (!gst_video_rate_transformcaps (videorate->sinkpad, caps,
videorate->srcpad, &transform, NULL))
goto no_transform;
GST_DEBUG_OBJECT (videorate, "transform %" GST_PTR_FORMAT, transform);
/* see what the peer can do */
caps = gst_pad_peer_get_caps (videorate->srcpad, transform);
GST_DEBUG_OBJECT (videorate, "icaps %" GST_PTR_FORMAT, caps);
/* could turn up empty, due to e.g. colorspace etc */
if (gst_caps_get_size (caps) == 0) {
gst_caps_unref (caps);
goto no_transform;
}
/* take first possibility */
caps = gst_caps_make_writable (caps);
gst_caps_truncate (caps);
structure = gst_caps_get_structure (caps, 0);
/* and fixate */
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
rate_numerator, rate_denominator);
gst_structure_get_fraction (structure, "framerate",
&rate_numerator, &rate_denominator);
videorate->to_rate_numerator = rate_numerator;
videorate->to_rate_denominator = rate_denominator;
if (gst_structure_has_field (structure, "interlaced"))
gst_structure_fixate_field_boolean (structure, "interlaced", FALSE);
if (gst_structure_has_field (structure, "color-matrix"))
gst_structure_fixate_field_string (structure, "color-matrix", "sdtv");
if (gst_structure_has_field (structure, "chroma-site"))
gst_structure_fixate_field_string (structure, "chroma-site", "mpeg2");
if (gst_structure_has_field (structure, "pixel-aspect-ratio"))
gst_structure_fixate_field_nearest_fraction (structure,
"pixel-aspect-ratio", 1, 1);
ret = gst_video_rate_set_src_caps (videorate, caps);
gst_caps_unref (caps);
}
done:
/* After a setcaps, our caps may have changed. In that case, we can't use
* the old buffer, if there was one (it might have different dimensions) */
GST_DEBUG_OBJECT (videorate, "swapping old buffers");
gst_video_rate_swap_prev (videorate, NULL, GST_CLOCK_TIME_NONE);
videorate->last_ts = GST_CLOCK_TIME_NONE;
videorate->average = 0;
return ret;
no_framerate:
{
GST_DEBUG_OBJECT (videorate, "no framerate specified");
goto done;
}
no_transform:
{
GST_DEBUG_OBJECT (videorate, "no framerate transform possible");
ret = FALSE;
goto done;
}
@ -476,34 +369,19 @@ gst_video_rate_reset (GstVideoRate * videorate)
static void
gst_video_rate_init (GstVideoRate * videorate)
{
videorate->sinkpad =
gst_pad_new_from_static_template (&gst_video_rate_sink_template, "sink");
gst_pad_set_event_function (videorate->sinkpad,
GST_DEBUG_FUNCPTR (gst_video_rate_event));
gst_pad_set_chain_function (videorate->sinkpad,
GST_DEBUG_FUNCPTR (gst_video_rate_chain));
gst_pad_set_getcaps_function (videorate->sinkpad,
GST_DEBUG_FUNCPTR (gst_video_rate_getcaps));
gst_element_add_pad (GST_ELEMENT (videorate), videorate->sinkpad);
videorate->srcpad =
gst_pad_new_from_static_template (&gst_video_rate_src_template, "src");
gst_pad_set_query_function (videorate->srcpad,
GST_DEBUG_FUNCPTR (gst_video_rate_query));
gst_pad_set_getcaps_function (videorate->srcpad,
GST_DEBUG_FUNCPTR (gst_video_rate_getcaps));
gst_element_add_pad (GST_ELEMENT (videorate), videorate->srcpad);
gst_video_rate_reset (videorate);
videorate->silent = DEFAULT_SILENT;
videorate->new_pref = DEFAULT_NEW_PREF;
videorate->drop_only = DEFAULT_DROP_ONLY;
videorate->average_period = DEFAULT_AVERAGE_PERIOD;
videorate->average_period_set = DEFAULT_AVERAGE_PERIOD;
videorate->from_rate_numerator = 0;
videorate->from_rate_denominator = 0;
videorate->to_rate_numerator = 0;
videorate->to_rate_denominator = 0;
gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (videorate), TRUE);
}
/* flush the oldest buffer */
@ -559,7 +437,7 @@ gst_video_rate_flush_prev (GstVideoRate * videorate, gboolean duplicate)
"old is best, dup, pushing buffer outgoing ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (push_ts));
res = gst_pad_push (videorate->srcpad, outbuf);
res = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (videorate), outbuf);
return res;
@ -578,7 +456,7 @@ gst_video_rate_swap_prev (GstVideoRate * videorate, GstBuffer * buffer,
GST_LOG_OBJECT (videorate, "swap_prev: storing buffer %p in prev", buffer);
if (videorate->prevbuf)
gst_buffer_unref (videorate->prevbuf);
videorate->prevbuf = buffer;
videorate->prevbuf = buffer != NULL ? gst_buffer_ref (buffer) : NULL;
videorate->prev_ts = time;
}
@ -604,25 +482,13 @@ gst_video_rate_notify_duplicate (GstVideoRate * videorate)
#define MAGIC_LIMIT 25
static gboolean
gst_video_rate_event (GstPad * pad, GstEvent * event)
gst_video_rate_sink_event (GstBaseTransform * trans, GstEvent * event)
{
GstVideoRate *videorate;
gboolean ret;
videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));
videorate = GST_VIDEO_RATE (trans);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
ret = gst_video_rate_set_sink_caps (videorate, caps);
gst_event_unref (event);
/* don't forward */
goto done;
}
case GST_EVENT_SEGMENT:
{
const GstSegment *segment;
@ -732,12 +598,7 @@ gst_video_rate_event (GstPad * pad, GstEvent * event)
break;
}
ret = gst_pad_push_event (videorate->srcpad, event);
done:
gst_object_unref (videorate);
return ret;
return TRUE;
/* ERRORS */
format_error:
@ -745,18 +606,20 @@ format_error:
GST_WARNING_OBJECT (videorate,
"Got segment but doesn't have GST_FORMAT_TIME value");
gst_event_unref (event);
ret = FALSE;
goto done;
return FALSE;
}
}
static gboolean
gst_video_rate_query (GstPad * pad, GstQuery * query)
gst_video_rate_query (GstBaseTransform * trans, GstPadDirection direction,
GstQuery * query)
{
GstVideoRate *videorate;
GstVideoRate *videorate = GST_VIDEO_RATE (trans);
gboolean res = FALSE;
GstPad *otherpad;
videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));
otherpad = (direction == GST_PAD_SRC) ?
GST_BASE_TRANSFORM_SINK_PAD (trans) : GST_BASE_TRANSFORM_SRC_PAD (trans);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:
@ -764,9 +627,14 @@ gst_video_rate_query (GstPad * pad, GstQuery * query)
GstClockTime min, max;
gboolean live;
guint64 latency;
guint64 avg_period;
GstPad *peer;
if ((peer = gst_pad_get_peer (videorate->sinkpad))) {
GST_OBJECT_LOCK (videorate);
avg_period = videorate->average_period_set;
GST_OBJECT_UNLOCK (videorate);
if (avg_period == 0 && (peer = gst_pad_get_peer (otherpad))) {
if ((res = gst_pad_query (peer, query))) {
gst_query_parse_latency (query, &live, &min, &max);
@ -800,20 +668,23 @@ gst_video_rate_query (GstPad * pad, GstQuery * query)
gst_query_set_latency (query, live, min, max);
}
gst_object_unref (peer);
break;
}
break;
/* Simple fallthrough if we don't have a latency or not a peer that we
* can't ask about its latency yet.. */
}
default:
res = gst_pad_query_default (pad, query);
res =
GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
query);
break;
}
gst_object_unref (videorate);
return res;
}
static GstFlowReturn
gst_video_rate_chain_max_avg (GstVideoRate * videorate, GstBuffer * buf)
gst_video_rate_trans_ip_max_avg (GstVideoRate * videorate, GstBuffer * buf)
{
GstClockTime ts = GST_BUFFER_TIMESTAMP (buf);
@ -857,26 +728,24 @@ gst_video_rate_chain_max_avg (GstVideoRate * videorate, GstBuffer * buf)
push:
videorate->out++;
return gst_pad_push (videorate->srcpad, buf);
return GST_FLOW_OK;
drop:
gst_buffer_unref (buf);
if (!videorate->silent)
gst_video_rate_notify_drop (videorate);
return GST_FLOW_OK;
return GST_BASE_TRANSFORM_FLOW_DROPPED;
}
static GstFlowReturn
gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
GstVideoRate *videorate;
GstFlowReturn res = GST_FLOW_OK;
GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED;
GstClockTime intime, in_ts, in_dur;
GstClockTime avg_period;
gboolean skip = FALSE;
videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));
videorate = GST_VIDEO_RATE (trans);
/* make sure the denominators are not 0 */
if (videorate->from_rate_denominator == 0 ||
@ -889,22 +758,30 @@ gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
/* MT-safe switching between modes */
if (G_UNLIKELY (avg_period != videorate->average_period)) {
gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0);
videorate->average_period = avg_period;
videorate->last_ts = GST_CLOCK_TIME_NONE;
if (avg_period && !videorate->average) {
/* enabling average mode */
videorate->average = 0;
} else {
/* enable regular mode */
gst_video_rate_swap_prev (videorate, NULL, 0);
/* arrange for skip-to-first behaviour */
videorate->next_ts = GST_CLOCK_TIME_NONE;
skip = TRUE;
if (switch_mode) {
if (avg_period) {
/* enabling average mode */
videorate->average = 0;
/* make sure no cached buffers from regular mode are left */
gst_video_rate_swap_prev (videorate, NULL, 0);
} else {
/* enable regular mode */
videorate->next_ts = GST_CLOCK_TIME_NONE;
skip = TRUE;
}
/* max averaging mode has a no latency, normal mode does */
gst_element_post_message (GST_ELEMENT (videorate),
gst_message_new_latency (GST_OBJECT (videorate)));
}
}
if (videorate->average_period > 0)
return gst_video_rate_chain_max_avg (videorate, buffer);
return gst_video_rate_trans_ip_max_avg (videorate, buffer);
in_ts = GST_BUFFER_TIMESTAMP (buffer);
in_dur = GST_BUFFER_DURATION (buffer);
@ -967,7 +844,6 @@ gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
videorate->drop++;
if (!videorate->silent)
gst_video_rate_notify_drop (videorate);
gst_buffer_unref (buffer);
goto done;
}
@ -991,13 +867,13 @@ gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
/* output first one when its the best */
if (diff1 <= diff2) {
GstFlowReturn r;
count++;
/* on error the _flush function posted a warning already */
if ((res =
gst_video_rate_flush_prev (videorate,
if ((r = gst_video_rate_flush_prev (videorate,
count > 1)) != GST_FLOW_OK) {
gst_buffer_unref (buffer);
res = r;
goto done;
}
}
@ -1045,7 +921,6 @@ done:
not_negotiated:
{
GST_WARNING_OBJECT (videorate, "no framerate negotiated");
gst_buffer_unref (buffer);
res = GST_FLOW_NOT_NEGOTIATED;
goto done;
}
@ -1054,11 +929,18 @@ invalid_buffer:
{
GST_WARNING_OBJECT (videorate,
"Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it");
gst_buffer_unref (buffer);
res = GST_BASE_TRANSFORM_FLOW_DROPPED;
goto done;
}
}
static gboolean
gst_video_rate_start (GstBaseTransform * trans)
{
gst_video_rate_reset (GST_VIDEO_RATE (trans));
return TRUE;
}
static void
gst_video_rate_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
@ -1080,7 +962,7 @@ gst_video_rate_set_property (GObject * object,
videorate->drop_only = g_value_get_boolean (value);
break;
case ARG_AVERAGE_PERIOD:
videorate->average_period = g_value_get_uint64 (value);
videorate->average_period_set = g_value_get_uint64 (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
@ -1122,7 +1004,7 @@ gst_video_rate_get_property (GObject * object,
g_value_set_boolean (value, videorate->drop_only);
break;
case ARG_AVERAGE_PERIOD:
g_value_set_uint64 (value, videorate->average_period);
g_value_set_uint64 (value, videorate->average_period_set);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
@ -1131,36 +1013,6 @@ gst_video_rate_get_property (GObject * object,
GST_OBJECT_UNLOCK (videorate);
}
static GstStateChangeReturn
gst_video_rate_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
GstVideoRate *videorate;
videorate = GST_VIDEO_RATE (element);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
videorate->discont = TRUE;
videorate->last_ts = -1;
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_video_rate_reset (videorate);
break;
default:
break;
}
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{

View file

@ -21,6 +21,7 @@
#define __GST_VIDEO_RATE_H__
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
G_BEGIN_DECLS
@ -45,9 +46,7 @@ typedef struct _GstVideoRateClass GstVideoRateClass;
*/
struct _GstVideoRate
{
GstElement element;
GstPad *sinkpad, *srcpad;
GstBaseTransform parent;
/* video state */
gint from_rate_numerator, from_rate_denominator;
@ -81,7 +80,7 @@ struct _GstVideoRate
struct _GstVideoRateClass
{
GstElementClass parent_class;
GstBaseTransformClass parent_class;
};
GType gst_video_rate_get_type (void);

View file

@ -0,0 +1,48 @@
diff --git a/gst/videorate/gstvideorate.c b/gst/videorate/gstvideorate.c
index 8d22186..7afcfdd 100644
--- a/gst/videorate/gstvideorate.c
+++ b/gst/videorate/gstvideorate.c
@@ -357,6 +357,7 @@ gst_video_rate_reset (GstVideoRate * videorate)
videorate->drop = 0;
videorate->dup = 0;
videorate->next_ts = GST_CLOCK_TIME_NONE;
+ videorate->discont = TRUE;
gst_video_rate_swap_prev (videorate, NULL, 0);
gst_segment_init (&videorate->segment, GST_FORMAT_TIME);
@@ -409,6 +410,13 @@ gst_video_rate_flush_prev (GstVideoRate * videorate)
GST_BUFFER_OFFSET (outbuf) = videorate->out;
GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1;
+ if (videorate->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ videorate->discont = FALSE;
+ }
+ else
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);
+
/* this is the timestamp we put on the buffer */
push_ts = videorate->next_ts;
@@ -810,6 +818,9 @@ gst_video_rate_change_state (GstElement * element, GstStateChange transition)
videorate = GST_VIDEO_RATE (element);
switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ videorate->discont = TRUE;
+ break;
default:
break;
}
diff --git a/gst/videorate/gstvideorate.h b/gst/videorate/gstvideorate.h
index ea6063b..fe7feb6 100644
--- a/gst/videorate/gstvideorate.h
+++ b/gst/videorate/gstvideorate.h
@@ -56,6 +56,7 @@ struct _GstVideoRate
GstBuffer *prevbuf;
guint64 prev_ts; /* Previous buffer timestamp */
guint64 segment_out; /* in-segment counting */
+ gboolean discont;
/* segment handling */
GstSegment segment;

View file

@ -8,7 +8,8 @@ libgstvideoscale_la_SOURCES = \
vs_image.c \
vs_scanline.c \
vs_4tap.c \
vs_fill_borders.c
vs_fill_borders.c \
vs_lanczos.c
nodist_libgstvideoscale_la_SOURCES = $(ORC_NODIST_SOURCES)

View file

@ -90,13 +90,22 @@ GST_DEBUG_CATEGORY (video_scale_debug);
#define DEFAULT_PROP_METHOD GST_VIDEO_SCALE_BILINEAR
#define DEFAULT_PROP_ADD_BORDERS FALSE
#define DEFAULT_PROP_SHARPNESS 1.0
#define DEFAULT_PROP_SHARPEN 0.0
#define DEFAULT_PROP_DITHER FALSE
#define DEFAULT_PROP_SUBMETHOD 1
#define DEFAULT_PROP_ENVELOPE 2.0
enum
{
PROP_0,
PROP_METHOD,
PROP_ADD_BORDERS
/* FILL ME */
PROP_ADD_BORDERS,
PROP_SHARPNESS,
PROP_SHARPEN,
PROP_DITHER,
PROP_SUBMETHOD,
PROP_ENVELOPE
};
#undef GST_VIDEO_SIZE_RANGE
@ -122,6 +131,7 @@ gst_video_scale_method_get_type (void)
{GST_VIDEO_SCALE_NEAREST, "Nearest Neighbour", "nearest-neighbour"},
{GST_VIDEO_SCALE_BILINEAR, "Bilinear", "bilinear"},
{GST_VIDEO_SCALE_4TAP, "4-tap", "4-tap"},
{GST_VIDEO_SCALE_LANCZOS, "Lanczos", "lanczos"},
{0, NULL, NULL},
};
@ -217,6 +227,36 @@ gst_video_scale_class_init (GstVideoScaleClass * klass)
DEFAULT_PROP_ADD_BORDERS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_SHARPNESS,
g_param_spec_double ("sharpness", "Sharpness",
"Sharpness of filter", 0.0, 2.0, DEFAULT_PROP_SHARPNESS,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_SHARPEN,
g_param_spec_double ("sharpen", "Sharpen",
"Sharpening", 0.0, 1.0, DEFAULT_PROP_SHARPEN,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DITHER,
g_param_spec_boolean ("dither", "Dither",
"Add dither (only used for Lanczos method)",
DEFAULT_PROP_DITHER,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#if 0
/* I am hiding submethod for now, since it's poorly named, poorly
* documented, and will probably just get people into trouble. */
g_object_class_install_property (gobject_class, PROP_SUBMETHOD,
g_param_spec_int ("submethod", "submethod",
"submethod", 0, 3, DEFAULT_PROP_SUBMETHOD,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#endif
g_object_class_install_property (gobject_class, PROP_ENVELOPE,
g_param_spec_double ("envelope", "Envelope",
"Size of filter envelope", 0.0, 5.0, DEFAULT_PROP_ENVELOPE,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"Video scaler", "Filter/Converter/Video/Scaler",
"Resizes video", "Wim Taymans <wim.taymans@chello.be>");
@ -244,6 +284,11 @@ gst_video_scale_init (GstVideoScale * videoscale)
videoscale->tmp_buf = NULL;
videoscale->method = DEFAULT_PROP_METHOD;
videoscale->add_borders = DEFAULT_PROP_ADD_BORDERS;
videoscale->submethod = DEFAULT_PROP_SUBMETHOD;
videoscale->sharpness = DEFAULT_PROP_SHARPNESS;
videoscale->sharpen = DEFAULT_PROP_SHARPEN;
videoscale->dither = DEFAULT_PROP_DITHER;
videoscale->envelope = DEFAULT_PROP_ENVELOPE;
}
static void
@ -273,6 +318,31 @@ gst_video_scale_set_property (GObject * object, guint prop_id,
GST_OBJECT_UNLOCK (vscale);
gst_base_transform_reconfigure (GST_BASE_TRANSFORM_CAST (vscale));
break;
case PROP_SHARPNESS:
GST_OBJECT_LOCK (vscale);
vscale->sharpness = g_value_get_double (value);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_SHARPEN:
GST_OBJECT_LOCK (vscale);
vscale->sharpen = g_value_get_double (value);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_DITHER:
GST_OBJECT_LOCK (vscale);
vscale->dither = g_value_get_boolean (value);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_SUBMETHOD:
GST_OBJECT_LOCK (vscale);
vscale->submethod = g_value_get_int (value);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_ENVELOPE:
GST_OBJECT_LOCK (vscale);
vscale->envelope = g_value_get_double (value);
GST_OBJECT_UNLOCK (vscale);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -296,6 +366,31 @@ gst_video_scale_get_property (GObject * object, guint prop_id, GValue * value,
g_value_set_boolean (value, vscale->add_borders);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_SHARPNESS:
GST_OBJECT_LOCK (vscale);
g_value_set_double (value, vscale->sharpness);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_SHARPEN:
GST_OBJECT_LOCK (vscale);
g_value_set_double (value, vscale->sharpen);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_DITHER:
GST_OBJECT_LOCK (vscale);
g_value_set_boolean (value, vscale->dither);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_SUBMETHOD:
GST_OBJECT_LOCK (vscale);
g_value_set_int (value, vscale->submethod);
GST_OBJECT_UNLOCK (vscale);
break;
case PROP_ENVELOPE:
GST_OBJECT_LOCK (vscale);
g_value_set_double (value, vscale->envelope);
GST_OBJECT_UNLOCK (vscale);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -1065,6 +1160,11 @@ gst_video_scale_transform (GstBaseTransform * trans, GstBuffer * in,
case GST_VIDEO_SCALE_4TAP:
vs_image_scale_4tap_RGBA (&dest[0], &src[0], videoscale->tmp_buf);
break;
case GST_VIDEO_SCALE_LANCZOS:
vs_image_scale_lanczos_AYUV (&dest[0], &src[0], videoscale->tmp_buf,
videoscale->sharpness, videoscale->dither, videoscale->submethod,
videoscale->envelope, videoscale->sharpen);
break;
default:
goto unknown_mode;
}
@ -1205,6 +1305,17 @@ gst_video_scale_transform (GstBaseTransform * trans, GstBuffer * in,
vs_image_scale_4tap_Y (&dest[1], &src[1], videoscale->tmp_buf);
vs_image_scale_4tap_Y (&dest[2], &src[2], videoscale->tmp_buf);
break;
case GST_VIDEO_SCALE_LANCZOS:
vs_image_scale_lanczos_Y (&dest[0], &src[0], videoscale->tmp_buf,
videoscale->sharpness, videoscale->dither, videoscale->submethod,
videoscale->envelope, videoscale->sharpen);
vs_image_scale_lanczos_Y (&dest[1], &src[1], videoscale->tmp_buf,
videoscale->sharpness, videoscale->dither, videoscale->submethod,
videoscale->envelope, videoscale->sharpen);
vs_image_scale_lanczos_Y (&dest[2], &src[2], videoscale->tmp_buf,
videoscale->sharpness, videoscale->dither, videoscale->submethod,
videoscale->envelope, videoscale->sharpen);
break;
default:
goto unknown_mode;
}

View file

@ -47,13 +47,15 @@ GST_DEBUG_CATEGORY_EXTERN (video_scale_debug);
* @GST_VIDEO_SCALE_NEAREST: use nearest neighbour scaling (fast and ugly)
* @GST_VIDEO_SCALE_BILINEAR: use bilinear scaling (slower but prettier).
* @GST_VIDEO_SCALE_4TAP: use a 4-tap filter for scaling (slow).
* @GST_VIDEO_SCALE_LANCZOS: use a multitap Lanczos filter for scaling (slow).
*
* The videoscale method to use.
*/
typedef enum {
GST_VIDEO_SCALE_NEAREST,
GST_VIDEO_SCALE_BILINEAR,
GST_VIDEO_SCALE_4TAP
GST_VIDEO_SCALE_4TAP,
GST_VIDEO_SCALE_LANCZOS
} GstVideoScaleMethod;
typedef struct _GstVideoScale GstVideoScale;
@ -67,8 +69,14 @@ typedef struct _GstVideoScaleClass GstVideoScaleClass;
struct _GstVideoScale {
GstVideoFilter element;
/* properties */
GstVideoScaleMethod method;
gboolean add_borders;
double sharpness;
double sharpen;
gboolean dither;
int submethod;
double envelope;
/* negotiated stuff */
GstVideoInfo from_info;

View file

@ -28,6 +28,7 @@
#ifndef __VS_IMAGE_H__
#define __VS_IMAGE_H__
#include <glib.h>
#include <_stdint.h>
typedef struct _VSImage VSImage;
@ -48,6 +49,9 @@ void vs_image_scale_nearest_RGBA (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf);
void vs_image_scale_linear_RGBA (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf);
void vs_image_scale_lanczos_AYUV (const VSImage * dest, const VSImage * src,
uint8_t * tmpbuf, double sharpness, gboolean dither, int submethod,
double a, double sharpen);
void vs_image_scale_nearest_RGB (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf);
@ -68,6 +72,9 @@ void vs_image_scale_nearest_Y (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf);
void vs_image_scale_linear_Y (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf);
void vs_image_scale_lanczos_Y (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf, double sharpness, gboolean dither, int submethod,
double a, double sharpen);
void vs_image_scale_nearest_RGB565 (const VSImage *dest, const VSImage *src,
uint8_t *tmpbuf);

1558
gst/videoscale/vs_lanczos.c Normal file

File diff suppressed because it is too large Load diff

View file

@ -137,6 +137,8 @@ GST_START_TEST (test_target_naming)
{
GstEncodingTarget *target;
gst_debug_set_threshold_for_name ("default", GST_LEVEL_NONE);
/* NULL values */
ASSERT_CRITICAL (target = gst_encoding_target_new (NULL, NULL, NULL, NULL));
fail_if (target != NULL);
@ -397,6 +399,8 @@ GST_START_TEST (test_loading_profile)
GValue strvalue = { 0, };
GValue objectvalue = { 0, };
gst_debug_set_threshold_for_name ("default", GST_LEVEL_NONE);
/* Test loading using short method and all arguments */
target = gst_encoding_target_load ("myponytarget", "herding", NULL);
fail_unless (target != NULL);

View file

@ -776,6 +776,8 @@ GST_START_TEST (test_license_utils)
gchar *path, *data = NULL;
gsize data_len;
gst_debug_set_threshold_for_name ("tag-licenses", GST_LEVEL_NONE);
/* test jurisdiction-specific license */
fail_unless_equals_int (gst_tag_get_license_flags (SPECIFIC_L), 0x01010703);
fail_unless_equals_string (gst_tag_get_license_nick (SPECIFIC_L),

View file

@ -672,6 +672,8 @@ GST_START_TEST (test_convert_frame)
gint i;
guint8 *data;
gst_debug_set_threshold_for_name ("default", GST_LEVEL_NONE);
from_buffer = gst_buffer_new_and_alloc (640 * 480 * 4);
data = gst_buffer_map (from_buffer, NULL, NULL, GST_MAP_WRITE);
@ -752,6 +754,8 @@ GST_START_TEST (test_convert_frame_async)
GMainLoop *loop;
ConvertFrameContext cf_data = { NULL, NULL, NULL };
gst_debug_set_threshold_for_name ("default", GST_LEVEL_NONE);
from_buffer = gst_buffer_new_and_alloc (640 * 480 * 4);
data = gst_buffer_map (from_buffer, NULL, NULL, GST_MAP_WRITE);

View file

@ -161,6 +161,16 @@ gst_element_factory_make_or_warn (const gchar * type, const gchar * name)
{
GstElement *element = gst_element_factory_make (type, name);
#ifndef GST_DISABLE_PARSE
if (!element) {
/* Try parsing it as a pipeline description */
element = gst_parse_bin_from_description (type, TRUE, NULL);
if (element) {
gst_element_set_name (element, name);
}
}
#endif
if (!element) {
g_warning ("Failed to create element %s of type %s", name, type);
}

View file

@ -1,5 +1,5 @@
EXPORTS
_gst_base_audio_decoder_error
_gst_audio_decoder_error
gst_audio_buffer_clip
gst_audio_channel_position_get_type
gst_audio_check_channel_positions
@ -10,7 +10,47 @@ EXPORTS
gst_audio_clock_new
gst_audio_clock_new_full
gst_audio_clock_reset
gst_audio_decoder_finish_frame
gst_audio_decoder_get_audio_info
gst_audio_decoder_get_byte_time
gst_audio_decoder_get_delay
gst_audio_decoder_get_latency
gst_audio_decoder_get_max_errors
gst_audio_decoder_get_min_latency
gst_audio_decoder_get_parse_state
gst_audio_decoder_get_plc
gst_audio_decoder_get_plc_aware
gst_audio_decoder_get_tolerance
gst_audio_decoder_get_type
gst_audio_decoder_set_byte_time
gst_audio_decoder_set_latency
gst_audio_decoder_set_max_errors
gst_audio_decoder_set_min_latency
gst_audio_decoder_set_plc
gst_audio_decoder_set_plc_aware
gst_audio_decoder_set_tolerance
gst_audio_default_registry_mixer_filter
gst_audio_duration_from_pad_buffer
gst_audio_encoder_finish_frame
gst_audio_encoder_get_audio_info
gst_audio_encoder_get_frame_max
gst_audio_encoder_get_frame_samples
gst_audio_encoder_get_hard_resync
gst_audio_encoder_get_latency
gst_audio_encoder_get_lookahead
gst_audio_encoder_get_mark_granule
gst_audio_encoder_get_perfect_timestamp
gst_audio_encoder_get_tolerance
gst_audio_encoder_get_type
gst_audio_encoder_proxy_getcaps
gst_audio_encoder_set_frame_max
gst_audio_encoder_set_frame_samples
gst_audio_encoder_set_hard_resync
gst_audio_encoder_set_latency
gst_audio_encoder_set_lookahead
gst_audio_encoder_set_mark_granule
gst_audio_encoder_set_perfect_timestamp
gst_audio_encoder_set_tolerance
gst_audio_filter_class_add_pad_templates
gst_audio_filter_get_type
gst_audio_fixate_channel_positions
@ -33,46 +73,6 @@ EXPORTS
gst_audio_sink_get_type
gst_audio_src_get_type
gst_audio_structure_set_int
gst_base_audio_decoder_finish_frame
gst_base_audio_decoder_get_audio_info
gst_base_audio_decoder_get_byte_time
gst_base_audio_decoder_get_delay
gst_base_audio_decoder_get_latency
gst_base_audio_decoder_get_max_errors
gst_base_audio_decoder_get_min_latency
gst_base_audio_decoder_get_parse_state
gst_base_audio_decoder_get_plc
gst_base_audio_decoder_get_plc_aware
gst_base_audio_decoder_get_tolerance
gst_base_audio_decoder_get_type
gst_base_audio_decoder_set_byte_time
gst_base_audio_decoder_set_latency
gst_base_audio_decoder_set_max_errors
gst_base_audio_decoder_set_min_latency
gst_base_audio_decoder_set_plc
gst_base_audio_decoder_set_plc_aware
gst_base_audio_decoder_set_tolerance
gst_base_audio_encoder_finish_frame
gst_base_audio_encoder_get_audio_info
gst_base_audio_encoder_get_frame_max
gst_base_audio_encoder_get_frame_samples
gst_base_audio_encoder_get_hard_resync
gst_base_audio_encoder_get_latency
gst_base_audio_encoder_get_lookahead
gst_base_audio_encoder_get_mark_granule
gst_base_audio_encoder_get_perfect_timestamp
gst_base_audio_encoder_get_tolerance
gst_base_audio_encoder_get_type
gst_base_audio_encoder_proxy_getcaps
gst_base_audio_encoder_set_frame_max
gst_base_audio_encoder_set_frame_samples
gst_base_audio_encoder_set_hard_resync
gst_base_audio_encoder_set_latency
gst_base_audio_encoder_set_lookahead
gst_base_audio_encoder_set_mark_granule
gst_base_audio_encoder_set_perfect_timestamp
gst_base_audio_encoder_set_tolerance
>>>>>>> master
gst_base_audio_sink_create_ringbuffer
gst_base_audio_sink_get_drift_tolerance
gst_base_audio_sink_get_provide_clock