Merge branch 'master' into 0.11

This commit is contained in:
Wim Taymans 2011-09-26 22:31:17 +02:00
commit 7f4cf50496
67 changed files with 5193 additions and 1554 deletions

2
common

@ -1 +1 @@
Subproject commit a39eb835fb3be2a4c5a6a89b5ca5cc064e79b2e2
Subproject commit 11f0cd5a3fba36f85cf3e434150bfe66b1bf08d4

View file

@ -1735,6 +1735,17 @@ AG_GST_CHECK_FEATURE(RTMP, [rtmp library], rtmp, [
AG_GST_PKG_CHECK_MODULES(RTMP, librtmp)
])
dnl *** spandsp ***
translit(dnm, m, l) AM_CONDITIONAL(USE_SPANDSP, true)
AG_GST_CHECK_FEATURE(SPANDSP, [Spandsp], spandsp, [
PKG_CHECK_MODULES(SPANDSP, spandsp >= 0.0.6, [
HAVE_SPANDSP="yes" ], [
HAVE_SPANDSP="no"
])
])
AC_SUBST(SPANDSP_CFLAGS)
AC_SUBST(SPANDSP_LIBS)
dnl *** GSettings ***
translit(dnm, m, l) AM_CONDITIONAL(USE_GSETTINGS, true)
AG_GST_CHECK_FEATURE(GSETTINGS, [GSettings plugin], gsettings, [
@ -1797,6 +1808,7 @@ AM_CONDITIONAL(USE_WILDMIDI, false)
AM_CONDITIONAL(USE_SDL, false)
AM_CONDITIONAL(USE_SNDFILE, false)
AM_CONDITIONAL(USE_SOUNDTOUCH, false)
AM_CONDITIONAL(USE_SPANDSP, false)
AM_CONDITIONAL(USE_SPC, false)
AM_CONDITIONAL(USE_GME, false)
AM_CONDITIONAL(USE_GSETTINGS, false)
@ -2046,6 +2058,7 @@ ext/schroedinger/Makefile
ext/sdl/Makefile
ext/sndfile/Makefile
ext/soundtouch/Makefile
ext/spandsp/Makefile
ext/teletextdec/Makefile
ext/gme/Makefile
ext/gsettings/Makefile

28
docs/libs/.gitignore vendored Normal file
View file

@ -0,0 +1,28 @@
Makefile
Makefile.in
*-decl.txt
*-decl-list.txt
*-undocumented.txt
*-undeclared.txt
*-unused.txt
*.bak
*.args
*.hierarchy
*.interfaces
*.prerequisites
*.signals
gst-plugins-bad-libs.args.new
gst-plugins-bad-libs.signals.new
tmpl
xml
html
gst-plugins-bad-libs-scan
gst-plugins-bad-libs-scan.c
*-registry.*
*.stamp

View file

@ -13,43 +13,17 @@ FORMATS=html
html: html-build.stamp
include $(top_srcdir)/common/upload-doc.mak
# generated basefiles
#basefiles = \
## $(DOC_MODULE).types \
# $(DOC_MODULE)-sections.txt \
# $(DOC_MODULE)-docs.sgml
# ugly hack to make -unused.sgml work
#unused-build.stamp:
# BUILDDIR=`pwd` && \
# cd $(srcdir)/tmpl && \
# ln -sf gstreamer-libs-unused.sgml \
# $$BUILDDIR/tmpl/gstreamer-libs-@GST_MAJORMINOR@-unused.sgml
# touch unused-build.stamp
# these rules are added to create parallel docs using GST_MAJORMINOR
#$(basefiles): gstreamer-libs-@GST_MAJORMINOR@%: gstreamer-libs%
# cp $< $@
#CLEANFILES = $(basefiles)
# The top-level SGML file. Change it if you want.
DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml
# The directory containing the source code. Relative to $(top_srcdir).
# The directory containing the source code.
# gtk-doc will search all .c & .h files beneath here for inline comments
# documenting functions and macros.
DOC_SOURCE_DIR=$(top_srcdir)/gst-libs/gst
DOC_BUILD_DIR=$(top_builddir)/gst-libs/gst
# Extra options to supply to gtkdoc-scan.
SCAN_OPTIONS=--deprecated-guards="GST_DISABLE_DEPRECATED"
# FIXME :
# there's something wrong with gstreamer-sections.txt not being in the dist
# maybe it doesn't resolve; we're adding it below for now
#EXTRA_DIST = gstreamer.types.in gstreamer.hierarchy $(DOC_MODULE)-sections.txt gstreamer-sections.txt $(DOC_MAIN_SGML_FILE)
# Extra options to supply to gtkdoc-mkdb.
MKDB_OPTIONS=--sgml-mode --output-format=xml
@ -58,20 +32,8 @@ FIXXREF_OPTIONS=--extra-dir=$(GLIB_PREFIX)/share/gtk-doc/html \
--extra-dir=$(GST_PREFIX)/share/gtk-doc/html
# Used for dependencies.
HFILE_GLOB=$(DOC_SOURCE_DIR)/*/*.h
CFILE_GLOB=$(DOC_SOURCE_DIR)/*/*.c
# this is a wingo addition
# thomasvs: another nice wingo addition would be an explanation on why
# this is useful ;)
SCANOBJ_DEPS = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_MAJORMINOR@.la
HFILE_GLOB=$(top_srcdir)/gst-libs/gst/*/*.h
CFILE_GLOB=$(top_srcdir)/gst-libs/gst/*/*.c
# Header files to ignore when scanning.
IGNORE_HFILES =
@ -88,7 +50,13 @@ extra_files =
# CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib
# contains GtkObjects/GObjects and you want to document signals and properties.
GTKDOC_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) $(GST_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
GTKDOC_LIBS = $(SCANOBJ_DEPS) $(GST_BASE_LIBS) $(GST_BAD_LIBS)
GTKDOC_LIBS = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_MAJORMINOR@.la \
$(GST_BASE_LIBS) $(GST_BAD_LIBS)
GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC)
GTKDOC_LD=$(LIBTOOL) --tag=CC --mode=link $(CC)

View file

@ -30,6 +30,7 @@
</para>
<xi:include href="xml/gsth264parser.xml" />
<xi:include href="xml/gstmpegvideoparser.xml" />
<xi:include href="xml/gstvc1parser.xml" />
</chapter>
<chapter id="video">

View file

@ -44,6 +44,40 @@ gst_h264_parse_pps
<SUBSECTION Private>
</SECTION>
<SECTION>
<FILE>gstvc1parser</FILE>
<TITLE>vc1parser</TITLE>
<INCLUDE>gst/codecparsers/gstvc1parser.h</INCLUDE>
MAX_HRD_NUM_LEAKY_BUCKETS
GST_VC1_BFRACTION_BASIS
GstVC1StartCode
GstVC1Profile
GstVC1ParseResult
GstVC1PictureType
GstVC1Level
GstVC1QuantizerSpec
GstVC1DQProfile
GstVC1Condover
GstVC1MvMode
GstVC1SeqHdr
GstVC1AdvancedSeqHdr
GstVC1SimpleMainSeqHdr
GstVC1HrdParam
GstVC1EntryPointHdr
GstVC1FrameHdr
GstVC1PicAdvanced
GstVC1PicSimpleMain
GstVC1Picture
GstVC1VopDquant
GstVC1BDU
gst_vc1_identify_next_bdu
gst_vc1_parse_sequence_header
gst_vc1_parse_entry_point_header
gst_vc1_parse_frame_header
<SUBSECTION Standard>
<SUBSECTION Private>
</SECTION>
<SECTION>
<FILE>gstmpegvideoparser</FILE>
<TITLE>mpegvideoparser</TITLE>

View file

@ -13,33 +13,13 @@ FORMATS=html
html: html-build.stamp
include $(top_srcdir)/common/upload-doc.mak
# generated basefiles
#basefiles = \
## $(DOC_MODULE).types \
# $(DOC_MODULE)-sections.txt \
# $(DOC_MODULE)-docs.sgml
# ugly hack to make -unused.sgml work
#unused-build.stamp:
# BUILDDIR=`pwd` && \
# cd $(srcdir)/tmpl && \
# ln -sf gstreamer-libs-unused.sgml \
# $$BUILDDIR/tmpl/gstreamer-libs-@GST_MAJORMINOR@-unused.sgml
# touch unused-build.stamp
# these rules are added to create parallel docs using GST_MAJORMINOR
#$(basefiles): gstreamer-libs-@GST_MAJORMINOR@%: gstreamer-libs%
# cp $< $@
#CLEANFILES = $(basefiles)
# The top-level SGML file. Change it if you want.
DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml
# The directory containing the source code. Relative to $(top_srcdir).
# The directory containing the source code.
# gtk-doc will search all .c & .h files beneath here for inline comments
# documenting functions and macros.
DOC_SOURCE_DIR = $(top_srcdir)
DOC_SOURCE_DIR = $(top_srcdir)/gst $(top_srcdir)/ext $(top_srcdir)/sys
# Extra options to supply to gtkdoc-scan.
SCAN_OPTIONS=
@ -53,15 +33,12 @@ FIXXREF_OPTIONS=--extra-dir=$(GLIB_PREFIX)/share/gtk-doc/html \
--extra-dir=$(GSTPB_PREFIX)/share/gtk-doc/html
# Used for dependencies.
HFILE_GLOB=$(DOC_SOURCE_DIR)/*/*/*.h $(DOC_SOURCE_DIR)/*/*/*.hh
CFILE_GLOB=$(DOC_SOURCE_DIR)/*/*/*.c $(DOC_SOURCE_DIR)/*/*/*.cc
# this is a wingo addition
# thomasvs: another nice wingo addition would be an explanation on why
# this is useful ;)
SCANOBJ_DEPS = \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la
HFILE_GLOB= \
$(top_srcdir)/gst/*/*.h $(top_srcdir)/ext/*/*.h $(top_srcdir)/sys/*/*.h \
$(top_srcdir)/ext/*/*.hh
CFILE_GLOB= \
$(top_srcdir)/gst/*/*.c $(top_srcdir)/ext/*/*.c $(top_srcdir)/sys/*/*.c \
$(top_srcdir)/ext/*/*.cc $(top_srcdir)/sys/*/*.m
# Header files to ignore when scanning.
IGNORE_HFILES =
@ -214,7 +191,9 @@ extra_files =
# CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib
# contains GtkObjects/GObjects and you want to document signals and properties.
GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_BASE_CFLAGS) -I$(top_builddir)
GTKDOC_LIBS = $(SCANOBJ_DEPS) $(GST_BASE_LIBS)
GTKDOC_LIBS = \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(GST_BASE_LIBS)
GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC)
GTKDOC_LD=$(LIBTOOL) --tag=CC --mode=link $(CC)

View file

@ -121,12 +121,12 @@ gst_camerabin_get_type
GstCameraBin2
<SUBSECTION Standard>
GstCameraBin2Class
GST_CAMERABIN2
GST_IS_CAMERABIN2
GST_TYPE_CAMERABIN2
GST_CAMERABIN2_CLASS
GST_IS_CAMERABIN2_CLASS
gst_camerabin2_get_type
GST_CAMERA_BIN2
GST_IS_CAMERA_BIN2
GST_TYPE_CAMERA_BIN2
GST_CAMERA_BIN2_CLASS
GST_IS_CAMERA_BIN2_CLASS
gst_camera_bin2_get_type
</SECTION>
<SECTION>

View file

@ -324,6 +324,12 @@ else
SOUNDTOUCH_DIR=
endif
if USE_SPANDSP
SPANDSP_DIR = spandsp
else
SPANDSP_DIR =
endif
if USE_SPC
SPC_DIR=spc
else
@ -433,6 +439,7 @@ SUBDIRS=\
$(SMOOTHWAVE_DIR) \
$(SNDFILE_DIR) \
$(SOUNDTOUCH_DIR) \
$(SPANDSP_DIR) \
$(GME_DIR) \
$(SPC_DIR) \
$(SWFDEC_DIR) \
@ -485,6 +492,7 @@ DIST_SUBDIRS = \
sdl \
sndfile \
soundtouch \
spandsp \
spc \
gme \
swfdec \

View file

@ -560,8 +560,11 @@ celt_dec_chain_parse_header (GstCeltDec * dec, GstBuffer * buf)
gint error = CELT_OK;
/* get the header */
celt_header_from_packet ((const unsigned char *) GST_BUFFER_DATA (buf),
error =
celt_header_from_packet ((const unsigned char *) GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf), &dec->header);
if (error < 0)
goto invalid_header;
if (memcmp (dec->header.codec_id, "CELT ", 8) != 0)
goto invalid_header;
@ -760,7 +763,11 @@ celt_dec_chain_parse_data (GstCeltDec * dec, GstBuffer * buf,
#else
error = celt_decode (dec->state, data, size, out_data);
#endif
#ifdef HAVE_CELT_0_11
if (error < 0) {
#else
if (error != CELT_OK) {
#endif
GST_WARNING_OBJECT (dec, "Decoding error: %d", error);
return GST_FLOW_ERROR;
}

View file

@ -253,6 +253,9 @@ gst_celt_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
gst_caps_unref (otherpadcaps);
}
if (enc->requested_frame_size > 0)
enc->frame_size = enc->requested_frame_size;
GST_DEBUG_OBJECT (pad, "channels=%d rate=%d frame-size=%d",
enc->channels, enc->rate, enc->frame_size);
@ -573,6 +576,7 @@ gst_celt_enc_init (GstCeltEnc * enc, GstCeltEncClass * klass)
enc->bitrate = DEFAULT_BITRATE;
enc->frame_size = DEFAULT_FRAMESIZE;
enc->requested_frame_size = -1;
enc->cbr = DEFAULT_CBR;
enc->complexity = DEFAULT_COMPLEXITY;
enc->max_bitrate = DEFAULT_MAX_BITRATE;
@ -695,16 +699,18 @@ encoder_creation_failed:
/* prepare a buffer for transmission */
static GstBuffer *
gst_celt_enc_buffer_from_data (GstCeltEnc * enc, guchar * data,
gint data_len, guint64 granulepos)
guint data_len, gint64 granulepos)
{
GstBuffer *outbuf;
outbuf = gst_buffer_new_and_alloc (data_len);
memcpy (GST_BUFFER_DATA (outbuf), data, data_len);
outbuf = gst_buffer_new ();
GST_BUFFER_DATA (outbuf) = data;
GST_BUFFER_MALLOCDATA (outbuf) = data;
GST_BUFFER_SIZE (outbuf) = data_len;
GST_BUFFER_OFFSET (outbuf) = enc->bytes_out;
GST_BUFFER_OFFSET_END (outbuf) = granulepos;
GST_LOG_OBJECT (enc, "encoded buffer of %d bytes", GST_BUFFER_SIZE (outbuf));
GST_LOG_OBJECT (enc, "encoded buffer of %u bytes", GST_BUFFER_SIZE (outbuf));
return outbuf;
}
@ -903,11 +909,17 @@ gst_celt_enc_chain (GstPad * pad, GstBuffer * buf)
constraints */
GstBuffer *buf1, *buf2;
GstCaps *caps;
guchar data[100];
/* libcelt has a bug which underestimates header size by 4... */
unsigned int header_size = enc->header.header_size + 4;
unsigned char *data = g_malloc (header_size);
/* create header buffer */
celt_header_to_packet (&enc->header, data, 100);
buf1 = gst_celt_enc_buffer_from_data (enc, data, 100, 0);
int error = celt_header_to_packet (&enc->header, data, header_size);
if (error < 0) {
g_free (data);
goto no_header;
}
buf1 = gst_celt_enc_buffer_from_data (enc, data, header_size, 0);
/* create comment buffer */
buf2 = gst_celt_enc_create_metadata_buffer (enc);
@ -966,6 +978,7 @@ gst_celt_enc_chain (GstPad * pad, GstBuffer * buf)
/* Check if we have a continous stream, if not drop some samples or the buffer or
* insert some silence samples */
if (enc->next_ts != GST_CLOCK_TIME_NONE &&
GST_BUFFER_TIMESTAMP_IS_VALID (buf) &&
GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
guint64 diff_bytes;
@ -1038,6 +1051,13 @@ not_setup:
goto done;
}
no_header:
{
GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL),
("Failed to encode header"));
ret = GST_FLOW_ERROR;
goto done;
}
}
@ -1090,7 +1110,8 @@ gst_celt_enc_set_property (GObject * object, guint prop_id,
enc->bitrate = g_value_get_int (value);
break;
case PROP_FRAMESIZE:
enc->frame_size = g_value_get_int (value);
enc->requested_frame_size = g_value_get_int (value);
enc->frame_size = enc->requested_frame_size;
break;
case PROP_CBR:
enc->cbr = g_value_get_boolean (value);

View file

@ -62,6 +62,7 @@ struct _GstCeltEnc {
gint bitrate;
gint frame_size;
gint requested_frame_size;
gboolean cbr;
gint complexity;
gint max_bitrate;

View file

@ -1,7 +1,8 @@
plugin_LTLIBRARIES = libgstfaac.la
libgstfaac_la_SOURCES = gstfaac.c
libgstfaac_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) \
libgstfaac_la_CFLAGS = -DGST_USE_UNSTABLE_API \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_CFLAGS) $(FAAC_CFLAGS)
libgstfaac_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-@GST_MAJORMINOR@ -lgstpbutils-@GST_MAJORMINOR@ \

View file

@ -91,12 +91,6 @@
"rate = (int) {" SAMPLE_RATES "}, " \
"stream-format = (string) { adts, raw }, " \
"profile = (string) { main, lc }"
enum
{
VBR = 1,
ABR
};
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
@ -119,74 +113,41 @@ enum
PROP_SHORTCTL
};
static void gst_faac_base_init (GstFaacClass * klass);
static void gst_faac_class_init (GstFaacClass * klass);
static void gst_faac_init (GstFaac * faac);
static void gst_faac_finalize (GObject * object);
static void gst_faac_reset (GstFaac * faac);
enum
{
VBR = 1,
ABR
};
static void gst_faac_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_faac_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static gboolean gst_faac_sink_event (GstPad * pad, GstEvent * event);
static gboolean gst_faac_configure_source_pad (GstFaac * faac);
static gboolean gst_faac_sink_setcaps (GstPad * pad, GstCaps * caps);
static GstCaps *gst_faac_sink_getcaps (GstPad * pad);
static GstFlowReturn gst_faac_push_buffers (GstFaac * faac, gboolean force);
static GstFlowReturn gst_faac_chain (GstPad * pad, GstBuffer * data);
static GstStateChangeReturn gst_faac_change_state (GstElement * element,
GstStateChange transition);
static GstCaps *gst_faac_getcaps (GstAudioEncoder * enc);
static GstElementClass *parent_class = NULL;
static gboolean gst_faac_start (GstAudioEncoder * enc);
static gboolean gst_faac_stop (GstAudioEncoder * enc);
static gboolean gst_faac_set_format (GstAudioEncoder * enc,
GstAudioInfo * info);
static GstFlowReturn gst_faac_handle_frame (GstAudioEncoder * enc,
GstBuffer * in_buf);
GST_DEBUG_CATEGORY_STATIC (faac_debug);
#define GST_CAT_DEFAULT faac_debug
#define FAAC_DEFAULT_OUTPUTFORMAT 0 /* RAW */
#define FAAC_DEFAULT_QUALITY 100
#define FAAC_DEFAULT_BITRATE 128 * 1000
#define FAAC_DEFAULT_RATE_CONTROL VBR
#define FAAC_DEFAULT_RATE_CONTROL VBR
#define FAAC_DEFAULT_TNS FALSE
#define FAAC_DEFAULT_MIDSIDE TRUE
#define FAAC_DEFAULT_SHORTCTL SHORTCTL_NORMAL
GType
gst_faac_get_type (void)
{
static GType gst_faac_type = 0;
if (!gst_faac_type) {
static const GTypeInfo gst_faac_info = {
sizeof (GstFaacClass),
(GBaseInitFunc) gst_faac_base_init,
NULL,
(GClassInitFunc) gst_faac_class_init,
NULL,
NULL,
sizeof (GstFaac),
0,
(GInstanceInitFunc) gst_faac_init,
};
const GInterfaceInfo preset_interface_info = {
NULL, /* interface_init */
NULL, /* interface_finalize */
NULL /* interface_data */
};
gst_faac_type = g_type_register_static (GST_TYPE_ELEMENT,
"GstFaac", &gst_faac_info, 0);
g_type_add_interface_static (gst_faac_type, GST_TYPE_PRESET,
&preset_interface_info);
}
return gst_faac_type;
}
GST_BOILERPLATE (GstFaac, gst_faac, GstAudioEncoder, GST_TYPE_AUDIO_ENCODER);
static void
gst_faac_base_init (GstFaacClass * klass)
gst_faac_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -248,96 +209,53 @@ static void
gst_faac_class_init (GstFaacClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstAudioEncoderClass *base_class = GST_AUDIO_ENCODER_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_faac_set_property;
gobject_class->get_property = gst_faac_get_property;
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_faac_finalize);
base_class->start = GST_DEBUG_FUNCPTR (gst_faac_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_faac_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_faac_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_faac_handle_frame);
base_class->getcaps = GST_DEBUG_FUNCPTR (gst_faac_getcaps);
/* properties */
g_object_class_install_property (gobject_class, PROP_QUALITY,
g_param_spec_int ("quality", "Quality (%)",
"Variable bitrate (VBR) quantizer quality in %", 1, 1000,
FAAC_DEFAULT_QUALITY, G_PARAM_READWRITE));
FAAC_DEFAULT_QUALITY,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_BITRATE,
g_param_spec_int ("bitrate", "Bitrate (bps)",
"Average bitrate (ABR) in bits/sec", 8 * 1000, 320 * 1000,
FAAC_DEFAULT_BITRATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
"Average Bitrate (ABR) in bits/sec", 8 * 1000, 320 * 1000,
FAAC_DEFAULT_BITRATE,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_RATE_CONTROL,
g_param_spec_enum ("rate-control", "Rate Control (ABR/VBR)",
"Encoding bitrate type (VBR/ABR)", GST_TYPE_FAAC_RATE_CONTROL,
FAAC_DEFAULT_RATE_CONTROL, G_PARAM_READWRITE));
FAAC_DEFAULT_RATE_CONTROL,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_TNS,
g_param_spec_boolean ("tns", "TNS", "Use temporal noise shaping",
FAAC_DEFAULT_TNS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
FAAC_DEFAULT_TNS,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MIDSIDE,
g_param_spec_boolean ("midside", "Midside", "Allow mid/side encoding",
FAAC_DEFAULT_MIDSIDE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
FAAC_DEFAULT_MIDSIDE,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_SHORTCTL,
g_param_spec_enum ("shortctl", "Block type",
"Block type encorcing",
GST_TYPE_FAAC_SHORTCTL, FAAC_DEFAULT_SHORTCTL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/* virtual functions */
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_faac_change_state);
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void
gst_faac_init (GstFaac * faac)
gst_faac_init (GstFaac * faac, GstFaacClass * klass)
{
faac->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_chain_function (faac->sinkpad,
GST_DEBUG_FUNCPTR (gst_faac_chain));
gst_pad_set_setcaps_function (faac->sinkpad,
GST_DEBUG_FUNCPTR (gst_faac_sink_setcaps));
gst_pad_set_getcaps_function (faac->sinkpad,
GST_DEBUG_FUNCPTR (gst_faac_sink_getcaps));
gst_pad_set_event_function (faac->sinkpad,
GST_DEBUG_FUNCPTR (gst_faac_sink_event));
gst_element_add_pad (GST_ELEMENT (faac), faac->sinkpad);
faac->srcpad = gst_pad_new_from_static_template (&src_template, "src");
gst_pad_use_fixed_caps (faac->srcpad);
gst_element_add_pad (GST_ELEMENT (faac), faac->srcpad);
faac->adapter = gst_adapter_new ();
faac->profile = LOW;
faac->mpegversion = 4;
/* default properties */
faac->quality = FAAC_DEFAULT_QUALITY;
faac->bitrate = FAAC_DEFAULT_BITRATE;
faac->brtype = FAAC_DEFAULT_RATE_CONTROL;
faac->shortctl = FAAC_DEFAULT_SHORTCTL;
faac->outputformat = FAAC_DEFAULT_OUTPUTFORMAT;
faac->tns = FAAC_DEFAULT_TNS;
faac->midside = FAAC_DEFAULT_MIDSIDE;
gst_faac_reset (faac);
}
static void
gst_faac_reset (GstFaac * faac)
{
faac->handle = NULL;
faac->samplerate = -1;
faac->channels = -1;
faac->offset = 0;
gst_adapter_clear (faac->adapter);
}
static void
gst_faac_finalize (GObject * object)
{
GstFaac *faac = (GstFaac *) object;
g_object_unref (faac->adapter);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
@ -346,8 +264,25 @@ gst_faac_close_encoder (GstFaac * faac)
if (faac->handle)
faacEncClose (faac->handle);
faac->handle = NULL;
gst_adapter_clear (faac->adapter);
faac->offset = 0;
}
static gboolean
gst_faac_start (GstAudioEncoder * enc)
{
GstFaac *faac = GST_FAAC (enc);
GST_DEBUG_OBJECT (faac, "start");
return TRUE;
}
static gboolean
gst_faac_stop (GstAudioEncoder * enc)
{
GstFaac *faac = GST_FAAC (enc);
GST_DEBUG_OBJECT (faac, "stop");
gst_faac_close_encoder (faac);
return TRUE;
}
static const GstAudioChannelPosition aac_channel_positions[][8] = {
@ -380,7 +315,7 @@ static const GstAudioChannelPosition aac_channel_positions[][8] = {
};
static GstCaps *
gst_faac_sink_getcaps (GstPad * pad)
gst_faac_getcaps (GstAudioEncoder * enc)
{
static volatile gsize sinkcaps = 0;
@ -433,12 +368,82 @@ gst_faac_sink_getcaps (GstPad * pad)
gst_structure_free (s);
g_value_unset (&rates_arr);
GST_DEBUG_OBJECT (pad, "Generated sinkcaps: %" GST_PTR_FORMAT, tmp);
GST_DEBUG_OBJECT (enc, "Generated sinkcaps: %" GST_PTR_FORMAT, tmp);
g_once_init_leave (&sinkcaps, (gsize) tmp);
}
return gst_caps_ref ((GstCaps *) sinkcaps);
return gst_audio_encoder_proxy_getcaps (enc, (GstCaps *) sinkcaps);
}
static gboolean
gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
{
GstFaac *faac = GST_FAAC (enc);
faacEncHandle *handle;
gint channels, samplerate, width;
gulong samples, bytes, fmt = 0, bps = 0;
gboolean result = FALSE;
/* base class takes care */
channels = GST_AUDIO_INFO_CHANNELS (info);
samplerate = GST_AUDIO_INFO_RATE (info);
width = GST_AUDIO_INFO_WIDTH (info);
if (GST_AUDIO_INFO_IS_INTEGER (info)) {
switch (width) {
case 16:
fmt = FAAC_INPUT_16BIT;
bps = 2;
break;
case 24:
case 32:
fmt = FAAC_INPUT_32BIT;
bps = 4;
break;
default:
g_return_val_if_reached (FALSE);
}
} else {
fmt = FAAC_INPUT_FLOAT;
bps = 4;
}
/* clean up in case of re-configure */
gst_faac_close_encoder (faac);
if (!(handle = faacEncOpen (samplerate, channels, &samples, &bytes)))
goto setup_failed;
/* mind channel count */
samples /= channels;
/* ok, record and set up */
faac->format = fmt;
faac->bps = bps;
faac->handle = handle;
faac->bytes = bytes;
faac->samples = samples;
faac->channels = channels;
faac->samplerate = samplerate;
/* finish up */
result = gst_faac_configure_source_pad (faac);
/* report needs to base class */
gst_audio_encoder_set_frame_samples_min (enc, samples);
gst_audio_encoder_set_frame_samples_max (enc, samples);
gst_audio_encoder_set_frame_max (enc, 1);
done:
return result;
/* ERRORS */
setup_failed:
{
GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
goto done;
}
}
/* check downstream caps to configure format */
@ -447,7 +452,12 @@ gst_faac_negotiate (GstFaac * faac)
{
GstCaps *caps;
caps = gst_pad_get_allowed_caps (faac->srcpad);
/* default setup */
faac->profile = LOW;
faac->mpegversion = 4;
faac->outputformat = 0;
caps = gst_pad_get_allowed_caps (GST_AUDIO_ENCODER_SRC_PAD (faac));
GST_DEBUG_OBJECT (faac, "allowed caps: %" GST_PTR_FORMAT, caps);
@ -494,94 +504,6 @@ gst_faac_negotiate (GstFaac * faac)
gst_caps_unref (caps);
}
static gboolean
gst_faac_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstFaac *faac = GST_FAAC (gst_pad_get_parent (pad));
GstStructure *structure = gst_caps_get_structure (caps, 0);
faacEncHandle *handle;
gint channels, samplerate, width;
gulong samples, bytes, fmt = 0, bps = 0;
gboolean result = FALSE;
if (!gst_caps_is_fixed (caps))
goto refuse_caps;
if (!gst_structure_get_int (structure, "channels", &channels) ||
!gst_structure_get_int (structure, "rate", &samplerate)) {
goto refuse_caps;
}
if (gst_structure_has_name (structure, "audio/x-raw-int")) {
gst_structure_get_int (structure, "width", &width);
switch (width) {
case 16:
fmt = FAAC_INPUT_16BIT;
bps = 2;
break;
case 24:
case 32:
fmt = FAAC_INPUT_32BIT;
bps = 4;
break;
default:
g_return_val_if_reached (FALSE);
}
} else if (gst_structure_has_name (structure, "audio/x-raw-float")) {
fmt = FAAC_INPUT_FLOAT;
bps = 4;
}
if (!fmt)
goto refuse_caps;
/* If the encoder is initialized, do not
reinitialize it again if not necessary */
if (faac->handle) {
if (samplerate == faac->samplerate && channels == faac->channels &&
fmt == faac->format)
return TRUE;
/* clear out pending frames */
gst_faac_push_buffers (faac, TRUE);
gst_faac_close_encoder (faac);
}
if (!(handle = faacEncOpen (samplerate, channels, &samples, &bytes)))
goto setup_failed;
/* ok, record and set up */
faac->format = fmt;
faac->bps = bps;
faac->handle = handle;
faac->bytes = bytes;
faac->samples = samples;
faac->channels = channels;
faac->samplerate = samplerate;
gst_faac_negotiate (faac);
/* finish up */
result = gst_faac_configure_source_pad (faac);
done:
gst_object_unref (faac);
return result;
/* ERRORS */
setup_failed:
{
GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
goto done;
}
refuse_caps:
{
GST_WARNING_OBJECT (faac, "refused caps %" GST_PTR_FORMAT, caps);
goto done;
}
}
static gboolean
gst_faac_configure_source_pad (GstFaac * faac)
{
@ -590,6 +512,9 @@ gst_faac_configure_source_pad (GstFaac * faac)
faacEncConfiguration *conf;
guint maxbitrate;
/* negotiate stream format */
gst_faac_negotiate (faac);
/* we negotiated caps update current configuration */
conf = faacEncGetCurrentConfiguration (faac->handle);
conf->mpegVersion = (faac->mpegversion == 4) ? MPEG4 : MPEG2;
@ -698,7 +623,7 @@ gst_faac_configure_source_pad (GstFaac * faac)
GST_DEBUG_OBJECT (faac, "src pad caps: %" GST_PTR_FORMAT, srccaps);
ret = gst_pad_set_caps (faac->srcpad, srccaps);
ret = gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (faac), srccaps);
gst_caps_unref (srccaps);
return ret;
@ -717,127 +642,33 @@ invalid_codec_data:
}
static GstFlowReturn
gst_faac_push_buffers (GstFaac * faac, gboolean force)
gst_faac_handle_frame (GstAudioEncoder * enc, GstBuffer * in_buf)
{
GstFaac *faac = GST_FAAC (enc);
GstFlowReturn ret = GST_FLOW_OK;
gint av, frame_size, size, ret_size;
GstBuffer *outbuf;
guint64 timestamp, distance;
GstBuffer *out_buf;
gint size, ret_size;
const guint8 *data;
/* samples already considers channel count */
frame_size = faac->samples * faac->bps;
out_buf = gst_buffer_new_and_alloc (faac->bytes);
while (G_LIKELY (ret == GST_FLOW_OK)) {
av = gst_adapter_available (faac->adapter);
GST_LOG_OBJECT (faac, "pushing: force: %d, frame_size: %d, av: %d, "
"offset: %d", force, frame_size, av, faac->offset);
/* idea:
* - start of adapter corresponds with what has already been encoded
* (i.e. really returned by faac)
* - start + offset is what needs to be fed to faac next
* That way we can timestamp the output based
* on adapter provided timestamp (and duration is a fixed frame duration) */
/* not enough data for one frame and no flush forcing */
if (!force && (av < frame_size + faac->offset))
break;
if (G_LIKELY (av - faac->offset >= frame_size)) {
GST_LOG_OBJECT (faac, "encoding a frame");
data = gst_adapter_peek (faac->adapter, faac->offset + frame_size);
data += faac->offset;
size = frame_size;
} else if (av - faac->offset > 0) {
GST_LOG_OBJECT (faac, "encoding leftover");
data = gst_adapter_peek (faac->adapter, av);
data += faac->offset;
size = av - faac->offset;
} else {
GST_LOG_OBJECT (faac, "emptying encoder");
data = NULL;
size = 0;
}
outbuf = gst_buffer_new_and_alloc (faac->bytes);
if (G_UNLIKELY ((ret_size = faacEncEncode (faac->handle, (gint32 *) data,
size / faac->bps, GST_BUFFER_DATA (outbuf),
faac->bytes)) < 0)) {
gst_buffer_unref (outbuf);
goto encode_failed;
}
GST_LOG_OBJECT (faac, "encoder return: %d", ret_size);
/* consumed, advanced view */
faac->offset += size;
g_assert (faac->offset <= av);
if (G_UNLIKELY (!ret_size)) {
gst_buffer_unref (outbuf);
if (size)
continue;
else
break;
}
/* deal with encoder lead-out */
if (G_UNLIKELY (av == 0 && faac->offset == 0)) {
GST_DEBUG_OBJECT (faac, "encoder returned additional data");
/* continuous with previous output, ok to have 0 duration */
timestamp = faac->next_ts;
} else {
/* after some caching, finally some data */
/* adapter gives time */
timestamp = gst_adapter_prev_timestamp (faac->adapter, &distance);
}
if (G_LIKELY ((av = gst_adapter_available (faac->adapter)) >= frame_size)) {
/* must have then come from a complete frame */
gst_adapter_flush (faac->adapter, frame_size);
faac->offset -= frame_size;
size = frame_size;
} else {
/* otherwise leftover */
gst_adapter_clear (faac->adapter);
faac->offset = 0;
size = av;
}
GST_BUFFER_SIZE (outbuf) = ret_size;
if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp)))
GST_BUFFER_TIMESTAMP (outbuf) = timestamp +
GST_FRAMES_TO_CLOCK_TIME (distance / faac->channels / faac->bps,
faac->samplerate);
GST_BUFFER_DURATION (outbuf) =
GST_FRAMES_TO_CLOCK_TIME (size / faac->channels / faac->bps,
faac->samplerate);
faac->next_ts =
GST_BUFFER_TIMESTAMP (outbuf) + GST_BUFFER_DURATION (outbuf);
/* perhaps check/set DISCONT based on timestamps ? */
GST_LOG_OBJECT (faac, "Pushing out buffer time: %" GST_TIME_FORMAT
" duration: %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (faac->srcpad));
ret = gst_pad_push (faac->srcpad, outbuf);
if (G_LIKELY (in_buf)) {
data = GST_BUFFER_DATA (in_buf);
size = GST_BUFFER_SIZE (in_buf);
} else {
data = NULL;
size = 0;
}
/* in case encoder returns less than expected, clear our view as well */
if (G_UNLIKELY (force)) {
#ifndef GST_DISABLE_GST_DEBUG
if ((av = gst_adapter_available (faac->adapter)))
GST_WARNING_OBJECT (faac, "encoder left %d bytes; discarding", av);
#endif
gst_adapter_clear (faac->adapter);
faac->offset = 0;
if (G_UNLIKELY ((ret_size = faacEncEncode (faac->handle, (gint32 *) data,
size / faac->bps, GST_BUFFER_DATA (out_buf),
GST_BUFFER_SIZE (out_buf))) < 0))
goto encode_failed;
GST_LOG_OBJECT (faac, "encoder return: %d", ret_size);
if (ret_size > 0) {
GST_BUFFER_SIZE (out_buf) = ret_size;
ret = gst_audio_encoder_finish_frame (enc, out_buf, faac->samples);
}
return ret;
@ -850,72 +681,6 @@ encode_failed:
}
}
static gboolean
gst_faac_sink_event (GstPad * pad, GstEvent * event)
{
GstFaac *faac;
gboolean ret;
faac = GST_FAAC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (faac, "received %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
{
if (faac->handle) {
/* flush first */
GST_DEBUG_OBJECT (faac, "Pushing out remaining buffers because of EOS");
gst_faac_push_buffers (faac, TRUE);
}
ret = gst_pad_event_default (pad, event);
break;
}
default:
ret = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (faac);
return ret;
}
static GstFlowReturn
gst_faac_chain (GstPad * pad, GstBuffer * inbuf)
{
GstFlowReturn result = GST_FLOW_OK;
GstFaac *faac;
faac = GST_FAAC (gst_pad_get_parent (pad));
if (!faac->handle)
goto no_handle;
GST_LOG_OBJECT (faac, "Got buffer time: %" GST_TIME_FORMAT " duration: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf)));
gst_adapter_push (faac->adapter, inbuf);
result = gst_faac_push_buffers (faac, FALSE);
done:
gst_object_unref (faac);
return result;
/* ERRORS */
no_handle:
{
GST_ELEMENT_ERROR (faac, CORE, NEGOTIATION, (NULL),
("format wasn't negotiated before chain function"));
gst_buffer_unref (inbuf);
result = GST_FLOW_ERROR;
goto done;
}
}
static void
gst_faac_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
@ -986,35 +751,6 @@ gst_faac_get_property (GObject * object,
GST_OBJECT_UNLOCK (faac);
}
static GstStateChangeReturn
gst_faac_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstFaac *faac = GST_FAAC (element);
/* upwards state changes */
switch (transition) {
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
/* downwards state changes */
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
{
gst_faac_close_encoder (faac);
gst_faac_reset (faac);
break;
}
default:
break;
}
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{

View file

@ -21,8 +21,8 @@
#define __GST_FAAC_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudioencoder.h>
#include <faac.h>
G_BEGIN_DECLS
@ -42,41 +42,37 @@ typedef struct _GstFaac GstFaac;
typedef struct _GstFaacClass GstFaacClass;
struct _GstFaac {
GstElement element;
/* pads */
GstPad *srcpad, *sinkpad;
GstAudioEncoder element;
/* stream properties */
gint samplerate,
channels,
format,
bps,
quality,
bitrate,
brtype,
bps;
/* input frame size */
gulong samples;
/* required output buffer size */
gulong bytes;
/* negotiated */
gint mpegversion, outputformat;
/* properties */
gint bitrate,
profile,
mpegversion,
shortctl,
outputformat;
quality,
brtype,
shortctl;
gboolean tns,
midside;
gulong bytes,
samples;
/* FAAC object */
faacEncHandle handle;
/* cache of the input */
GstAdapter *adapter;
/* offset of data to be encoded next */
guint offset;
/* ts for last buffer */
GstClockTime next_ts;
};
struct _GstFaacClass {
GstElementClass parent_class;
GstAudioEncoderClass parent_class;
};
GType gst_faac_get_type (void);

View file

@ -1,7 +1,8 @@
plugin_LTLIBRARIES = libgstfaad.la
libgstfaad_la_SOURCES = gstfaad.c
libgstfaad_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
libgstfaad_la_CFLAGS = -DGST_USE_UNSTABLE_API \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(FAAD_CFLAGS)
libgstfaad_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) $(FAAD_LIBS)

View file

@ -140,55 +140,24 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_STATIC_CAPS (STATIC_CAPS)
);
static void gst_faad_base_init (GstFaadClass * klass);
static void gst_faad_class_init (GstFaadClass * klass);
static void gst_faad_init (GstFaad * faad);
static void gst_faad_reset (GstFaad * faad);
static void gst_faad_finalize (GObject * object);
static void clear_queued (GstFaad * faad);
static gboolean gst_faad_start (GstAudioDecoder * dec);
static gboolean gst_faad_stop (GstAudioDecoder * dec);
static gboolean gst_faad_set_format (GstAudioDecoder * dec, GstCaps * caps);
static gboolean gst_faad_parse (GstAudioDecoder * dec, GstAdapter * adapter,
gint * offset, gint * length);
static GstFlowReturn gst_faad_handle_frame (GstAudioDecoder * dec,
GstBuffer * buffer);
static void gst_faad_flush (GstAudioDecoder * dec, gboolean hard);
static gboolean gst_faad_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_faad_src_event (GstPad * pad, GstEvent * event);
static gboolean gst_faad_sink_event (GstPad * pad, GstEvent * event);
static gboolean gst_faad_src_query (GstPad * pad, GstQuery * query);
static GstFlowReturn gst_faad_chain (GstPad * pad, GstBuffer * buffer);
static GstStateChangeReturn gst_faad_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_faad_src_convert (GstFaad * faad, GstFormat src_format,
gint64 src_val, GstFormat dest_format, gint64 * dest_val);
static gboolean gst_faad_open_decoder (GstFaad * faad);
static void gst_faad_close_decoder (GstFaad * faad);
static GstElementClass *parent_class; /* NULL */
GType
gst_faad_get_type (void)
{
static GType gst_faad_type = 0;
if (!gst_faad_type) {
static const GTypeInfo gst_faad_info = {
sizeof (GstFaadClass),
(GBaseInitFunc) gst_faad_base_init,
NULL,
(GClassInitFunc) gst_faad_class_init,
NULL,
NULL,
sizeof (GstFaad),
0,
(GInstanceInitFunc) gst_faad_init,
};
gst_faad_type = g_type_register_static (GST_TYPE_ELEMENT,
"GstFaad", &gst_faad_info, 0);
}
return gst_faad_type;
}
GST_BOILERPLATE (GstFaad, gst_faad, GstAudioDecoder, GST_TYPE_AUDIO_DECODER);
static void
gst_faad_base_init (GstFaadClass * klass)
gst_faad_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -208,47 +177,27 @@ gst_faad_base_init (GstFaadClass * klass)
static void
gst_faad_class_init (GstFaadClass * klass)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstAudioDecoderClass *base_class = GST_AUDIO_DECODER_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_faad_finalize);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_faad_change_state);
base_class->start = GST_DEBUG_FUNCPTR (gst_faad_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_faad_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_faad_set_format);
base_class->parse = GST_DEBUG_FUNCPTR (gst_faad_parse);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_faad_handle_frame);
base_class->flush = GST_DEBUG_FUNCPTR (gst_faad_flush);
}
static void
gst_faad_init (GstFaad * faad)
gst_faad_init (GstFaad * faad, GstFaadClass * klass)
{
faad->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_element_add_pad (GST_ELEMENT (faad), faad->sinkpad);
gst_pad_set_event_function (faad->sinkpad,
GST_DEBUG_FUNCPTR (gst_faad_sink_event));
gst_pad_set_setcaps_function (faad->sinkpad,
GST_DEBUG_FUNCPTR (gst_faad_setcaps));
gst_pad_set_chain_function (faad->sinkpad,
GST_DEBUG_FUNCPTR (gst_faad_chain));
faad->srcpad = gst_pad_new_from_static_template (&src_template, "src");
gst_pad_use_fixed_caps (faad->srcpad);
gst_pad_set_query_function (faad->srcpad,
GST_DEBUG_FUNCPTR (gst_faad_src_query));
gst_pad_set_event_function (faad->srcpad,
GST_DEBUG_FUNCPTR (gst_faad_src_event));
gst_element_add_pad (GST_ELEMENT (faad), faad->srcpad);
faad->adapter = gst_adapter_new ();
gst_faad_reset (faad);
}
static void
gst_faad_reset_stream_state (GstFaad * faad)
{
faad->sync_flush = 0;
gst_adapter_clear (faad->adapter);
clear_queued (faad);
if (faad->handle)
faacDecPostSeekReset (faad->handle, 0);
}
@ -256,45 +205,43 @@ gst_faad_reset_stream_state (GstFaad * faad)
static void
gst_faad_reset (GstFaad * faad)
{
gst_segment_init (&faad->segment, GST_FORMAT_TIME);
faad->samplerate = -1;
faad->channels = -1;
faad->init = FALSE;
faad->packetised = FALSE;
g_free (faad->channel_positions);
faad->channel_positions = NULL;
faad->next_ts = GST_CLOCK_TIME_NONE;
faad->prev_ts = 0;
faad->bytes_in = 0;
faad->sum_dur_out = 0;
faad->error_count = 0;
faad->last_header = 0;
gst_faad_reset_stream_state (faad);
}
static void
gst_faad_finalize (GObject * object)
static gboolean
gst_faad_start (GstAudioDecoder * dec)
{
GstFaad *faad = GST_FAAD (object);
GstFaad *faad = GST_FAAD (dec);
g_object_unref (faad->adapter);
GST_DEBUG_OBJECT (dec, "start");
gst_faad_reset (faad);
G_OBJECT_CLASS (parent_class)->finalize (object);
/* call upon legacy upstream byte support (e.g. seeking) */
gst_audio_decoder_set_byte_time (dec, TRUE);
/* never mind a few errors */
gst_audio_decoder_set_max_errors (dec, 10);
return TRUE;
}
static void
gst_faad_send_tags (GstFaad * faad)
static gboolean
gst_faad_stop (GstAudioDecoder * dec)
{
GstTagList *tags;
GstFaad *faad = GST_FAAD (dec);
tags = gst_tag_list_new ();
GST_DEBUG_OBJECT (dec, "stop");
gst_faad_reset (faad);
gst_faad_close_decoder (faad);
gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, "MPEG-4 AAC audio", NULL);
gst_element_found_tags (GST_ELEMENT (faad), tags);
return TRUE;
}
static gint
@ -327,9 +274,9 @@ aac_rate_idx (gint rate)
}
static gboolean
gst_faad_setcaps (GstPad * pad, GstCaps * caps)
gst_faad_set_format (GstAudioDecoder * dec, GstCaps * caps)
{
GstFaad *faad = GST_FAAD (gst_pad_get_parent (pad));
GstFaad *faad = GST_FAAD (dec);
GstStructure *str = gst_caps_get_structure (caps, 0);
GstBuffer *buf;
const GValue *value;
@ -352,8 +299,8 @@ gst_faad_setcaps (GstPad * pad, GstCaps * caps)
/* We have codec data, means packetised stream */
faad->packetised = TRUE;
buf = gst_value_get_buffer (value);
buf = gst_value_get_buffer (value);
g_return_val_if_fail (buf != NULL, FALSE);
cdata = GST_BUFFER_DATA (buf);
@ -391,9 +338,6 @@ gst_faad_setcaps (GstPad * pad, GstCaps * caps)
faad->channels = 0;
faad->init = TRUE;
gst_faad_send_tags (faad);
gst_adapter_clear (faad->adapter);
} else if ((value = gst_structure_get_value (str, "framed")) &&
g_value_get_boolean (value) == TRUE) {
faad->packetised = TRUE;
@ -424,7 +368,6 @@ gst_faad_setcaps (GstPad * pad, GstCaps * caps)
}
}
gst_object_unref (faad);
return TRUE;
/* ERRORS */
@ -535,349 +478,6 @@ gst_faad_chanpos_to_gst (GstFaad * faad, guchar * fpos, guint num,
return pos;
}
static void
clear_queued (GstFaad * faad)
{
g_list_foreach (faad->queued, (GFunc) gst_mini_object_unref, NULL);
g_list_free (faad->queued);
faad->queued = NULL;
g_list_foreach (faad->gather, (GFunc) gst_mini_object_unref, NULL);
g_list_free (faad->gather);
faad->gather = NULL;
g_list_foreach (faad->decode, (GFunc) gst_mini_object_unref, NULL);
g_list_free (faad->decode);
faad->decode = NULL;
}
static GstFlowReturn
flush_queued (GstFaad * faad)
{
GstFlowReturn ret = GST_FLOW_OK;
while (faad->queued) {
GstBuffer *buf = GST_BUFFER_CAST (faad->queued->data);
GST_LOG_OBJECT (faad, "pushing buffer %p, timestamp %"
GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, buf,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
/* iterate ouput queue an push downstream */
ret = gst_pad_push (faad->srcpad, buf);
faad->queued = g_list_delete_link (faad->queued, faad->queued);
}
return ret;
}
static GstFlowReturn
gst_faad_drain (GstFaad * faad)
{
GstFlowReturn ret = GST_FLOW_OK;
GST_DEBUG_OBJECT (faad, "draining");
if (faad->segment.rate < 0.0) {
/* also decode tail = head of previous fragment to fill this one */
while (faad->decode) {
GstBuffer *buf = GST_BUFFER_CAST (faad->decode->data);
GST_DEBUG_OBJECT (faad, "processing delayed decode buffer");
gst_faad_chain (faad->sinkpad, buf);
faad->decode = g_list_delete_link (faad->decode, faad->decode);
}
/* if we have some queued frames for reverse playback, flush
* them now */
ret = flush_queued (faad);
/* move non-decoded leading buffers gathered in previous run
* to decode queue for this run */
faad->decode = g_list_reverse (faad->gather);
faad->gather = NULL;
} else {
/* squeeze any possible remaining frames that are pending sync */
gst_faad_chain (faad->sinkpad, NULL);
}
return ret;
}
static gboolean
gst_faad_do_raw_seek (GstFaad * faad, GstEvent * event)
{
GstSeekFlags flags;
GstSeekType start_type, end_type;
GstFormat format;
gdouble rate;
gint64 start, start_time;
gst_event_parse_seek (event, &rate, &format, &flags, &start_type,
&start_time, &end_type, NULL);
if (rate != 1.0 ||
format != GST_FORMAT_TIME ||
start_type != GST_SEEK_TYPE_SET || end_type != GST_SEEK_TYPE_NONE) {
return FALSE;
}
if (!gst_faad_src_convert (faad, GST_FORMAT_TIME, start_time,
GST_FORMAT_BYTES, &start)) {
return FALSE;
}
event = gst_event_new_seek (1.0, GST_FORMAT_BYTES, flags,
GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_NONE, -1);
GST_DEBUG_OBJECT (faad, "seeking to %" GST_TIME_FORMAT " at byte offset %"
G_GINT64_FORMAT, GST_TIME_ARGS (start_time), start);
return gst_pad_push_event (faad->sinkpad, event);
}
static gboolean
gst_faad_src_event (GstPad * pad, GstEvent * event)
{
GstFaad *faad;
gboolean res;
faad = GST_FAAD (gst_pad_get_parent (pad));
GST_LOG_OBJECT (faad, "Handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:{
/* try upstream first, there might be a demuxer */
gst_event_ref (event);
if (!(res = gst_pad_push_event (faad->sinkpad, event))) {
res = gst_faad_do_raw_seek (faad, event);
}
gst_event_unref (event);
break;
}
default:
res = gst_pad_push_event (faad->sinkpad, event);
break;
}
gst_object_unref (faad);
return res;
}
static gboolean
gst_faad_sink_event (GstPad * pad, GstEvent * event)
{
GstFaad *faad;
gboolean res = TRUE;
faad = GST_FAAD (gst_pad_get_parent (pad));
GST_LOG_OBJECT (faad, "Handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
gst_faad_reset_stream_state (faad);
res = gst_pad_push_event (faad->srcpad, event);
break;
case GST_EVENT_EOS:
gst_faad_drain (faad);
gst_faad_reset_stream_state (faad);
res = gst_pad_push_event (faad->srcpad, event);
break;
case GST_EVENT_NEWSEGMENT:
{
GstFormat fmt;
gboolean is_update;
gint64 start, end, base;
gdouble rate;
gst_event_parse_new_segment (event, &is_update, &rate, &fmt, &start,
&end, &base);
/* drain queued buffers before we activate the new segment */
gst_faad_drain (faad);
if (fmt == GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (faad,
"Got NEWSEGMENT event in GST_FORMAT_TIME, passing on (%"
GST_TIME_FORMAT " - %" GST_TIME_FORMAT ")", GST_TIME_ARGS (start),
GST_TIME_ARGS (end));
gst_segment_set_newsegment (&faad->segment, is_update, rate, fmt, start,
end, base);
} else if (fmt == GST_FORMAT_BYTES) {
gint64 new_start = 0;
gint64 new_end = -1;
GST_DEBUG_OBJECT (faad, "Got NEWSEGMENT event in GST_FORMAT_BYTES (%"
G_GUINT64_FORMAT " - %" G_GUINT64_FORMAT ")", start, end);
if (gst_faad_src_convert (faad, GST_FORMAT_BYTES, start,
GST_FORMAT_TIME, &new_start)) {
if (end != -1) {
gst_faad_src_convert (faad, GST_FORMAT_BYTES, end,
GST_FORMAT_TIME, &new_end);
}
} else {
GST_DEBUG_OBJECT (faad,
"no average bitrate yet, sending newsegment with start at 0");
}
gst_event_unref (event);
event = gst_event_new_new_segment (is_update, rate,
GST_FORMAT_TIME, new_start, new_end, new_start);
gst_segment_set_newsegment (&faad->segment, is_update, rate,
GST_FORMAT_TIME, new_start, new_end, new_start);
GST_DEBUG_OBJECT (faad,
"Sending new NEWSEGMENT event, time %" GST_TIME_FORMAT
" - %" GST_TIME_FORMAT, GST_TIME_ARGS (new_start),
GST_TIME_ARGS (new_end));
faad->next_ts = GST_CLOCK_TIME_NONE;
faad->prev_ts = new_start;
}
res = gst_pad_push_event (faad->srcpad, event);
break;
}
default:
res = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (faad);
return res;
}
static gboolean
gst_faad_src_convert (GstFaad * faad, GstFormat src_format, gint64 src_val,
GstFormat dest_format, gint64 * dest_val)
{
guint64 bytes_in, time_out, val;
if (src_format == dest_format) {
if (dest_val)
*dest_val = src_val;
return TRUE;
}
GST_OBJECT_LOCK (faad);
bytes_in = faad->bytes_in;
time_out = faad->sum_dur_out;
GST_OBJECT_UNLOCK (faad);
if (bytes_in == 0 || time_out == 0)
return FALSE;
/* convert based on the average bitrate so far */
if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME) {
val = gst_util_uint64_scale (src_val, time_out, bytes_in);
} else if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES) {
val = gst_util_uint64_scale (src_val, bytes_in, time_out);
} else {
return FALSE;
}
if (dest_val)
*dest_val = (gint64) val;
return TRUE;
}
static gboolean
gst_faad_src_query (GstPad * pad, GstQuery * query)
{
gboolean res = FALSE;
GstFaad *faad;
GstPad *peer = NULL;
faad = GST_FAAD (gst_pad_get_parent (pad));
GST_LOG_OBJECT (faad, "processing %s query", GST_QUERY_TYPE_NAME (query));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_DURATION:{
GstFormat format;
gint64 len_bytes, duration;
/* try upstream first, in case there's a demuxer */
if ((res = gst_pad_query_default (pad, query)))
break;
gst_query_parse_duration (query, &format, NULL);
if (format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (faad, "query failed: can't handle format %s",
gst_format_get_name (format));
break;
}
peer = gst_pad_get_peer (faad->sinkpad);
if (peer == NULL)
break;
format = GST_FORMAT_BYTES;
if (!gst_pad_query_duration (peer, &format, &len_bytes)) {
GST_DEBUG_OBJECT (faad, "query failed: failed to get upstream length");
break;
}
res = gst_faad_src_convert (faad, GST_FORMAT_BYTES, len_bytes,
GST_FORMAT_TIME, &duration);
if (res) {
gst_query_set_duration (query, GST_FORMAT_TIME, duration);
GST_LOG_OBJECT (faad, "duration estimate: %" GST_TIME_FORMAT,
GST_TIME_ARGS (duration));
}
break;
}
case GST_QUERY_POSITION:{
GstFormat format;
gint64 pos_bytes, pos;
/* try upstream first, in case there's a demuxer */
if ((res = gst_pad_query_default (pad, query)))
break;
gst_query_parse_position (query, &format, NULL);
if (format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (faad, "query failed: can't handle format %s",
gst_format_get_name (format));
break;
}
peer = gst_pad_get_peer (faad->sinkpad);
if (peer == NULL)
break;
format = GST_FORMAT_BYTES;
if (!gst_pad_query_position (peer, &format, &pos_bytes)) {
GST_OBJECT_LOCK (faad);
pos = faad->next_ts;
GST_OBJECT_UNLOCK (faad);
res = TRUE;
} else {
res = gst_faad_src_convert (faad, GST_FORMAT_BYTES, pos_bytes,
GST_FORMAT_TIME, &pos);
}
if (res) {
gst_query_set_position (query, GST_FORMAT_TIME, pos);
}
break;
}
default:
res = gst_pad_query_default (pad, query);
break;
}
if (peer)
gst_object_unref (peer);
gst_object_unref (faad);
return res;
}
static gboolean
gst_faad_update_caps (GstFaad * faad, faacDecFrameInfo * info)
{
@ -935,7 +535,7 @@ gst_faad_update_caps (GstFaad * faad, faacDecFrameInfo * info)
GST_DEBUG_OBJECT (faad, "New output caps: %" GST_PTR_FORMAT, caps);
ret = gst_pad_set_caps (faad->srcpad, caps);
ret = gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (faad), caps);
gst_caps_unref (caps);
return ret;
@ -950,12 +550,13 @@ gst_faad_update_caps (GstFaad * faad, faacDecFrameInfo * info)
* gst/typefind/) for ADTS because 12 bits isn't very reliable.
*/
static gboolean
gst_faad_sync (GstFaad * faad, guint8 * data, guint size, gboolean next,
guint * off)
gst_faad_sync (GstFaad * faad, const guint8 * data, guint size, gboolean next,
gint * off, gint * length)
{
guint n = 0;
gint snc;
gboolean ret = FALSE;
guint len = 0;
GST_LOG_OBJECT (faad, "Finding syncpoint");
@ -968,8 +569,6 @@ gst_faad_sync (GstFaad * faad, guint8 * data, guint size, gboolean next,
if ((snc & 0xfff6) == 0xfff0) {
/* we have an ADTS syncpoint. Parse length and find
* next syncpoint. */
guint len;
GST_LOG_OBJECT (faad,
"Found one ADTS syncpoint at offset 0x%x, tracing next...", n);
@ -1012,10 +611,14 @@ gst_faad_sync (GstFaad * faad, guint8 * data, guint size, gboolean next,
}
exit:
*off = n;
if (!ret)
if (ret) {
*length = len;
} else {
GST_LOG_OBJECT (faad, "Found no syncpoint");
}
return ret;
}
@ -1038,78 +641,52 @@ looks_like_valid_header (guint8 * input_data, guint input_size)
return FALSE;
}
#define FAAD_MAX_ERROR 10
#define FAAD_MAX_SYNC 10 * 8 * 1024
static GstFlowReturn
gst_faad_parse (GstAudioDecoder * dec, GstAdapter * adapter,
gint * offset, gint * length)
{
GstFaad *faad;
const guint8 *data;
guint size;
gboolean sync, eos;
faad = GST_FAAD (dec);
size = gst_adapter_available (adapter);
g_return_val_if_fail (size > 0, GST_FLOW_ERROR);
gst_audio_decoder_get_parse_state (dec, &sync, &eos);
if (faad->packetised) {
*offset = 0;
*length = size;
return GST_FLOW_OK;
} else {
data = gst_adapter_peek (adapter, size);
return gst_faad_sync (faad, data, size, !eos, offset, length) ?
GST_FLOW_OK : GST_FLOW_UNEXPECTED;
}
}
static GstFlowReturn
gst_faad_chain (GstPad * pad, GstBuffer * buffer)
gst_faad_handle_frame (GstAudioDecoder * dec, GstBuffer * buffer)
{
GstFaad *faad;
GstFlowReturn ret = GST_FLOW_OK;
guint input_size;
guint available;
guchar *input_data;
GstFaad *faad;
GstBuffer *outbuf;
faacDecFrameInfo info;
void *out;
gboolean run_loop = TRUE;
guint sync_off;
GstClockTime ts;
gboolean next;
faad = GST_FAAD (gst_pad_get_parent (pad));
faad = GST_FAAD (dec);
if (G_LIKELY (buffer)) {
GST_LOG_OBJECT (faad, "buffer of size %d with ts: %" GST_TIME_FORMAT
", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buffer),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
/* no fancy draining */
if (G_UNLIKELY (!buffer))
return GST_FLOW_OK;
if (GST_BUFFER_IS_DISCONT (buffer)) {
gst_faad_drain (faad);
gst_faad_reset_stream_state (faad);
faad->discont = TRUE;
}
gst_adapter_push (faad->adapter, buffer);
buffer = NULL;
next = TRUE;
} else {
next = FALSE;
}
available = gst_adapter_available (faad->adapter);
input_size = available;
if (G_UNLIKELY (!available))
goto out;
ts = gst_adapter_prev_timestamp (faad->adapter, NULL);
if (GST_CLOCK_TIME_IS_VALID (ts) && (ts != faad->prev_ts)) {
faad->prev_ts = ts;
} else {
/* nothing new */
ts = GST_CLOCK_TIME_NONE;
}
if (!GST_CLOCK_TIME_IS_VALID (faad->next_ts))
faad->next_ts = faad->prev_ts;
input_data = (guchar *) gst_adapter_peek (faad->adapter, available);
if (!faad->packetised) {
if (!gst_faad_sync (faad, input_data, input_size, next, &sync_off)) {
faad->sync_flush += sync_off;
input_size -= sync_off;
if (faad->sync_flush > FAAD_MAX_SYNC)
goto parse_failed;
else
goto out;
} else {
faad->sync_flush = 0;
input_data += sync_off;
input_size -= sync_off;
}
}
input_data = GST_BUFFER_DATA (buffer);
input_size = GST_BUFFER_SIZE (buffer);
init:
/* init if not already done during capsnego */
@ -1143,7 +720,6 @@ init:
}
faad->init = TRUE;
gst_faad_send_tags (faad);
/* make sure we create new caps below */
faad->samplerate = 0;
@ -1151,18 +727,11 @@ init:
}
/* decode cycle */
info.bytesconsumed = input_size;
info.error = 0;
while ((input_size > 0) && run_loop) {
do {
if (faad->packetised) {
/* Only one packet per buffer, no matter how much is really consumed */
run_loop = FALSE;
} else {
if (input_size < FAAD_MIN_STREAMSIZE || info.bytesconsumed <= 0) {
break;
}
if (!faad->packetised) {
/* faad only really parses ADTS header at Init time, not when decoding,
* so monitor for changes and kick faad when needed */
if (GST_READ_UINT32_BE (input_data) >> 4 != faad->last_header >> 4) {
@ -1178,33 +747,14 @@ init:
out = faacDecDecode (faad->handle, &info, input_data, input_size);
if (info.error > 0) {
/* mark discont for the next buffer */
faad->discont = TRUE;
/* flush a bit, arranges for resync next time */
input_size--;
faad->error_count++;
/* do not bail out at once, but know when to stop */
if (faad->error_count > FAAD_MAX_ERROR)
goto decode_failed;
else {
GST_WARNING_OBJECT (faad, "decoding error: %s",
faacDecGetErrorMessage (info.error));
goto out;
}
/* give up on frame and bail out */
gst_audio_decoder_finish_frame (dec, NULL, 1);
goto decode_failed;
}
/* ok again */
faad->error_count = 0;
GST_LOG_OBJECT (faad, "%d bytes consumed, %d samples decoded",
(guint) info.bytesconsumed, (guint) info.samples);
if (info.bytesconsumed > input_size)
info.bytesconsumed = input_size;
input_size -= info.bytesconsumed;
input_data += info.bytesconsumed;
if (out && info.samples > 0) {
if (!gst_faad_update_caps (faad, &info))
goto negotiation_failed;
@ -1213,82 +763,21 @@ init:
if (info.samples > G_MAXUINT / faad->bps)
goto sample_overflow;
/* play decoded data */
if (info.samples > 0) {
guint bufsize = info.samples * faad->bps;
guint num_samples = info.samples / faad->channels;
/* note: info.samples is total samples, not per channel */
ret =
gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD
(faad), 0, info.samples * faad->bps,
GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (faad)), &outbuf);
if (ret != GST_FLOW_OK)
goto out;
/* note: info.samples is total samples, not per channel */
ret =
gst_pad_alloc_buffer_and_set_caps (faad->srcpad, 0, bufsize,
GST_PAD_CAPS (faad->srcpad), &outbuf);
if (ret != GST_FLOW_OK)
goto out;
memcpy (GST_BUFFER_DATA (outbuf), out, GST_BUFFER_SIZE (outbuf));
memcpy (GST_BUFFER_DATA (outbuf), out, GST_BUFFER_SIZE (outbuf));
GST_BUFFER_OFFSET (outbuf) =
GST_CLOCK_TIME_TO_FRAMES (faad->next_ts, faad->samplerate);
GST_BUFFER_TIMESTAMP (outbuf) = faad->next_ts;
GST_BUFFER_DURATION (outbuf) =
GST_FRAMES_TO_CLOCK_TIME (num_samples, faad->samplerate);
GST_OBJECT_LOCK (faad);
faad->next_ts += GST_BUFFER_DURATION (outbuf);
faad->sum_dur_out += GST_BUFFER_DURATION (outbuf);
faad->bytes_in += info.bytesconsumed;
GST_OBJECT_UNLOCK (faad);
if ((outbuf = gst_audio_buffer_clip (outbuf, &faad->segment,
faad->samplerate, faad->bps * faad->channels))) {
GST_LOG_OBJECT (faad,
"pushing buffer, off=%" G_GUINT64_FORMAT ", ts=%" GST_TIME_FORMAT,
GST_BUFFER_OFFSET (outbuf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
if (faad->discont) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
faad->discont = FALSE;
}
if (faad->segment.rate > 0.0) {
ret = gst_pad_push (faad->srcpad, outbuf);
} else {
/* reverse playback, queue frame till later when we get a discont. */
GST_LOG_OBJECT (faad, "queued frame");
faad->queued = g_list_prepend (faad->queued, outbuf);
ret = GST_FLOW_OK;
}
if (ret != GST_FLOW_OK)
goto out;
}
}
} else {
if (faad->packetised && faad->segment.rate < 0.0) {
/* leading non-decoded frames used as tail
* for next preceding fragment */
outbuf = gst_adapter_take_buffer (faad->adapter, available);
available = 0;
outbuf = gst_buffer_make_metadata_writable (outbuf);
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);
faad->gather = g_list_prepend (faad->gather, outbuf);
}
ret = gst_audio_decoder_finish_frame (dec, outbuf, 1);
}
/* adjust to incoming new timestamp, if any, after decoder delay */
if (GST_CLOCK_TIME_IS_VALID (ts)) {
faad->next_ts = ts;
ts = GST_CLOCK_TIME_NONE;
}
}
} while (FALSE);
out:
/* in raw case: (pretend) all consumed */
if (faad->packetised)
input_size = 0;
gst_adapter_flush (faad->adapter, available - input_size);
gst_object_unref (faad);
return ret;
/* ERRORS */
@ -1315,9 +804,8 @@ init2_failed:
}
decode_failed:
{
GST_ELEMENT_ERROR (faad, STREAM, DECODE, (NULL),
("decoding error: %s", faacDecGetErrorMessage (info.error)));
ret = GST_FLOW_ERROR;
GST_AUDIO_DECODER_ERROR (faad, 1, STREAM, DECODE, (NULL),
("decoding error: %s", faacDecGetErrorMessage (info.error)), ret);
goto out;
}
negotiation_failed:
@ -1334,13 +822,12 @@ sample_overflow:
ret = GST_FLOW_ERROR;
goto out;
}
parse_failed:
{
GST_ELEMENT_ERROR (faad, STREAM, DECODE, (NULL),
("failed to parse non-packetized stream"));
ret = GST_FLOW_ERROR;
goto out;
}
}
static void
gst_faad_flush (GstAudioDecoder * dec, gboolean hard)
{
gst_faad_reset_stream_state (GST_FAAD (dec));
}
static gboolean
@ -1377,38 +864,6 @@ gst_faad_close_decoder (GstFaad * faad)
}
}
static GstStateChangeReturn
gst_faad_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstFaad *faad = GST_FAAD (element);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
break;
default:
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_faad_reset (faad);
gst_faad_close_decoder (faad);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default:
break;
}
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{

View file

@ -21,7 +21,8 @@
#define __GST_FAAD_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudiodecoder.h>
#ifdef FAAD_IS_NEAAC
#include <neaacdec.h>
#else
@ -42,10 +43,7 @@ G_BEGIN_DECLS
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_FAAD))
typedef struct _GstFaad {
GstElement element;
GstPad *srcpad;
GstPad *sinkpad;
GstAudioDecoder element;
guint samplerate; /* sample rate of the last MPEG frame */
guint channels; /* number of channels of the last frame */
@ -55,34 +53,16 @@ typedef struct _GstFaad {
guint8 fake_codec_data[2];
guint32 last_header;
GstAdapter *adapter;
/* FAAD object */
faacDecHandle handle;
gboolean init;
gboolean packetised; /* We must differentiate between raw and packetised streams */
gint64 prev_ts; /* timestamp of previous buffer */
gint64 next_ts; /* timestamp of next buffer */
guint64 bytes_in; /* bytes received */
guint64 sum_dur_out; /* sum of durations of decoded buffers we sent out */
gint error_count;
gboolean discont;
gint sync_flush;
/* segment handling */
GstSegment segment;
/* list of raw output buffers for reverse playback */
GList *queued;
/* gather/decode queues for reverse playback */
GList *gather;
GList *decode;
} GstFaad;
typedef struct _GstFaadClass {
GstElementClass parent_class;
GstAudioDecoderClass parent_class;
} GstFaadClass;
GType gst_faad_get_type (void);

View file

@ -36,7 +36,7 @@ G_BEGIN_DECLS
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_KATE_PARSE,GstKateParseClass))
#define GST_IS_KATE_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_KATE_PARSE))
#define GST_IS_KATKATEE_CLASS(klass) \
#define GST_IS_KATE_PARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_KATE_PARSE))
typedef struct _GstKateParse GstKateParse;
typedef struct _GstKateParseClass GstKateParseClass;

View file

@ -677,6 +677,7 @@ gst_kate_tiger_video_set_caps (GstPad * pad, GstCaps * caps)
GstKateTiger *tiger = GST_KATE_TIGER (gst_pad_get_parent (pad));
GstVideoFormat format;
gint w, h;
gboolean ret;
GST_KATE_TIGER_MUTEX_LOCK (tiger);
@ -692,10 +693,10 @@ gst_kate_tiger_video_set_caps (GstPad * pad, GstCaps * caps)
GST_KATE_TIGER_MUTEX_UNLOCK (tiger);
gst_pad_set_caps (tiger->srcpad, caps);
ret = gst_pad_set_caps (tiger->srcpad, caps);
gst_object_unref (tiger);
return TRUE;
return ret;
}
static gdouble
@ -858,9 +859,13 @@ gst_kate_tiger_seek (GstKateTiger * tiger, GstPad * pad, GstEvent * event)
gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
&stop_type, &stop);
if (flags & GST_SEEK_FLAG_FLUSH)
gst_pad_push_event (tiger->srcpad, gst_event_new_flush_start ());
GST_KATE_TIGER_MUTEX_LOCK (tiger);
tiger->video_flushing = TRUE;
gst_kate_util_decoder_base_set_flushing (&tiger->decoder, TRUE);
g_cond_broadcast (tiger->cond);
GST_KATE_TIGER_MUTEX_UNLOCK (tiger);
if (format == GST_FORMAT_TIME) {

View file

@ -164,20 +164,6 @@ gst_rtmp_sink_start (GstBaseSink * basesink)
/* Mark this as an output connection */
RTMP_EnableWrite (sink->rtmp);
/* open the connection */
if (!RTMP_IsConnected (sink->rtmp)) {
if (!RTMP_Connect (sink->rtmp, NULL) || !RTMP_ConnectStream (sink->rtmp, 0)) {
GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL),
("Could not connect to RTMP stream \"%s\" for writing", sink->uri));
RTMP_Free (sink->rtmp);
sink->rtmp = NULL;
g_free (sink->rtmp_uri);
sink->rtmp_uri = NULL;
return FALSE;
}
GST_DEBUG_OBJECT (sink, "Opened connection to %s", sink->rtmp_uri);
}
sink->first = TRUE;
return TRUE;
@ -210,6 +196,21 @@ gst_rtmp_sink_render (GstBaseSink * bsink, GstBuffer * buf)
GstBuffer *reffed_buf = NULL;
if (sink->first) {
/* open the connection */
if (!RTMP_IsConnected (sink->rtmp)) {
if (!RTMP_Connect (sink->rtmp, NULL)
|| !RTMP_ConnectStream (sink->rtmp, 0)) {
GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL),
("Could not connect to RTMP stream \"%s\" for writing", sink->uri));
RTMP_Free (sink->rtmp);
sink->rtmp = NULL;
g_free (sink->rtmp_uri);
sink->rtmp_uri = NULL;
return GST_FLOW_ERROR;
}
GST_DEBUG_OBJECT (sink, "Opened connection to %s", sink->rtmp_uri);
}
/* FIXME: Parse the first buffer and see if it contains a header plus a packet instead
* of just assuming it's only the header */
GST_LOG_OBJECT (sink, "Caching first buffer of size %d for concatenation",

9
ext/spandsp/Makefile.am Normal file
View file

@ -0,0 +1,9 @@
plugin_LTLIBRARIES = libgstspandsp.la
libgstspandsp_la_SOURCES = gstspandsp.c gstspanplc.c
libgstspandsp_la_CFLAGS = $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(SPANDSP_CFLAGS)
libgstspandsp_la_LIBADD = $(SPANDSP_LIBS) $(GST_PLUGINS_BASE_LIBS) $(GST_LIBS)
libgstspandsp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstspandsp_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstspanplc.h

40
ext/spandsp/gstspandsp.c Normal file
View file

@ -0,0 +1,40 @@
/*
* (C) 2011 Collabora Ltd.
* Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstspanplc.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "spanplc",
GST_RANK_PRIMARY, GST_TYPE_SPAN_PLC);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"spandsp",
"libspandsp plugin",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

300
ext/spandsp/gstspanplc.c Normal file
View file

@ -0,0 +1,300 @@
/*
* (C) 2011 Collabora Ltd.
* Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-spanplc
*
* The spanplc (Packet Loss Concealment) element provides a synthetic
* fill-in signal, to minimise the audible effect of lost packets in
* VoIP applications
*
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstspanplc.h"
GST_BOILERPLATE (GstSpanPlc, gst_span_plc, GstElement, GST_TYPE_ELEMENT);
GST_DEBUG_CATEGORY_STATIC (gst_span_plc_debug);
#define GST_CAT_DEFAULT gst_span_plc_debug
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw-int, "
"endianness = (int) BYTE_ORDER, signed = (bool) TRUE, "
"width = (int) 16, depth = (int) 16, "
"rate = (int) [ 1, MAX ], channels = (int) 1")
);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw-int, "
"endianness = (int) BYTE_ORDER, signed = (bool) TRUE, "
"width = (int) 16, depth = (int) 16, "
"rate = (int) [ 1, MAX ], channels = (int) 1")
);
static void gst_span_plc_dispose (GObject * object);
static GstStateChangeReturn gst_span_plc_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_span_plc_setcaps_sink (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_span_plc_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_span_plc_event_sink (GstPad * pad, GstEvent * event);
static void
gst_span_plc_base_init (gpointer gclass)
{
GstElementClass *element_class = (GstElementClass *) gclass;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class, "SpanDSP PLC",
"Filter/Effect/Audio",
"Adds packet loss concealment to audio",
"Youness Alaoui <youness.alaoui@collabora.co.uk>");
}
/* initialize the plugin's class */
static void
gst_span_plc_class_init (GstSpanPlcClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->dispose = gst_span_plc_dispose;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_span_plc_change_state);
GST_DEBUG_CATEGORY_INIT (gst_span_plc_debug, "spanplc",
0, "spanDSP's packet loss concealment");
}
static void
gst_span_plc_init (GstSpanPlc * plc, GstSpanPlcClass * gclass)
{
GST_DEBUG_OBJECT (plc, "init");
plc->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
plc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (plc->sinkpad,
GST_DEBUG_FUNCPTR (gst_span_plc_setcaps_sink));
gst_pad_set_getcaps_function (plc->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
gst_pad_set_getcaps_function (plc->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
gst_pad_set_chain_function (plc->sinkpad,
GST_DEBUG_FUNCPTR (gst_span_plc_chain));
gst_pad_set_event_function (plc->sinkpad,
GST_DEBUG_FUNCPTR (gst_span_plc_event_sink));
gst_element_add_pad (GST_ELEMENT (plc), plc->srcpad);
gst_element_add_pad (GST_ELEMENT (plc), plc->sinkpad);
plc->plc_state = NULL;
plc->last_stop = GST_CLOCK_TIME_NONE;
GST_DEBUG_OBJECT (plc, "init complete");
}
static void
gst_span_plc_dispose (GObject * object)
{
GstSpanPlc *plc = GST_SPAN_PLC (object);
if (plc->plc_state)
plc_free (plc->plc_state);
plc->plc_state = NULL;
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
gst_span_plc_flush (GstSpanPlc * plc, gboolean renew)
{
if (plc->plc_state)
plc_free (plc->plc_state);
if (renew)
plc->plc_state = plc_init (NULL);
else
plc->plc_state = NULL;
plc->last_stop = GST_CLOCK_TIME_NONE;
}
static GstStateChangeReturn
gst_span_plc_change_state (GstElement * element, GstStateChange transition)
{
GstSpanPlc *plc = GST_SPAN_PLC (element);
GstStateChangeReturn ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
gst_span_plc_flush (plc, TRUE);
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
gst_span_plc_flush (plc, FALSE);
default:
break;
}
return ret;
}
static gboolean
gst_span_plc_setcaps_sink (GstPad * pad, GstCaps * caps)
{
GstSpanPlc *plc = GST_SPAN_PLC (gst_pad_get_parent (pad));
GstStructure *s = NULL;
gboolean ret = FALSE;
ret = gst_pad_set_caps (plc->srcpad, caps);
s = gst_caps_get_structure (caps, 0);
if (s) {
gst_structure_get_int (s, "rate", &plc->sample_rate);
GST_DEBUG_OBJECT (plc, "setcaps: got sample rate : %d", plc->sample_rate);
}
gst_span_plc_flush (plc, TRUE);
gst_object_unref (plc);
return ret;
}
static GstFlowReturn
gst_span_plc_chain (GstPad * pad, GstBuffer * buffer)
{
GstSpanPlc *plc = GST_SPAN_PLC (GST_PAD_PARENT (pad));
GstClockTime buffer_duration;
if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
plc->last_stop = GST_BUFFER_TIMESTAMP (buffer);
else
GST_WARNING_OBJECT (plc, "Buffer has no timestamp!");
if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
buffer_duration = GST_BUFFER_DURATION (buffer);
} else {
GST_WARNING_OBJECT (plc, "Buffer has no duration!");
buffer_duration = (GST_BUFFER_SIZE (buffer) /
(plc->sample_rate * sizeof (guint16))) * GST_SECOND;
GST_DEBUG_OBJECT (plc, "Buffer duration : %" GST_TIME_FORMAT,
GST_TIME_ARGS (buffer_duration));
}
plc->last_stop += buffer_duration;
if (plc->plc_state->missing_samples != 0)
buffer = gst_buffer_make_writable (buffer);
plc_rx (plc->plc_state, (int16_t *) GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer) / 2);
return gst_pad_push (plc->srcpad, buffer);
}
static void
gst_span_plc_send_fillin (GstSpanPlc * plc, GstClockTime duration)
{
guint buf_size;
GstBuffer *buffer = NULL;
buf_size = ((float) duration / GST_SECOND) * plc->sample_rate;
buf_size *= sizeof (guint16);
buffer = gst_buffer_new_and_alloc (buf_size);
GST_DEBUG_OBJECT (plc, "Missing packet of %" GST_TIME_FORMAT
" == %d bytes", GST_TIME_ARGS (duration), buf_size);
plc_fillin (plc->plc_state, (int16_t *) GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer) / 2);
GST_BUFFER_TIMESTAMP (buffer) = plc->last_stop;
GST_BUFFER_DURATION (buffer) = duration;
gst_buffer_set_caps (buffer, GST_PAD_CAPS (plc->srcpad));
gst_pad_push (plc->srcpad, buffer);
}
static gboolean
gst_span_plc_event_sink (GstPad * pad, GstEvent * event)
{
gboolean ret = FALSE;
GstSpanPlc *plc = GST_SPAN_PLC (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (plc, "received event %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
{
GstFormat format;
gdouble rate;
gint64 start, stop, time;
gboolean update;
gst_event_parse_new_segment (event, &update, &rate, &format, &start,
&stop, &time);
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
if (update) {
/* time progressed without data, see if we can fill the gap with
* some concealment data */
if (plc->last_stop < start)
gst_span_plc_send_fillin (plc, start - plc->last_stop);
}
plc->last_stop = start;
break;
}
case GST_EVENT_FLUSH_START:
gst_span_plc_flush (plc, TRUE);
break;
default:
break;
}
ret = gst_pad_push_event (plc->srcpad, event);
gst_object_unref (plc);
return ret;
newseg_wrong_format:
{
GST_DEBUG_OBJECT (plc, "received non TIME newsegment");
gst_object_unref (plc);
return FALSE;
}
}

60
ext/spandsp/gstspanplc.h Normal file
View file

@ -0,0 +1,60 @@
/*
* (C) 2011 Collabora Ltd.
* Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#ifndef __GST_SPANDSP_H__
#define __GST_SPANDSP_H__
#include <gst/gst.h>
#include <spandsp.h>
G_BEGIN_DECLS
#define GST_TYPE_SPAN_PLC (gst_span_plc_get_type())
#define GST_SPAN_PLC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SPAN_PLC,GstSpanPlc))
#define GST_SPAN_PLC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SPAN_PLC,GstSpanPlcClass))
#define GST_IS_SPAN_PLC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SPAN_PLC))
#define GST_IS_SPAN_PLC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SPAN_PLC))
typedef struct _GstSpanPlc GstSpanPlc;
typedef struct _GstSpanPlcClass GstSpanPlcClass;
struct _GstSpanPlc
{
GstElement element;
GstPad *sinkpad;
GstPad *srcpad;
/* <private> */
plc_state_t *plc_state;
GstClockTime last_stop;
gint sample_rate;
};
struct _GstSpanPlcClass
{
GstElementClass parent_class;
};
GType gst_span_plc_get_type (void);
G_END_DECLS
#endif

View file

@ -445,7 +445,11 @@ gst_base_camera_src_change_state (GstElement * element,
gst_camerabin_create_preview_pipeline (GST_ELEMENT_CAST (self),
self->preview_filter);
g_assert (self->preview_pipeline != NULL);
if (self->preview_pipeline == NULL) {
/* failed to create preview pipeline, fail state change */
return GST_STATE_CHANGE_FAILURE;
}
self->preview_filter_changed = FALSE;
if (self->preview_caps) {
GST_DEBUG_OBJECT (self,

View file

@ -146,7 +146,8 @@ gst_camerabin_create_preview_pipeline (GstElement * element,
}
g_object_set (data->appsrc, "emit-signals", FALSE, NULL);
g_object_set (data->appsink, "sync", FALSE, NULL);
g_object_set (data->appsink, "sync", FALSE, "enable-last-buffer",
FALSE, NULL);
gst_bin_add_many (GST_BIN (data->pipeline), data->appsrc, data->capsfilter,
data->appsink, csp, vscale, NULL);
@ -230,6 +231,8 @@ void
gst_camerabin_destroy_preview_pipeline (GstCameraBinPreviewPipelineData *
preview)
{
g_return_if_fail (preview != NULL);
if (preview->processing_lock) {
g_mutex_free (preview->processing_lock);
preview->processing_lock = NULL;

View file

@ -1,17 +1,27 @@
lib_LTLIBRARIES = libgstcodecparsers-@GST_MAJORMINOR@.la
libgstcodecparsers_@GST_MAJORMINOR@_la_SOURCES = \
gstmpegvideoparser.c gsth264parser.c
gstmpegvideoparser.c gsth264parser.c gstvc1parser.c
libgstcodecparsers_@GST_MAJORMINOR@includedir = \
$(includedir)/gstreamer-@GST_MAJORMINOR@/gst/codecparsers
libgstcodecparsers_@GST_MAJORMINOR@include_HEADERS = \
gstmpegvideoparser.h gsth264parser.h
gstmpegvideoparser.h gsth264parser.h gstvc1parser.h
libgstcodecparsers_@GST_MAJORMINOR@_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) $(GST_CFLAGS)
libgstcodecparsers_@GST_MAJORMINOR@_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS)
libgstcodecparsers_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_ALL_LDFLAGS) $(GST_LT_LDFLAGS)
libgstcodecparsers_@GST_MAJORMINOR@_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_CFLAGS)
libgstcodecparsers_@GST_MAJORMINOR@_la_LIBADD = \
$(GST_BASE_LIBS) \
$(GST_LIBS)
libgstcodecparsers_@GST_MAJORMINOR@_la_LDFLAGS = \
$(GST_LIB_LDFLAGS) \
$(GST_ALL_LDFLAGS) \
$(GST_LT_LDFLAGS)
Android.mk: $(BUILT_SOURCES) Makefile.am
androgenizer -:PROJECT libgstcodecparsers -:STATIC libgstcodecparsers-@GST_MAJORMINOR@ \

View file

@ -32,25 +32,52 @@
* @short_description: Convenience library for h264 video
* bitstream parsing.
*
* It offers you basic parsing in AVC mode or not. Tp identify Nals in a bitstream and
* parse its basic headers, you should call:
* It offers you bitstream parsing in AVC mode or not. To identify Nals in a bitstream and
* parse its headers, you should call:
* <itemizedlist>
* <listitem>
* gst_h264_parser_identify_nalu to identify the following nalu in not AVC bitstreams
* #gst_h264_parser_identify_nalu to identify the following nalu in not AVC bitstreams
* </listitem>
* <listitem>
* gst_h264_parser_identify_nalu_avc to identify the following nalu in AVC bitstreams
* #gst_h264_parser_identify_nalu_avc to identify the nalu in AVC bitstreams
* </listitem>
* </itemizedlist>
*
* Then, depending on the #GstH264NalUnitType of the newly parsed #GstH264NalUnit, you should
* call the differents functions to parse the struct.
* call the differents functions to parse the structure:
* <itemizedlist>
* <listitem>
* From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr
* </listitem>
* <listitem>
* #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei
* </listitem>
* <listitem>
* #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps
* </listitem>
* <listitem>
* #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps
* </listitem>
* <listitem>
* Any other: #gst_h264_parser_parse_nal
* </listitem>
* </itemizedlist>
*
* Note: You should always call gst_h264_parser_parse_nal if you don't actually need
* #GstH264NalUnitType to be parsed for your personnal use. This, to guarantee that the
* #GstH264NalUnitType to be parsed for your personnal use, in order to guarantee that the
* #GstH264NalParser is always up to date.
*
* For more details about the structures, look at the ISO specifications.
* For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 MPEG-4
* Part 10 specifications, you can download them from:
*
* <itemizedlist>
* <listitem>
* ITU-T H.264: http://www.itu.int/rec/T-REC-H.264
* </listitem>
* <listitem>
* ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538
* </listitem>
* </itemizedlist>
*/
#ifdef HAVE_CONFIG_H
@ -1064,7 +1091,7 @@ gst_h264_parser_parse_pic_timing (GstH264NalParser * nalparser,
guint i;
READ_UINT8 (nr, tim->pic_struct, 4);
CHECK_ALLOWED (tim->pic_struct, 0, 8);
CHECK_ALLOWED ((gint8) tim->pic_struct, 0, 8);
num_clock_num_ts = num_clock_ts_table[tim->pic_struct];
for (i = 0; i < num_clock_num_ts; i++) {
@ -1090,7 +1117,8 @@ error:
/**
* gst_h264_nal_parser_new:
*
* Creates a nez #GstH264NalParser
* Creates a new #GstH264NalParser. It should be freed with
* gst_h264_nal_parser_free after use.
*
* Returns: a new #GstH264NalParser
*/
@ -1099,22 +1127,36 @@ gst_h264_nal_parser_new (void)
{
GstH264NalParser *nalparser;
nalparser = g_malloc0 (sizeof (GstH264NalParser));
nalparser = g_slice_new0 (GstH264NalParser);
GST_DEBUG_CATEGORY_INIT (h264_parser_debug, "codecparsers_h264", 0,
"h264 parser library");
return nalparser;
}
/**
* gst_h264_nal_parser_free:
* @nalparser: the #GstH264NalParser to free
*
* Frees @nalparser and sets it to %NULL
*/
void
gst_h264_nal_parser_free (GstH264NalParser * nalparser)
{
g_slice_free (GstH264NalParser, nalparser);
nalparser = NULL;
}
/**
* gst_h264_parser_identify_nalu:
* @nalparser: a #GstH264NalParser
* @data: The data to parse
* @offset: the offset from which to parse @data
* @offset: the offset from which to parse @data
* @size: the size of @data
* @nalu: The #GstH264NalUnit where to store parsed nal headers
*
* Parses the buffer and set @nalu from the next nalu data from @data
* Parses @data and fills @nalu from the next nalu data from @data
*
* Returns: a #GstH264ParserResult
*/
@ -1125,8 +1167,8 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
gint off1, off2;
if (size - offset < 4) {
GST_DEBUG ("Can't parse, buffer has too small size %u, offset %u", size,
offset);
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSSIZE_FORMAT
", offset %u", size, offset);
return GST_H264_PARSER_ERROR;
}
@ -1180,12 +1222,14 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
/**
* gst_h264_parser_identify_nalu_avc:
* @nalparser: a #GstH264NalParser
* @data: The data to parse, must be the beging of the Nal unit
* @offset: the offset from which to parse @data
* @size: the size of @data
* @nal_length_size: the size in bytes of the AVC nal length prefix.
* @nalu: The #GstH264NalUnit where to store parsed nal headers
*
* Parses the data and sets @nalu from @data.
* Parses @data and sets @nalu.
*
* Returns: a #GstH264ParserResult
*/
@ -1221,6 +1265,17 @@ gst_h264_parser_identify_nalu_avc (GstH264NalParser * nalparser,
return GST_H264_PARSER_OK;
}
/**
* gst_h264_parser_parse_nal:
* @nalparser: a #GstH264NalParser
* @nalu: The #GstH264NalUnit to parse
*
* This function should be called in the case one doesn't need to
* parse a specific structure. It is necessary to do so to make
* sure @nalparser is up to date.
*
* Returns: a #GstH264ParserResult
*/
GstH264ParserResult
gst_h264_parser_parse_nal (GstH264NalParser * nalparser, GstH264NalUnit * nalu)
{
@ -1241,11 +1296,11 @@ gst_h264_parser_parse_nal (GstH264NalParser * nalparser, GstH264NalUnit * nalu)
/**
* gst_h264_parser_parse_sps:
* @nalparser: a #GstH264NalParser
* @nalu: The #GST_H264_NAL_SPS #GstH264NalUnit you want to parse
* @slice: The #GstH264SPS to set.
* @nalu: The #GST_H264_NAL_SPS #GstH264NalUnit to parse
* @sps: The #GstH264SPS to fill.
* @parse_vui_params: Whether to parse the vui_params or not
*
* Parses the @data, and sets the @sps.
* Parses @data, and fills the @sps structure.
*
* Returns: a #GstH264ParserResult
*/
@ -1269,11 +1324,11 @@ gst_h264_parser_parse_sps (GstH264NalParser * nalparser, GstH264NalUnit * nalu,
/**
* gst_h264_parse_sps:
* @nalu: The #GST_H264_NAL_SPS #GstH264NalUnit you want to parse
* @slice: The #GstH264SPS to set.
* @nalu: The #GST_H264_NAL_SPS #GstH264NalUnit to parse
* @sps: The #GstH264SPS to fill.
* @parse_vui_params: Whether to parse the vui_params or not
*
* Parses the @data, and sets the @sps.
* Parses @data, and fills the @sps structure.
*
* Returns: a #GstH264ParserResult
*/
@ -1413,19 +1468,25 @@ gst_h264_parse_sps (GstH264NalUnit * nalu, GstH264SPS * sps,
sps->width = width;
sps->height = height;
/* derive framerate */
/* FIXME verify / also handle other cases */
GST_LOG ("Framerate: %u %u %u %u", parse_vui_params,
vui->fixed_frame_rate_flag, sps->frame_mbs_only_flag,
vui->pic_struct_present_flag);
if (vui) {
/* derive framerate */
/* FIXME verify / also handle other cases */
GST_LOG ("Framerate: %u %u %u %u", parse_vui_params,
vui->fixed_frame_rate_flag, sps->frame_mbs_only_flag,
vui->pic_struct_present_flag);
if (parse_vui_params && vui->fixed_frame_rate_flag &&
sps->frame_mbs_only_flag && !vui->pic_struct_present_flag) {
sps->fps_num = vui->time_scale;
sps->fps_den = vui->num_units_in_tick;
/* picture is a frame = 2 fields */
sps->fps_den *= 2;
GST_LOG ("framerate %d/%d", sps->fps_num, sps->fps_den);
if (parse_vui_params && vui->fixed_frame_rate_flag &&
sps->frame_mbs_only_flag && !vui->pic_struct_present_flag) {
sps->fps_num = vui->time_scale;
sps->fps_den = vui->num_units_in_tick;
/* picture is a frame = 2 fields */
sps->fps_den *= 2;
GST_LOG ("framerate %d/%d", sps->fps_num, sps->fps_den);
}
} else {
sps->fps_num = 0;
sps->fps_den = 1;
GST_LOG ("No VUI, unknown framerate");
}
sps->valid = TRUE;
@ -1441,12 +1502,10 @@ error:
/**
* gst_h264_parse_pps:
* @nalparser: a #GstH264NalParser
* @data: the data to parse
* @size: the size of @data
* @nalu: The #GST_H264_NAL_PPS #GstH264NalUnit you want to parse
* @slice: The #GstH264PPS to set.
* @nalu: The #GST_H264_NAL_PPS #GstH264NalUnit to parse
* @pps: The #GstH264PPS to fill.
*
* Parses the @data, and sets the @pps.
* Parses @data, and fills the @pps structure.
*
* Returns: a #GstH264ParserResult
*/
@ -1572,12 +1631,10 @@ error:
/**
* gst_h264_parser_parse_pps:
* @nalparser: a #GstH264NalParser
* @data: the data to parse
* @size: the size of @data
* @nalu: The #GST_H264_NAL_PPS #GstH264NalUnit you want to parse
* @slice: The #GstH264PPS to set.
* @nalu: The #GST_H264_NAL_PPS #GstH264NalUnit to parse
* @pps: The #GstH264PPS to fill.
*
* Parses the @data, and sets the @pps.
* Parses @data, and fills the @pps structure.
*
* Returns: a #GstH264ParserResult
*/
@ -1599,12 +1656,13 @@ gst_h264_parser_parse_pps (GstH264NalParser * nalparser,
/**
* gst_h264_parser_parse_slice_hdr:
* @nalu: The #GST_H264_NAL_SLICE #GstH264NalUnit you want to parse
* @slice: The #GstH264SliceHdr to set.
* @nalparser: a #GstH264NalParser
* @nalu: The #GST_H264_NAL_SLICE #GstH264NalUnit to parse
* @slice: The #GstH264SliceHdr to fill.
* @parse_pred_weight_table: Whether to parse the pred_weight_table or not
* @parse_dec_ref_pic_marking: Whether to parse the dec_ref_pic_marking or not
*
* Parses the @data, and sets the @slice.
* Parses @data, and fills the @slice structure.
*
* Returns: a #GstH264ParserResult
*/
@ -1776,10 +1834,10 @@ error:
/**
* gst_h264_parser_parse_sei:
* @nalparser: a #GstH264NalParser
* @nalu: The #GST_H264_NAL_SEI #GstH264NalUnit you want to parse
* @slice: The #GstH264SEIMessage to set.
* @nalu: The #GST_H264_NAL_SEI #GstH264NalUnit to parse
* @sei: The #GstH264SEIMessage to fill.
*
* Parses the @data, and sets the @pps.
* Parses @data, and fills the @sei structures.
*
* Returns: a #GstH264ParserResult
*/

View file

@ -30,6 +30,11 @@
#ifndef __GST_H264_PARSER_H__
#define __GST_H264_PARSER_H__
#ifndef GST_USE_UNSTABLE_API
#warning "The H.264 parsing library is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
G_BEGIN_DECLS
@ -45,19 +50,19 @@ G_BEGIN_DECLS
/**
* GstH264NalUnitType:
* @GST_H264_NAL_UNKNOWN: Unkonw nal type
* @GST_H264_NAL_UNKNOWN: Unknown nal type
* @GST_H264_NAL_SLICE: Slice nal
* @GST_H264_NAL_SLICE_DPA: DPA slice nal
* @GST_H264_NAL_SLICE_DPB: DPB slice nal
* @GST_H264_NAL_SLICE_DPC: DPC slice nal
* @GST_H264_NAL_SLICE_IDR: DPR slice nal
* @GST_H264_NAL_SEI: Supplemental enhancement information nal unit
* @GST_H264_NAL_SPS: Sequence parameter set nal unit
* @GST_H264_NAL_PPS: Picture parameter set nal unit
* @GST_H264_NAL_AU_DELIMITER: Access unit delimiter nal unit
* @GST_H264_NAL_SEI: Supplemental enhancement information (SEI) nal unit
* @GST_H264_NAL_SPS: Sequence parameter set (SPS) nal unit
* @GST_H264_NAL_PPS: Picture parameter set (PPS) nal unit
* @GST_H264_NAL_AU_DELIMITER: Access unit (AU) delimiter nal unit
* @GST_H264_NAL_SEQ_END: End of sequence nal unit
* @GST_H264_NAL_STREAM_END: End of stream nal unit
* @GST_H264_NAL_FILLER_DATA: Filler data na lunit
* @GST_H264_NAL_FILLER_DATA: Filler data nal lunit
*
* Indicates the type of H264 Nal Units
*/
@ -81,13 +86,13 @@ typedef enum
/**
* GstH264ParserResult:
* @GST_H264_PARSER_OK: The parsing succeded
* @GST_H264_PARSER_BROKEN_DATA: The data we parsed where broken
* @GST_H264_PARSER_BROKEN_LINK: The link to a needed struct for the parsing couldn't be found
* @GST_H264_PARSER_BROKEN_DATA: The data to parse is broken
* @GST_H264_PARSER_BROKEN_LINK: The link to structure needed for the parsing couldn't be found
* @GST_H264_PARSER_ERROR: An error accured when parsing
* @GST_H264_PARSER_NO_NAL: No nal found during the parsing
* @GST_H264_PARSER_NO_NAL_END: Start of the nal found, not the end.
* @GST_H264_PARSER_NO_NAL_END: Start of the nal found, but not the end.
*
* Information about how the parsing of a H264 elements went.
* The result of parsing H264 data.
*/
typedef enum
{
@ -101,11 +106,11 @@ typedef enum
/**
* GstH264SEIPayloadType:
* @GST_H264_SEI_BUF_PERIOD: The Sei Message contains a buffering period message
* @GST_H264_SEI_PIC_TIMING: The Sei Message contains a picture timing message
* @GST_H264_SEI_BUF_PERIOD: Buffering Period SEI Message
* @GST_H264_SEI_PIC_TIMING: Picture Timing SEI Message
* ...
*
* The type of the SEI message information
* The type of SEI message.
*/
typedef enum
{
@ -294,7 +299,7 @@ struct _GstH264HRDParams
* samples outside picture boundaries may be used in inter prediction
* @max_bytes_per_pic_denom: indicates a number of bytes not exceeded by the sum of the sizes of
* the VCL NAL units associated with any coded picture in the coded video sequence.
* @max_bits_per_mb_denom: indicates the maximum number of coded bits of macroblock_layer()
* @max_bits_per_mb_denom: indicates the maximum number of coded bits of macroblock_layer
* @log2_max_mv_length_horizontal: indicate the maximum absolute value of a decoded horizontal
* motion vector component
* @log2_max_mv_length_vertical: indicate the maximum absolute value of a decoded vertical
@ -363,7 +368,7 @@ struct _GstH264VUIParams
* @id: The ID of the sequence parameter set
* @profile_idc: indicate the profile to which the coded video sequence conforms
*
*
* H264 Sequence Parameter Set (SPS)
*/
struct _GstH264SPS
{
@ -429,6 +434,11 @@ struct _GstH264SPS
gboolean valid;
};
/**
* GstH264PPS:
*
* H264 Picture Parameter Set
*/
struct _GstH264PPS
{
gint id;

View file

@ -309,12 +309,12 @@ scan_for_start_codes (const GstByteReader * reader, guint offset, guint size)
/**
* gst_mpeg_video_parse:
* @data: The datas from which to parse
* @data: The data to parse
* @size: The size of @data
* @offset: The offset from which to start the parsing
* @offset: The offset from which to start parsing
*
* Parses @data, and detects the different packets types, offset,
* and size, starting from @offset
* Parses the MPEG 1/2 video bitstream contained in @data , and returns the
* detect packets as a list of #GstMpegVideoTypeOffsetSize.
*
* Returns: a #GList of #GstMpegVideoTypeOffsetSize
*/
@ -324,7 +324,8 @@ gst_mpeg_video_parse (guint8 * data, gsize size, guint offset)
gint off, rsize;
GstByteReader br;
GList *ret = NULL;
size = size - offset;
size -= offset;
if (!initialized) {
GST_DEBUG_CATEGORY_INIT (mpegvideo_parser_debug, "codecparsers_mpegvideo",
@ -373,12 +374,12 @@ gst_mpeg_video_parse (guint8 * data, gsize size, guint offset)
/**
* gst_mpeg_video_parse_sequence_header:
* @seqhdr: The #GstMpegVideoSequenceHdr to set
* @data: The datas from which to parse the seqhdr
* @seqhdr: (out): The #GstMpegVideoSequenceHdr structure to fill
* @data: The data from which to parse the sequence header
* @size: The size of @data
* @offset: The offset in byte from which to start parsing @data
*
* Sets the @seqhdr Mpeg Video Sequence Header structure members from @data
* Parses the @seqhdr Mpeg Video Sequence Header structure members from @data
*
* Returns: %TRUE if the seqhdr could be parsed correctly, %FALSE otherwize.
*/
@ -388,9 +389,11 @@ gst_mpeg_video_parse_sequence_header (GstMpegVideoSequenceHdr * seqhdr,
{
GstBitReader br;
size = size - offset;
g_return_val_if_fail (seqhdr != NULL, FALSE);
if (size - offset < 4)
size -= offset;
if (size < 4)
return FALSE;
gst_bit_reader_init (&br, &data[offset], size);
@ -400,12 +403,12 @@ gst_mpeg_video_parse_sequence_header (GstMpegVideoSequenceHdr * seqhdr,
/**
* gst_mpeg_video_parse_sequence_extension:
* @seqhdr: The #GstMpegVideoSequenceExt to set
* @data: The datas from which to parse the seqext
* @seqext: (out): The #GstMpegVideoSequenceExt structure to fill
* @data: The data from which to parse the sequence extension
* @size: The size of @data
* @offset: The offset in byte from which to start parsing @data
*
* Sets the @seqext Mpeg Video Sequence Extension structure members from @data
* Parses the @seqext Mpeg Video Sequence Extension structure members from @data
*
* Returns: %TRUE if the seqext could be parsed correctly, %FALSE otherwize.
*/
@ -415,7 +418,9 @@ gst_mpeg_video_parse_sequence_extension (GstMpegVideoSequenceExt * seqext,
{
GstBitReader br;
size = size - offset;
g_return_val_if_fail (seqext != NULL, FALSE);
size -= offset;
if (size < 6) {
GST_DEBUG ("not enough bytes to parse the extension");
@ -464,12 +469,12 @@ gst_mpeg_video_parse_sequence_extension (GstMpegVideoSequenceExt * seqext,
/**
* gst_mpeg_video_parse_quant_matrix_extension:
* @ext: The #GstMpegVideoQuantMatrixExt to set
* @data: The datas from which to parse @quant
* @quant: (out): The #GstMpegVideoQuantMatrixExt structure to fill
* @data: The data from which to parse the Quantization Matrix extension
* @size: The size of @data
* @offset: The offset in byte from which to start the parsing
*
* Sets the @quant Mpeg Video Quant Matrix Extension structure members from
* Parses the @quant Mpeg Video Quant Matrix Extension structure members from
* @data
*
* Returns: %TRUE if the quant matrix extension could be parsed correctly,
@ -482,7 +487,9 @@ gst_mpeg_video_parse_quant_matrix_extension (GstMpegVideoQuantMatrixExt * quant,
guint8 i;
GstBitReader br;
size = size - offset;
g_return_val_if_fail (quant != NULL, FALSE);
size -= offset;
if (size < 1) {
GST_DEBUG ("not enough bytes to parse the extension");
@ -537,12 +544,12 @@ failed:
/**
* gst_mpeg_video_parse_picture_extension:
* @ext: The #GstMpegVideoPictureExt to set
* @data: The datas from which to parse the ext
* @ext: (out): The #GstMpegVideoPictureExt structure to fill
* @data: The data from which to parse the picture extension
* @size: The size of @data
* @offset: The offset in byte from which to start the parsing
*
* Sets the @ext Mpeg Video Picture Extension structure members from @data
* Parse the @ext Mpeg Video Picture Extension structure members from @data
*
* Returns: %TRUE if the picture extension could be parsed correctly,
* %FALSE otherwize.
@ -553,7 +560,9 @@ gst_mpeg_video_parse_picture_extension (GstMpegVideoPictureExt * ext,
{
GstBitReader br;
size = size - offset;
g_return_val_if_fail (ext != NULL, FALSE);
size -= offset;
if (size < 4)
return FALSE;
@ -636,14 +645,15 @@ failed:
/**
* gst_mpeg_video_parse_picture_header:
* @hdr: The #GstMpegVideoPictureHdr to set
* @data: The datas from which to parse the hdr
* @hdr: (out): The #GstMpegVideoPictureHdr structure to fill
* @data: The data from which to parse the picture header
* @size: The size of @data
* @offset: The offset in byte from which to start the parsing
*
* Sets the @hdr Mpeg Video Picture Header structure members from @data
* Parsers the @hdr Mpeg Video Picture Header structure members from @data
*
* Returns: %TRUE if the picture sequence could be parsed correctly, %FALSE otherwize.
* Returns: %TRUE if the picture sequence could be parsed correctly, %FALSE
* otherwize.
*/
gboolean
gst_mpeg_video_parse_picture_header (GstMpegVideoPictureHdr * hdr,
@ -654,24 +664,26 @@ gst_mpeg_video_parse_picture_header (GstMpegVideoPictureHdr * hdr,
size = size - offset;
if (size < 4)
return FALSE;
goto failed;
gst_bit_reader_init (&br, &data[offset], size);
/* temperal sequence number */
if (!gst_bit_reader_get_bits_uint16 (&br, &hdr->tsn, 10))
return FALSE;
goto failed;
/* frame type */
if (!gst_bit_reader_get_bits_uint8 (&br, (guint8 *) & hdr->pic_type, 3))
return FALSE;
goto failed;
if (hdr->pic_type == 0 || hdr->pic_type > 4)
return FALSE; /* Corrupted picture packet */
goto failed; /* Corrupted picture packet */
/* skype VBV delay */
if (!gst_bit_reader_skip (&br, 8))
return FALSE;
goto failed;
if (hdr->pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_P
|| hdr->pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_B) {
@ -699,20 +711,19 @@ gst_mpeg_video_parse_picture_header (GstMpegVideoPictureHdr * hdr,
failed:
{
GST_WARNING ("Failed to parse sequence extension");
GST_WARNING ("Failed to parse picture header");
return FALSE;
}
}
/**
* gst_mpeg_video_parse_gop:
* @gop: The #GstMpegVideoGop to set
* @data: The datas from which to parse the gop
* @gop: (out): The #GstMpegVideoGop structure to fill
* @data: The data from which to parse the gop
* @size: The size of @data
* @offset: The offset in byte from which to start the parsing
*
*
* Sets the @gop Mpeg Video Group of Picture structure members from @data
* Parses the @gop Mpeg Video Group of Picture structure members from @data
*
* Returns: %TRUE if the gop could be parsed correctly, %FALSE otherwize.
*/
@ -722,7 +733,9 @@ gst_mpeg_video_parse_gop (GstMpegVideoGop * gop, guint8 * data,
{
GstBitReader br;
size = size - offset;
g_return_val_if_fail (gop != NULL, FALSE);
size -= offset;
if (size < 4)
return FALSE;
@ -745,7 +758,7 @@ gst_mpeg_video_parse_gop (GstMpegVideoGop * gop, guint8 * data,
READ_UINT8 (&br, gop->closed_gop, 1);
READ_UINT8 (&br, gop->broken_gop, 1);
READ_UINT8 (&br, gop->broken_link, 1);
return TRUE;

View file

@ -26,6 +26,11 @@
#ifndef __GST_MPEG_VIDEO_UTILS_H__
#define __GST_MPEG_VIDEO_UTILS_H__
#ifndef GST_USE_UNSTABLE_API
#warning "The Mpeg video parsing library is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
G_BEGIN_DECLS
@ -56,33 +61,44 @@ typedef enum {
GST_MPEG_VIDEO_PACKET_NONE = 0xff
} GstMpegVideoPacketTypeCode;
/**
* GST_MPEG_VIDEO_PACKET_IS_SLICE:
* @typecode: The MPEG video packet type code
*
* Checks whether a packet type code is a slice.
*
* Returns: %TRUE if the packet type code corresponds to a slice,
* else %FALSE.
*/
#define GST_MPEG_VIDEO_PACKET_IS_SLICE(typecode) ((typecode) >= GST_MPEG_VIDEO_PACKET_SLICE_MIN && \
(typecode) <= GST_MPEG_VIDEO_PACKET_SLICE_MAX)
/**
* GstMpegVideoPacketExtensionCode:
* @GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE: Sequence extension code
* @GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY: Display extension code
* @GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX: Quantizer extension code
* @GST_MPEG_VIDEO_PACKET_EXT_GOP: Group Of Picture extension code
* @GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY: Sequence Display extension code
* @GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX: Quantization Matrix extension code
* @GST_MPEG_VIDEO_PACKET_EXT_PICTURE: Picture coding extension
*
* Indicates what type of packets are in this
* block, some are mutually * exclusive though - ie, sequence packs are
* accumulated separately. GOP & Picture may occur together or separately
* Indicates what type of packets are in this block, some are mutually
* exclusive though - ie, sequence packs are accumulated separately. GOP &
* Picture may occur together or separately.
*/
typedef enum {
GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE = 0x01,
GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY = 0x02,
GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX = 0x03,
GST_MPEG_VIDEO_PACKET_EXT_GOP = 0x04,
GST_MPEG_VIDEO_PACKET_EXT_PICTURE = 0x08
} GstMpegVideoPacketExtensionCode;
/**
* GstMpegVideoLevel:
* @GST_MPEG_VIDEO_LEVEL_LOW: Level Low
* @GST_MPEG_VIDEO_LEVEL_MAIN: Level Main
* @GST_MPEG_VIDEO_LEVEL_HIGH_1440: Level High 1440
* @GST_MPEG_VIDEO_LEVEL_HIGH: Level High
* @GST_MPEG_VIDEO_LEVEL_LOW: Low level (LL)
* @GST_MPEG_VIDEO_LEVEL_MAIN: Main level (ML)
* @GST_MPEG_VIDEO_LEVEL_HIGH_1440: High 1440 level (H-14)
* @GST_MPEG_VIDEO_LEVEL_HIGH: High level (HL)
*
* Indicates the level in use
* Mpeg-2 Levels.
**/
typedef enum {
GST_MPEG_VIDEO_LEVEL_HIGH = 0x04,
@ -93,14 +109,14 @@ typedef enum {
/**
* GstMpegVideoProfile:
* @GST_MPEG_VIDEO_PROFILE_422,
* @GST_MPEG_VIDEO_PROFILE_HIGH,
* @GST_MPEG_VIDEO_PROFILE_SPATIALLY_SCALABLE,
* @GST_MPEG_VIDEO_PROFILE_SNR_SCALABLE,
* @GST_MPEG_VIDEO_PROFILE_MAIN,
* @GST_MPEG_VIDEO_PROFILE_SIMPLE,
* @GST_MPEG_VIDEO_PROFILE_422: 4:2:2 profile (422)
* @GST_MPEG_VIDEO_PROFILE_HIGH: High profile (HP)
* @GST_MPEG_VIDEO_PROFILE_SPATIALLY_SCALABLE: Spatially Scalable profile (Spatial)
* @GST_MPEG_VIDEO_PROFILE_SNR_SCALABLE: SNR Scalable profile (SNR)
* @GST_MPEG_VIDEO_PROFILE_MAIN: Main profile (MP)
* @GST_MPEG_VIDEO_PROFILE_SIMPLE: Simple profile (SP)
*
* Indicates the profile type in use
* Mpeg-2 Profiles.
**/
typedef enum {
GST_MPEG_VIDEO_PROFILE_422 = 0x00,
@ -112,13 +128,29 @@ typedef enum {
} GstMpegVideoProfile;
/**
* GstMpegVideoPictureType:
* @GST_MPEG_VIDEO_PICTURE_TYPE_I: Type I
* @GST_MPEG_VIDEO_PICTURE_TYPE_P: Type P
* @GST_MPEG_VIDEO_PICTURE_TYPE_B: Type B
* @GST_MPEG_VIDEO_PICTURE_TYPE_D: Type D
* GstMpegVideoChromaFormat:
* @GST_MPEG_VIDEO_CHROMA_RES: Invalid (reserved for future use)
* @GST_MPEG_VIDEO_CHROMA_420: 4:2:0 subsampling
* @GST_MPEG_VIDEO_CHROMA_422: 4:2:2 subsampling
* @GST_MPEG_VIDEO_CHROMA_444: 4:4:4 (non-subsampled)
*
* Indicates the type of picture
* Chroma subsampling type.
*/
typedef enum {
GST_MPEG_VIDEO_CHROMA_RES = 0x00,
GST_MPEG_VIDEO_CHROMA_420 = 0x01,
GST_MPEG_VIDEO_CHROMA_422 = 0x02,
GST_MPEG_VIDEO_CHROMA_444 = 0x03,
} GstMpegVideoChromaFormat;
/**
* GstMpegVideoPictureType:
* @GST_MPEG_VIDEO_PICTURE_TYPE_I: Intra-coded (I) frame
* @GST_MPEG_VIDEO_PICTURE_TYPE_P: Predictive-codec (P) frame
* @GST_MPEG_VIDEO_PICTURE_TYPE_B: Bidirectionally predictive-coded (B) frame
* @GST_MPEG_VIDEO_PICTURE_TYPE_D: D frame
*
* Picture type.
*/
typedef enum {
GST_MPEG_VIDEO_PICTURE_TYPE_I = 0x01,
@ -131,9 +163,9 @@ typedef enum {
* GstMpegVideoPictureStructure:
* @GST_MPEG_VIDEO_PICTURE_STRUCTURE_TOP_FIELD: Top field
* @GST_MPEG_VIDEO_PICTURE_STRUCTURE_BOTTOM_FIELD: Bottom field
* @GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME: Frame
* @GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME: Frame picture
*
* Indicates the structure of picture
* Picture structure type.
*/
typedef enum {
GST_MPEG_VIDEO_PICTURE_STRUCTURE_TOP_FIELD = 0x01,
@ -168,20 +200,20 @@ typedef struct _GstMpegVideoTypeOffsetSize GstMpegVideoTypeOffsetSize;
struct _GstMpegVideoSequenceHdr
{
guint16 width, height;
guint8 aspect_ratio_info;
guint8 frame_rate_code;
guint8 aspect_ratio_info;
guint8 frame_rate_code;
guint32 bitrate_value;
guint16 vbv_buffer_size_value;
guint8 constrained_parameters_flag;
guint8 constrained_parameters_flag;
guint8 intra_quantizer_matrix[64];
guint8 non_intra_quantizer_matrix[64];
guint8 intra_quantizer_matrix[64];
guint8 non_intra_quantizer_matrix[64];
/* Calculated values */
guint par_w, par_h;
guint fps_n, fps_d;
guint bitrate;
guint par_w, par_h;
guint fps_n, fps_d;
guint bitrate;
};
/**
@ -194,7 +226,7 @@ struct _GstMpegVideoSequenceHdr
* @vert_size_ext: Vertical size
* @bitrate_ext: The bitrate
* @vbv_buffer_size_extension: Vbv vuffer size
* @low_delay: %TRUE if the sequence doesn't contain any B-pitcture, %FALSE
* @low_delay: %TRUE if the sequence doesn't contain any B-pictures, %FALSE
* otherwize
* @fps_n_ext: Framerate nominator code
* @fps_d_ext: Framerate denominator code
@ -222,14 +254,14 @@ struct _GstMpegVideoSequenceExt
/**
* GstMpegVideoQuantMatrixExt:
* @load_intra_quantiser_matrix
* @intra_quantiser_matrix
* @load_non_intra_quantiser_matrix
* @load_intra_quantiser_matrix:
* @intra_quantiser_matrix:
* @load_non_intra_quantiser_matrix:
* @non_intra_quantiser_matrix:
* @load_chroma_intra_quantiser_matrix
* @chroma_intra_quantiser_matrix
* @load_chroma_non_intra_quantiser_matrix
* @chroma_non_intra_quantiser_matrix
* @load_chroma_intra_quantiser_matrix:
* @chroma_intra_quantiser_matrix:
* @load_chroma_non_intra_quantiser_matrix:
* @chroma_non_intra_quantiser_matrix:
*
* The Quant Matrix Extension structure
*/
@ -314,7 +346,7 @@ struct _GstMpegVideoPictureExt
* @second: Second (0-59)
* @frame: Frame (0-59)
* @closed_gop: Closed Gop
* @broken_gop: Broken Gop
* @broken_link: Broken link
*
* The Mpeg Video Group of Picture structure.
*/
@ -325,7 +357,7 @@ struct _GstMpegVideoGop
guint8 hour, minute, second, frame;
guint8 closed_gop;
guint8 broken_gop;
guint8 broken_link;
};
/**
@ -339,11 +371,11 @@ struct _GstMpegVideoGop
struct _GstMpegVideoTypeOffsetSize
{
guint8 type;
guint offset;
gint size;
guint offset;
gint size;
};
GList * gst_mpeg_video_parse (guint8 * data, gsize size, guint offset);
GList *gst_mpeg_video_parse (guint8 * data, gsize size, guint offset);
gboolean gst_mpeg_video_parse_sequence_header (GstMpegVideoSequenceHdr * params,
guint8 * data, gsize size, guint offset);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,468 @@
/* Gstreamer
* Copyright (C) <2011> Intel
* Copyright (C) <2011> Collabora Ltd.
* Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VC1_PARSER_H__
#define __GST_VC1_PARSER_H__
#ifndef GST_USE_UNSTABLE_API
#warning "The VC1 parsing library is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
G_BEGIN_DECLS
#define MAX_HRD_NUM_LEAKY_BUCKETS 31
/**
* @GST_VC1_BFRACTION_BASIS: The @bfraction variable should be divided
* by this constant to have the actual value.
*/
#define GST_VC1_BFRACTION_BASIS 256
typedef enum {
GST_VC1_END_OF_SEQ = 0x0A,
GST_VC1_SLICE = 0x0B,
GST_VC1_FIELD = 0x0C,
GST_VC1_FRAME = 0x0D,
GST_VC1_ENTRYPOINT = 0x0E,
GST_VC1_SEQUENCE = 0x0F,
GST_VC1_SLICE_USER = 0x1B,
GST_VC1_FIELD_USER = 0x1C,
GST_VC1_FRAME_USER = 0x1D,
GST_VC1_ENTRY_POINT_USER = 0x1E,
GST_VC1_SEQUENCE_USER = 0x1F
} GstVC1StartCode;
typedef enum {
GST_VC1_PROFILE_SIMPLE,
GST_VC1_PROFILE_MAIN,
GST_VC1_PROFILE_RESERVED,
GST_VC1_PROFILE_ADVANCED
} GstVC1Profile;
typedef enum {
GST_VC1_PARSER_OK,
GST_VC1_PARSER_BROKEN_DATA,
GST_VC1_PARSER_NO_BDU,
GST_VC1_PARSER_NO_BDU_END,
GST_VC1_PARSER_ERROR,
} GstVC1ParseResult;
typedef enum
{
GST_VC1_PICTURE_TYPE_P,
GST_VC1_PICTURE_TYPE_B,
GST_VC1_PICTURE_TYPE_I,
GST_VC1_PICTURE_TYPE_BI,
GST_VC1_PICTURE_TYPE_SKIPPED
} GstVC1PictureType;
typedef enum
{
GST_VC1_LEVEL_LOW = 0, /* Simple/Main profile low level */
GST_VC1_LEVELMEDIUM = 1, /* Simple/Main profile medium level */
GST_VC1_LEVELHIGH = 2, /* Main profile high level */
GST_VC1_LEVEL_L0 = 0, /* Advanced profile level 0 */
GST_VC1_LEVEL_L1 = 1, /* Advanced profile level 1 */
GST_VC1_LEVEL_L2 = 2, /* Advanced profile level 2 */
GST_VC1_LEVEL_L3 = 3, /* Advanced profile level 3 */
GST_VC1_LEVEL_L4 = 4, /* Advanced profile level 4 */
/* 5 to 7 reserved */
GST_VC1_LEVEL_UNKNOWN = 255 /* Unknown profile */
} GstVC1Level;
typedef enum
{
GST_VC1_QUANTIZER_IMPLICITLY,
GST_VC1_QUANTIZER_EXPLICITLY,
GST_VC1_QUANTIZER_NON_UNIFORM,
GST_VC1_QUANTIZER_UNIFORM
} GstVC1QuantizerSpec;
typedef enum {
GST_VC1_DQPROFILE_FOUR_EDGES,
GST_VC1_DQPROFILE_DOUBLE_EDGES,
GST_VC1_DQPROFILE_SINGLE_EDGE,
GST_VC1_DQPROFILE_ALL_MBS
} GstVC1DQProfile;
typedef enum {
GST_VC1_CONDOVER_NONE,
GST_VC1_CONDOVER_ALL,
GST_VC1_CONDOVER_SELECT
} GstVC1Condover;
/**
* GstVC1MvMode:
*
*/
typedef enum
{
GST_VC1_MVMODE_1MV_HPEL_BILINEAR,
GST_VC1_MVMODE_1MV,
GST_VC1_MVMODE_1MV_HPEL,
GST_VC1_MVMODE_MIXED_MV,
GST_VC1_MVMODE_INTENSITY_COMP
} GstVC1MvMode;
typedef struct _GstVC1SeqHdr GstVC1SeqHdr;
typedef struct _GstVC1AdvancedSeqHdr GstVC1AdvancedSeqHdr;
typedef struct _GstVC1SimpleMainSeqHdr GstVC1SimpleMainSeqHdr;
typedef struct _GstVC1HrdParam GstVC1HrdParam;
typedef struct _GstVC1EntryPointHdr GstVC1EntryPointHdr;
/* Pictures Structures */
typedef struct _GstVC1FrameHdr GstVC1FrameHdr;
typedef struct _GstVC1PicAdvanced GstVC1PicAdvanced;
typedef struct _GstVC1PicSimpleMain GstVC1PicSimpleMain;
typedef struct _GstVC1Picture GstVC1Picture;
typedef struct _GstVC1VopDquant GstVC1VopDquant;
typedef struct _GstVC1BDU GstVC1BDU;
struct _GstVC1HrdParam
{
guint8 hrd_num_leaky_buckets;
guint8 bit_rate_exponent;
guint8 buffer_size_exponent;
guint16 hrd_rate[MAX_HRD_NUM_LEAKY_BUCKETS];
guint16 hrd_buffer[MAX_HRD_NUM_LEAKY_BUCKETS];
};
/**
* GstVC1SimpleMainSeqHdr:
*
* Structure for simple and main profile sequence headers specific parameters.
*/
struct _GstVC1SimpleMainSeqHdr
{
guint8 res_sprite;
guint8 loop_filter;
guint8 multires;
guint8 fastuvmc;
guint8 extended_mv;
guint8 dquant;
guint8 vstransform;
guint8 overlap;
guint8 syncmarker;
guint8 rangered;
guint8 maxbframes;
guint8 quantizer;
/* This should be filled by user if previously known */
guint16 coded_width;
/* This should be filled by user if previously known */
guint16 coded_height;
/* Wmvp specific */
guint8 wmvp; /* Specify if the stream is wmp or not */
guint8 framerate;
guint8 slice_code;
};
/**
* GstVC1EntryPointHdr:
*
* Structure for entrypoint header, this will be used only in advanced profiles
*/
struct _GstVC1EntryPointHdr
{
guint8 broken_link;
guint8 closed_entry;
guint8 panscan_flag;
guint8 refdist_flag;
guint8 loopfilter;
guint8 fastuvmc;
guint8 extended_mv;
guint8 dquant;
guint8 vstransform;
guint8 overlap;
guint8 quantizer;
guint8 coded_size_flag;
guint16 coded_width;
guint16 coded_height;
guint8 extended_dmv;
guint8 range_mapy_flag;
guint8 range_mapy;
guint8 range_mapuv_flag;
guint8 range_mapuv;
guint8 hrd_full[MAX_HRD_NUM_LEAKY_BUCKETS];
};
/**
* GstVC1AdvancedSeqHdr:
*
* Structure for the advanced profile sequence headers specific parameters.
*/
struct _GstVC1AdvancedSeqHdr
{
guint8 level;
guint8 postprocflag;
guint16 max_coded_width;
guint16 max_coded_height;
guint8 pulldown;
guint8 interlace;
guint8 tfcntrflag;
guint8 psf;
guint8 display_ext;
guint16 disp_horiz_size;
guint16 disp_vert_size;
guint8 aspect_ratio_flag;
guint8 aspect_ratio;
guint8 aspect_horiz_size;
guint8 aspect_vert_size;
guint8 framerate_flag;
guint8 framerateind;
guint8 frameratenr;
guint8 frameratedr;
guint16 framerateexp;
guint8 color_format_flag;
guint8 color_prim;
guint8 transfer_char;
guint8 matrix_coef;
guint8 hrd_param_flag;
GstVC1HrdParam hrd_param;
/* The last parsed entry point */
GstVC1EntryPointHdr entrypoint;
};
/**
* GstVC1SeqHdr:
*
* Structure for sequence headers in any profile.
*/
struct _GstVC1SeqHdr
{
guint8 profiletype;
guint8 colordiff_format;
guint8 frmrtq_postproc;
guint8 bitrtq_postproc;
guint8 finterpflag;
/* calculated */
guint framerate; /* Around in fps, 0 if unknown*/
guint bitrate; /* Around in kpbs, 0 if unknown*/
union {
GstVC1AdvancedSeqHdr advanced;
GstVC1SimpleMainSeqHdr simplemain;
} profile;
};
/**
* GstVC1PicSimpleMain:
* @bfaction: Should be divided by #GST_VC1_BFRACTION_BASIS
* to get the real value.
*/
struct _GstVC1PicSimpleMain
{
guint8 frmcnt;
guint8 mvrange;
guint8 rangeredfrm;
/* I and P pic simple and main profiles only */
guint8 respic;
/* I and BI pic simple and main profiles only */
guint8 transacfrm2;
guint8 bf;
/* B and P pic simple and main profiles only */
guint8 mvmode;
guint8 mvtab;
guint8 ttmbf;
/* P pic simple and main profiles only */
guint8 mvmode2;
guint8 lumscale;
guint8 lumshift;
guint8 cbptab;
guint8 ttfrm;
/* B and BI picture only
* Should be divided by #GST_VC1_BFRACTION_BASIS
* to get the real value. */
guint8 bfraction;
/* Biplane value, those fields only mention the fact
* that the bitplane is in raw mode or not */
guint8 mvtypemb;
guint8 skipmb;
guint8 directmb; /* B pic main profile only */
};
/**
* GstVC1PicAdvanced:
* @bfaction: Should be divided by #GST_VC1_BFRACTION_BASIS
* to get the real value.
*/
struct _GstVC1PicAdvanced
{
guint8 fcm;
guint8 tfcntr;
guint8 rptfrm;
guint8 tff;
guint8 rff;
guint8 ps_present;
guint32 ps_hoffset;
guint32 ps_voffset;
guint16 ps_width;
guint16 ps_height;
guint8 rndctrl;
guint8 uvsamp;
guint8 postproc;
/* B and P picture specific */
guint8 mvrange;
guint8 mvmode;
guint8 mvtab;
guint8 cbptab;
guint8 ttmbf;
guint8 ttfrm;
/* B and BI picture only
* Should be divided by #GST_VC1_BFRACTION_BASIS
* to get the real value. */
guint8 bfraction;
/* ppic */
guint8 mvmode2;
guint8 lumscale;
guint8 lumshift;
/* bipic */
guint8 bf;
guint8 condover;
guint8 transacfrm2;
/* Biplane value, those fields only mention the fact
* that the bitplane is in raw mode or not */
guint8 acpred;
guint8 overflags;
guint8 mvtypemb;
guint8 skipmb;
guint8 directmb;
};
struct _GstVC1VopDquant
{
guint8 pqdiff;
guint8 abspq;
/* if dqant != 2*/
guint8 dquantfrm;
guint8 dqprofile;
/* if dqprofile == GST_VC1_DQPROFILE_SINGLE_EDGE
* or GST_VC1_DQPROFILE_DOUBLE_EDGE:*/
guint8 dqsbedge;
/* if dqprofile == GST_VC1_DQPROFILE_SINGLE_EDGE
* or GST_VC1_DQPROFILE_DOUBLE_EDGE:*/
guint8 dqbedge;
/* if dqprofile == GST_VC1_DQPROFILE_ALL_MBS */
guint8 dqbilevel;
};
/**
* GstVC1FrameHdr:
*
* Structure that represent picture in any profile or mode.
* You should look at @ptype and @profile to know what is currently
* in use.
*/
struct _GstVC1FrameHdr
{
/* common fields */
guint8 ptype;
guint8 interpfrm;
guint8 halfqp;
guint8 transacfrm;
guint8 transdctab;
guint8 pqindex;
guint8 pquantizer;
/* Computed */
guint8 pquant;
/* Convenience fields */
guint8 profile;
guint8 dquant;
/* If dquant */
GstVC1VopDquant vopdquant;
union {
GstVC1PicSimpleMain simple;
GstVC1PicAdvanced advanced;
} pic;
};
/**
* GstVC1BDU:
*
* Structure that represents a Bitstream Data Unit.
*/
struct _GstVC1BDU
{
GstVC1StartCode type;
guint size;
guint sc_offset;
guint offset;
guint8 * data;
};
GstVC1ParseResult gst_vc1_identify_next_bdu (const guint8 *data,
gsize size,
GstVC1BDU *bdu);
GstVC1ParseResult gst_vc1_parse_sequence_header (const guint8 *data,
gsize size,
GstVC1SeqHdr * seqhdr);
GstVC1ParseResult gst_vc1_parse_entry_point_header (const guint8 *data,
gsize size,
GstVC1EntryPointHdr * entrypoint,
GstVC1SeqHdr *seqhdr);
GstVC1ParseResult gst_vc1_parse_frame_header (const guint8 *data,
gsize size,
GstVC1FrameHdr * framehdr,
GstVC1SeqHdr *seqhdr);
G_END_DECLS
#endif

View file

@ -227,7 +227,6 @@ make ERROR_CFLAGS='' ERROR_CXXFLAGS=''
%{_libdir}/gstreamer-%{majorminor}/libgsth264parse.so
%{_libdir}/gstreamer-%{majorminor}/libgsthdvparse.so
%{_libdir}/gstreamer-%{majorminor}/libgstid3tag.so
%{_libdir}/gstreamer-%{majorminor}/libgstinvtelecine.so
%{_libdir}/gstreamer-%{majorminor}/libgstivfparse.so
%{_libdir}/gstreamer-%{majorminor}/libgstjpegformat.so
%{_libdir}/gstreamer-%{majorminor}/libgstlegacyresample.so
@ -274,9 +273,13 @@ make ERROR_CFLAGS='' ERROR_CXXFLAGS=''
%{_libdir}/gstreamer-%{majorminor}/libgstvideofiltersbad.so
%{_libdir}/gstreamer-%{majorminor}/libgstvideoparsersbad.so
%{_libdir}/gstreamer-%{majorminor}/libgsty4mdec.so
%{_libdir}/gstreamer-%{majorminor}//libgstopenal.so
%{_libdir}/gstreamer-%{majorminor}/libgstopenal.so
%{_libdir}/libgstbasecamerabinsrc-%{majorminor}.so.0
%{_libdir}/libgstbasecamerabinsrc-%{majorminor}.so.0.0.0
%{_libdir}/gstreamer-%{majorminor}/libgstaudiovisualizers.so
%{_libdir}/gstreamer-%{majorminor}/libgstfaceoverlay.so
%{_libdir}/gstreamer-%{majorminor}/libgstinter.so
%{_libdir}/gstreamer-%{majorminor}/libgstremovesilence.so
# System (Linux) specific plugins
%{_libdir}/gstreamer-%{majorminor}/libgstdvb.so
@ -343,10 +346,12 @@ make ERROR_CFLAGS='' ERROR_CXXFLAGS=''
# pkg-config files
%{_libdir}/pkgconfig/gstreamer-plugins-bad-%{majorminor}.pc
%{_libdir}/pkgconfig/gstreamer-codecparsers-0.10.pc
%files devel-docs
%defattr(-,root,root,-)
%doc %{_datadir}/gtk-doc/html/gst-plugins-bad-plugins-%{majorminor}
%doc %{_datadir}/gtk-doc/html/gst-plugins-bad-libs-%{majorminor}
%changelog
* Thu May 19 2011 Christian Schaller <christian.schaller@collabora.co.uk>

View file

@ -166,9 +166,15 @@
#include <gst/gst-i18n-plugin.h>
#include <gst/pbutils/pbutils.h>
#if GLIB_CHECK_VERSION(2,29,6)
#define gst_camerabin2_atomic_int_add g_atomic_int_add
#else
#define gst_camerabin2_atomic_int_add g_atomic_int_exchange_and_add
#endif
#define GST_CAMERA_BIN2_PROCESSING_INC(c) \
{ \
gint bef = g_atomic_int_exchange_and_add (&c->processing_counter, 1); \
gint bef = gst_camerabin2_atomic_int_add (&c->processing_counter, 1); \
if (bef == 0) \
g_object_notify (G_OBJECT (c), "idle"); \
GST_DEBUG_OBJECT ((c), "Processing counter incremented to: %d", \
@ -374,11 +380,6 @@ gst_camera_bin_start_capture (GstCameraBin2 * camerabin)
if (camerabin->audio_src) {
GstClock *clock = gst_pipeline_get_clock (GST_PIPELINE_CAST (camerabin));
/* FIXME We need to set audiosrc to null to make it resync the ringbuffer
* while bug https://bugzilla.gnome.org/show_bug.cgi?id=648359 isn't
* fixed */
gst_element_set_state (camerabin->audio_src, GST_STATE_NULL);
/* need to reset eos status (pads could be flushing) */
gst_element_set_state (camerabin->audio_capsfilter, GST_STATE_READY);
gst_element_set_state (camerabin->audio_volume, GST_STATE_READY);
@ -446,6 +447,14 @@ gst_camera_bin_stop_capture (GstCameraBin2 * camerabin)
if (camerabin->mode == MODE_VIDEO && camerabin->audio_src) {
camerabin->audio_drop_eos = FALSE;
gst_element_send_event (camerabin->audio_src, gst_event_new_eos ());
/* FIXME We need to set audiosrc to null to make it resync the ringbuffer
* while bug https://bugzilla.gnome.org/show_bug.cgi?id=648359 isn't
* fixed.
*
* Also, we set to NULL here to stop capturing audio through to the next
* video mode start capture. */
gst_element_set_state (camerabin->audio_src, GST_STATE_NULL);
}
}
@ -485,7 +494,12 @@ gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec,
GST_DEBUG_OBJECT (camera, "Switching videobin location to %s", location);
g_object_set (camera->videosink, "location", location, NULL);
g_free (location);
gst_element_set_state (camera->videosink, GST_STATE_PLAYING);
if (gst_element_set_state (camera->videosink, GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
/* Resets the latest state change return, that would be a failure
* and could cause problems in a camerabin2 state change */
gst_element_set_state (camera->videosink, GST_STATE_NULL);
}
gst_element_set_state (camera->video_encodebin, GST_STATE_PLAYING);
gst_element_set_state (camera->videobin_capsfilter, GST_STATE_PLAYING);
}
@ -916,6 +930,8 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
if (gst_structure_has_name (structure, "GstMultiFileSink")) {
GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin));
filename = gst_structure_get_string (structure, "filename");
GST_DEBUG_OBJECT (bin, "Got file save message from multifilesink, "
"image %s has been saved", filename);
if (filename) {
gst_image_capture_bin_post_image_done (GST_CAMERA_BIN2_CAST (bin),
filename);
@ -930,6 +946,8 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
gst_message_parse_warning (message, &err, &debug);
if (err->domain == GST_RESOURCE_ERROR) {
/* some capturing failed */
GST_WARNING_OBJECT (bin, "Capture failed, reason: %s - %s",
err->message, debug);
GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin));
}
}
@ -1152,6 +1170,17 @@ gst_camera_bin_src_notify_max_zoom_cb (GObject * self, GParamSpec * pspec,
g_object_notify (G_OBJECT (camera), "max-zoom");
}
static void
gst_camera_bin_src_notify_zoom_cb (GObject * self, GParamSpec * pspec,
gpointer user_data)
{
GstCameraBin2 *camera = (GstCameraBin2 *) user_data;
g_object_get (self, "zoom", &camera->zoom, NULL);
GST_DEBUG_OBJECT (camera, "Zoom updated to %f", camera->zoom);
g_object_notify (G_OBJECT (camera), "zoom");
}
static gboolean
gst_camera_bin_image_src_buffer_probe (GstPad * pad, GstBuffer * buf,
gpointer data)
@ -1208,7 +1237,12 @@ gst_camera_bin_image_sink_event_probe (GstPad * pad, GstEvent * event,
GST_DEBUG_OBJECT (camerabin, "Setting filename to imagesink: %s",
filename);
g_object_set (camerabin->imagesink, "location", filename, NULL);
gst_element_set_state (camerabin->imagesink, GST_STATE_PLAYING);
if (gst_element_set_state (camerabin->imagesink, GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
/* Resets the latest state change return, that would be a failure
* and could cause problems in a camerabin2 state change */
gst_element_set_state (camerabin->imagesink, GST_STATE_NULL);
}
}
}
break;
@ -1495,6 +1529,8 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
"preview-caps", camera->preview_caps, "preview-filter",
camera->preview_filter, NULL);
}
g_signal_connect (G_OBJECT (camera->src), "notify::zoom",
(GCallback) gst_camera_bin_src_notify_zoom_cb, camera);
g_object_set (camera->src, "zoom", camera->zoom, NULL);
g_signal_connect (G_OBJECT (camera->src), "notify::max-zoom",
(GCallback) gst_camera_bin_src_notify_max_zoom_cb, camera);

View file

@ -1,6 +1,9 @@
/* GStreamer
* Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
* Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
* Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
* Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
*
* Gsthlsdemux.c:
*
@ -310,6 +313,7 @@ gst_hls_demux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_PAUSED_TO_READY:
demux->cancelled = TRUE;
gst_hls_demux_stop (demux);
gst_task_join (demux->task);
gst_hls_demux_reset (demux, FALSE);
break;
default:
@ -406,7 +410,8 @@ gst_hls_demux_src_event (GstPad * pad, GstEvent * event)
GST_M3U8_CLIENT_LOCK (demux->client);
GST_DEBUG_OBJECT (demux, "seeking to sequence %d", current_sequence);
demux->client->sequence = current_sequence;
demux->position = start;
gst_m3u8_client_get_current_position (demux->client, &demux->position);
demux->position_shift = start - demux->position;
demux->need_segment = TRUE;
GST_M3U8_CLIENT_UNLOCK (demux->client);
@ -462,6 +467,7 @@ gst_hls_demux_sink_event (GstPad * pad, GstEvent * event)
playlist = gst_hls_src_buf_to_utf8_playlist ((gchar *)
GST_BUFFER_DATA (demux->playlist), GST_BUFFER_SIZE (demux->playlist));
gst_buffer_unref (demux->playlist);
demux->playlist = NULL;
if (playlist == NULL) {
GST_WARNING_OBJECT (demux, "Error validating first playlist.");
} else if (!gst_m3u8_client_update (demux->client, playlist)) {
@ -573,9 +579,7 @@ gst_hls_demux_fetcher_sink_event (GstPad * pad, GstEvent * event)
GST_DEBUG_OBJECT (demux, "Got EOS on the fetcher pad");
/* signal we have fetched the URI */
if (!demux->cancelled) {
g_mutex_lock (demux->fetcher_lock);
g_cond_broadcast (demux->fetcher_cond);
g_mutex_unlock (demux->fetcher_lock);
}
}
default:
@ -665,7 +669,7 @@ gst_hls_demux_stop (GstHLSDemux * demux)
g_mutex_lock (demux->fetcher_lock);
gst_hls_demux_stop_fetcher_locked (demux, TRUE);
g_mutex_unlock (demux->fetcher_lock);
gst_task_join (demux->task);
gst_task_stop (demux->task);
gst_hls_demux_stop_update (demux);
}
@ -748,13 +752,15 @@ gst_hls_demux_loop (GstHLSDemux * demux)
demux->need_segment = TRUE;
}
if (demux->need_segment) {
GstClockTime start = demux->position + demux->position_shift;
/* And send a newsegment */
GST_DEBUG_OBJECT (demux, "Sending new-segment. Segment start:%"
GST_TIME_FORMAT, GST_TIME_ARGS (demux->position));
GST_DEBUG_OBJECT (demux, "Sending new-segment. segment start:%"
GST_TIME_FORMAT, GST_TIME_ARGS (start));
gst_pad_push_event (demux->srcpad,
gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, demux->position,
GST_CLOCK_TIME_NONE, demux->position));
gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
start, GST_CLOCK_TIME_NONE, start));
demux->need_segment = FALSE;
demux->position_shift = 0;
}
if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
@ -883,6 +889,7 @@ gst_hls_demux_reset (GstHLSDemux * demux, gboolean dispose)
g_queue_clear (demux->queue);
demux->position = 0;
demux->position_shift = 0;
demux->need_segment = TRUE;
}
@ -1034,6 +1041,7 @@ gst_hls_demux_cache_fragments (GstHLSDemux * demux)
demux->client->sequence -= demux->fragments_cache;
else
demux->client->sequence = 0;
gst_m3u8_client_get_current_position (demux->client, &demux->position);
GST_M3U8_CLIENT_UNLOCK (demux->client);
} else {
GstClockTime duration = gst_m3u8_client_get_duration (demux->client);

View file

@ -89,6 +89,7 @@ struct _GstHLSDemux
/* Position in the stream */
GstClockTime position;
GstClockTime position_shift;
gboolean need_segment;
};

View file

@ -468,13 +468,31 @@ _find_next (GstM3U8MediaFile * file, GstM3U8Client * client)
return TRUE;
}
void
gst_m3u8_client_get_current_position (GstM3U8Client * client,
GstClockTime * timestamp)
{
GList *l;
GList *walk;
l = g_list_find_custom (client->current->files, client,
(GCompareFunc) _find_next);
*timestamp = 0;
for (walk = client->current->files; walk; walk = walk->next) {
if (walk == l)
break;
*timestamp += GST_M3U8_MEDIA_FILE (walk->data)->duration;
}
*timestamp *= GST_SECOND;
}
gboolean
gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
gboolean * discontinuity, const gchar ** uri, GstClockTime * duration,
GstClockTime * timestamp)
{
GList *l;
GList *walk;
GstM3U8MediaFile *file;
g_return_val_if_fail (client != NULL, FALSE);
@ -490,6 +508,8 @@ gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
return FALSE;
}
gst_m3u8_client_get_current_position (client, timestamp);
file = GST_M3U8_MEDIA_FILE (l->data);
*discontinuity = client->sequence != file->sequence;
@ -498,14 +518,6 @@ gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
*uri = file->uri;
*duration = file->duration * GST_SECOND;
*timestamp = 0;
for (walk = client->current->files; walk; walk = walk->next) {
if (walk == l)
break;
*timestamp += GST_M3U8_MEDIA_FILE (walk->data)->duration;
}
*timestamp *= GST_SECOND;
GST_M3U8_CLIENT_UNLOCK (client);
return TRUE;
}

View file

@ -84,6 +84,8 @@ void gst_m3u8_client_set_current (GstM3U8Client * client, GstM3U8 * m3u8);
gboolean gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
gboolean * discontinuity, const gchar ** uri, GstClockTime * duration,
GstClockTime * timestamp);
void gst_m3u8_client_get_current_position (GstM3U8Client * client,
GstClockTime * timestamp);
GstClockTime gst_m3u8_client_get_duration (GstM3U8Client * client);
GstClockTime gst_m3u8_client_get_target_duration (GstM3U8Client * client);
const gchar *gst_m3u8_client_get_uri(GstM3U8Client * client);

View file

@ -417,7 +417,7 @@ gst_mpegts_demux_remove_pads (GstMpegTSDemux * demux)
#endif
static guint32 crc_tab[256] = {
static const guint32 crc_tab[256] = {
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, 0x130476dc, 0x17c56b6b,
0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7,

View file

@ -348,7 +348,7 @@ psmux_write_system_header (PsMux * mux)
bits_write (&bw, 24, PSMUX_START_CODE_PREFIX);
bits_write (&bw, 8, PSMUX_SYSTEM_HEADER);
bits_write (&bw, 16, len); /* header_length */
bits_write (&bw, 16, len - 6); /* header_length (bytes after this field) */
bits_write (&bw, 1, 1); /* marker */
bits_write (&bw, 22, mux->rate_bound); /* rate_bound */
bits_write (&bw, 1, 1); /* marker */

View file

@ -1,7 +1,11 @@
/*
* mpegtsbase.c -
* mpegtsbase.c -
* Copyright (C) 2007 Alessandro Decina
* 2010 Edward Hervey
* Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
* Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
* Author: Edward Hervey <bilboed@bilboed.com>, Collabora Ltd.
*
* Authors:
* Alessandro Decina <alessandro@nnva.org>

View file

@ -2,6 +2,9 @@
* mpegtsbase.h - GStreamer MPEG transport stream base class
* Copyright (C) 2009 Edward Hervey <edward.hervey@collabora.co.uk>
* 2007 Alessandro Decina
* Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
* Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
*
* Authors:
* Alessandro Decina <alessandro@nnva.org>

View file

@ -2,6 +2,10 @@
* tsdemux.c
* Copyright (C) 2009 Zaheer Abbas Merali
* 2010 Edward Hervey
* Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
* Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
* Author: Edward Hervey <bilboed@bilboed.com>, Collabora Ltd.
*
* Authors:
* Zaheer Abbas Merali <zaheerabbas at merali dot org>

View file

@ -1027,7 +1027,7 @@ plugin_init (GstPlugin * plugin)
"MPEG Video Parser");
return gst_element_register (plugin, "legacympegvideoparse",
GST_RANK_PRIMARY, GST_TYPE_MPEGVIDEOPARSE);
GST_RANK_NONE, GST_TYPE_MPEGVIDEOPARSE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,

View file

@ -8,10 +8,10 @@ else
endif
libgstpcapparse_la_SOURCES = \
gstpcapparse.c
gstpcapparse.c gstirtspparse.c plugin.c
noinst_HEADERS = \
gstpcapparse.h
gstpcapparse.h gstirtspparse.h
libgstpcapparse_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS)
libgstpcapparse_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) $(WINSOCK2_LIBS)

View file

@ -0,0 +1,255 @@
/* GStreamer Interleaved RTSP parser
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
* Contact: Stefan Kost <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-irtspparse
* @short_description: Interleaved RTSP parser
* @see_also: #GstPcapParse
*
* This is an interleaved RTSP parser that allows extracting specific
* so-called "channels" from received interleaved (TCP) RTSP data
* (typically extracted from some network capture).
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch-0.10 filesrc location=h264crasher.pcap ! pcapparse ! irtspparse
* ! rtph264depay ! ffdec_h264 ! fakesink
* ]| Read from a pcap dump file using filesrc, extract the raw TCP packets,
* depayload and decode them.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include "gstirtspparse.h"
#include <gst/base/gstbytereader.h>
GST_DEBUG_CATEGORY_STATIC (irtsp_parse_debug);
#define GST_CAT_DEFAULT irtsp_parse_debug
enum
{
PROP_0,
PROP_CHANNEL_ID
};
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("application/x-rtp ; application/x-rtcp"));
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
static void gst_irtsp_parse_finalize (GObject * object);
static gboolean gst_irtsp_parse_start (GstBaseParse * parse);
static gboolean gst_irtsp_parse_stop (GstBaseParse * parse);
static gboolean gst_irtsp_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * size, gint * skipsize);
static GstFlowReturn gst_irtsp_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
static void gst_irtsp_parse_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_irtsp_parse_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
GST_BOILERPLATE (GstIRTSPParse, gst_irtsp_parse, GstBaseParse,
GST_TYPE_BASE_PARSE);
static void
gst_irtsp_parse_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class, "IRTSPParse",
"Raw/Parser",
"Parses a raw interleaved RTSP stream",
"Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
}
static void
gst_irtsp_parse_class_init (GstIRTSPParseClass * klass)
{
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (irtsp_parse_debug, "irtspparse", 0,
"Interleaved RTSP stream parser");
object_class->finalize = gst_irtsp_parse_finalize;
object_class->set_property = gst_irtsp_parse_set_property;
object_class->get_property = gst_irtsp_parse_get_property;
g_object_class_install_property (object_class, PROP_CHANNEL_ID,
g_param_spec_int ("channel-id", "channel-id",
"Channel Identifier", 0, 255,
0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
parse_class->start = GST_DEBUG_FUNCPTR (gst_irtsp_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_irtsp_parse_stop);
parse_class->check_valid_frame =
GST_DEBUG_FUNCPTR (gst_irtsp_parse_check_valid_frame);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_irtsp_parse_parse_frame);
}
static void
gst_irtsp_parse_reset (GstIRTSPParse * IRTSPParse)
{
}
static void
gst_irtsp_parse_init (GstIRTSPParse * IRTSPParse, GstIRTSPParseClass * klass)
{
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (IRTSPParse), 4);
gst_irtsp_parse_reset (IRTSPParse);
}
static void
gst_irtsp_parse_finalize (GObject * object)
{
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_irtsp_parse_start (GstBaseParse * parse)
{
GstIRTSPParse *IRTSPParse = GST_IRTSP_PARSE (parse);
GST_DEBUG_OBJECT (parse, "starting");
gst_irtsp_parse_reset (IRTSPParse);
return TRUE;
}
static gboolean
gst_irtsp_parse_stop (GstBaseParse * parse)
{
GST_DEBUG_OBJECT (parse, "stopping");
return TRUE;
}
static gboolean
gst_irtsp_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
GstIRTSPParse *IRTSPParse = GST_IRTSP_PARSE (parse);
GstBuffer *buf = frame->buffer;
GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
gint off;
if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 4))
return FALSE;
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000,
0x24000000 + (IRTSPParse->channel_id << 16), 0, GST_BUFFER_SIZE (buf));
GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
*skipsize = GST_BUFFER_SIZE (buf) - 3;
return FALSE;
}
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
return FALSE;
}
*framesize = GST_READ_UINT16_BE (GST_BUFFER_DATA (frame->buffer) + 2) + 4;
GST_LOG_OBJECT (parse, "got frame size %d", *framesize);
return TRUE;
}
static GstFlowReturn
gst_irtsp_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
/* HACK HACK skip header.
* could also ask baseparse to skip this,
* but that would give us a discontinuity for free
* which is a bit too much to have on all our packets */
GST_BUFFER_DATA (frame->buffer) += 4;
GST_BUFFER_SIZE (frame->buffer) -= 4;
if (!GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (parse))) {
GstCaps *caps;
caps = gst_caps_new_simple ("application/x-rtp", NULL);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
}
GST_BUFFER_FLAG_UNSET (frame->buffer, GST_BUFFER_FLAG_DISCONT);
return GST_FLOW_OK;
}
static void
gst_irtsp_parse_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstIRTSPParse *IRTSPParse = GST_IRTSP_PARSE (object);
switch (prop_id) {
case PROP_CHANNEL_ID:
IRTSPParse->channel_id = g_value_get_int (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_irtsp_parse_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstIRTSPParse *IRTSPParse = GST_IRTSP_PARSE (object);
switch (prop_id) {
case PROP_CHANNEL_ID:
g_value_set_int (value, IRTSPParse->channel_id);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}

View file

@ -0,0 +1,70 @@
/* GStreamer Interleaved RTSP parser
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
* Contact: Stefan Kost <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_IRTSP_PARSE_H__
#define __GST_IRTSP_PARSE_H__
#include <gst/gst.h>
#include <gst/base/gstbaseparse.h>
G_BEGIN_DECLS
#define GST_TYPE_IRTSP_PARSE \
(gst_irtsp_parse_get_type())
#define GST_IRTSP_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_IRTSP_PARSE, GstIRTSPParse))
#define GST_IRTSP_PARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_IRTSP_PARSE, GstIRTSPParseClass))
#define GST_IS_IRTSP_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_IRTSP_PARSE))
#define GST_IS_IRTSP_PARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_IRTSP_PARSE))
typedef struct _GstIRTSPParse GstIRTSPParse;
typedef struct _GstIRTSPParseClass GstIRTSPParseClass;
/**
* GstIRTSPParse:
*
* The opaque GstIRTSPParse object
*/
struct _GstIRTSPParse {
GstBaseParse baseparse;
guint8 channel_id;
/*< private >*/
};
/**
* GstIRTSPParseClass:
* @parent_class: Element parent class.
*
* The opaque GstIRTSPParseClass data structure.
*/
struct _GstIRTSPParseClass {
GstBaseParseClass baseparse_class;
};
GType gst_irtsp_parse_get_type (void);
G_END_DECLS
#endif /* __GST_IRTSP_PARSE_H__ */

View file

@ -607,17 +607,3 @@ gst_pcap_sink_event (GstPad * pad, GstEvent * event)
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "pcapparse",
GST_RANK_NONE, GST_TYPE_PCAP_PARSE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"pcapparse",
"Element parsing raw pcap streams",
plugin_init, VERSION, "LGPL", "GStreamer", "http://gstreamer.net/")

44
gst/pcapparse/plugin.c Normal file
View file

@ -0,0 +1,44 @@
/*
* Copyright 2007 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstpcapparse.h"
#include "gstirtspparse.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean ret;
ret = gst_element_register (plugin, "pcapparse",
GST_RANK_NONE, GST_TYPE_PCAP_PARSE);
ret &= gst_element_register (plugin, "irtspparse",
GST_RANK_NONE, GST_TYPE_IRTSP_PARSE);
return ret;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"pcapparse",
"Element parsing raw pcap streams",
plugin_init, VERSION, "LGPL", "GStreamer", "http://gstreamer.net/")

View file

@ -2,10 +2,12 @@ plugin_LTLIBRARIES = libgstrtpvp8.la
libgstrtpvp8_la_SOURCES = gstrtpvp8.c \
gstrtpvp8depay.c \
gstrtpvp8pay.c
gstrtpvp8pay.c \
dboolhuff.c
noinst_HEADERS = gstrtpvp8depay.h \
gstrtpvp8pay.h
gstrtpvp8pay.h \
dboolhuff.h
libgstrtpvp8_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) $(GST_CFLAGS)

View file

@ -0,0 +1,30 @@
Copyright (c) 2010, Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Google nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

68
gst/rtpvp8/dboolhuff.c Normal file
View file

@ -0,0 +1,68 @@
/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the dboolhuff.LICENSE file in this directory.
* See the libvpx original distribution for more information,
* including patent information, and author information.
*/
#include "dboolhuff.h"
const unsigned char vp8_norm[256] __attribute__ ((aligned (16))) = {
0, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
int
vp8dx_start_decode (BOOL_DECODER * br,
const unsigned char *source, unsigned int source_sz)
{
br->user_buffer_end = source + source_sz;
br->user_buffer = source;
br->value = 0;
br->count = -8;
br->range = 255;
if (source_sz && !source)
return 1;
/* Populate the buffer */
vp8dx_bool_decoder_fill (br);
return 0;
}
void
vp8dx_bool_decoder_fill (BOOL_DECODER * br)
{
const unsigned char *bufptr;
const unsigned char *bufend;
VP8_BD_VALUE value;
int count;
bufend = br->user_buffer_end;
bufptr = br->user_buffer;
value = br->value;
count = br->count;
VP8DX_BOOL_DECODER_FILL (count, value, bufptr, bufend);
br->user_buffer = bufptr;
br->value = value;
br->count = count;
}

151
gst/rtpvp8/dboolhuff.h Normal file
View file

@ -0,0 +1,151 @@
/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the dboolhuff.LICENSE file in this directory.
* See the libvpx original distribution for more information,
* including patent information, and author information.
*/
#ifndef DBOOLHUFF_H
#define DBOOLHUFF_H
#include <stddef.h>
#include <limits.h>
#include <glib.h>
typedef size_t VP8_BD_VALUE;
# define VP8_BD_VALUE_SIZE ((int)sizeof(VP8_BD_VALUE)*CHAR_BIT)
/*This is meant to be a large, positive constant that can still be efficiently
loaded as an immediate (on platforms like ARM, for example).
Even relatively modest values like 100 would work fine.*/
# define VP8_LOTS_OF_BITS (0x40000000)
typedef struct
{
const unsigned char *user_buffer_end;
const unsigned char *user_buffer;
VP8_BD_VALUE value;
int count;
unsigned int range;
} BOOL_DECODER;
extern const unsigned char vp8_norm[256] __attribute__((aligned(16)));
int vp8dx_start_decode(BOOL_DECODER *br,
const unsigned char *source,
unsigned int source_sz);
void vp8dx_bool_decoder_fill(BOOL_DECODER *br);
/*The refill loop is used in several places, so define it in a macro to make
sure they're all consistent.
An inline function would be cleaner, but has a significant penalty, because
multiple BOOL_DECODER fields must be modified, and the compiler is not smart
enough to eliminate the stores to those fields and the subsequent reloads
from them when inlining the function.*/
#define VP8DX_BOOL_DECODER_FILL(_count,_value,_bufptr,_bufend) \
do \
{ \
int shift = VP8_BD_VALUE_SIZE - 8 - ((_count) + 8); \
int loop_end, x; \
size_t bits_left = ((_bufend)-(_bufptr))*CHAR_BIT; \
\
x = shift + CHAR_BIT - bits_left; \
loop_end = 0; \
if(x >= 0) \
{ \
(_count) += VP8_LOTS_OF_BITS; \
loop_end = x; \
if(!bits_left) break; \
} \
while(shift >= loop_end) \
{ \
(_count) += CHAR_BIT; \
(_value) |= (VP8_BD_VALUE)*(_bufptr)++ << shift; \
shift -= CHAR_BIT; \
} \
} \
while(0) \
static int vp8dx_decode_bool(BOOL_DECODER *br, int probability) {
unsigned int bit = 0;
VP8_BD_VALUE value;
unsigned int split;
VP8_BD_VALUE bigsplit;
int count;
unsigned int range;
split = 1 + (((br->range - 1) * probability) >> 8);
if(br->count < 0)
vp8dx_bool_decoder_fill(br);
value = br->value;
count = br->count;
bigsplit = (VP8_BD_VALUE)split << (VP8_BD_VALUE_SIZE - 8);
range = split;
if (value >= bigsplit)
{
range = br->range - split;
value = value - bigsplit;
bit = 1;
}
{
register unsigned int shift = vp8_norm[range];
range <<= shift;
value <<= shift;
count -= shift;
}
br->value = value;
br->count = count;
br->range = range;
return bit;
}
static G_GNUC_UNUSED int vp8_decode_value(BOOL_DECODER *br, int bits)
{
int z = 0;
int bit;
for (bit = bits - 1; bit >= 0; bit--)
{
z |= (vp8dx_decode_bool(br, 0x80) << bit);
}
return z;
}
static G_GNUC_UNUSED int vp8dx_bool_error(BOOL_DECODER *br)
{
/* Check if we have reached the end of the buffer.
*
* Variable 'count' stores the number of bits in the 'value' buffer, minus
* 8. The top byte is part of the algorithm, and the remainder is buffered
* to be shifted into it. So if count == 8, the top 16 bits of 'value' are
* occupied, 8 for the algorithm and 8 in the buffer.
*
* When reading a byte from the user's buffer, count is filled with 8 and
* one byte is filled into the value buffer. When we reach the end of the
* data, count is additionally filled with VP8_LOTS_OF_BITS. So when
* count == VP8_LOTS_OF_BITS - 1, the user's data has been exhausted.
*/
if ((br->count > VP8_BD_VALUE_SIZE) && (br->count < VP8_LOTS_OF_BITS))
{
/* We have tried to decode bits after the end of
* stream was encountered.
*/
return 1;
}
/* No error. */
return 0;
}
#endif

View file

@ -25,6 +25,7 @@
#include <gst/base/gstbitreader.h>
#include <gst/rtp/gstrtppayloads.h>
#include <gst/rtp/gstrtpbuffer.h>
#include "dboolhuff.h"
#include "gstrtpvp8pay.h"
#define FI_FRAG_UNFRAGMENTED 0x0
@ -130,6 +131,8 @@ gst_rtp_vp8_pay_parse_frame (GstRtpVP8Pay * self, GstBuffer * buffer)
guint8 tmp8 = 0;
guint8 *data;
guint8 partitions;
guint offset;
BOOL_DECODER bc;
reader = gst_bit_reader_new_from_buffer (buffer);
@ -150,7 +153,8 @@ gst_rtp_vp8_pay_parse_frame (GstRtpVP8Pay * self, GstBuffer * buffer)
header_size = data[2] << 11 | data[1] << 3 | (data[0] >> 5);
/* Include the uncompressed data blob in the header */
header_size += keyframe ? 10 : 3;
offset = keyframe ? 10 : 3;
header_size += offset;
if (!gst_bit_reader_skip (reader, 24))
goto error;
@ -166,109 +170,81 @@ gst_rtp_vp8_pay_parse_frame (GstRtpVP8Pay * self, GstBuffer * buffer)
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 8) || tmp8 != 0x2a)
goto error;
/* Skip horizontal size code (16 bits) vertical size code (16 bits),
* color space (1 bit) and clamping type (1 bit) */
if (!gst_bit_reader_skip (reader, 34))
/* Skip horizontal size code (16 bits) vertical size code (16 bits) */
if (!gst_bit_reader_skip (reader, 32))
goto error;
}
offset = keyframe ? 10 : 3;
vp8dx_start_decode (&bc, GST_BUFFER_DATA (buffer) + offset,
GST_BUFFER_SIZE (buffer) - offset);
if (keyframe) {
/* color space (1 bit) and clamping type (1 bit) */
vp8dx_decode_bool (&bc, 0x80);
vp8dx_decode_bool (&bc, 0x80);
}
/* segmentation_enabled */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (tmp8 != 0) {
gboolean update_mb_segmentation_map;
gboolean update_segment_feature_data;
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 2))
goto error;
update_mb_segmentation_map = (tmp8 & 0x2) != 0;
update_segment_feature_data = (tmp8 & 0x1) != 0;
if (vp8dx_decode_bool (&bc, 0x80)) {
guint8 update_mb_segmentation_map = vp8dx_decode_bool (&bc, 0x80);
guint8 update_segment_feature_data = vp8dx_decode_bool (&bc, 0x80);
if (update_segment_feature_data) {
/* skip segment feature mode */
if (!gst_bit_reader_skip (reader, 1))
goto error;
vp8dx_decode_bool (&bc, 0x80);
/* quantizer update */
for (i = 0; i < 4; i++) {
/* quantizer update */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (tmp8 != 0) {
/* skip quantizer value (7 bits) and sign (1 bit) */
if (!gst_bit_reader_skip (reader, 8))
goto error;
}
/* skip flagged quantizer value (7 bits) and sign (1 bit) */
if (vp8dx_decode_bool (&bc, 0x80))
vp8_decode_value (&bc, 8);
}
/* loop filter update */
for (i = 0; i < 4; i++) {
/* loop filter update */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (tmp8 != 0) {
/* skip lf update value (6 bits) and sign (1 bit) */
if (!gst_bit_reader_skip (reader, 7))
goto error;
}
/* skip flagged lf update value (6 bits) and sign (1 bit) */
if (vp8dx_decode_bool (&bc, 0x80))
vp8_decode_value (&bc, 7);
}
}
if (update_mb_segmentation_map) {
/* segment prob update */
for (i = 0; i < 3; i++) {
/* segment prob update */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (tmp8 != 0) {
/* skip segment prob */
if (!gst_bit_reader_skip (reader, 8))
goto error;
}
/* skip flagged segment prob */
if (vp8dx_decode_bool (&bc, 0x80))
vp8_decode_value (&bc, 8);
}
}
}
/* skip filter type (1 bit), loop filter level (6 bits) and
* sharpness level (3 bits) */
if (!gst_bit_reader_skip (reader, 10))
goto error;
vp8_decode_value (&bc, 1);
vp8_decode_value (&bc, 6);
vp8_decode_value (&bc, 3);
/* loop_filter_adj_enabled */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (vp8dx_decode_bool (&bc, 0x80)) {
if (tmp8 != 0) {
/* loop filter adj enabled */
/* mode_ref_lf_delta_update */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (tmp8 != 0) {
/* mode_ref_lf_data_update */
int i;
/* delta update */
if (vp8dx_decode_bool (&bc, 0x80)) {
for (i = 0; i < 8; i++) {
/* 8 updates, 1 bit indicate whether there is one and if follow by a
* 7 bit update */
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 1))
goto error;
if (tmp8 != 0) {
/* skip delta magnitude (6 bits) and sign (1 bit) */
if (!gst_bit_reader_skip (reader, 7))
goto error;
}
if (vp8dx_decode_bool (&bc, 0x80))
vp8_decode_value (&bc, 7);
}
}
}
if (!gst_bit_reader_get_bits_uint8 (reader, &tmp8, 2))
if (vp8dx_bool_error (&bc))
goto error;
tmp8 = vp8_decode_value (&bc, 2);
partitions = 1 << tmp8;
/* Check if things are still sensible */

View file

@ -7,6 +7,7 @@ libgstvideoparsersbad_la_SOURCES = plugin.c \
libgstvideoparsersbad_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstvideoparsersbad_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-$(GST_MAJORMINOR).la \

View file

@ -165,7 +165,7 @@ dirac_sequence_header_parse (DiracSequenceHeader * header,
/* standard stuff */
static DiracSequenceHeader schro_video_formats[] = {
static const DiracSequenceHeader schro_video_formats[] = {
{0, 0, 0, 0,
0, /* custom */
640, 480, SCHRO_CHROMA_420,
@ -323,7 +323,7 @@ struct _SchroFrameRate
int denominator;
};
static SchroFrameRate schro_frame_rates[] = {
static const SchroFrameRate schro_frame_rates[] = {
{0, 0},
{24000, 1001},
{24, 1},

View file

@ -78,14 +78,19 @@ static GstStaticPadTemplate gst_dirac_parse_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-dirac, parsed=(boolean)FALSE")
GST_STATIC_CAPS ("video/x-dirac")
);
static GstStaticPadTemplate gst_dirac_parse_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-dirac, parsed=(boolean)TRUE")
GST_STATIC_CAPS ("video/x-dirac, parsed=(boolean)TRUE, "
"width=(int)[1,MAX], height=(int)[1,MAX], "
"framerate=(fraction)[0/1,MAX], "
"pixel-aspect-ratio=(fraction)[0/1,MAX], "
"interlaced=(boolean){TRUE,FALSE}, "
"profile=(int)[0,MAX], level=(int)[0,MAX]")
);
/* class initialization */

View file

@ -39,14 +39,13 @@ static GstStaticPadTemplate srctemplate =
GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h263, variant = (string) itu, "
"parsed = (boolean) true")
"parsed = (boolean) true, framerate=(fraction)[0/1,MAX]")
);
static GstStaticPadTemplate sinktemplate =
GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h263, variant = (string) itu, "
"parsed = (boolean) false")
GST_STATIC_CAPS ("video/x-h263, variant = (string) itu")
);
GST_BOILERPLATE (GstH263Parse, gst_h263_parse, GstElement, GST_TYPE_BASE_PARSE);
@ -102,7 +101,7 @@ gst_h263_parse_start (GstBaseParse * parse)
{
GstH263Parse *h263parse = GST_H263_PARSE (parse);
GST_DEBUG ("Start");
GST_DEBUG_OBJECT (h263parse, "start");
h263parse->bitrate = 0;
h263parse->profile = -1;
@ -118,7 +117,7 @@ gst_h263_parse_start (GstBaseParse * parse)
static gboolean
gst_h263_parse_stop (GstBaseParse * parse)
{
GST_DEBUG ("Stop");
GST_DEBUG_OBJECT (parse, "stop");
return TRUE;
}
@ -139,7 +138,7 @@ gst_h263_parse_sink_event (GstBaseParse * parse, GstEvent * event)
gst_event_parse_tag (event, &taglist);
if (gst_tag_list_get_uint (taglist, GST_TAG_BITRATE, &h263parse->bitrate))
GST_DEBUG ("Got bitrate tag: %u", h263parse->bitrate);
GST_DEBUG_OBJECT (h263parse, "got bitrate tag: %u", h263parse->bitrate);
break;
}
@ -201,7 +200,7 @@ gst_h263_parse_set_src_caps (GstH263Parse * h263parse,
if (sink_caps && (st = gst_caps_get_structure (sink_caps, 0)) &&
gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) {
/* Got it in caps - nothing more to do */
GST_DEBUG ("Sink caps override framerate from headers");
GST_DEBUG_OBJECT (h263parse, "sink caps override framerate from headers");
} else {
/* Caps didn't have the framerate - get it from params */
gst_h263_parse_get_framerate (params, &fr_num, &fr_denom);
@ -308,7 +307,8 @@ gst_h263_parse_check_valid_frame (GstBaseParse * parse,
/* XXX: After getting a keyframe, should we adjust min_frame_size to
* something smaller so we don't end up collecting too many non-keyframes? */
GST_DEBUG ("Found a frame of size %d at pos %d", *framesize, *skipsize);
GST_DEBUG_OBJECT (h263parse, "found a frame of size %d at pos %d",
*framesize, *skipsize);
return TRUE;

View file

@ -62,12 +62,14 @@ enum
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) false"));
GST_STATIC_CAPS ("video/x-h264"));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) true"));
GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) true, "
"stream-format=(string) { avc, byte-stream }, "
"alignment=(string) { au, nal }"));
GST_BOILERPLATE (GstH264Parse, gst_h264_parse, GstBaseParse,
GST_TYPE_BASE_PARSE);
@ -237,8 +239,7 @@ gst_h264_parse_stop (GstBaseParse * parse)
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++)
gst_buffer_replace (&h264parse->pps_nals[i], NULL);
g_free (h264parse->nalparser);
h264parse->nalparser = NULL;
gst_h264_nal_parser_free (h264parse->nalparser);
return TRUE;
}
@ -338,7 +339,8 @@ gst_h264_parse_wrap_nal (GstH264Parse * h264parse, guint format, guint8 * data,
GstBuffer *buf;
const guint nl = h264parse->nal_length_size;
GST_DEBUG ("Nal length %d %d", size, h264parse->nal_length_size);
GST_DEBUG_OBJECT (h264parse, "nal length %d %d", size,
h264parse->nal_length_size);
buf = gst_buffer_new_and_alloc (size + nl + 4);
if (format == GST_H264_PARSE_FORMAT_AVC) {
@ -364,11 +366,11 @@ gst_h264_parser_store_nal (GstH264Parse * h264parse, guint id,
if (naltype == GST_H264_NAL_SPS) {
store_size = GST_H264_MAX_SPS_COUNT;
store = h264parse->sps_nals;
GST_DEBUG ("Storing sps %u", id);
GST_DEBUG_OBJECT (h264parse, "storing sps %u", id);
} else if (naltype == GST_H264_NAL_PPS) {
store_size = GST_H264_MAX_PPS_COUNT;
store = h264parse->pps_nals;
GST_DEBUG ("Storing pps %u", id);
GST_DEBUG_OBJECT (h264parse, "storing pps %u", id);
} else
return;
@ -476,8 +478,8 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
/* if we need to sneak codec NALs into the stream,
* this is a good place, so fake it as IDR
* (which should be at start anyway) */
GST_DEBUG ("Frame start: %i first_mb_in_slice %i", h264parse->frame_start,
slice.first_mb_in_slice);
GST_DEBUG_OBJECT (h264parse, "frame start: %i first_mb_in_slice %i",
h264parse->frame_start, slice.first_mb_in_slice);
if (G_LIKELY (!h264parse->push_codec))
break;
/* fall-through */
@ -486,7 +488,7 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
if (gst_h264_parser_parse_slice_hdr (nalparser, nalu,
&slice, FALSE, FALSE) == GST_H264_PARSER_ERROR)
return;
GST_DEBUG ("Frame start: %i first_mb_in_slice %i",
GST_DEBUG_OBJECT (h264parse, "frame start: %i first_mb_in_slice %i",
h264parse->frame_start, slice.first_mb_in_slice);
}
/* real frame data */
@ -494,15 +496,17 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
/* mark where config needs to go if interval expired */
/* mind replacement buffer if applicable */
if (h264parse->format == GST_H264_PARSE_FORMAT_AVC)
h264parse->idr_pos = gst_adapter_available (h264parse->frame_out);
else
h264parse->idr_pos = nalu->offset - 4;
GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
h264parse->idr_pos);
if (h264parse->idr_pos == -1) {
if (h264parse->format == GST_H264_PARSE_FORMAT_AVC)
h264parse->idr_pos = gst_adapter_available (h264parse->frame_out);
else
h264parse->idr_pos = nalu->offset - 4;
GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
h264parse->idr_pos);
}
GST_DEBUG ("first MB: %u, slice type: %u", slice.first_mb_in_slice,
slice.type);
GST_DEBUG_OBJECT (h264parse, "first MB: %u, slice type: %u",
slice.first_mb_in_slice, slice.type);
break;
default:
gst_h264_parser_parse_nal (nalparser, nalu);
@ -531,7 +535,7 @@ gst_h264_parse_collect_nal (GstH264Parse * h264parse, const guint8 * data,
GstH264NalUnitType nal_type = nalu->type;
GstH264NalUnit nnalu;
GST_DEBUG ("Parsing collecte nal");
GST_DEBUG_OBJECT (h264parse, "parsing collected nal");
parse_res = gst_h264_parser_identify_nalu (h264parse->nalparser, data,
nalu->offset + nalu->size, size, &nnalu);
@ -612,16 +616,18 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
drain = GST_BASE_PARSE_DRAINING (parse);
current_off = h264parse->current_off;
GST_DEBUG ("Last parse position %u", current_off);
GST_DEBUG_OBJECT (h264parse, "last parse position %u", current_off);
while (TRUE) {
switch (gst_h264_parser_identify_nalu (nalparser, data, current_off,
size, &nalu)) {
case GST_H264_PARSER_OK:
GST_DEBUG ("Complete nal found. %u Off: %u, Size: %u",
GST_DEBUG_OBJECT (h264parse, "complete nal found. "
"current offset: %u, Nal offset: %u, Nal Size: %u",
current_off, nalu.offset, nalu.size);
current_off = nalu.offset + nalu.size;
GST_DEBUG ("CURENT OFF. %u, %u", current_off, nalu.offset + nalu.size);
GST_DEBUG_OBJECT (h264parse, "current off. %u, %u", current_off,
nalu.offset + nalu.size);
if (!h264parse->nalu.size && !h264parse->nalu.valid)
h264parse->nalu = nalu;
break;
@ -643,15 +649,15 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
if (nalu.sc_offset == h264parse->nalu.sc_offset) {
*skipsize = nalu.offset;
GST_DEBUG ("Skiping broken nal");
GST_DEBUG_OBJECT (h264parse, "skipping broken nal");
return FALSE;
} else {
nalu.size = 0;
goto end;
}
case GST_H264_PARSER_NO_NAL_END:
GST_DEBUG ("Not a complete nal found at offset %u", nalu.offset);
GST_DEBUG_OBJECT (h264parse, "not a complete nal found at offset %u",
nalu.offset);
current_off = nalu.sc_offset;
/* We keep the reference to this nal so we start over the parsing
@ -660,8 +666,8 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
h264parse->nalu = nalu;
if (drain) {
GST_DEBUG ("Drainning NAL %u %u %u", size, h264parse->nalu.offset,
h264parse->nalu.size);
GST_DEBUG_OBJECT (h264parse, "drainning NAL %u %u %u", size,
h264parse->nalu.offset, h264parse->nalu.size);
/* Can't parse the nalu */
if (size - h264parse->nalu.offset < 2) {
*skipsize = nalu.offset;
@ -677,8 +683,8 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
current_off = nalu.offset + nalu.size;
GST_DEBUG ("%p Complete nal found. Off: %u, Size: %u", data, nalu.offset,
nalu.size);
GST_DEBUG_OBJECT (h264parse, "%p complete nal found. Off: %u, Size: %u",
data, nalu.offset, nalu.size);
gst_h264_parse_process_nal (h264parse, &nalu);
if (gst_h264_parse_collect_nal (h264parse, data, size, &nalu) || drain)
@ -697,7 +703,7 @@ end:
return TRUE;
parsing_error:
GST_DEBUG ("Error parsing Nal Unit");
GST_DEBUG_OBJECT (h264parse, "error parsing Nal Unit");
more:
/* ask for best next available */

View file

@ -46,8 +46,7 @@ static GstStaticPadTemplate sink_template =
GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, "
"mpegversion = (int) [1, 2], "
"parsed = (boolean) false, " "systemstream = (boolean) false")
"mpegversion = (int) [1, 2], " "systemstream = (boolean) false")
);
/* Properties */
@ -72,6 +71,8 @@ static gboolean gst_mpegv_parse_check_valid_frame (GstBaseParse * parse,
static GstFlowReturn gst_mpegv_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
static gboolean gst_mpegv_parse_set_caps (GstBaseParse * parse, GstCaps * caps);
static GstFlowReturn gst_mpegv_parse_pre_push_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
static void gst_mpegv_parse_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -165,6 +166,8 @@ gst_mpegv_parse_class_init (GstMpegvParseClass * klass)
GST_DEBUG_FUNCPTR (gst_mpegv_parse_check_valid_frame);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_mpegv_parse_parse_frame);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_mpegv_parse_set_caps);
parse_class->pre_push_frame =
GST_DEBUG_FUNCPTR (gst_mpegv_parse_pre_push_frame);
}
static void
@ -188,6 +191,7 @@ gst_mpegv_parse_reset (GstMpegvParse * mpvparse)
gst_mpegv_parse_reset_frame (mpvparse);
mpvparse->profile = 0;
mpvparse->update_caps = TRUE;
mpvparse->send_codec_tag = TRUE;
gst_buffer_replace (&mpvparse->config, NULL);
memset (&mpvparse->sequencehdr, 0, sizeof (mpvparse->sequencehdr));
@ -237,13 +241,12 @@ gst_mpegv_parse_process_config (GstMpegvParse * mpvparse, GstBuffer * buf,
if (!gst_mpeg_video_parse_sequence_header (&mpvparse->sequencehdr, data,
GST_BUFFER_SIZE (buf) - mpvparse->seq_offset, 0)) {
GST_DEBUG_OBJECT (mpvparse,
"failed to parse config data (size %" G_GSSIZE_FORMAT ") at offset %d",
"failed to parse config data (size %d) at offset %d",
size, mpvparse->seq_offset);
return FALSE;
}
GST_LOG_OBJECT (mpvparse, "accepting parsed config size %" G_GSSIZE_FORMAT,
size);
GST_LOG_OBJECT (mpvparse, "accepting parsed config size %d", size);
/* Set mpeg version, and parse sequence extension */
if (mpvparse->mpeg_version <= 0) {
@ -490,15 +493,23 @@ end:
} else if (GST_BASE_PARSE_DRAINING (parse)) {
*framesize = GST_BUFFER_SIZE (buf);
ret = TRUE;
} else {
/* resume scan where we left it */
mpvparse->last_sc = GST_BUFFER_SIZE (buf);
if (!mpvparse->last_sc)
*skipsize = mpvparse->last_sc = GST_BUFFER_SIZE (buf) - 3;
else if (mpvparse->typeoffsize)
mpvparse->last_sc = GST_BUFFER_SIZE (buf) - 3;
else
*skipsize = 0;
/* request best next available */
*framesize = G_MAXUINT;
ret = FALSE;
}
g_list_free_full (mpvparse->typeoffsize, (GDestroyNotify) g_free);
g_list_foreach (mpvparse->typeoffsize, (GFunc) g_free, NULL);
g_list_free (mpvparse->typeoffsize);
mpvparse->typeoffsize = NULL;
return ret;
@ -654,6 +665,37 @@ gst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
return GST_FLOW_OK;
}
static GstFlowReturn
gst_mpegv_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
GstTagList *taglist;
/* tag sending done late enough in hook to ensure pending events
* have already been sent */
if (G_UNLIKELY (mpvparse->send_codec_tag)) {
gchar *codec;
/* codec tag */
codec = g_strdup_printf ("MPEG %d Video", mpvparse->mpeg_version);
taglist = gst_tag_list_new ();
gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_VIDEO_CODEC, codec, NULL);
g_free (codec);
gst_element_found_tags_for_pad (GST_ELEMENT (mpvparse),
GST_BASE_PARSE_SRC_PAD (mpvparse), taglist);
mpvparse->send_codec_tag = FALSE;
}
/* usual clipping applies */
frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
return GST_FLOW_OK;
}
static gboolean
gst_mpegv_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
{

View file

@ -56,6 +56,7 @@ struct _GstMpegvParse {
gint seq_offset;
gint pic_offset;
gboolean update_caps;
gboolean send_codec_tag;
GstBuffer *config;
guint8 profile;

View file

@ -33,13 +33,13 @@ plugin_init (GstPlugin * plugin)
gboolean ret;
ret = gst_element_register (plugin, "h263parse",
GST_RANK_NONE, GST_TYPE_H263_PARSE);
GST_RANK_PRIMARY + 1, GST_TYPE_H263_PARSE);
ret = gst_element_register (plugin, "h264parse",
GST_RANK_NONE, GST_TYPE_H264_PARSE);
GST_RANK_PRIMARY + 1, GST_TYPE_H264_PARSE);
ret = gst_element_register (plugin, "diracparse",
GST_RANK_NONE, GST_TYPE_DIRAC_PARSE);
ret = gst_element_register (plugin, "mpegvideoparse",
GST_RANK_NONE, GST_TYPE_MPEGVIDEO_PARSE);
GST_RANK_PRIMARY + 1, GST_TYPE_MPEGVIDEO_PARSE);
return ret;
}

View file

@ -185,6 +185,7 @@ check_PROGRAMS = \
elements/rtpmux \
libs/mpegvideoparser \
libs/h264parser \
libs/vc1parser \
$(check_schro) \
$(check_vp8) \
elements/viewfinderbin \
@ -218,6 +219,7 @@ elements_h264parse_LDADD = libparser.la $(LDADD)
libs_mpegvideoparser_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AM_CFLAGS)
libs_mpegvideoparser_LDADD = \
@ -227,6 +229,7 @@ libs_mpegvideoparser_LDADD = \
libs_h264parser_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AM_CFLAGS)
libs_h264parser_LDADD = \
@ -234,6 +237,16 @@ libs_h264parser_LDADD = \
$(GST_PLUGINS_BAD_LIBS) -lgstcodecparsers-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) $(LDADD)
libs_vc1parser_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AM_CFLAGS)
libs_vc1parser_LDADD = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-@GST_MAJORMINOR@.la \
$(GST_PLUGINS_BAD_LIBS) -lgstcodecparsers-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) $(LDADD)
elements_voaacenc_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AM_CFLAGS)

View file

@ -145,7 +145,7 @@ GST_START_TEST (test_h264_parse_slice_dpa)
assert_equals_int (res, GST_H264_PARSER_OK);
assert_equals_int (nalu.type, GST_H264_NAL_SLICE_DPA);
g_free (parser);
gst_h264_nal_parser_free (parser);
}
GST_END_TEST;

1230
tests/check/libs/vc1parser.c Normal file

File diff suppressed because it is too large Load diff