Merge branch 'master' into 0.11

Conflicts:
	configure.ac
	ext/kate/gstkateenc.c
	gst/colorspace/colorspace.c
	gst/mpegvideoparse/mpegvideoparse.c
This commit is contained in:
Sebastian Dröge 2012-01-25 13:22:43 +01:00
commit a2a4300241
62 changed files with 2074 additions and 776 deletions

View file

@ -63,7 +63,8 @@ libamrwb (for AMR-WB support)
(http://www.penguin.cz/~utx/amr)
libkate (for Kate support)
(http://libkate.googlecode.com/)
librtmp (for RTMP support)
(http://rtmpdump.mplayerhq.hu/)
Optional (debian) packages:
===========================

View file

@ -71,6 +71,8 @@ AG_GST_GETTEXT([gst-plugins-bad-$GST_MAJORMINOR])
dnl *** check for arguments to configure ***
AG_GST_ARG_DISABLE_FATAL_WARNINGS
AG_GST_ARG_DEBUG
AG_GST_ARG_PROFILING
AG_GST_ARG_VALGRIND
@ -208,9 +210,11 @@ AM_CONDITIONAL(HAVE_GST_CHECK, test "x$HAVE_GST_CHECK" = "xyes")
AG_GST_CHECK_GST_PLUGINS_BASE($GST_MAJORMINOR, [$GSTPB_REQ], yes)
dnl check for uninstalled plugin directories for unit tests
AG_GST_CHECK_GST_PLUGINS_GOOD($GST_MAJORMINOR, [0.11.0])
AG_GST_CHECK_GST_PLUGINS_UGLY($GST_MAJORMINOR, [0.11.0])
AG_GST_CHECK_GST_PLUGINS_FFMPEG($GST_MAJORMINOR, [0.11.0])
AG_GST_CHECK_UNINSTALLED_SETUP([
AG_GST_CHECK_GST_PLUGINS_GOOD($GST_MAJORMINOR, [0.11.0])
AG_GST_CHECK_GST_PLUGINS_UGLY($GST_MAJORMINOR, [0.11.0])
AG_GST_CHECK_GST_PLUGINS_FFMPEG($GST_MAJORMINOR, [0.11.0])
])
dnl Check for documentation xrefs
GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`"
@ -288,14 +292,14 @@ AG_GST_SET_PACKAGE_RELEASE_DATETIME_WITH_NANO([$PACKAGE_VERSION_NANO],
dnl define an ERROR_CFLAGS Makefile variable
dnl -Waggregate-return - libexif returns aggregates
dnl -Wundef - Windows headers check _MSC_VER unconditionally
AG_GST_SET_ERROR_CFLAGS($GST_GIT, [
AG_GST_SET_ERROR_CFLAGS($FATAL_WARNINGS, [
-Wmissing-declarations -Wmissing-prototypes -Wredundant-decls
-Wwrite-strings -Wformat-security -Wold-style-definition
-Winit-self -Wmissing-include-dirs -Waddress -Wno-multichar
-Wnested-externs])
dnl define an ERROR_CXXFLAGS Makefile variable
AG_GST_SET_ERROR_CXXFLAGS($GST_GIT, [
AG_GST_SET_ERROR_CXXFLAGS($FATAL_WARNINGS, [
-Wmissing-declarations -Wredundant-decls
-Wwrite-strings -Wformat-nonliteral -Wformat-security
-Winit-self -Wmissing-include-dirs -Waddress -Wno-multichar ])
@ -755,6 +759,16 @@ AG_GST_CHECK_FEATURE(CELT, [celt], celt, [
AC_SUBST(CELT_LIBS)
])
dnl *** chromaprint ***
translit(dnm, m, l) AM_CONDITIONAL(USE_CHROMAPRINT, true)
AG_GST_CHECK_FEATURE(CHROMAPRINT, [chromaprint], chromaprint, [
PKG_CHECK_MODULES(CHROMAPRINT, libchromaprint, HAVE_CHROMAPRINT="yes", [
HAVE_CHROMAPRINT="no"
])
AC_SUBST(CHROMAPRINT_CFLAGS)
AC_SUBST(CHROMAPRINT_LIBS)
])
dnl *** Cog ***
translit(dnm, m, l) AM_CONDITIONAL(USE_COG, true)
AG_GST_CHECK_FEATURE(COG, [Cog plugin], cog, [
@ -1712,7 +1726,7 @@ AG_GST_CHECK_FEATURE(VDPAU, [VDPAU], vdpau, [
dnl *** schroedinger ***
translit(dnm, m, l) AM_CONDITIONAL(USE_SCHRO, true)
AG_GST_CHECK_FEATURE(SCHRO, [Schroedinger video codec], schro, [
AG_GST_PKG_CHECK_MODULES(SCHRO, schroedinger-1.0 >= 1.0.7)
AG_GST_PKG_CHECK_MODULES(SCHRO, schroedinger-1.0 >= 1.0.10)
])
dnl *** zbar ***
@ -1800,6 +1814,7 @@ AM_CONDITIONAL(USE_APEXSINK, false)
AM_CONDITIONAL(USE_BZ2, false)
AM_CONDITIONAL(USE_CDAUDIO, false)
AM_CONDITIONAL(USE_CELT, false)
AM_CONDITIONAL(USE_CHROMAPRINT, false)
AM_CONDITIONAL(USE_COG, false)
AM_CONDITIONAL(USE_CURL, false)
AM_CONDITIONAL(USE_DC1394, false)
@ -2056,6 +2071,7 @@ ext/apexsink/Makefile
ext/bz2/Makefile
ext/cdaudio/Makefile
ext/celt/Makefile
ext/chromaprint/Makefile
ext/cog/Makefile
ext/curl/Makefile
ext/dc1394/Makefile

View file

@ -58,6 +58,12 @@ else
CELT_DIR=
endif
if USE_CHROMAPRINT
CHROMAPRINT_DIR=chromaprint
else
CHROMAPRINT_DIR=
endif
if USE_COG
COG_DIR=cog
else
@ -397,6 +403,7 @@ SUBDIRS=\
$(BZ2_DIR) \
$(CDAUDIO_DIR) \
$(CELT_DIR) \
$(CHROMAPRINT_DIR) \
$(COG_DIR) \
$(CURL_DIR) \
$(DC1394_DIR) \
@ -456,6 +463,7 @@ DIST_SUBDIRS = \
bz2 \
cdaudio \
celt \
chromaprint \
cog \
curl \
dc1394 \

View file

@ -0,0 +1,14 @@
plugin_LTLIBRARIES = libgstchromaprint.la
libgstchromaprint_la_SOURCES = gstchromaprint.c gstchromaprint.h
libgstchromaprint_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
$(CHROMAPRINT_CFLAGS)
libgstchromaprint_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(GST_LIBS) \
$(CHROMAPRINT_LIBS)
libgstchromaprint_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstchromaprint_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstchromaprint.h

View file

@ -0,0 +1,317 @@
/* GStreamer chromaprint audio fingerprinting element
* Copyright (C) 2006 M. Derezynski
* Copyright (C) 2008 Eric Buehl
* Copyright (C) 2008 Sebastian Dröge <slomo@circular-chaos.org>
* Copyright (C) 2011 Lukáš Lalinský <lalinsky@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-chromaprint
*
* The chromaprint element calculates an acoustic fingerprint for an
* audio stream which can be used to identify a song and look up
* further metadata from the <ulink url="http://acoustid.org/">Acoustid</ulink>
* and Musicbrainz databases.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -m uridecodebin uri=file:///path/to/song.ogg ! audioconvert ! chromaprint ! fakesink
* ]|
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstchromaprint.h"
#define DEFAULT_MAX_DURATION 120
#define PAD_CAPS \
"audio/x-raw-int, " \
"rate = (int) [ 1, MAX ], " \
"channels = (int) [ 1, 2 ], " \
"endianness = (int) { BYTE_ORDER }, " \
"width = (int) { 16 }, " \
"depth = (int) { 16 }, " \
"signed = (boolean) true"
GST_DEBUG_CATEGORY_STATIC (gst_chromaprint_debug);
#define GST_CAT_DEFAULT gst_chromaprint_debug
enum
{
PROP_0,
PROP_FINGERPRINT,
PROP_MAX_DURATION
};
GST_BOILERPLATE (GstChromaprint, gst_chromaprint, GstElement,
GST_TYPE_AUDIO_FILTER);
static void gst_chromaprint_finalize (GObject * object);
static void gst_chromaprint_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_chromaprint_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_chromaprint_transform_ip (GstBaseTransform * trans,
GstBuffer * buf);
static gboolean gst_chromaprint_event (GstBaseTransform * trans,
GstEvent * event);
static void
gst_chromaprint_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstAudioFilterClass *audio_filter_class = (GstAudioFilterClass *) g_class;
GstCaps *caps;
gst_element_class_set_details_simple (element_class,
"Chromaprint fingerprinting element",
"Filter/Analyzer/Audio",
"Find an audio fingerprint using the Chromaprint library",
"Lukáš Lalinský <lalinsky@gmail.com>");
caps = gst_caps_from_string (PAD_CAPS);
gst_audio_filter_class_add_pad_templates (audio_filter_class, caps);
gst_caps_unref (caps);
}
static void
gst_chromaprint_class_init (GstChromaprintClass * klass)
{
GObjectClass *gobject_class;
GstBaseTransformClass *gstbasetrans_class;
gobject_class = G_OBJECT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_chromaprint_set_property;
gobject_class->get_property = gst_chromaprint_get_property;
/* FIXME: do we need this in addition to the tag message ? */
g_object_class_install_property (gobject_class, PROP_FINGERPRINT,
g_param_spec_string ("fingerprint", "Resulting fingerprint",
"Resulting fingerprint", NULL, G_PARAM_READABLE));
g_object_class_install_property (gobject_class, PROP_MAX_DURATION,
g_param_spec_uint ("duration", "Duration limit",
"Number of seconds of audio to use for fingerprinting",
0, G_MAXUINT, DEFAULT_MAX_DURATION,
G_PARAM_READABLE | G_PARAM_WRITABLE));
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_chromaprint_finalize);
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_chromaprint_transform_ip);
gstbasetrans_class->event = GST_DEBUG_FUNCPTR (gst_chromaprint_event);
gstbasetrans_class->passthrough_on_same_caps = TRUE;
}
static void
gst_chromaprint_reset (GstChromaprint * chromaprint)
{
if (chromaprint->fingerprint) {
chromaprint_dealloc (chromaprint->fingerprint);
chromaprint->fingerprint = NULL;
}
chromaprint->nsamples = 0;
chromaprint->duration = 0;
chromaprint->record = TRUE;
}
static void
gst_chromaprint_create_fingerprint (GstChromaprint * chromaprint)
{
GstTagList *tags;
if (chromaprint->duration <= 3)
return;
GST_DEBUG_OBJECT (chromaprint,
"Generating fingerprint based on %d seconds of audio",
chromaprint->duration);
chromaprint_finish (chromaprint->context);
chromaprint_get_fingerprint (chromaprint->context, &chromaprint->fingerprint);
chromaprint->record = FALSE;
tags = gst_tag_list_new_full (GST_TAG_CHROMAPRINT_FINGERPRINT,
chromaprint->fingerprint, NULL);
gst_element_found_tags (GST_ELEMENT (chromaprint), tags);
}
static void
gst_chromaprint_init (GstChromaprint * chromaprint,
GstChromaprintClass * gclass)
{
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (chromaprint), TRUE);
chromaprint->context = chromaprint_new (CHROMAPRINT_ALGORITHM_DEFAULT);
chromaprint->fingerprint = NULL;
chromaprint->max_duration = DEFAULT_MAX_DURATION;
gst_chromaprint_reset (chromaprint);
}
static void
gst_chromaprint_finalize (GObject * object)
{
GstChromaprint *chromaprint = GST_CHROMAPRINT (object);
chromaprint->record = FALSE;
if (chromaprint->context) {
chromaprint_free (chromaprint->context);
chromaprint->context = NULL;
}
if (chromaprint->fingerprint) {
chromaprint_dealloc (chromaprint->fingerprint);
chromaprint->fingerprint = NULL;
}
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstFlowReturn
gst_chromaprint_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
GstChromaprint *chromaprint = GST_CHROMAPRINT (trans);
gint rate = GST_AUDIO_FILTER (chromaprint)->format.rate;
gint channels = GST_AUDIO_FILTER (chromaprint)->format.channels;
guint nsamples;
if (G_UNLIKELY (rate <= 0 || channels <= 0))
return GST_FLOW_NOT_NEGOTIATED;
if (!chromaprint->record)
return GST_FLOW_OK;
nsamples = GST_BUFFER_SIZE (buf) / (channels * 2);
if (nsamples == 0)
return GST_FLOW_OK;
if (chromaprint->nsamples == 0) {
chromaprint_start (chromaprint->context, rate, channels);
}
chromaprint->nsamples += nsamples;
chromaprint->duration = chromaprint->nsamples / rate;
chromaprint_feed (chromaprint->context, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf) / 2);
if (chromaprint->duration >= chromaprint->max_duration
&& !chromaprint->fingerprint) {
gst_chromaprint_create_fingerprint (chromaprint);
}
return GST_FLOW_OK;
}
static gboolean
gst_chromaprint_event (GstBaseTransform * trans, GstEvent * event)
{
GstChromaprint *chromaprint = GST_CHROMAPRINT (trans);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
case GST_EVENT_NEWSEGMENT:
GST_DEBUG_OBJECT (trans, "Got %s event, clearing buffer",
GST_EVENT_TYPE_NAME (event));
gst_chromaprint_reset (chromaprint);
break;
case GST_EVENT_EOS:
if (!chromaprint->fingerprint) {
gst_chromaprint_create_fingerprint (chromaprint);
}
break;
default:
break;
}
return TRUE;
}
static void
gst_chromaprint_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstChromaprint *chromaprint = GST_CHROMAPRINT (object);
switch (prop_id) {
case PROP_MAX_DURATION:
chromaprint->max_duration = g_value_get_uint (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_chromaprint_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstChromaprint *chromaprint = GST_CHROMAPRINT (object);
switch (prop_id) {
case PROP_FINGERPRINT:
g_value_set_string (value, chromaprint->fingerprint);
break;
case PROP_MAX_DURATION:
g_value_set_uint (value, chromaprint->max_duration);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean ret;
GST_DEBUG_CATEGORY_INIT (gst_chromaprint_debug, "chromaprint",
0, "chromaprint element");
GST_INFO ("libchromaprint %s", chromaprint_get_version ());
ret = gst_element_register (plugin, "chromaprint", GST_RANK_NONE,
GST_TYPE_CHROMAPRINT);
if (ret) {
gst_tag_register (GST_TAG_CHROMAPRINT_FINGERPRINT, GST_TAG_FLAG_META,
G_TYPE_STRING, "chromaprint fingerprint", "Chromaprint fingerprint",
NULL);
}
return ret;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"chromaprint",
"Calculate Chromaprint fingerprint from audio files",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -0,0 +1,77 @@
/* GStreamer chromaprint audio fingerprinting element
* Copyright (C) 2006 M. Derezynski
* Copyright (C) 2008 Eric Buehl
* Copyright (C) 2008 Sebastian Dröge <slomo@circular-chaos.org>
* Copyright (C) 2011 Lukáš Lalinský <<user@hostname.org>>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CHROMAPRINT_H__
#define __GST_CHROMAPRINT_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudiofilter.h>
#include <gst/audio/audio.h>
#include <chromaprint.h>
G_BEGIN_DECLS
#define GST_TYPE_CHROMAPRINT \
(gst_chromaprint_get_type())
#define GST_CHROMAPRINT(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CHROMAPRINT,GstChromaprint))
#define GST_CHROMAPRINT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CHROMAPRINT,GstChromaprintClass))
#define GST_IS_CHROMAPRINT(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CHROMAPRINT))
#define GST_IS_CHROMAPRINT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CHROMAPRINT))
#define GST_TAG_CHROMAPRINT_FINGERPRINT "chromaprint-fingerprint"
typedef struct _GstChromaprint GstChromaprint;
typedef struct _GstChromaprintClass GstChromaprintClass;
/**
* GstChromaprint:
*
* Opaque #GstChromaprint element structure
*/
struct _GstChromaprint
{
GstAudioFilter element;
/*< private >*/
ChromaprintContext * context;
char * fingerprint;
gboolean record;
guint64 nsamples;
guint duration;
guint max_duration;
};
struct _GstChromaprintClass
{
GstAudioFilterClass parent_class;
};
GType gst_chromaprint_get_type (void);
G_END_DECLS
#endif /* __GST_CHROMAPRINT_H__ */

View file

@ -208,6 +208,9 @@ gst_mse_finalize (GObject * object)
gst_object_unref (fs->sinkpad_test);
g_mutex_free (fs->lock);
g_cond_free (fs->cond);
gst_buffer_replace (&fs->buffer_ref, NULL);
GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
}
static GstCaps *
@ -243,7 +246,7 @@ gst_mse_getcaps (GstPad * pad)
}
if (pad != fs->sinkpad_test) {
peercaps = gst_pad_peer_get_caps (fs->sinkpad_ref);
peercaps = gst_pad_peer_get_caps (fs->sinkpad_test);
if (peercaps) {
icaps = gst_caps_intersect (caps, peercaps);
gst_caps_unref (caps);
@ -310,6 +313,7 @@ gst_mse_reset (GstMSE * fs)
fs->luma_mse_sum = 0;
fs->chroma_mse_sum = 0;
fs->n_frames = 0;
fs->cancel = FALSE;
if (fs->buffer_ref) {
gst_buffer_unref (fs->buffer_ref);
@ -435,9 +439,11 @@ gst_mse_sink_event (GstPad * pad, GstEvent * event)
break;
case GST_EVENT_FLUSH_START:
GST_DEBUG ("flush start");
fs->cancel = TRUE;
break;
case GST_EVENT_FLUSH_STOP:
GST_DEBUG ("flush stop");
fs->cancel = FALSE;
break;
default:
break;

View file

@ -161,6 +161,8 @@ gst_gme_dec_dispose (GObject * object)
g_object_unref (gme->adapter);
gme->adapter = NULL;
}
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
static GstFlowReturn

View file

@ -43,43 +43,16 @@ enum
ARG_0
};
static void gst_gsmdec_base_init (gpointer g_class);
static void gst_gsmdec_class_init (GstGSMDec * klass);
static void gst_gsmdec_init (GstGSMDec * gsmdec);
static void gst_gsmdec_finalize (GObject * object);
static gboolean gst_gsmdec_sink_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_gsmdec_sink_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_gsmdec_chain (GstPad * pad, GstBuffer * buf);
static GstElementClass *parent_class = NULL;
static gboolean gst_gsmdec_start (GstAudioDecoder * dec);
static gboolean gst_gsmdec_stop (GstAudioDecoder * dec);
static gboolean gst_gsmdec_set_format (GstAudioDecoder * dec, GstCaps * caps);
static GstFlowReturn gst_gsmdec_parse (GstAudioDecoder * dec,
GstAdapter * adapter, gint * offset, gint * length);
static GstFlowReturn gst_gsmdec_handle_frame (GstAudioDecoder * dec,
GstBuffer * in_buf);
/*static guint gst_gsmdec_signals[LAST_SIGNAL] = { 0 }; */
GType
gst_gsmdec_get_type (void)
{
static GType gsmdec_type = 0;
if (!gsmdec_type) {
static const GTypeInfo gsmdec_info = {
sizeof (GstGSMDecClass),
gst_gsmdec_base_init,
NULL,
(GClassInitFunc) gst_gsmdec_class_init,
NULL,
NULL,
sizeof (GstGSMDec),
0,
(GInstanceInitFunc) gst_gsmdec_init,
};
gsmdec_type =
g_type_register_static (GST_TYPE_ELEMENT, "GstGSMDec", &gsmdec_info, 0);
}
return gsmdec_type;
}
#define ENCODED_SAMPLES 160
static GstStaticPadTemplate gsmdec_sink_template =
@ -101,6 +74,9 @@ GST_STATIC_PAD_TEMPLATE ("src",
"depth = (int) 16, " "rate = (int) [1, MAX], " "channels = (int) 1")
);
GST_BOILERPLATE (GstGSMDec, gst_gsmdec, GstAudioDecoder,
GST_TYPE_AUDIO_DECODER);
static void
gst_gsmdec_base_init (gpointer g_class)
{
@ -116,63 +92,60 @@ gst_gsmdec_base_init (gpointer g_class)
}
static void
gst_gsmdec_class_init (GstGSMDec * klass)
gst_gsmdec_class_init (GstGSMDecClass * klass)
{
GObjectClass *gobject_class;
GstAudioDecoderClass *base_class;
gobject_class = (GObjectClass *) klass;
base_class = (GstAudioDecoderClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = gst_gsmdec_finalize;
base_class->start = GST_DEBUG_FUNCPTR (gst_gsmdec_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_gsmdec_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_gsmdec_set_format);
base_class->parse = GST_DEBUG_FUNCPTR (gst_gsmdec_parse);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_gsmdec_handle_frame);
GST_DEBUG_CATEGORY_INIT (gsmdec_debug, "gsmdec", 0, "GSM Decoder");
}
static void
gst_gsmdec_init (GstGSMDec * gsmdec)
gst_gsmdec_init (GstGSMDec * gsmdec, GstGSMDecClass * klass)
{
/* create the sink and src pads */
gsmdec->sinkpad =
gst_pad_new_from_static_template (&gsmdec_sink_template, "sink");
gst_pad_set_setcaps_function (gsmdec->sinkpad, gst_gsmdec_sink_setcaps);
gst_pad_set_event_function (gsmdec->sinkpad, gst_gsmdec_sink_event);
gst_pad_set_chain_function (gsmdec->sinkpad, gst_gsmdec_chain);
gst_element_add_pad (GST_ELEMENT (gsmdec), gsmdec->sinkpad);
gsmdec->srcpad =
gst_pad_new_from_static_template (&gsmdec_src_template, "src");
gst_element_add_pad (GST_ELEMENT (gsmdec), gsmdec->srcpad);
gsmdec->state = gsm_create ();
gsmdec->adapter = gst_adapter_new ();
gsmdec->next_of = 0;
gsmdec->next_ts = 0;
}
static void
gst_gsmdec_finalize (GObject * object)
{
GstGSMDec *gsmdec;
gsmdec = GST_GSMDEC (object);
g_object_unref (gsmdec->adapter);
gsm_destroy (gsmdec->state);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_gsmdec_sink_setcaps (GstPad * pad, GstCaps * caps)
gst_gsmdec_start (GstAudioDecoder * dec)
{
GstGSMDec *gsmdec = GST_GSMDEC (dec);
GST_DEBUG_OBJECT (dec, "start");
gsmdec->state = gsm_create ();
return TRUE;
}
static gboolean
gst_gsmdec_stop (GstAudioDecoder * dec)
{
GstGSMDec *gsmdec = GST_GSMDEC (dec);
GST_DEBUG_OBJECT (dec, "stop");
gsm_destroy (gsmdec->state);
return TRUE;
}
static gboolean
gst_gsmdec_set_format (GstAudioDecoder * dec, GstCaps * caps)
{
GstGSMDec *gsmdec;
GstCaps *srccaps;
GstStructure *s;
gboolean ret = FALSE;
gint rate;
gsmdec = GST_GSMDEC (gst_pad_get_parent (pad));
gsmdec = GST_GSMDEC (dec);
s = gst_caps_get_structure (caps, 0);
if (s == NULL)
@ -186,7 +159,9 @@ gst_gsmdec_sink_setcaps (GstPad * pad, GstCaps * caps)
else
goto wrong_caps;
if (!gst_structure_get_int (s, "rate", &gsmdec->rate)) {
gsmdec->needed = 33;
if (!gst_structure_get_int (s, "rate", &rate)) {
GST_WARNING_OBJECT (gsmdec, "missing sample rate parameter from sink caps");
goto beach;
}
@ -194,21 +169,16 @@ gst_gsmdec_sink_setcaps (GstPad * pad, GstCaps * caps)
/* MSGSM needs different framing */
gsm_option (gsmdec->state, GSM_OPT_WAV49, &gsmdec->use_wav49);
gsmdec->duration = gst_util_uint64_scale (ENCODED_SAMPLES,
GST_SECOND, gsmdec->rate);
/* Setting up src caps based on the input sample rate. */
srccaps = gst_caps_new_simple ("audio/x-raw-int",
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"signed", G_TYPE_BOOLEAN, TRUE,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, gsmdec->rate, "channels", G_TYPE_INT, 1, NULL);
ret = gst_pad_set_caps (gsmdec->srcpad, srccaps);
"rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, 1, NULL);
ret = gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), srccaps);
gst_caps_unref (srccaps);
gst_object_unref (gsmdec);
return ret;
@ -218,127 +188,66 @@ wrong_caps:
GST_ERROR_OBJECT (gsmdec, "invalid caps received");
beach:
gst_object_unref (gsmdec);
return ret;
}
static gboolean
gst_gsmdec_sink_event (GstPad * pad, GstEvent * event)
static GstFlowReturn
gst_gsmdec_parse (GstAudioDecoder * dec, GstAdapter * adapter,
gint * offset, gint * length)
{
gboolean res;
GstGSMDec *gsmdec;
GstGSMDec *gsmdec = GST_GSMDEC (dec);
guint size;
gsmdec = GST_GSMDEC (gst_pad_get_parent (pad));
size = gst_adapter_available (adapter);
g_return_val_if_fail (size > 0, GST_FLOW_ERROR);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
res = gst_pad_push_event (gsmdec->srcpad, event);
break;
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&gsmdec->segment, GST_FORMAT_UNDEFINED);
res = gst_pad_push_event (gsmdec->srcpad, event);
break;
case GST_EVENT_NEWSEGMENT:
{
gboolean update;
GstFormat format;
gdouble rate, arate;
gint64 start, stop, time;
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
&start, &stop, &time);
/* now configure the values */
gst_segment_set_newsegment_full (&gsmdec->segment, update,
rate, arate, format, start, stop, time);
/* and forward */
res = gst_pad_push_event (gsmdec->srcpad, event);
break;
}
case GST_EVENT_EOS:
default:
res = gst_pad_push_event (gsmdec->srcpad, event);
break;
/* WAV49 requires alternating 33 and 32 bytes of input */
if (gsmdec->use_wav49) {
gsmdec->needed = (gsmdec->needed == 33 ? 32 : 33);
}
gst_object_unref (gsmdec);
if (size < gsmdec->needed)
return GST_FLOW_UNEXPECTED;
return res;
*offset = 0;
*length = gsmdec->needed;
return GST_FLOW_OK;
}
static GstFlowReturn
gst_gsmdec_chain (GstPad * pad, GstBuffer * buf)
gst_gsmdec_handle_frame (GstAudioDecoder * dec, GstBuffer * buffer)
{
GstGSMDec *gsmdec;
gsm_byte *data;
GstFlowReturn ret = GST_FLOW_OK;
GstClockTime timestamp;
gint needed;
GstBuffer *outbuf;
gsmdec = GST_GSMDEC (gst_pad_get_parent (pad));
/* no fancy draining */
if (G_UNLIKELY (!buffer))
return GST_FLOW_OK;
timestamp = GST_BUFFER_TIMESTAMP (buf);
gsmdec = GST_GSMDEC (dec);
if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
gst_adapter_clear (gsmdec->adapter);
gsmdec->next_ts = GST_CLOCK_TIME_NONE;
/* FIXME, do some good offset */
gsmdec->next_of = 0;
}
gst_adapter_push (gsmdec->adapter, buf);
/* always the same amount of output samples */
outbuf = gst_buffer_new_and_alloc (ENCODED_SAMPLES * sizeof (gsm_signal));
needed = 33;
/* do we have enough bytes to read a frame */
while (gst_adapter_available (gsmdec->adapter) >= needed) {
GstBuffer *outbuf;
/* always the same amount of output samples */
outbuf = gst_buffer_new_and_alloc (ENCODED_SAMPLES * sizeof (gsm_signal));
/* If we are not given any timestamp, interpolate from last seen
* timestamp (if any). */
if (timestamp == GST_CLOCK_TIME_NONE)
timestamp = gsmdec->next_ts;
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
/* interpolate in the next run */
if (timestamp != GST_CLOCK_TIME_NONE)
gsmdec->next_ts = timestamp + gsmdec->duration;
timestamp = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (outbuf) = gsmdec->duration;
GST_BUFFER_OFFSET (outbuf) = gsmdec->next_of;
if (gsmdec->next_of != -1)
gsmdec->next_of += ENCODED_SAMPLES;
GST_BUFFER_OFFSET_END (outbuf) = gsmdec->next_of;
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmdec->srcpad));
/* now encode frame into the output buffer */
data = (gsm_byte *) gst_adapter_peek (gsmdec->adapter, needed);
if (gsm_decode (gsmdec->state, data,
(gsm_signal *) GST_BUFFER_DATA (outbuf)) < 0) {
/* invalid frame */
GST_WARNING_OBJECT (gsmdec, "tried to decode an invalid frame, skipping");
}
gst_adapter_flush (gsmdec->adapter, needed);
/* WAV49 requires alternating 33 and 32 bytes of input */
if (gsmdec->use_wav49)
needed = (needed == 33 ? 32 : 33);
GST_DEBUG_OBJECT (gsmdec, "Pushing buffer of size %d ts %" GST_TIME_FORMAT,
GST_BUFFER_SIZE (outbuf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
/* push */
ret = gst_pad_push (gsmdec->srcpad, outbuf);
/* now encode frame into the output buffer */
data = (gsm_byte *) GST_BUFFER_DATA (buffer);
if (gsm_decode (gsmdec->state, data,
(gsm_signal *) GST_BUFFER_DATA (outbuf)) < 0) {
/* invalid frame */
GST_AUDIO_DECODER_ERROR (gsmdec, 1, STREAM, DECODE, (NULL),
("tried to decode an invalid frame"), ret);
if (ret != GST_FLOW_OK)
goto exit;
gst_buffer_unref (outbuf);
outbuf = NULL;
}
gst_object_unref (gsmdec);
gst_audio_decoder_finish_frame (dec, outbuf, 1);
exit:
return ret;
}

View file

@ -21,7 +21,7 @@
#define __GST_GSMDEC_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudiodecoder.h>
#ifdef GSM_HEADER_IN_SUBDIR
#include <gsm/gsm.h>
@ -47,28 +47,16 @@ typedef struct _GstGSMDecClass GstGSMDecClass;
struct _GstGSMDec
{
GstElement element;
/* pads */
GstPad *sinkpad, *srcpad;
GstAudioDecoder element;
gsm state;
gint use_wav49;
gint64 next_of;
GstClockTime next_ts;
GstAdapter *adapter;
GstSegment segment;
gint rate;
GstClockTime duration;
gint needed;
};
struct _GstGSMDecClass
{
GstElementClass parent_class;
GstAudioDecoderClass parent_class;
};
GType gst_gsmdec_get_type (void);

View file

@ -43,39 +43,12 @@ enum
ARG_0
};
static void gst_gsmenc_base_init (gpointer g_class);
static void gst_gsmenc_class_init (GstGSMEnc * klass);
static void gst_gsmenc_init (GstGSMEnc * gsmenc);
static void gst_gsmenc_finalize (GObject * object);
static gboolean gst_gsmenc_setcaps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_gsmenc_chain (GstPad * pad, GstBuffer * buf);
static GstElementClass *parent_class = NULL;
GType
gst_gsmenc_get_type (void)
{
static GType gsmenc_type = 0;
if (!gsmenc_type) {
static const GTypeInfo gsmenc_info = {
sizeof (GstGSMEncClass),
gst_gsmenc_base_init,
NULL,
(GClassInitFunc) gst_gsmenc_class_init,
NULL,
NULL,
sizeof (GstGSMEnc),
0,
(GInstanceInitFunc) gst_gsmenc_init,
};
gsmenc_type =
g_type_register_static (GST_TYPE_ELEMENT, "GstGSMEnc", &gsmenc_info, 0);
}
return gsmenc_type;
}
static gboolean gst_gsmenc_start (GstAudioEncoder * enc);
static gboolean gst_gsmenc_stop (GstAudioEncoder * enc);
static gboolean gst_gsmenc_set_format (GstAudioEncoder * enc,
GstAudioInfo * info);
static GstFlowReturn gst_gsmenc_handle_frame (GstAudioEncoder * enc,
GstBuffer * in_buf);
static GstStaticPadTemplate gsmenc_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
@ -95,6 +68,9 @@ GST_STATIC_PAD_TEMPLATE ("sink",
"depth = (int) 16, " "rate = (int) 8000, " "channels = (int) 1")
);
GST_BOILERPLATE (GstGSMEnc, gst_gsmenc, GstAudioEncoder,
GST_TYPE_AUDIO_ENCODER);
static void
gst_gsmenc_base_init (gpointer g_class)
{
@ -110,34 +86,32 @@ gst_gsmenc_base_init (gpointer g_class)
}
static void
gst_gsmenc_class_init (GstGSMEnc * klass)
gst_gsmenc_class_init (GstGSMEncClass * klass)
{
GObjectClass *gobject_class;
GstAudioEncoderClass *base_class;
gobject_class = (GObjectClass *) klass;
base_class = (GstAudioEncoderClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = gst_gsmenc_finalize;
base_class->start = GST_DEBUG_FUNCPTR (gst_gsmenc_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_gsmenc_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_gsmenc_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_gsmenc_handle_frame);
GST_DEBUG_CATEGORY_INIT (gsmenc_debug, "gsmenc", 0, "GSM Encoder");
}
static void
gst_gsmenc_init (GstGSMEnc * gsmenc)
gst_gsmenc_init (GstGSMEnc * gsmenc, GstGSMEncClass * klass)
{
}
static gboolean
gst_gsmenc_start (GstAudioEncoder * enc)
{
GstGSMEnc *gsmenc = GST_GSMENC (enc);
gint use_wav49;
/* create the sink and src pads */
gsmenc->sinkpad =
gst_pad_new_from_static_template (&gsmenc_sink_template, "sink");
gst_pad_set_chain_function (gsmenc->sinkpad, gst_gsmenc_chain);
gst_pad_set_setcaps_function (gsmenc->sinkpad, gst_gsmenc_setcaps);
gst_element_add_pad (GST_ELEMENT (gsmenc), gsmenc->sinkpad);
gsmenc->srcpad =
gst_pad_new_from_static_template (&gsmenc_src_template, "src");
gst_element_add_pad (GST_ELEMENT (gsmenc), gsmenc->srcpad);
GST_DEBUG_OBJECT (enc, "start");
gsmenc->state = gsm_create ();
@ -145,78 +119,69 @@ gst_gsmenc_init (GstGSMEnc * gsmenc)
use_wav49 = 0;
gsm_option (gsmenc->state, GSM_OPT_WAV49, &use_wav49);
gsmenc->adapter = gst_adapter_new ();
gsmenc->next_ts = 0;
}
static void
gst_gsmenc_finalize (GObject * object)
{
GstGSMEnc *gsmenc;
gsmenc = GST_GSMENC (object);
g_object_unref (gsmenc->adapter);
gsm_destroy (gsmenc->state);
G_OBJECT_CLASS (parent_class)->finalize (object);
return TRUE;
}
static gboolean
gst_gsmenc_setcaps (GstPad * pad, GstCaps * caps)
gst_gsmenc_stop (GstAudioEncoder * enc)
{
GstGSMEnc *gsmenc;
GstCaps *srccaps;
GstGSMEnc *gsmenc = GST_GSMENC (enc);
gsmenc = GST_GSMENC (gst_pad_get_parent (pad));
srccaps = gst_static_pad_template_get_caps (&gsmenc_src_template);
gst_pad_set_caps (gsmenc->srcpad, srccaps);
gst_object_unref (gsmenc);
GST_DEBUG_OBJECT (enc, "stop");
gsm_destroy (gsmenc->state);
return TRUE;
}
static gboolean
gst_gsmenc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
{
GstCaps *srccaps;
srccaps = gst_static_pad_template_get_caps (&gsmenc_src_template);
gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (benc), srccaps);
/* report needs to base class */
gst_audio_encoder_set_frame_samples_min (benc, 160);
gst_audio_encoder_set_frame_samples_max (benc, 160);
gst_audio_encoder_set_frame_max (benc, 1);
return TRUE;
}
static GstFlowReturn
gst_gsmenc_chain (GstPad * pad, GstBuffer * buf)
gst_gsmenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buffer)
{
GstGSMEnc *gsmenc;
gsm_signal *data;
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *outbuf;
gsmenc = GST_GSMENC (gst_pad_get_parent (pad));
gsmenc = GST_GSMENC (benc);
if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
gst_adapter_clear (gsmenc->adapter);
}
gst_adapter_push (gsmenc->adapter, buf);
while (gst_adapter_available (gsmenc->adapter) >= 320) {
GstBuffer *outbuf;
outbuf = gst_buffer_new_and_alloc (33 * sizeof (gsm_byte));
GST_BUFFER_TIMESTAMP (outbuf) = gsmenc->next_ts;
GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;
gsmenc->next_ts += 20 * GST_MSECOND;
/* encode 160 16-bit samples into 33 bytes */
data = (gsm_signal *) gst_adapter_peek (gsmenc->adapter, 320);
gsm_encode (gsmenc->state, data, (gsm_byte *) GST_BUFFER_DATA (outbuf));
gst_adapter_flush (gsmenc->adapter, 320);
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmenc->srcpad));
GST_DEBUG_OBJECT (gsmenc, "Pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
ret = gst_pad_push (gsmenc->srcpad, outbuf);
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buffer == NULL)) {
GST_DEBUG_OBJECT (gsmenc, "no data");
goto done;
}
gst_object_unref (gsmenc);
if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < 320)) {
GST_DEBUG_OBJECT (gsmenc, "discarding trailing data %d",
GST_BUFFER_SIZE (buffer));
ret = gst_audio_encoder_finish_frame (benc, NULL, -1);
goto done;
}
outbuf = gst_buffer_new_and_alloc (33 * sizeof (gsm_byte));
/* encode 160 16-bit samples into 33 bytes */
data = (gsm_signal *) GST_BUFFER_DATA (buffer);
gsm_encode (gsmenc->state, data, (gsm_byte *) GST_BUFFER_DATA (outbuf));
GST_LOG_OBJECT (gsmenc, "encoded to %d bytes", GST_BUFFER_SIZE (outbuf));
ret = gst_audio_encoder_finish_frame (benc, outbuf, 160);
done:
return ret;
}

View file

@ -21,7 +21,7 @@
#define __GST_GSMENC_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudioencoder.h>
#ifdef GSM_HEADER_IN_SUBDIR
#include <gsm/gsm.h>
@ -47,20 +47,14 @@ typedef struct _GstGSMEncClass GstGSMEncClass;
struct _GstGSMEnc
{
GstElement element;
/* pads */
GstPad *sinkpad, *srcpad;
GstAdapter *adapter;
GstAudioEncoder element;
gsm state;
GstClockTime next_ts;
gboolean firstBuf;
};
struct _GstGSMEncClass
{
GstElementClass parent_class;
GstAudioEncoderClass parent_class;
};
GType gst_gsmenc_get_type (void);

View file

@ -924,33 +924,32 @@ gst_kate_enc_chain_text (GstKateEnc * ke, GstBuffer * buf,
("kate_encode_set_markup_type: %d", ret));
rflow = GST_FLOW_ERROR;
} else {
char *text;
gsize text_len;
const char *text;
size_t text_len;
gboolean need_unmap = TRUE;
kate_float t0 = start / (double) GST_SECOND;
kate_float t1 = stop / (double) GST_SECOND;
text = gst_buffer_map (buf, &text_len, NULL, GST_MAP_READ);
if (text) {
kate_float t0 = start / (double) GST_SECOND;
kate_float t1 = stop / (double) GST_SECOND;
GST_LOG_OBJECT (ke, "Encoding text: %*.*s (%u bytes) from %f to %f",
(int) text_len, (int) text_len, text, text_len, t0, t1);
ret = kate_encode_text (&ke->k, t0, t1, text, text_len, &kp);
if (G_UNLIKELY (ret < 0)) {
GST_ELEMENT_ERROR (ke, STREAM, ENCODE, (NULL),
("Failed to encode text: %d", ret));
rflow = GST_FLOW_ERROR;
} else {
rflow =
gst_kate_enc_chain_push_packet (ke, &kp, start, stop - start + 1);
}
} else {
/* FIXME: this should not be an error, we should ignore it and move on */
GST_ELEMENT_ERROR (ke, STREAM, ENCODE, (NULL),
("no text in text packet"));
rflow = GST_FLOW_ERROR;
if (text == NULL) {
text = "";
text_len = 0;
need_unmap = FALSE;
}
gst_buffer_unmap (buf, text, text_len);
GST_LOG_OBJECT (ke, "Encoding text: %*.*s (%u bytes) from %f to %f",
(int) text_len, (int) text_len, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf), t0, t1);
ret = kate_encode_text (&ke->k, t0, t1, text, text_len, &kp);
if (G_UNLIKELY (ret < 0)) {
GST_ELEMENT_ERROR (ke, STREAM, ENCODE, (NULL),
("Failed to encode text: %d", ret));
rflow = GST_FLOW_ERROR;
} else {
rflow = gst_kate_enc_chain_push_packet (ke, &kp, start, stop - start + 1);
}
if (need_unmap)
gst_buffer_unmap (buf, text, text_len);
}
return rflow;

View file

@ -296,6 +296,8 @@ gst_template_match_finalize (GObject * object)
if (filter->cvTemplateImage) {
cvReleaseImage (&filter->cvTemplateImage);
}
GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
}
/* chain function

View file

@ -470,16 +470,25 @@ create_elements (RsnDvdBin * dvdbin)
RSN_TYPE_STREAM_SELECTOR, "audioselect", "Audio stream selector"))
return FALSE;
if (!try_create_piece (dvdbin, DVD_ELEM_AUD_MUNGE, NULL,
RSN_TYPE_AUDIOMUNGE, "audioearlymunge", "Audio output filter"))
return FALSE;
if (!try_create_piece (dvdbin, DVD_ELEM_AUDDEC, NULL,
RSN_TYPE_AUDIODEC, "auddec", "audio decoder"))
return FALSE;
/* rsnaudiomunge goes after the audio decoding to regulate the stream */
if (!try_create_piece (dvdbin, DVD_ELEM_AUD_MUNGE, NULL,
RSN_TYPE_AUDIOMUNGE, "audiomunge", "Audio output filter"))
return FALSE;
src = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUD_MUNGE], "src");
sink = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUDDEC], "sink");
if (src == NULL || sink == NULL)
goto failed_aud_connect;
if (GST_PAD_LINK_FAILED (gst_pad_link (src, sink)))
goto failed_aud_connect;
gst_object_unref (sink);
gst_object_unref (src);
src = sink = NULL;
src = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUDDEC], "src");
src = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUD_SELECT], "src");
sink =
gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUD_MUNGE], "sink");
if (src == NULL || sink == NULL)
@ -490,18 +499,8 @@ create_elements (RsnDvdBin * dvdbin)
gst_object_unref (src);
src = sink = NULL;
src = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUD_SELECT], "src");
sink = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUDDEC], "sink");
if (src == NULL || sink == NULL)
goto failed_aud_connect;
if (GST_PAD_LINK_FAILED (gst_pad_link (src, sink)))
goto failed_aud_connect;
gst_object_unref (sink);
gst_object_unref (src);
src = sink = NULL;
/* ghost audio munge output pad onto bin */
src = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUD_MUNGE], "src");
src = gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_AUDDEC], "src");
if (src == NULL)
goto failed_aud_ghost;
src_templ = gst_static_pad_template_get (&audio_src_template);
@ -701,7 +700,7 @@ demux_pad_added (GstElement * element, GstPad * pad, RsnDvdBin * dvdbin)
gst_element_get_request_pad (dvdbin->pieces[DVD_ELEM_SPU_SELECT],
"sink_%u");
skip_mq = TRUE;
} else if (can_sink_caps (dvdbin->pieces[DVD_ELEM_AUDDEC], caps)) {
} else if (can_sink_caps (dvdbin->pieces[DVD_ELEM_AUD_MUNGE], caps)) {
GST_LOG_OBJECT (dvdbin, "Found audio pad w/ caps %" GST_PTR_FORMAT, caps);
dest_pad =
gst_element_get_request_pad (dvdbin->pieces[DVD_ELEM_AUD_SELECT],
@ -720,7 +719,7 @@ demux_pad_added (GstElement * element, GstPad * pad, RsnDvdBin * dvdbin)
("No MPEG video decoder found"));
} else {
GST_ELEMENT_WARNING (dvdbin, STREAM, CODEC_NOT_FOUND, (NULL),
("No MPEG video decoder found"));
("No MPEG audio decoder found"));
}
}

View file

@ -269,6 +269,7 @@ rsn_dvdsrc_finalize (GObject * object)
g_mutex_free (src->dvd_lock);
g_mutex_free (src->branch_lock);
g_cond_free (src->still_cond);
g_free (src->device);
gst_buffer_replace (&src->alloc_buf, NULL);
gst_buffer_replace (&src->next_buf, NULL);

View file

@ -155,9 +155,9 @@ rsn_audiomunge_set_caps (GstPad * pad, GstCaps * caps)
g_return_val_if_fail (munge != NULL, FALSE);
otherpad = (pad == munge->srcpad) ? munge->sinkpad : munge->srcpad;
gst_object_unref (munge);
ret = gst_pad_set_caps (otherpad, caps);
gst_object_unref (munge);
return ret;
}

View file

@ -247,18 +247,50 @@ _get_decoder_factories (gpointer arg)
GstPadTemplate *templ = gst_element_class_get_pad_template (klass,
"sink");
RsnDecFactoryFilterCtx ctx = { NULL, };
GstCaps *raw;
gboolean raw_audio;
ctx.desired_caps = gst_pad_template_get_caps (templ);
raw = gst_caps_from_string ("audio/x-raw-float");
raw_audio = gst_caps_can_intersect (raw, ctx.desired_caps);
if (raw_audio) {
GstCaps *sub = gst_caps_subtract (ctx.desired_caps, raw);
ctx.desired_caps = sub;
} else {
gst_caps_ref (ctx.desired_caps);
}
gst_caps_unref (raw);
/* Set decoder caps to empty. Will be filled by the factory_filter */
ctx.decoder_caps = gst_caps_new_empty ();
GST_DEBUG ("Finding factories for caps: %" GST_PTR_FORMAT, ctx.desired_caps);
factories = gst_default_registry_feature_filter (
(GstPluginFeatureFilter) rsndec_factory_filter, FALSE, &ctx);
/* If these are audio caps, we add audioconvert, which is not a decoder,
but allows raw audio to go through relatively unmolested - this will
come handy when we have to send placeholder silence to allow preroll
for those DVDs which have titles with no audio track. */
if (raw_audio) {
GstPluginFeature *feature;
GST_DEBUG ("These are audio caps, adding audioconvert");
feature =
gst_default_registry_find_feature ("audioconvert",
GST_TYPE_ELEMENT_FACTORY);
if (feature) {
factories = g_list_append (factories, feature);
} else {
GST_WARNING ("Could not find feature audioconvert");
}
}
factories = g_list_sort (factories, (GCompareFunc) sort_by_ranks);
GST_DEBUG ("Available decoder caps %" GST_PTR_FORMAT, ctx.decoder_caps);
gst_caps_unref (ctx.decoder_caps);
gst_caps_unref (ctx.desired_caps);
return factories;
}
@ -343,7 +375,7 @@ static GstStaticPadTemplate audio_sink_template =
GST_STATIC_CAPS ("audio/mpeg,mpegversion=(int)1;"
"audio/x-private1-lpcm;"
"audio/x-private1-ac3;" "audio/ac3;" "audio/x-ac3;"
"audio/x-private1-dts;")
"audio/x-private1-dts; audio/x-raw-float")
);
static GstStaticPadTemplate audio_src_template = GST_STATIC_PAD_TEMPLATE ("src",

View file

@ -123,6 +123,8 @@ static GstStaticPadTemplate data_sink_template =
GST_BOILERPLATE (GstRsvgOverlay, gst_rsvg_overlay, GstVideoFilter,
GST_TYPE_VIDEO_FILTER);
static void gst_rsvg_overlay_finalize (GObject * object);
static void
gst_rsvg_overlay_set_svg_data (GstRsvgOverlay * overlay, const gchar * data,
gboolean consider_as_filename)
@ -467,6 +469,7 @@ gst_rsvg_overlay_class_init (GstRsvgOverlayClass * klass)
gobject_class->set_property = gst_rsvg_overlay_set_property;
gobject_class->get_property = gst_rsvg_overlay_get_property;
gobject_class->finalize = gst_rsvg_overlay_finalize;
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DATA,
g_param_spec_string ("data", "data", "SVG data.", "",
@ -542,3 +545,13 @@ gst_rsvg_overlay_init (GstRsvgOverlay * overlay, GstRsvgOverlayClass * klass)
GST_DEBUG_FUNCPTR (gst_rsvg_overlay_data_sink_event));
gst_element_add_pad (GST_ELEMENT (overlay), overlay->data_sinkpad);
}
static void
gst_rsvg_overlay_finalize (GObject * object)
{
GstRsvgOverlay *overlay = GST_RSVG_OVERLAY (object);
g_object_unref (overlay->adapter);
G_OBJECT_CLASS (parent_class)->finalize (object);
}

View file

@ -102,7 +102,7 @@ static GstStaticPadTemplate gst_schro_dec_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YUY2, AYUV }"))
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV (GST_SCHRO_YUV_LIST))
);
GST_BOILERPLATE (GstSchroDec, gst_schro_dec, GstBaseVideoDecoder,
@ -313,12 +313,25 @@ parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size)
ret = schro_parse_decode_sequence_header (data + 13, size - 13,
&video_format);
if (ret) {
if (video_format.chroma_format == SCHRO_CHROMA_444) {
state->format = GST_VIDEO_FORMAT_AYUV;
} else if (video_format.chroma_format == SCHRO_CHROMA_422) {
state->format = GST_VIDEO_FORMAT_YUY2;
} else if (video_format.chroma_format == SCHRO_CHROMA_420) {
state->format = GST_VIDEO_FORMAT_I420;
int bit_depth;
bit_depth = schro_video_format_get_bit_depth (&video_format);
if (bit_depth == 8) {
if (video_format.chroma_format == SCHRO_CHROMA_444) {
state->format = GST_VIDEO_FORMAT_AYUV;
} else if (video_format.chroma_format == SCHRO_CHROMA_422) {
state->format = GST_VIDEO_FORMAT_UYVY;
} else if (video_format.chroma_format == SCHRO_CHROMA_420) {
state->format = GST_VIDEO_FORMAT_I420;
}
} else if (bit_depth <= 10) {
state->format = GST_VIDEO_FORMAT_v210;
} else if (bit_depth <= 16) {
state->format = GST_VIDEO_FORMAT_AYUV64;
} else {
GST_ERROR ("bit depth too large (%d > 16)", bit_depth);
state->format = GST_VIDEO_FORMAT_AYUV64;
}
state->fps_n = video_format.frame_rate_numerator;
state->fps_d = video_format.frame_rate_denominator;

View file

@ -107,7 +107,7 @@ static GstStaticPadTemplate gst_schro_enc_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, AYUV }"))
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV (GST_SCHRO_YUV_LIST))
);
static GstStaticPadTemplate gst_schro_enc_src_template =
@ -271,13 +271,18 @@ gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
switch (state->format) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y42B:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_420;
break;
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_UYVY:
case GST_VIDEO_FORMAT_v216:
case GST_VIDEO_FORMAT_v210:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_422;
break;
case GST_VIDEO_FORMAT_AYUV:
case GST_VIDEO_FORMAT_Y444:
case GST_VIDEO_FORMAT_AYUV64:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_444;
break;
case GST_VIDEO_FORMAT_ARGB:
@ -300,8 +305,24 @@ gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
schro_enc->video_format->aspect_ratio_numerator = state->par_n;
schro_enc->video_format->aspect_ratio_denominator = state->par_d;
schro_video_format_set_std_signal_range (schro_enc->video_format,
SCHRO_SIGNAL_RANGE_8BIT_VIDEO);
switch (state->format) {
default:
schro_video_format_set_std_signal_range (schro_enc->video_format,
SCHRO_SIGNAL_RANGE_8BIT_VIDEO);
break;
case GST_VIDEO_FORMAT_v210:
schro_video_format_set_std_signal_range (schro_enc->video_format,
SCHRO_SIGNAL_RANGE_10BIT_VIDEO);
break;
case GST_VIDEO_FORMAT_v216:
case GST_VIDEO_FORMAT_AYUV64:
schro_enc->video_format->luma_offset = 64 << 8;
schro_enc->video_format->luma_excursion = 219 << 8;
schro_enc->video_format->chroma_offset = 128 << 8;
schro_enc->video_format->chroma_excursion = 224 << 8;
break;
}
schro_video_format_set_std_colour_spec (schro_enc->video_format,
SCHRO_COLOUR_SPEC_HDTV);

View file

@ -72,6 +72,29 @@ gst_schro_buffer_wrap (GstBuffer * buf, GstVideoFormat format, int width,
frame =
schro_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_Y42B:
frame =
schro_frame_new_from_data_Y42B (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_Y444:
frame =
schro_frame_new_from_data_Y444 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_v210:
frame =
schro_frame_new_from_data_v210 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_v216:
frame =
schro_frame_new_from_data_v216 (GST_BUFFER_DATA (buf), width, height);
break;
#ifdef SCHRO_FRAME_FORMAT_AY64
/* Added in 1.0.11 */
case GST_VIDEO_FORMAT_AYUV64:
frame =
schro_frame_new_from_data_AY64 (GST_BUFFER_DATA (buf), width, height);
break;
#endif
#if 0
case GST_VIDEO_FORMAT_ARGB:
{

View file

@ -24,6 +24,12 @@
#include <gst/video/video.h>
#include <schroedinger/schro.h>
#ifdef SCHRO_FRAME_FORMAT_AY64
#define GST_SCHRO_YUV_LIST "{ I420, YV12, YUY2, UYVY, AYUV, Y42B, Y444, v216, v210, AY64 }"
#else
#define GST_SCHRO_YUV_LIST "{ I420, YV12, YUY2, UYVY, AYUV, Y42B, Y444 }"
#endif
SchroFrame *
gst_schro_buffer_wrap (GstBuffer *buf, GstVideoFormat format, int width,
int height);

View file

@ -169,6 +169,8 @@ gst_spc_dec_dispose (GObject * object)
}
spc_tag_free (&spc->tag_info);
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
static GstFlowReturn

View file

@ -1174,6 +1174,9 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
gst_util_uint64_scale (frame->presentation_frame_number + 1,
GST_SECOND * state->fps_d, state->fps_n);
GST_LOG_OBJECT (base_video_encoder, "src ts: %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (encoder, "flow error %d", ret);

View file

@ -518,6 +518,7 @@ gst_h264_parse_hrd_parameters (GstH264HRDParams * hrd, NalReader * nr)
for (sched_sel_idx = 0; sched_sel_idx <= hrd->cpb_cnt_minus1; sched_sel_idx++) {
READ_UE (nr, hrd->bit_rate_value_minus1[sched_sel_idx]);
READ_UE (nr, hrd->cpb_size_value_minus1[sched_sel_idx]);
READ_UINT8 (nr, hrd->cbr_flag[sched_sel_idx], 1);
}
READ_UINT8 (nr, hrd->initial_cpb_removal_delay_length_minus1, 5);
@ -747,22 +748,26 @@ slice_parse_ref_pic_list_modification_1 (GstH264SliceHdr * slice,
NalReader * nr, guint list)
{
GstH264RefPicListModification *entries;
guint8 *ref_pic_list_modification_flag;
guint8 *ref_pic_list_modification_flag, *n_ref_pic_list_modification;
guint32 modification_of_pic_nums_idc;
guint i = 0;
if (list == 0) {
entries = slice->ref_pic_list_modification_l0;
ref_pic_list_modification_flag = &slice->ref_pic_list_modification_flag_l0;
n_ref_pic_list_modification = &slice->n_ref_pic_list_modification_l0;
} else {
entries = slice->ref_pic_list_modification_l1;
ref_pic_list_modification_flag = &slice->ref_pic_list_modification_flag_l1;
n_ref_pic_list_modification = &slice->n_ref_pic_list_modification_l1;
}
READ_UINT8 (nr, *ref_pic_list_modification_flag, 1);
if (*ref_pic_list_modification_flag) {
do {
while (1) {
READ_UE (nr, modification_of_pic_nums_idc);
if (modification_of_pic_nums_idc == 3)
break;
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
READ_UE_ALLOWED (nr, entries[i].value.abs_diff_pic_num_minus1, 0,
@ -770,9 +775,10 @@ slice_parse_ref_pic_list_modification_1 (GstH264SliceHdr * slice,
} else if (modification_of_pic_nums_idc == 2) {
READ_UE (nr, entries[i].value.long_term_pic_num);
}
} while (modification_of_pic_nums_idc != 3);
entries[i++].modification_of_pic_nums_idc = modification_of_pic_nums_idc;
}
}
*n_ref_pic_list_modification = i;
return TRUE;
error:
@ -1050,6 +1056,8 @@ gst_h264_parse_clock_timestamp (GstH264ClockTimestamp * tim,
if (time_offset_length > 0)
READ_UINT32 (nr, tim->time_offset, time_offset_length);
return TRUE;
error:
GST_WARNING ("error parsing \"Clock timestamp\"");
return FALSE;

View file

@ -573,8 +573,10 @@ struct _GstH264SliceHdr
guint8 num_ref_idx_l1_active_minus1;
guint8 ref_pic_list_modification_flag_l0;
guint8 n_ref_pic_list_modification_l0;
GstH264RefPicListModification ref_pic_list_modification_l0[32];
guint8 ref_pic_list_modification_flag_l1;
guint8 n_ref_pic_list_modification_l1;
GstH264RefPicListModification ref_pic_list_modification_l1[32];
GstH264PredWeightTable pred_weight_table;

View file

@ -1132,7 +1132,7 @@ void
gst_base_video_encoder_set_latency (GstBaseVideoEncoder * base_video_encoder,
GstClockTime min_latency, GstClockTime max_latency)
{
g_return_if_fail (min_latency >= 0);
g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
g_return_if_fail (max_latency >= min_latency);
GST_OBJECT_LOCK (base_video_encoder);

View file

@ -5,8 +5,9 @@ libgstadpcmdec_la_SOURCES = adpcmdec.c
# flags used to compile this plugin
# add other _CFLAGS and _LIBS as needed
libgstadpcmdec_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS)
libgstadpcmdec_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS)
libgstadpcmdec_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
libgstadpcmdec_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-@GST_MAJORMINOR@ \
$(GST_LIBS)
libgstadpcmdec_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstadpcmdec_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -28,7 +28,7 @@
#endif
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudiodecoder.h>
#define GST_TYPE_ADPCM_DEC \
(adpcmdec_get_type ())
@ -69,80 +69,29 @@ enum adpcm_layout
typedef struct _ADPCMDecClass
{
GstElementClass parent_class;
GstAudioDecoderClass parent_class;
} ADPCMDecClass;
typedef struct _ADPCMDec
{
GstElement parent;
GstPad *sinkpad;
GstPad *srcpad;
GstCaps *output_caps;
GstAudioDecoder parent;
enum adpcm_layout layout;
int rate;
int channels;
int blocksize;
gboolean is_setup;
GstClockTime timestamp;
GstClockTime base_timestamp;
guint64 out_samples;
GstAdapter *adapter;
} ADPCMDec;
GType adpcmdec_get_type (void);
GST_BOILERPLATE (ADPCMDec, adpcmdec, GstElement, GST_TYPE_ELEMENT);
static gboolean
adpcmdec_setup (ADPCMDec * dec)
{
dec->output_caps = gst_caps_new_simple ("audio/x-raw-int",
"rate", G_TYPE_INT, dec->rate,
"channels", G_TYPE_INT, dec->channels,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"signed", G_TYPE_BOOLEAN, TRUE, NULL);
if (dec->output_caps) {
gst_pad_set_caps (dec->srcpad, dec->output_caps);
}
dec->is_setup = TRUE;
dec->timestamp = GST_CLOCK_TIME_NONE;
dec->base_timestamp = GST_CLOCK_TIME_NONE;
dec->adapter = gst_adapter_new ();
dec->out_samples = 0;
return TRUE;
}
static void
adpcmdec_teardown (ADPCMDec * dec)
{
if (dec->output_caps) {
gst_caps_unref (dec->output_caps);
dec->output_caps = NULL;
}
if (dec->adapter) {
g_object_unref (dec->adapter);
dec->adapter = NULL;
}
dec->is_setup = FALSE;
}
GST_BOILERPLATE (ADPCMDec, adpcmdec, GstAudioDecoder, GST_TYPE_AUDIO_DECODER);
static gboolean
adpcmdec_sink_setcaps (GstPad * pad, GstCaps * caps)
adpcmdec_set_format (GstAudioDecoder * bdec, GstCaps * in_caps)
{
ADPCMDec *dec = (ADPCMDec *) gst_pad_get_parent (pad);
GstStructure *structure = gst_caps_get_structure (caps, 0);
ADPCMDec *dec = (ADPCMDec *) (bdec);
GstStructure *structure = gst_caps_get_structure (in_caps, 0);
const gchar *layout;
GstCaps *caps;
layout = gst_structure_get_string (structure, "layout");
if (!layout)
@ -163,9 +112,16 @@ adpcmdec_sink_setcaps (GstPad * pad, GstCaps * caps)
if (!gst_structure_get_int (structure, "channels", &dec->channels))
return FALSE;
if (dec->is_setup)
adpcmdec_teardown (dec);
gst_object_unref (dec);
caps = gst_caps_new_simple ("audio/x-raw-int",
"rate", G_TYPE_INT, dec->rate,
"channels", G_TYPE_INT, dec->channels,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"signed", G_TYPE_BOOLEAN, TRUE, NULL);
gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (bdec), caps);
gst_caps_unref (caps);
return TRUE;
}
@ -377,10 +333,10 @@ adpcmdec_decode_ima_block (ADPCMDec * dec, int n_samples, const guint8 * data,
return TRUE;
}
static GstFlowReturn
static GstBuffer *
adpcmdec_decode_block (ADPCMDec * dec, const guint8 * data, int blocksize)
{
gboolean res;
gboolean res = FALSE;
GstBuffer *outbuf = NULL;
int outsize;
int samples;
@ -390,7 +346,7 @@ adpcmdec_decode_block (ADPCMDec * dec, const guint8 * data, int blocksize)
give two initial sample values per channel. Then the remainder gives
two samples per byte */
if (blocksize < 7 * dec->channels)
return GST_FLOW_ERROR;
goto exit;
samples = (blocksize - 7 * dec->channels) * 2 + 2 * dec->channels;
outsize = 2 * samples;
outbuf = gst_buffer_new_and_alloc (outsize);
@ -401,7 +357,7 @@ adpcmdec_decode_block (ADPCMDec * dec, const guint8 * data, int blocksize)
/* Each block has a 4 byte header per channel, include an initial sample.
Then the remainder gives two samples per byte */
if (blocksize < 4 * dec->channels)
return GST_FLOW_ERROR;
goto exit;
samples = (blocksize - 4 * dec->channels) * 2 + dec->channels;
outsize = 2 * samples;
outbuf = gst_buffer_new_and_alloc (outsize);
@ -410,155 +366,114 @@ adpcmdec_decode_block (ADPCMDec * dec, const guint8 * data, int blocksize)
(gint16 *) (GST_BUFFER_DATA (outbuf)));
} else {
GST_WARNING_OBJECT (dec, "Unknown layout");
return GST_FLOW_ERROR;
}
if (!res) {
gst_buffer_unref (outbuf);
if (outbuf)
gst_buffer_unref (outbuf);
outbuf = NULL;
GST_WARNING_OBJECT (dec, "Decode of block failed");
return GST_FLOW_ERROR;
}
gst_buffer_set_caps (outbuf, dec->output_caps);
GST_BUFFER_TIMESTAMP (outbuf) = dec->timestamp;
dec->out_samples += samples / dec->channels;
dec->timestamp = dec->base_timestamp +
gst_util_uint64_scale_int (dec->out_samples, GST_SECOND, dec->rate);
GST_BUFFER_DURATION (outbuf) = dec->timestamp - GST_BUFFER_TIMESTAMP (outbuf);
return gst_pad_push (dec->srcpad, outbuf);
exit:
return outbuf;
}
static GstFlowReturn
adpcmdec_chain (GstPad * pad, GstBuffer * buf)
adpcmdec_parse (GstAudioDecoder * bdec, GstAdapter * adapter,
gint * offset, gint * length)
{
ADPCMDec *dec = (ADPCMDec *) gst_pad_get_parent (pad);
ADPCMDec *dec = (ADPCMDec *) (bdec);
guint size;
size = gst_adapter_available (adapter);
g_return_val_if_fail (size > 0, GST_FLOW_ERROR);
if (dec->blocksize < 0) {
/* No explicit blocksize; we just process one input buffer at a time */
*offset = 0;
*length = size;
} else {
if (size >= dec->blocksize) {
*offset = 0;
*length = dec->blocksize;
} else {
return GST_FLOW_UNEXPECTED;
}
}
return GST_FLOW_OK;
}
static GstFlowReturn
adpcmdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
{
ADPCMDec *dec = (ADPCMDec *) (bdec);
GstFlowReturn ret = GST_FLOW_OK;
guint8 *data;
GstBuffer *databuf = NULL;
GstBuffer *outbuf = NULL;
if (!dec->is_setup)
adpcmdec_setup (dec);
/* no fancy draining */
if (G_UNLIKELY (!buffer))
return GST_FLOW_OK;
if (dec->base_timestamp == GST_CLOCK_TIME_NONE) {
dec->base_timestamp = GST_BUFFER_TIMESTAMP (buf);
if (dec->base_timestamp == GST_CLOCK_TIME_NONE)
dec->base_timestamp = 0;
dec->timestamp = dec->base_timestamp;
if (!dec->blocksize)
return GST_FLOW_NOT_NEGOTIATED;
data = GST_BUFFER_DATA (buffer);
outbuf = adpcmdec_decode_block (dec, data, dec->blocksize);
if (outbuf == NULL) {
GST_AUDIO_DECODER_ERROR (bdec, 1, STREAM, DECODE, (NULL),
("frame decode failed"), ret);
}
if (dec->blocksize > 0) {
gst_adapter_push (dec->adapter, buf);
while (gst_adapter_available (dec->adapter) >= dec->blocksize) {
databuf = gst_adapter_take_buffer (dec->adapter, dec->blocksize);
data = GST_BUFFER_DATA (databuf);
ret = adpcmdec_decode_block (dec, data, dec->blocksize);
/* Done with input data, free it */
gst_buffer_unref (databuf);
if (ret != GST_FLOW_OK)
goto done;
}
} else {
/* No explicit blocksize; we just process one input buffer at a time */
ret = adpcmdec_decode_block (dec, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
gst_buffer_unref (buf);
}
done:
gst_object_unref (dec);
if (ret == GST_FLOW_OK)
ret = gst_audio_decoder_finish_frame (bdec, outbuf, 1);
return ret;
}
static gboolean
adpcmdec_sink_event (GstPad * pad, GstEvent * event)
adpcmdec_start (GstAudioDecoder * bdec)
{
ADPCMDec *dec = (ADPCMDec *) gst_pad_get_parent (pad);
gboolean res;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
dec->out_samples = 0;
dec->timestamp = GST_CLOCK_TIME_NONE;
dec->base_timestamp = GST_CLOCK_TIME_NONE;
gst_adapter_clear (dec->adapter);
/* Fall through */
default:
res = gst_pad_push_event (dec->srcpad, event);
break;
}
gst_object_unref (dec);
return res;
ADPCMDec *dec = (ADPCMDec *) bdec;
GST_DEBUG_OBJECT (dec, "start");
dec->blocksize = 0;
dec->rate = 0;
dec->channels = 0;
return TRUE;
}
static GstStateChangeReturn
adpcmdec_change_state (GstElement * element, GstStateChange transition)
static gboolean
adpcmdec_stop (GstAudioDecoder * dec)
{
GstStateChangeReturn ret;
ADPCMDec *dec = (ADPCMDec *) element;
GST_DEBUG_OBJECT (dec, "stop");
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
adpcmdec_teardown (dec);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default:
break;
}
return ret;
}
static void
adpcmdec_dispose (GObject * obj)
{
G_OBJECT_CLASS (parent_class)->dispose (obj);
return TRUE;
}
static void
adpcmdec_init (ADPCMDec * dec, ADPCMDecClass * klass)
{
dec->sinkpad =
gst_pad_new_from_static_template (&adpcmdec_sink_template, "sink");
gst_pad_set_setcaps_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (adpcmdec_sink_setcaps));
gst_pad_set_chain_function (dec->sinkpad, GST_DEBUG_FUNCPTR (adpcmdec_chain));
gst_pad_set_event_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (adpcmdec_sink_event));
gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
dec->srcpad =
gst_pad_new_from_static_template (&adpcmdec_src_template, "src");
gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
}
static void
adpcmdec_class_init (ADPCMDecClass * klass)
{
GObjectClass *gobjectclass = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass;
gobjectclass->dispose = adpcmdec_dispose;
gstelement_class->change_state = adpcmdec_change_state;
} static void
GstAudioDecoderClass *base_class = (GstAudioDecoderClass *) klass;
base_class->start = GST_DEBUG_FUNCPTR (adpcmdec_start);
base_class->stop = GST_DEBUG_FUNCPTR (adpcmdec_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (adpcmdec_set_format);
base_class->parse = GST_DEBUG_FUNCPTR (adpcmdec_parse);
base_class->handle_frame = GST_DEBUG_FUNCPTR (adpcmdec_handle_frame);
}
static void
adpcmdec_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);

View file

@ -5,8 +5,9 @@ libgstadpcmenc_la_SOURCES = adpcmenc.c
# flags used to compile this plugin
# add other _CFLAGS and _LIBS as needed
libgstadpcmenc_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS)
libgstadpcmenc_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS)
libgstadpcmenc_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
libgstadpcmenc_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-@GST_MAJORMINOR@ \
$(GST_LIBS)
libgstadpcmenc_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstadpcmenc_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -28,7 +28,7 @@
#endif
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudioencoder.h>
#define GST_TYPE_ADPCM_ENC \
(adpcmenc_get_type ())
@ -113,17 +113,12 @@ adpcmenc_layout_get_type (void)
typedef struct _ADPCMEncClass
{
GstElementClass parent_class;
GstAudioEncoderClass parent_class;
} ADPCMEncClass;
typedef struct _ADPCMEnc
{
GstElement parent;
GstPad *sinkpad;
GstPad *srcpad;
GstCaps *output_caps;
GstAudioEncoder parent;
enum adpcm_layout layout;
int rate;
@ -133,19 +128,11 @@ typedef struct _ADPCMEnc
guint8 step_index[2];
gboolean is_setup;
GstClockTime timestamp;
GstClockTime base_timestamp;
guint64 out_samples;
GstAdapter *adapter;
} ADPCMEnc;
GType adpcmenc_get_type (void);
GST_BOILERPLATE (ADPCMEnc, adpcmenc, GstElement, GST_TYPE_ELEMENT);
GST_BOILERPLATE (ADPCMEnc, adpcmenc, GstAudioEncoder, GST_TYPE_AUDIO_ENCODER);
static gboolean
adpcmenc_setup (ADPCMEnc * enc)
{
@ -153,6 +140,7 @@ adpcmenc_setup (ADPCMEnc * enc)
const int ADPCM_SAMPLES_PER_BYTE = 2;
guint64 sample_bytes;
const char *layout;
GstCaps *caps;
switch (enc->layout) {
case LAYOUT_ADPCM_DVI:
@ -168,21 +156,14 @@ adpcmenc_setup (ADPCMEnc * enc)
return FALSE;
}
enc->output_caps = gst_caps_new_simple ("audio/x-adpcm",
caps = gst_caps_new_simple ("audio/x-adpcm",
"rate", G_TYPE_INT, enc->rate,
"channels", G_TYPE_INT, enc->channels,
"layout", G_TYPE_STRING, layout,
"block_align", G_TYPE_INT, enc->blocksize, NULL);
if (enc->output_caps) {
gst_pad_set_caps (enc->srcpad, enc->output_caps);
}
enc->is_setup = TRUE;
enc->timestamp = GST_CLOCK_TIME_NONE;
enc->base_timestamp = GST_CLOCK_TIME_NONE;
enc->adapter = gst_adapter_new ();
enc->out_samples = 0;
gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (enc), caps);
gst_caps_unref (caps);
/* Step index state is carried between blocks. */
enc->step_index[0] = 0;
@ -191,37 +172,21 @@ adpcmenc_setup (ADPCMEnc * enc)
return TRUE;
}
static void
adpcmenc_teardown (ADPCMEnc * enc)
{
if (enc->output_caps) {
gst_caps_unref (enc->output_caps);
enc->output_caps = NULL;
}
if (enc->adapter) {
g_object_unref (enc->adapter);
enc->adapter = NULL;
}
enc->is_setup = FALSE;
}
static gboolean
adpcmenc_sink_setcaps (GstPad * pad, GstCaps * caps)
adpcmenc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
{
ADPCMEnc *enc = (ADPCMEnc *) gst_pad_get_parent (pad);
GstStructure *structure = gst_caps_get_structure (caps, 0);
ADPCMEnc *enc = (ADPCMEnc *) (benc);
if (!gst_structure_get_int (structure, "rate", &enc->rate))
return FALSE;
if (!gst_structure_get_int (structure, "channels", &enc->channels))
enc->rate = GST_AUDIO_INFO_RATE (info);
enc->channels = GST_AUDIO_INFO_CHANNELS (info);
if (!adpcmenc_setup (enc))
return FALSE;
if (enc->is_setup) {
adpcmenc_teardown (enc);
}
adpcmenc_setup (enc);
gst_object_unref (enc);
/* report needs to base class */
gst_audio_encoder_set_frame_samples_min (benc, enc->samples_per_block);
gst_audio_encoder_set_frame_samples_max (benc, enc->samples_per_block);
gst_audio_encoder_set_frame_max (benc, 1);
return TRUE;
}
@ -368,148 +333,86 @@ adpcmenc_encode_ima_block (ADPCMEnc * enc, const gint16 * samples,
return TRUE;
}
static GstFlowReturn
static GstBuffer *
adpcmenc_encode_block (ADPCMEnc * enc, const gint16 * samples, int blocksize)
{
gboolean res;
gboolean res = FALSE;
GstBuffer *outbuf = NULL;
if (enc->layout == LAYOUT_ADPCM_DVI) {
outbuf = gst_buffer_new_and_alloc (enc->blocksize);
res = adpcmenc_encode_ima_block (enc, samples, GST_BUFFER_DATA (outbuf));
} else {
/* should not happen afaics */
g_assert_not_reached ();
GST_WARNING_OBJECT (enc, "Unknown layout");
return GST_FLOW_ERROR;
res = FALSE;
}
if (!res) {
gst_buffer_unref (outbuf);
if (outbuf)
gst_buffer_unref (outbuf);
outbuf = NULL;
GST_WARNING_OBJECT (enc, "Encode of block failed");
return GST_FLOW_ERROR;
}
gst_buffer_set_caps (outbuf, enc->output_caps);
GST_BUFFER_TIMESTAMP (outbuf) = enc->timestamp;
enc->out_samples += enc->samples_per_block;
enc->timestamp = enc->base_timestamp +
gst_util_uint64_scale_int (enc->out_samples, GST_SECOND, enc->rate);
GST_BUFFER_DURATION (outbuf) = enc->timestamp - GST_BUFFER_TIMESTAMP (outbuf);
return gst_pad_push (enc->srcpad, outbuf);
return outbuf;
}
static GstFlowReturn
adpcmenc_chain (GstPad * pad, GstBuffer * buf)
adpcmenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buffer)
{
ADPCMEnc *enc = (ADPCMEnc *) gst_pad_get_parent (pad);
ADPCMEnc *enc = (ADPCMEnc *) (benc);
GstFlowReturn ret = GST_FLOW_OK;
gint16 *samples;
GstBuffer *databuf = NULL;
GstBuffer *outbuf;
int input_bytes_per_block;
const int BYTES_PER_SAMPLE = 2;
if (enc->base_timestamp == GST_CLOCK_TIME_NONE) {
enc->base_timestamp = GST_BUFFER_TIMESTAMP (buf);
if (enc->base_timestamp == GST_CLOCK_TIME_NONE)
enc->base_timestamp = 0;
enc->timestamp = enc->base_timestamp;
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buffer == NULL)) {
GST_DEBUG_OBJECT (benc, "no data");
goto done;
}
gst_adapter_push (enc->adapter, buf);
input_bytes_per_block =
enc->samples_per_block * BYTES_PER_SAMPLE * enc->channels;
while (gst_adapter_available (enc->adapter) >= input_bytes_per_block) {
databuf = gst_adapter_take_buffer (enc->adapter, input_bytes_per_block);
samples = (gint16 *) GST_BUFFER_DATA (databuf);
ret = adpcmenc_encode_block (enc, samples, enc->blocksize);
gst_buffer_unref (databuf);
if (ret != GST_FLOW_OK)
goto done;
if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < input_bytes_per_block)) {
GST_DEBUG_OBJECT (enc, "discarding trailing data %d",
GST_BUFFER_SIZE (buffer));
ret = gst_audio_encoder_finish_frame (benc, NULL, -1);
goto done;
}
samples = (gint16 *) GST_BUFFER_DATA (buffer);
outbuf = adpcmenc_encode_block (enc, samples, enc->blocksize);
ret = gst_audio_encoder_finish_frame (benc, outbuf, enc->samples_per_block);
done:
gst_object_unref (enc);
return ret;
}
static gboolean
adpcmenc_sink_event (GstPad * pad, GstEvent * event)
adpcmenc_start (GstAudioEncoder * enc)
{
ADPCMEnc *enc = (ADPCMEnc *) gst_pad_get_parent (pad);
gboolean res;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
enc->out_samples = 0;
enc->timestamp = GST_CLOCK_TIME_NONE;
enc->base_timestamp = GST_CLOCK_TIME_NONE;
gst_adapter_clear (enc->adapter);
/* Fall through */
default:
res = gst_pad_push_event (enc->srcpad, event);
break;
}
gst_object_unref (enc);
return res;
GST_DEBUG_OBJECT (enc, "start");
return TRUE;
}
static GstStateChangeReturn
adpcmenc_change_state (GstElement * element, GstStateChange transition)
static gboolean
adpcmenc_stop (GstAudioEncoder * enc)
{
GstStateChangeReturn ret;
ADPCMEnc *enc = (ADPCMEnc *) element;
GST_DEBUG_OBJECT (enc, "stop");
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
adpcmenc_teardown (enc);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default:
break;
}
return ret;
}
static void
adpcmenc_dispose (GObject * obj)
{
G_OBJECT_CLASS (parent_class)->dispose (obj);
return TRUE;
}
static void
adpcmenc_init (ADPCMEnc * enc, ADPCMEncClass * klass)
{
enc->sinkpad =
gst_pad_new_from_static_template (&adpcmenc_sink_template, "sink");
gst_pad_set_setcaps_function (enc->sinkpad,
GST_DEBUG_FUNCPTR (adpcmenc_sink_setcaps));
gst_pad_set_chain_function (enc->sinkpad, GST_DEBUG_FUNCPTR (adpcmenc_chain));
gst_pad_set_event_function (enc->sinkpad,
GST_DEBUG_FUNCPTR (adpcmenc_sink_event));
gst_element_add_pad (GST_ELEMENT (enc), enc->sinkpad);
enc->srcpad =
gst_pad_new_from_static_template (&adpcmenc_src_template, "src");
gst_element_add_pad (GST_ELEMENT (enc), enc->srcpad);
/* Set defaults. */
enc->blocksize = DEFAULT_ADPCM_BLOCK_SIZE;
enc->layout = DEFAULT_ADPCM_LAYOUT;
@ -519,11 +422,16 @@ static void
adpcmenc_class_init (ADPCMEncClass * klass)
{
GObjectClass *gobjectclass = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioEncoderClass *base_class = (GstAudioEncoderClass *) klass;
gobjectclass->set_property = adpcmenc_set_property;
gobjectclass->get_property = adpcmenc_get_property;
base_class->start = GST_DEBUG_FUNCPTR (adpcmenc_start);
base_class->stop = GST_DEBUG_FUNCPTR (adpcmenc_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (adpcmenc_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (adpcmenc_handle_frame);
g_object_class_install_property (gobjectclass, ARG_LAYOUT,
g_param_spec_enum ("layout", "Layout",
"Layout for output stream",
@ -537,10 +445,9 @@ adpcmenc_class_init (ADPCMEncClass * klass)
DEFAULT_ADPCM_BLOCK_SIZE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gobjectclass->dispose = adpcmenc_dispose;
gstelement_class->change_state = adpcmenc_change_state;
} static void
}
static void
adpcmenc_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);

View file

@ -227,6 +227,8 @@ gst_debug_spy_transform_ip (GstBaseTransform * transform, GstBuffer * buf)
"size", G_TYPE_UINT, GST_BUFFER_SIZE (buf),
"caps", GST_TYPE_CAPS, GST_BUFFER_CAPS (buf), NULL);
g_free (checksum);
message =
gst_message_new_element (GST_OBJECT (transform), message_structure);

View file

@ -297,22 +297,29 @@ gst_festival_chain (GstPad * pad, GstBuffer * buf)
GstFlowReturn ret = GST_FLOW_OK;
GstFestival *festival;
guint8 *p, *ep;
gint f;
FILE *fd;
festival = GST_FESTIVAL (GST_PAD_PARENT (pad));
GST_LOG_OBJECT (festival, "Got text buffer, %u bytes", GST_BUFFER_SIZE (buf));
fd = fdopen (dup (festival->info->server_fd), "wb");
f = dup (festival->info->server_fd);
if (f < 0)
goto fail_open;
fd = fdopen (f, "wb");
if (fd == NULL) {
close (f);
goto fail_open;
}
/* Copy text over to server, escaping any quotes */
fprintf (fd, "(Parameter.set 'Audio_Required_Rate 16000)\n");
fflush (fd);
GST_DEBUG_OBJECT (festival, "issued Parameter.set command");
if (read_response (festival) == FALSE) {
ret = GST_FLOW_ERROR;
fclose (fd);
goto out;
goto fail_read;
}
fprintf (fd, "(tts_textall \"");
@ -332,11 +339,25 @@ gst_festival_chain (GstPad * pad, GstBuffer * buf)
/* Read back info from server */
if (read_response (festival) == FALSE)
ret = GST_FLOW_ERROR;
goto fail_read;
out:
gst_buffer_unref (buf);
return ret;
/* ERRORS */
fail_open:
{
GST_ELEMENT_ERROR (festival, RESOURCE, OPEN_WRITE, (NULL), (NULL));
ret = GST_FLOW_ERROR;
goto out;
}
fail_read:
{
GST_ELEMENT_ERROR (festival, RESOURCE, READ, (NULL), (NULL));
ret = GST_FLOW_ERROR;
goto out;
}
}
static FT_Info *

View file

@ -5,6 +5,8 @@ noinst_PROGRAMS = gstintertest
libgstinter_la_SOURCES = \
gstinteraudiosink.c \
gstinteraudiosrc.c \
gstintersubsink.c \
gstintersubsrc.c \
gstintervideosink.c \
gstintervideosrc.c \
gstinter.c \
@ -13,6 +15,8 @@ libgstinter_la_SOURCES = \
noinst_HEADERS = \
gstinteraudiosink.h \
gstinteraudiosrc.h \
gstintersubsink.h \
gstintersubsrc.h \
gstintervideosink.h \
gstintervideosrc.h \
gstintersurface.h

View file

@ -1,5 +1,5 @@
/* GStreamer
* Copyright (C) 2011 David A. Schleef <ds@schleef.org>
* Copyright (C) 2011 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@ -23,6 +23,8 @@
#include "gstinteraudiosrc.h"
#include "gstinteraudiosink.h"
#include "gstintersubsrc.h"
#include "gstintersubsink.h"
#include "gstintervideosrc.h"
#include "gstintervideosink.h"
#include "gstintersurface.h"
@ -34,13 +36,15 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_INTER_AUDIO_SRC);
gst_element_register (plugin, "interaudiosink", GST_RANK_NONE,
GST_TYPE_INTER_AUDIO_SINK);
gst_element_register (plugin, "intersubsrc", GST_RANK_NONE,
GST_TYPE_INTER_SUB_SRC);
gst_element_register (plugin, "intersubsink", GST_RANK_NONE,
GST_TYPE_INTER_SUB_SINK);
gst_element_register (plugin, "intervideosrc", GST_RANK_NONE,
GST_TYPE_INTER_VIDEO_SRC);
gst_element_register (plugin, "intervideosink", GST_RANK_NONE,
GST_TYPE_INTER_VIDEO_SINK);
gst_inter_surface_init ();
return TRUE;
}

View file

@ -77,7 +77,8 @@ static gboolean gst_inter_audio_sink_unlock_stop (GstBaseSink * sink);
enum
{
PROP_0
PROP_0,
PROP_CHANNEL
};
/* pad templates */
@ -150,6 +151,10 @@ gst_inter_audio_sink_class_init (GstInterAudioSinkClass * klass)
base_sink_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_inter_audio_sink_unlock_stop);
g_object_class_install_property (gobject_class, PROP_CHANNEL,
g_param_spec_string ("channel", "Channel",
"Channel name to match inter src and sink elements",
"default", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void

View file

@ -79,7 +79,8 @@ gst_inter_audio_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek,
enum
{
PROP_0
PROP_0,
PROP_CHANNEL
};
/* pad templates */
@ -158,6 +159,10 @@ gst_inter_audio_src_class_init (GstInterAudioSrcClass * klass)
base_src_class->prepare_seek_segment =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_prepare_seek_segment);
g_object_class_install_property (gobject_class, PROP_CHANNEL,
g_param_spec_string ("channel", "Channel",
"Channel name to match inter src and sink elements",
"default", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}

325
gst/inter/gstintersubsink.c Normal file
View file

@ -0,0 +1,325 @@
/* GStreamer
* Copyright (C) 2011 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
/**
* SECTION:element-gstintersubsink
*
* The intersubsink element does FIXME stuff.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v fakesrc ! intersubsink ! FIXME ! fakesink
* ]|
* FIXME Describe what the pipeline does.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasesink.h>
#include "gstintersubsink.h"
GST_DEBUG_CATEGORY_STATIC (gst_inter_sub_sink_debug_category);
#define GST_CAT_DEFAULT gst_inter_sub_sink_debug_category
/* prototypes */
static void gst_inter_sub_sink_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_inter_sub_sink_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_inter_sub_sink_dispose (GObject * object);
static void gst_inter_sub_sink_finalize (GObject * object);
static GstCaps *gst_inter_sub_sink_get_caps (GstBaseSink * sink);
static gboolean gst_inter_sub_sink_set_caps (GstBaseSink * sink,
GstCaps * caps);
static GstFlowReturn gst_inter_sub_sink_buffer_alloc (GstBaseSink * sink,
guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
static void gst_inter_sub_sink_get_times (GstBaseSink * sink,
GstBuffer * buffer, GstClockTime * start, GstClockTime * end);
static gboolean gst_inter_sub_sink_start (GstBaseSink * sink);
static gboolean gst_inter_sub_sink_stop (GstBaseSink * sink);
static gboolean gst_inter_sub_sink_unlock (GstBaseSink * sink);
static gboolean gst_inter_sub_sink_event (GstBaseSink * sink, GstEvent * event);
static GstFlowReturn
gst_inter_sub_sink_preroll (GstBaseSink * sink, GstBuffer * buffer);
static GstFlowReturn
gst_inter_sub_sink_render (GstBaseSink * sink, GstBuffer * buffer);
static GstStateChangeReturn gst_inter_sub_sink_async_play (GstBaseSink * sink);
static gboolean gst_inter_sub_sink_activate_pull (GstBaseSink * sink,
gboolean active);
static gboolean gst_inter_sub_sink_unlock_stop (GstBaseSink * sink);
enum
{
PROP_0
};
/* pad templates */
static GstStaticPadTemplate gst_inter_sub_sink_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("text/plain")
);
/* class initialization */
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_inter_sub_sink_debug_category, "intersubsink", 0, \
"debug category for intersubsink element");
GST_BOILERPLATE_FULL (GstInterSubSink, gst_inter_sub_sink, GstBaseSink,
GST_TYPE_BASE_SINK, DEBUG_INIT);
static void
gst_inter_sub_sink_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_inter_sub_sink_sink_template));
gst_element_class_set_details_simple (element_class, "FIXME Long name",
"Generic", "FIXME Description", "FIXME <fixme@example.com>");
}
static void
gst_inter_sub_sink_class_init (GstInterSubSinkClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS (klass);
gobject_class->set_property = gst_inter_sub_sink_set_property;
gobject_class->get_property = gst_inter_sub_sink_get_property;
gobject_class->dispose = gst_inter_sub_sink_dispose;
gobject_class->finalize = gst_inter_sub_sink_finalize;
base_sink_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_get_caps);
base_sink_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_set_caps);
if (0)
base_sink_class->buffer_alloc =
GST_DEBUG_FUNCPTR (gst_inter_sub_sink_buffer_alloc);
base_sink_class->get_times = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_get_times);
base_sink_class->start = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_start);
base_sink_class->stop = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_stop);
base_sink_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_unlock);
if (0)
base_sink_class->event = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_event);
base_sink_class->preroll = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_preroll);
base_sink_class->render = GST_DEBUG_FUNCPTR (gst_inter_sub_sink_render);
if (0)
base_sink_class->async_play =
GST_DEBUG_FUNCPTR (gst_inter_sub_sink_async_play);
if (0)
base_sink_class->activate_pull =
GST_DEBUG_FUNCPTR (gst_inter_sub_sink_activate_pull);
base_sink_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_inter_sub_sink_unlock_stop);
}
static void
gst_inter_sub_sink_init (GstInterSubSink * intersubsink,
GstInterSubSinkClass * intersubsink_class)
{
intersubsink->surface = gst_inter_surface_get ("default");
intersubsink->fps_n = 1;
intersubsink->fps_d = 1;
}
void
gst_inter_sub_sink_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
/* GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_sub_sink_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
/* GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_sub_sink_dispose (GObject * object)
{
/* GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (object); */
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (parent_class)->dispose (object);
}
void
gst_inter_sub_sink_finalize (GObject * object)
{
/* GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (object); */
/* clean up object here */
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
gst_inter_sub_sink_get_caps (GstBaseSink * sink)
{
return NULL;
}
static gboolean
gst_inter_sub_sink_set_caps (GstBaseSink * sink, GstCaps * caps)
{
return FALSE;
}
static GstFlowReturn
gst_inter_sub_sink_buffer_alloc (GstBaseSink * sink, guint64 offset, guint size,
GstCaps * caps, GstBuffer ** buf)
{
return GST_FLOW_ERROR;
}
static void
gst_inter_sub_sink_get_times (GstBaseSink * sink, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (sink);
if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
*start = GST_BUFFER_TIMESTAMP (buffer);
if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
*end = *start + GST_BUFFER_DURATION (buffer);
} else {
if (intersubsink->fps_n > 0) {
*end = *start +
gst_util_uint64_scale_int (GST_SECOND, intersubsink->fps_d,
intersubsink->fps_n);
}
}
}
}
static gboolean
gst_inter_sub_sink_start (GstBaseSink * sink)
{
return TRUE;
}
static gboolean
gst_inter_sub_sink_stop (GstBaseSink * sink)
{
GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (sink);
g_mutex_lock (intersubsink->surface->mutex);
if (intersubsink->surface->sub_buffer) {
gst_buffer_unref (intersubsink->surface->sub_buffer);
}
intersubsink->surface->sub_buffer = NULL;
g_mutex_unlock (intersubsink->surface->mutex);
return TRUE;
}
static gboolean
gst_inter_sub_sink_unlock (GstBaseSink * sink)
{
return TRUE;
}
static gboolean
gst_inter_sub_sink_event (GstBaseSink * sink, GstEvent * event)
{
return TRUE;
}
static GstFlowReturn
gst_inter_sub_sink_preroll (GstBaseSink * sink, GstBuffer * buffer)
{
return GST_FLOW_OK;
}
static GstFlowReturn
gst_inter_sub_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
GstInterSubSink *intersubsink = GST_INTER_SUB_SINK (sink);
g_mutex_lock (intersubsink->surface->mutex);
if (intersubsink->surface->sub_buffer) {
gst_buffer_unref (intersubsink->surface->sub_buffer);
}
intersubsink->surface->sub_buffer = gst_buffer_ref (buffer);
//intersubsink->surface->sub_buffer_count = 0;
g_mutex_unlock (intersubsink->surface->mutex);
return GST_FLOW_OK;
}
static GstStateChangeReturn
gst_inter_sub_sink_async_play (GstBaseSink * sink)
{
return GST_STATE_CHANGE_SUCCESS;
}
static gboolean
gst_inter_sub_sink_activate_pull (GstBaseSink * sink, gboolean active)
{
return TRUE;
}
static gboolean
gst_inter_sub_sink_unlock_stop (GstBaseSink * sink)
{
return TRUE;
}

View file

@ -0,0 +1,57 @@
/* GStreamer
* Copyright (C) 2011 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_INTER_SUB_SINK_H_
#define _GST_INTER_SUB_SINK_H_
#include <gst/base/gstbasesink.h>
#include "gstintersurface.h"
G_BEGIN_DECLS
#define GST_TYPE_INTER_SUB_SINK (gst_inter_sub_sink_get_type())
#define GST_INTER_SUB_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_SUB_SINK,GstInterSubSink))
#define GST_INTER_SUB_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_SUB_SINK,GstInterSubSinkClass))
#define GST_IS_INTER_SUB_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_SUB_SINK))
#define GST_IS_INTER_SUB_SINK_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_SUB_SINK))
typedef struct _GstInterSubSink GstInterSubSink;
typedef struct _GstInterSubSinkClass GstInterSubSinkClass;
struct _GstInterSubSink
{
GstBaseSink base_intersubsink;
GstPad *sinkpad;
GstInterSurface *surface;
int fps_n;
int fps_d;
};
struct _GstInterSubSinkClass
{
GstBaseSinkClass base_intersubsink_class;
};
GType gst_inter_sub_sink_get_type (void);
G_END_DECLS
#endif

455
gst/inter/gstintersubsrc.c Normal file
View file

@ -0,0 +1,455 @@
/* GStreamer
* Copyright (C) 2011 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
/**
* SECTION:element-gstintersubsrc
*
* The intersubsrc element is a subtitle source element. It is used
* in connection with a intersubsink element in a different pipeline,
* similar to interaudiosink and interaudiosrc.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v intersubsrc ! kateenc ! oggmux ! filesink location=out.ogv
* ]|
*
* The intersubsrc element cannot be used effectively with gst-launch,
* as it requires a second pipeline in the application to send subtitles.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasesrc.h>
#include "gstintersubsrc.h"
GST_DEBUG_CATEGORY_STATIC (gst_inter_sub_src_debug_category);
#define GST_CAT_DEFAULT gst_inter_sub_src_debug_category
/* prototypes */
static void gst_inter_sub_src_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_inter_sub_src_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_inter_sub_src_dispose (GObject * object);
static void gst_inter_sub_src_finalize (GObject * object);
static GstCaps *gst_inter_sub_src_get_caps (GstBaseSrc * src);
static gboolean gst_inter_sub_src_set_caps (GstBaseSrc * src, GstCaps * caps);
static gboolean gst_inter_sub_src_negotiate (GstBaseSrc * src);
static gboolean gst_inter_sub_src_newsegment (GstBaseSrc * src);
static gboolean gst_inter_sub_src_start (GstBaseSrc * src);
static gboolean gst_inter_sub_src_stop (GstBaseSrc * src);
static void
gst_inter_sub_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end);
static gboolean gst_inter_sub_src_is_seekable (GstBaseSrc * src);
static gboolean gst_inter_sub_src_unlock (GstBaseSrc * src);
static gboolean gst_inter_sub_src_event (GstBaseSrc * src, GstEvent * event);
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf);
static gboolean gst_inter_sub_src_do_seek (GstBaseSrc * src,
GstSegment * segment);
static gboolean gst_inter_sub_src_query (GstBaseSrc * src, GstQuery * query);
static gboolean gst_inter_sub_src_check_get_range (GstBaseSrc * src);
static void gst_inter_sub_src_fixate (GstBaseSrc * src, GstCaps * caps);
static gboolean gst_inter_sub_src_unlock_stop (GstBaseSrc * src);
static gboolean
gst_inter_sub_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek,
GstSegment * segment);
enum
{
PROP_0
};
/* pad templates */
static GstStaticPadTemplate gst_inter_sub_src_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("application/unknown")
);
/* class initialization */
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_inter_sub_src_debug_category, "intersubsrc", 0, \
"debug category for intersubsrc element");
GST_BOILERPLATE_FULL (GstInterSubSrc, gst_inter_sub_src, GstBaseSrc,
GST_TYPE_BASE_SRC, DEBUG_INIT);
static void
gst_inter_sub_src_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_inter_sub_src_src_template));
gst_element_class_set_details_simple (element_class,
"Inter-pipeline subtitle source",
"Source/Subtitle", "Inter-pipeline subtitle source",
"David Schleef <ds@entropywave.com>");
}
static void
gst_inter_sub_src_class_init (GstInterSubSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
gobject_class->set_property = gst_inter_sub_src_set_property;
gobject_class->get_property = gst_inter_sub_src_get_property;
gobject_class->dispose = gst_inter_sub_src_dispose;
gobject_class->finalize = gst_inter_sub_src_finalize;
if (0)
base_src_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_sub_src_get_caps);
base_src_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_sub_src_set_caps);
if (0)
base_src_class->negotiate = GST_DEBUG_FUNCPTR (gst_inter_sub_src_negotiate);
if (0)
base_src_class->newsegment =
GST_DEBUG_FUNCPTR (gst_inter_sub_src_newsegment);
base_src_class->start = GST_DEBUG_FUNCPTR (gst_inter_sub_src_start);
base_src_class->stop = GST_DEBUG_FUNCPTR (gst_inter_sub_src_stop);
base_src_class->get_times = GST_DEBUG_FUNCPTR (gst_inter_sub_src_get_times);
if (0)
base_src_class->is_seekable =
GST_DEBUG_FUNCPTR (gst_inter_sub_src_is_seekable);
base_src_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_sub_src_unlock);
base_src_class->event = GST_DEBUG_FUNCPTR (gst_inter_sub_src_event);
base_src_class->create = GST_DEBUG_FUNCPTR (gst_inter_sub_src_create);
if (0)
base_src_class->do_seek = GST_DEBUG_FUNCPTR (gst_inter_sub_src_do_seek);
base_src_class->query = GST_DEBUG_FUNCPTR (gst_inter_sub_src_query);
if (0)
base_src_class->check_get_range =
GST_DEBUG_FUNCPTR (gst_inter_sub_src_check_get_range);
base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_inter_sub_src_fixate);
if (0)
base_src_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_inter_sub_src_unlock_stop);
if (0)
base_src_class->prepare_seek_segment =
GST_DEBUG_FUNCPTR (gst_inter_sub_src_prepare_seek_segment);
}
static void
gst_inter_sub_src_init (GstInterSubSrc * intersubsrc,
GstInterSubSrcClass * intersubsrc_class)
{
intersubsrc->srcpad =
gst_pad_new_from_static_template (&gst_inter_sub_src_src_template, "src");
gst_base_src_set_format (GST_BASE_SRC (intersubsrc), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (intersubsrc), TRUE);
intersubsrc->surface = gst_inter_surface_get ("default");
}
void
gst_inter_sub_src_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
/* GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_sub_src_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
/* GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_sub_src_dispose (GObject * object)
{
/* GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (object); */
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (parent_class)->dispose (object);
}
void
gst_inter_sub_src_finalize (GObject * object)
{
/* GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (object); */
/* clean up object here */
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
gst_inter_sub_src_get_caps (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "get_caps");
return NULL;
}
static gboolean
gst_inter_sub_src_set_caps (GstBaseSrc * src, GstCaps * caps)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "set_caps");
return TRUE;
}
static gboolean
gst_inter_sub_src_negotiate (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "negotiate");
return TRUE;
}
static gboolean
gst_inter_sub_src_newsegment (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "newsegment");
return TRUE;
}
static gboolean
gst_inter_sub_src_start (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "start");
return TRUE;
}
static gboolean
gst_inter_sub_src_stop (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "stop");
return TRUE;
}
static void
gst_inter_sub_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
/* get duration to calculate end time */
GstClockTime duration = GST_BUFFER_DURATION (buffer);
if (GST_CLOCK_TIME_IS_VALID (duration)) {
*end = timestamp + duration;
}
*start = timestamp;
}
} else {
*start = -1;
*end = -1;
}
}
static gboolean
gst_inter_sub_src_is_seekable (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "is_seekable");
return FALSE;
}
static gboolean
gst_inter_sub_src_unlock (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "unlock");
return TRUE;
}
static gboolean
gst_inter_sub_src_event (GstBaseSrc * src, GstEvent * event)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "event");
return TRUE;
}
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GstBuffer *buffer;
GST_DEBUG_OBJECT (intersubsrc, "create");
buffer = NULL;
g_mutex_lock (intersubsrc->surface->mutex);
if (intersubsrc->surface->sub_buffer) {
buffer = gst_buffer_ref (intersubsrc->surface->sub_buffer);
//intersubsrc->surface->sub_buffer_count++;
//if (intersubsrc->surface->sub_buffer_count >= 30) {
gst_buffer_unref (intersubsrc->surface->sub_buffer);
intersubsrc->surface->sub_buffer = NULL;
//}
}
g_mutex_unlock (intersubsrc->surface->mutex);
if (buffer == NULL) {
guint8 *data;
buffer = gst_buffer_new_and_alloc (1);
data = GST_BUFFER_DATA (buffer);
data[0] = 0;
}
buffer = gst_buffer_make_metadata_writable (buffer);
GST_BUFFER_TIMESTAMP (buffer) =
gst_util_uint64_scale_int (GST_SECOND, intersubsrc->n_frames,
intersubsrc->rate);
GST_DEBUG_OBJECT (intersubsrc, "create ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
GST_BUFFER_DURATION (buffer) =
gst_util_uint64_scale_int (GST_SECOND, (intersubsrc->n_frames + 1),
intersubsrc->rate) - GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_OFFSET (buffer) = intersubsrc->n_frames;
GST_BUFFER_OFFSET_END (buffer) = -1;
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
if (intersubsrc->n_frames == 0) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
}
gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (intersubsrc)));
intersubsrc->n_frames++;
*buf = buffer;
return GST_FLOW_OK;
}
static gboolean
gst_inter_sub_src_do_seek (GstBaseSrc * src, GstSegment * segment)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "do_seek");
return FALSE;
}
static gboolean
gst_inter_sub_src_query (GstBaseSrc * src, GstQuery * query)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "query");
return TRUE;
}
static gboolean
gst_inter_sub_src_check_get_range (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "get_range");
return FALSE;
}
static void
gst_inter_sub_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "fixate");
}
static gboolean
gst_inter_sub_src_unlock_stop (GstBaseSrc * src)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "stop");
return TRUE;
}
static gboolean
gst_inter_sub_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek,
GstSegment * segment)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "seek_segment");
return FALSE;
}

View file

@ -0,0 +1,57 @@
/* GStreamer
* Copyright (C) 2011 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_INTER_SUB_SRC_H_
#define _GST_INTER_SUB_SRC_H_
#include <gst/base/gstbasesrc.h>
#include "gstintersurface.h"
G_BEGIN_DECLS
#define GST_TYPE_INTER_SUB_SRC (gst_inter_sub_src_get_type())
#define GST_INTER_SUB_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_SUB_SRC,GstInterSubSrc))
#define GST_INTER_SUB_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_SUB_SRC,GstInterSubSrcClass))
#define GST_IS_INTER_SUB_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_SUB_SRC))
#define GST_IS_INTER_SUB_SRC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_SUB_SRC))
typedef struct _GstInterSubSrc GstInterSubSrc;
typedef struct _GstInterSubSrcClass GstInterSubSrcClass;
struct _GstInterSubSrc
{
GstBaseSrc base_intersubsrc;
GstPad *srcpad;
GstInterSurface *surface;
int rate;
int n_frames;
};
struct _GstInterSubSrcClass
{
GstBaseSrcClass base_intersubsrc_class;
};
GType gst_inter_sub_src_get_type (void);
G_END_DECLS
#endif

View file

@ -21,22 +21,43 @@
#include "config.h"
#endif
#include <string.h>
#include "gstintersurface.h"
static GstInterSurface *surface;
static GList *list;
static GStaticMutex mutex = G_STATIC_MUTEX_INIT;
GstInterSurface *
gst_inter_surface_get (const char *name)
{
return surface;
GList *g;
GstInterSurface *surface;
g_static_mutex_lock (&mutex);
for (g = list; g; g = g_list_next (g)) {
surface = (GstInterSurface *) g->data;
if (strcmp (name, surface->name) == 0) {
g_static_mutex_unlock (&mutex);
return surface;
}
}
surface = g_malloc0 (sizeof (GstInterSurface));
surface->name = g_strdup (name);
surface->mutex = g_mutex_new ();
surface->audio_adapter = gst_adapter_new ();
list = g_list_append (list, surface);
g_static_mutex_unlock (&mutex);
return surface;
}
void
gst_inter_surface_init (void)
gst_inter_surface_unref (GstInterSurface * surface)
{
surface = g_malloc0 (sizeof (GstInterSurface));
surface->mutex = g_mutex_new ();
surface->audio_adapter = gst_adapter_new ();
}

View file

@ -30,6 +30,7 @@ typedef struct _GstInterSurface GstInterSurface;
struct _GstInterSurface
{
GMutex *mutex;
char *name;
/* video */
GstVideoFormat format;
@ -45,12 +46,13 @@ struct _GstInterSurface
int n_channels;
GstBuffer *video_buffer;
GstBuffer *sub_buffer;
GstAdapter *audio_adapter;
};
GstInterSurface * gst_inter_surface_get (const char *name);
void gst_inter_surface_init (void);
void gst_inter_surface_unref (GstInterSurface *surface);
G_END_DECLS

View file

@ -76,7 +76,8 @@ static gboolean gst_inter_video_sink_unlock_stop (GstBaseSink * sink);
enum
{
PROP_0
PROP_0,
PROP_CHANNEL
};
/* pad templates */
@ -144,6 +145,10 @@ gst_inter_video_sink_class_init (GstInterVideoSinkClass * klass)
base_sink_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_inter_video_sink_unlock_stop);
g_object_class_install_property (gobject_class, PROP_CHANNEL,
g_param_spec_string ("channel", "Channel",
"Channel name to match inter src and sink elements",
"default", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void
@ -151,15 +156,21 @@ gst_inter_video_sink_init (GstInterVideoSink * intervideosink,
GstInterVideoSinkClass * intervideosink_class)
{
intervideosink->surface = gst_inter_surface_get ("default");
intervideosink->channel = g_strdup ("default");
}
void
gst_inter_video_sink_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
/* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */
GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object);
switch (property_id) {
case PROP_CHANNEL:
g_free (intervideosink->channel);
intervideosink->channel = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
@ -170,9 +181,12 @@ void
gst_inter_video_sink_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
/* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */
GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object);
switch (property_id) {
case PROP_CHANNEL:
g_value_set_string (value, intervideosink->channel);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
@ -192,9 +206,10 @@ gst_inter_video_sink_dispose (GObject * object)
void
gst_inter_video_sink_finalize (GObject * object)
{
/* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */
GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object);
/* clean up object here */
g_free (intervideosink->channel);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -248,6 +263,9 @@ gst_inter_video_sink_get_times (GstBaseSink * sink, GstBuffer * buffer,
static gboolean
gst_inter_video_sink_start (GstBaseSink * sink)
{
GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (sink);
intervideosink->surface = gst_inter_surface_get (intervideosink->channel);
return TRUE;
}
@ -264,6 +282,9 @@ gst_inter_video_sink_stop (GstBaseSink * sink)
intervideosink->surface->video_buffer = NULL;
g_mutex_unlock (intervideosink->surface->mutex);
gst_inter_surface_unref (intervideosink->surface);
intervideosink->surface = NULL;
return TRUE;
}

View file

@ -39,6 +39,7 @@ struct _GstInterVideoSink
GstBaseSink base_intervideosink;
GstInterSurface *surface;
char *channel;
int fps_n;
int fps_d;

View file

@ -80,7 +80,8 @@ gst_inter_video_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek,
enum
{
PROP_0
PROP_0,
PROP_CHANNEL
};
/* pad templates */
@ -156,6 +157,10 @@ gst_inter_video_src_class_init (GstInterVideoSrcClass * klass)
base_src_class->prepare_seek_segment =
GST_DEBUG_FUNCPTR (gst_inter_video_src_prepare_seek_segment);
g_object_class_install_property (gobject_class, PROP_CHANNEL,
g_param_spec_string ("channel", "Channel",
"Channel name to match inter src and sink elements",
"default", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
@ -166,16 +171,20 @@ gst_inter_video_src_init (GstInterVideoSrc * intervideosrc,
gst_base_src_set_format (GST_BASE_SRC (intervideosrc), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (intervideosrc), TRUE);
intervideosrc->surface = gst_inter_surface_get ("default");
intervideosrc->channel = g_strdup ("default");
}
void
gst_inter_video_src_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
/* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */
GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object);
switch (property_id) {
case PROP_CHANNEL:
g_free (intervideosrc->channel);
intervideosrc->channel = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
@ -186,9 +195,12 @@ void
gst_inter_video_src_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
/* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */
GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object);
switch (property_id) {
case PROP_CHANNEL:
g_value_set_string (value, intervideosrc->channel);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
@ -208,9 +220,10 @@ gst_inter_video_src_dispose (GObject * object)
void
gst_inter_video_src_finalize (GObject * object)
{
/* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */
GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object);
/* clean up object here */
g_free (intervideosrc->channel);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -279,6 +292,8 @@ gst_inter_video_src_start (GstBaseSrc * src)
GST_DEBUG_OBJECT (intervideosrc, "start");
intervideosrc->surface = gst_inter_surface_get (intervideosrc->channel);
return TRUE;
}
@ -289,6 +304,9 @@ gst_inter_video_src_stop (GstBaseSrc * src)
GST_DEBUG_OBJECT (intervideosrc, "stop");
gst_inter_surface_unref (intervideosrc->surface);
intervideosrc->surface = NULL;
return TRUE;
}
@ -391,15 +409,6 @@ gst_inter_video_src_create (GstBaseSrc * src, guint64 offset, guint size,
intervideosrc->width) *
gst_video_format_get_component_height (intervideosrc->format, 1,
intervideosrc->height));
#if 0
{
int i;
for (i = 0; i < 10000; i++) {
data[i] = g_random_int () & 0xff;
}
}
#endif
}
buffer = gst_buffer_make_metadata_writable (buffer);

View file

@ -41,6 +41,8 @@ struct _GstInterVideoSrc
GstInterSurface *surface;
char *channel;
GstVideoFormat format;
int fps_n;
int fps_d;

View file

@ -122,6 +122,8 @@ mpegts_pmt_stream_info_finalize (GObject * object)
g_value_array_free (info->languages);
g_value_array_free (info->descriptors);
GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
}
MpegTsPmtStreamInfo *

View file

@ -60,6 +60,8 @@
#define SEGMENT_THRESHOLD (300*GST_MSECOND)
#define VIDEO_SEGMENT_THRESHOLD (500*GST_MSECOND)
#define DURATION_SCAN_LIMIT 4 * 1024 * 1024
typedef enum
{
SCAN_SCR,
@ -154,9 +156,9 @@ static GstStateChangeReturn gst_flups_demux_change_state (GstElement * element,
GstStateChange transition);
static inline gboolean gst_flups_demux_scan_forward_ts (GstFluPSDemux * demux,
guint64 * pos, SCAN_MODE mode, guint64 * rts);
guint64 * pos, SCAN_MODE mode, guint64 * rts, gint limit);
static inline gboolean gst_flups_demux_scan_backward_ts (GstFluPSDemux * demux,
guint64 * pos, SCAN_MODE mode, guint64 * rts);
guint64 * pos, SCAN_MODE mode, guint64 * rts, gint limit);
static inline void gst_flups_demux_send_segment_updates (GstFluPSDemux * demux,
GstClockTime new_time);
@ -399,8 +401,13 @@ gst_flups_demux_create_stream (GstFluPSDemux * demux, gint id, gint stream_type)
break;
}
if (name == NULL || template == NULL || caps == NULL)
return NULL;
if (name == NULL || template == NULL || caps == NULL) {
if (name)
g_free (name);
if (caps)
gst_caps_unref (caps);
return FALSE;
}
stream = g_new0 (GstFluPSStream, 1);
stream->id = id;
@ -1046,19 +1053,22 @@ gst_flups_demux_do_seek (GstFluPSDemux * demux, GstSegment * seeksegment)
MIN (gst_util_uint64_scale (scr - demux->first_scr, scr_rate_n,
scr_rate_d), demux->sink_segment.stop);
found = gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr);
found = gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
if (!found) {
found = gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr);
found =
gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
}
while (found && fscr < scr) {
offset++;
found = gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr);
found =
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
}
while (found && fscr > scr && offset > 0) {
offset--;
found = gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr);
found =
gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
}
GST_INFO_OBJECT (demux, "doing seek at offset %" G_GUINT64_FORMAT
@ -2377,7 +2387,7 @@ beach:
static inline gboolean
gst_flups_demux_scan_forward_ts (GstFluPSDemux * demux, guint64 * pos,
SCAN_MODE mode, guint64 * rts)
SCAN_MODE mode, guint64 * rts, gint limit)
{
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *buffer = NULL;
@ -2387,12 +2397,15 @@ gst_flups_demux_scan_forward_ts (GstFluPSDemux * demux, guint64 * pos,
guint scan_sz = (mode == SCAN_SCR ? SCAN_SCR_SZ : SCAN_PTS_SZ);
guint cursor, to_read = BLOCK_SZ;
guint8 *data;
guint end_scan;
guint end_scan, data_size;
do {
if (offset + scan_sz > demux->sink_segment.stop)
return FALSE;
if (limit && offset > *pos + limit)
return FALSE;
if (offset + to_read > demux->sink_segment.stop)
to_read = demux->sink_segment.stop - offset;
@ -2401,8 +2414,14 @@ gst_flups_demux_scan_forward_ts (GstFluPSDemux * demux, guint64 * pos,
if (G_UNLIKELY (ret != GST_FLOW_OK))
return FALSE;
/* may get a short buffer at the end of the file */
data_size = GST_BUFFER_SIZE (buffer);
if (G_UNLIKELY (data_size <= scan_sz))
return FALSE;
data = GST_BUFFER_DATA (buffer);
end_scan = GST_BUFFER_SIZE (buffer) - scan_sz;
end_scan = data_size - scan_sz;
/* scan the block */
for (cursor = 0; !found && cursor <= end_scan; cursor++) {
found = gst_flups_demux_scan_ts (demux, data++, mode, &ts);
@ -2424,7 +2443,7 @@ gst_flups_demux_scan_forward_ts (GstFluPSDemux * demux, guint64 * pos,
static inline gboolean
gst_flups_demux_scan_backward_ts (GstFluPSDemux * demux, guint64 * pos,
SCAN_MODE mode, guint64 * rts)
SCAN_MODE mode, guint64 * rts, gint limit)
{
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *buffer = NULL;
@ -2433,13 +2452,16 @@ gst_flups_demux_scan_backward_ts (GstFluPSDemux * demux, guint64 * pos,
guint64 ts = 0;
guint scan_sz = (mode == SCAN_SCR ? SCAN_SCR_SZ : SCAN_PTS_SZ);
guint cursor, to_read = BLOCK_SZ;
guint start_scan;
guint start_scan, data_size;
guint8 *data;
do {
if (offset < scan_sz - 1)
return FALSE;
if (limit && offset < *pos - limit)
return FALSE;
if (offset > BLOCK_SZ)
offset -= BLOCK_SZ;
else {
@ -2451,8 +2473,14 @@ gst_flups_demux_scan_backward_ts (GstFluPSDemux * demux, guint64 * pos,
if (G_UNLIKELY (ret != GST_FLOW_OK))
return FALSE;
start_scan = GST_BUFFER_SIZE (buffer) - scan_sz;
/* may get a short buffer at the end of the file */
data_size = GST_BUFFER_SIZE (buffer);
if (G_UNLIKELY (data_size <= scan_sz))
return FALSE;
start_scan = data_size - scan_sz;
data = GST_BUFFER_DATA (buffer) + start_scan;
/* scan the block */
for (cursor = (start_scan + 1); !found && cursor > 0; cursor--) {
found = gst_flups_demux_scan_ts (demux, data--, mode, &ts);
@ -2505,7 +2533,8 @@ gst_flups_sink_get_duration (GstFluPSDemux * demux)
/* Scan for notorious SCR and PTS to calculate the duration */
/* scan for first SCR in the stream */
offset = demux->sink_segment.start;
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &demux->first_scr);
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &demux->first_scr,
DURATION_SCAN_LIMIT);
GST_DEBUG_OBJECT (demux, "First SCR: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
" in packet starting at %" G_GUINT64_FORMAT,
demux->first_scr, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->first_scr)),
@ -2513,7 +2542,8 @@ gst_flups_sink_get_duration (GstFluPSDemux * demux)
demux->first_scr_offset = offset;
/* scan for last SCR in the stream */
offset = demux->sink_segment.stop;
gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &demux->last_scr);
gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_SCR,
&demux->last_scr, 0);
GST_DEBUG_OBJECT (demux, "Last SCR: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
" in packet starting at %" G_GUINT64_FORMAT,
demux->last_scr, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->last_scr)),
@ -2521,18 +2551,22 @@ gst_flups_sink_get_duration (GstFluPSDemux * demux)
demux->last_scr_offset = offset;
/* scan for first PTS in the stream */
offset = demux->sink_segment.start;
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_PTS, &demux->first_pts);
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_PTS, &demux->first_pts,
DURATION_SCAN_LIMIT);
GST_DEBUG_OBJECT (demux, "First PTS: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
" in packet starting at %" G_GUINT64_FORMAT,
demux->first_pts, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->first_pts)),
offset);
/* scan for last PTS in the stream */
offset = demux->sink_segment.stop;
gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_PTS, &demux->last_pts);
GST_DEBUG_OBJECT (demux, "Last PTS: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
" in packet starting at %" G_GUINT64_FORMAT,
demux->last_pts, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->last_pts)),
offset);
if (demux->first_pts != G_MAXUINT64) {
/* scan for last PTS in the stream */
offset = demux->sink_segment.stop;
gst_flups_demux_scan_backward_ts (demux, &offset, SCAN_PTS,
&demux->last_pts, DURATION_SCAN_LIMIT);
GST_DEBUG_OBJECT (demux,
"Last PTS: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
" in packet starting at %" G_GUINT64_FORMAT, demux->last_pts,
GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->last_pts)), offset);
}
/* Detect wrong SCR values */
if (demux->first_scr > demux->last_scr) {
GST_DEBUG_OBJECT (demux, "Wrong SCR values detected, searching for "
@ -2540,7 +2574,7 @@ gst_flups_sink_get_duration (GstFluPSDemux * demux)
offset = demux->first_scr_offset;
for (i = 0; i < 10; i++) {
offset++;
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &scr);
gst_flups_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &scr, 0);
if (scr < demux->last_scr) {
demux->first_scr = scr;
demux->first_scr_offset = offset;

View file

@ -829,8 +829,13 @@ gst_mpegts_demux_fill_stream (GstMpegTSStream * stream, guint8 id,
default:
break;
}
if (name == NULL || template == NULL || caps == NULL)
if (name == NULL || template == NULL || caps == NULL) {
if (name)
g_free (name);
if (caps)
gst_caps_unref (caps);
return FALSE;
}
stream->stream_type = stream_type;
stream->id = id;
@ -1105,6 +1110,10 @@ gst_mpegts_demux_add_all_streams (GstMpegTSDemux * demux, GstClockTime pts)
GstPad *srcpad;
gboolean all_added = TRUE;
GST_DEBUG_OBJECT (demux, "Adding streams early fixes a wedge in some low "
"bitrate streams, but causes deadlocks - disabled for now");
return FALSE;
/* When adding a stream, require either a valid base PCR, or a valid PTS */
if (!gst_mpegts_demux_setup_base_pts (demux, pts)) {
GST_ERROR ("Can't set base pts");

View file

@ -1275,6 +1275,8 @@ mpegts_parse_get_tags_from_sdt (MpegTSParse * parse, GstStructure * sdt_info)
* which looks like service-%d */
sid_str = gst_structure_get_name (service);
tmp = g_strstr_len (sid_str, -1, "-");
if (!tmp)
continue;
program_number = atoi (++tmp);
program = mpegts_parse_get_program (parse, program_number);

View file

@ -1097,6 +1097,8 @@ mpegts_base_get_tags_from_sdt (MpegTSBase * base, GstStructure * sdt_info)
* which looks like service-%d */
sid_str = gst_structure_get_name (service);
tmp = g_strstr_len (sid_str, -1, "-");
if (!tmp)
continue;
program_number = atoi (++tmp);
program = mpegts_base_get_program (base, program_number);

View file

@ -1050,6 +1050,7 @@ create_pad_for_stream (MpegTSBase * base, MpegTSBaseStream * bstream,
name = g_strdup_printf ("private_%04x", bstream->pid);
caps = gst_caps_new_empty_simple ("subpicture/x-dvb");
g_free (desc);
break;
}
/* hack for itv hd (sid 10510, video pid 3401 */
if (program->program_number == 10510 && bstream->pid == 3401) {

View file

@ -337,7 +337,7 @@ static void
gst_mve_mux_palette_analyze (GstMveMux * mvemux, const GstBuffer * pal,
guint16 * first, guint16 * last)
{
guint i;
gint i;
guint32 *col1;
col1 = (guint32 *) GST_BUFFER_DATA (pal);

View file

@ -285,6 +285,9 @@ mve_quantize (const GstMveMux * mve, const guint16 * src,
}
}
if (G_UNLIKELY (!best))
continue;
++best->hits;
best->r_total += r;
best->g_total += g;

View file

@ -488,7 +488,7 @@ gst_nuv_demux_stream_data (GstNuvDemux * nuv)
switch (h->i_type) {
case 'V':
{
if (h->i_length == 0)
if (!buf)
break;
GST_BUFFER_OFFSET (buf) = nuv->video_offset;
@ -499,7 +499,7 @@ gst_nuv_demux_stream_data (GstNuvDemux * nuv)
}
case 'A':
{
if (h->i_length == 0)
if (!buf)
break;
GST_BUFFER_OFFSET (buf) = nuv->audio_offset;

View file

@ -158,7 +158,7 @@ gst_siren_enc_finalize (GObject * object)
Siren7_CloseEncoder (enc->encoder);
g_object_unref (enc->adapter);
G_OBJECT_CLASS (parent_class)->dispose (object);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean

View file

@ -33,6 +33,7 @@ Android.mk: Makefile.am $(BUILT_SOURCES)
$(libgstvideoparsersbad_la_LIBADD) \
-ldl \
-:LIBFILTER_STATIC gstbaseparse-@GST_MAJORMINOR@ \
gstcodecparsers-@GST_MAJORMINOR@ \
-:PASSTHROUGH LOCAL_ARM_MODE:=arm \
LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
> $@

View file

@ -6,7 +6,7 @@ libgstavc_la_CPPFLAGS = \
$(GST_PLUGINS_BAD_CXXFLAGS) \
$(GST_PLUGINS_BASE_CXXFLAGS) \
$(GST_CXXFLAGS) \
-framework AVCVideoServices
-framework AVCVideoServices \
-Wno-deprecated-declarations
libgstavc_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) \

View file

@ -196,9 +196,14 @@ gst_linsys_sdi_sink_get_property (GObject * object, guint property_id,
void
gst_linsys_sdi_sink_dispose (GObject * object)
{
GstLinsysSdiSink *linsyssdisink;
g_return_if_fail (GST_IS_LINSYS_SDI_SINK (object));
linsyssdisink = GST_LINSYS_SDI_SINK (object);
/* clean up as possible. may be called multiple times */
g_free (linsyssdisink->device);
linsyssdisink->device = NULL;
G_OBJECT_CLASS (parent_class)->dispose (object);
}

View file

@ -212,9 +212,12 @@ gst_linsys_sdi_src_get_property (GObject * object, guint property_id,
void
gst_linsys_sdi_src_dispose (GObject * object)
{
g_return_if_fail (GST_IS_LINSYS_SDI_SRC (object));
GstLinsysSdiSrc *linsyssdisrc = GST_LINSYS_SDI_SRC (object);
g_return_if_fail (linsyssdisrc != NULL);
/* clean up as possible. may be called multiple times */
g_free (linsyssdisrc->device);
linsyssdisrc->device = NULL;
G_OBJECT_CLASS (parent_class)->dispose (object);
}