Merge remote-tracking branch 'origin/master' into 0.11

Conflicts:
	ext/pulse/pulseaudiosink.c
	ext/pulse/pulsesrc.c
	gst/audioparsers/gstaacparse.c
	gst/audioparsers/gstamrparse.c
	gst/audioparsers/gstdcaparse.c
	gst/audioparsers/gstflacparse.c
	gst/effectv/gstradioac.c
	gst/effectv/gstradioac.h
	gst/effectv/gstripple.c

Some possible FIXMEs remaining in the audio parser getcaps functions.
This commit is contained in:
Tim-Philipp Müller 2011-11-26 13:34:10 +00:00
commit be0d6baac5
22 changed files with 750 additions and 117 deletions

View file

@ -535,7 +535,7 @@ dbin_event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data)
static void static void
pad_added_cb (GstElement * dbin, GstPad * pad, gpointer * data) pad_added_cb (GstElement * dbin, GstPad * pad, gpointer * data)
{ {
GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (data); GstPulseAudioSink *pbin;
GstPad *sinkpad = NULL; GstPad *sinkpad = NULL;
pbin = GST_PULSE_AUDIO_SINK (data); pbin = GST_PULSE_AUDIO_SINK (data);
@ -584,11 +584,13 @@ gst_pulse_audio_sink_add_dbin (GstPulseAudioSink * pbin)
/* Trap the newsegment events that we feed the decodebin and discard them */ /* Trap the newsegment events that we feed the decodebin and discard them */
sinkpad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink), "sink"); sinkpad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink), "sink");
if (pbin->event_probe_id == 0) if (pbin->event_probe_id == 0)
pbin->event_probe_id = pbin->event_probe_id =
gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
dbin_event_probe, gst_object_ref (pbin), dbin_event_probe, gst_object_ref (pbin),
(GDestroyNotify) gst_object_unref); (GDestroyNotify) gst_object_unref);
gst_object_unref (sinkpad); gst_object_unref (sinkpad);
sinkpad = NULL; sinkpad = NULL;

View file

@ -43,6 +43,9 @@
#include <gst/base/gstbasesrc.h> #include <gst/base/gstbasesrc.h>
#include <gst/gsttaglist.h> #include <gst/gsttaglist.h>
#ifdef HAVE_PULSE_1_0
#include <gst/interfaces/streamvolume.h>
#endif
#include "pulsesrc.h" #include "pulsesrc.h"
#include "pulseutil.h" #include "pulseutil.h"
@ -55,6 +58,12 @@ GST_DEBUG_CATEGORY_EXTERN (pulse_debug);
#define DEFAULT_DEVICE NULL #define DEFAULT_DEVICE NULL
#define DEFAULT_DEVICE_NAME NULL #define DEFAULT_DEVICE_NAME NULL
#ifdef HAVE_PULSE_1_0
#define DEFAULT_VOLUME 1.0
#define DEFAULT_MUTE FALSE
#define MAX_VOLUME 10.0
#endif
enum enum
{ {
PROP_0, PROP_0,
@ -64,6 +73,10 @@ enum
PROP_CLIENT, PROP_CLIENT,
PROP_STREAM_PROPERTIES, PROP_STREAM_PROPERTIES,
PROP_SOURCE_OUTPUT_INDEX, PROP_SOURCE_OUTPUT_INDEX,
#ifdef HAVE_PULSE_1_0
PROP_VOLUME,
PROP_MUTE,
#endif
PROP_LAST PROP_LAST
}; };
@ -126,7 +139,8 @@ GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSrc, gst_pulsesrc);
G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC, G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC,
G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsesrc_mixer_interface_init); G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsesrc_mixer_interface_init);
G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE, G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
gst_pulsesrc_property_probe_interface_init)); gst_pulsesrc_property_probe_interface_init);
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void static void
gst_pulsesrc_class_init (GstPulseSrcClass * klass) gst_pulsesrc_class_init (GstPulseSrcClass * klass)
@ -174,7 +188,7 @@ gst_pulsesrc_class_init (GstPulseSrcClass * klass)
clientname = gst_pulse_client_name (); clientname = gst_pulse_client_name ();
/** /**
* GstPulseSink:client * GstPulseSrc:client
* *
* The PulseAudio client name to use. * The PulseAudio client name to use.
* *
@ -230,6 +244,35 @@ gst_pulsesrc_class_init (GstPulseSrcClass * klass)
"Captures audio from a PulseAudio server", "Lennart Poettering"); "Captures audio from a PulseAudio server", "Lennart Poettering");
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&pad_template)); gst_static_pad_template_get (&pad_template));
#ifdef HAVE_PULSE_1_0
/**
* GstPulseSrc:volume
*
* The volume of the record stream. Only works when using PulseAudio 1.0 or
* later.
*
* Since: 0.10.36
*/
g_object_class_install_property (gobject_class,
PROP_VOLUME, g_param_spec_double ("volume", "Volume",
"Linear volume of this stream, 1.0=100%",
0.0, MAX_VOLUME, DEFAULT_VOLUME,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstPulseSrc:mute
*
* Whether the stream is muted or not. Only works when using PulseAudio 1.0
* or later.
*
* Since: 0.10.36
*/
g_object_class_install_property (gobject_class,
PROP_MUTE, g_param_spec_boolean ("mute", "Mute",
"Mute state of this stream",
DEFAULT_MUTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#endif
} }
static void static void
@ -254,6 +297,16 @@ gst_pulsesrc_init (GstPulseSrc * pulsesrc)
pulsesrc->paused = TRUE; pulsesrc->paused = TRUE;
pulsesrc->in_read = FALSE; pulsesrc->in_read = FALSE;
#ifdef HAVE_PULSE_1_0
pulsesrc->volume = DEFAULT_VOLUME;
pulsesrc->volume_set = FALSE;
pulsesrc->mute = DEFAULT_MUTE;
pulsesrc->mute_set = FALSE;
pulsesrc->notify = 0;
#endif
pulsesrc->mixer = NULL; pulsesrc->mixer = NULL;
pulsesrc->properties = NULL; pulsesrc->properties = NULL;
@ -290,7 +343,15 @@ gst_pulsesrc_destroy_context (GstPulseSrc * pulsesrc)
if (pulsesrc->context) { if (pulsesrc->context) {
pa_context_disconnect (pulsesrc->context); pa_context_disconnect (pulsesrc->context);
/* Make sure we don't get any further callbacks */
pa_context_set_state_callback (pulsesrc->context, NULL, NULL);
#ifdef HAVE_PULSE_1_0
pa_context_set_subscribe_callback (pulsesrc->context, NULL, NULL);
#endif
pa_context_unref (pulsesrc->context); pa_context_unref (pulsesrc->context);
pulsesrc->context = NULL; pulsesrc->context = NULL;
} }
} }
@ -408,6 +469,260 @@ no_mainloop:
} }
} }
#ifdef HAVE_PULSE_1_0
static void
gst_pulsesrc_source_output_info_cb (pa_context * c,
const pa_source_output_info * i, int eol, void *userdata)
{
GstPulseSrc *psrc;
psrc = GST_PULSESRC_CAST (userdata);
if (!i)
goto done;
/* If the index doesn't match our current stream,
* it implies we just recreated the stream (caps change)
*/
if (i->index == psrc->source_output_idx) {
psrc->volume = pa_sw_volume_to_linear (pa_cvolume_max (&i->volume));
psrc->mute = i->mute;
}
done:
pa_threaded_mainloop_signal (psrc->mainloop, 0);
}
static gdouble
gst_pulsesrc_get_stream_volume (GstPulseSrc * pulsesrc)
{
pa_operation *o = NULL;
gdouble v;
if (!pulsesrc->mainloop)
goto no_mainloop;
if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
if (!(o = pa_context_get_source_output_info (pulsesrc->context,
pulsesrc->source_output_idx, gst_pulsesrc_source_output_info_cb,
pulsesrc)))
goto info_failed;
while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
pa_threaded_mainloop_wait (pulsesrc->mainloop);
if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
goto unlock;
}
unlock:
v = pulsesrc->volume;
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
if (v > MAX_VOLUME) {
GST_WARNING_OBJECT (pulsesrc, "Clipped volume from %f to %f", v,
MAX_VOLUME);
v = MAX_VOLUME;
}
return v;
/* ERRORS */
no_mainloop:
{
v = pulsesrc->volume;
GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
return v;
}
no_index:
{
v = pulsesrc->volume;
GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
return v;
}
info_failed:
{
GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
("pa_context_get_source_output_info() failed: %s",
pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
goto unlock;
}
}
static gboolean
gst_pulsesrc_get_stream_mute (GstPulseSrc * pulsesrc)
{
pa_operation *o = NULL;
gboolean mute;
if (!pulsesrc->mainloop)
goto no_mainloop;
if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
if (!(o = pa_context_get_source_output_info (pulsesrc->context,
pulsesrc->source_output_idx, gst_pulsesrc_source_output_info_cb,
pulsesrc)))
goto info_failed;
while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
pa_threaded_mainloop_wait (pulsesrc->mainloop);
if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
goto unlock;
}
unlock:
mute = pulsesrc->mute;
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
return mute;
/* ERRORS */
no_mainloop:
{
mute = pulsesrc->mute;
GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
return mute;
}
no_index:
{
mute = pulsesrc->mute;
GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
return mute;
}
info_failed:
{
GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
("pa_context_get_source_output_info() failed: %s",
pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
goto unlock;
}
}
static void
gst_pulsesrc_set_stream_volume (GstPulseSrc * pulsesrc, gdouble volume)
{
pa_cvolume v;
pa_operation *o = NULL;
if (!pulsesrc->mainloop)
goto no_mainloop;
if (!pulsesrc->source_output_idx)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
GST_DEBUG_OBJECT (pulsesrc, "setting volume to %f", volume);
gst_pulse_cvolume_from_linear (&v, pulsesrc->sample_spec.channels, volume);
if (!(o = pa_context_set_source_output_volume (pulsesrc->context,
pulsesrc->source_output_idx, &v, NULL, NULL)))
goto volume_failed;
/* We don't really care about the result of this call */
unlock:
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
return;
/* ERRORS */
no_mainloop:
{
pulsesrc->volume = volume;
pulsesrc->volume_set = TRUE;
GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
return;
}
no_index:
{
pulsesrc->volume = volume;
pulsesrc->volume_set = TRUE;
GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
return;
}
volume_failed:
{
GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
("pa_stream_set_source_output_volume() failed: %s",
pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
goto unlock;
}
}
static void
gst_pulsesrc_set_stream_mute (GstPulseSrc * pulsesrc, gboolean mute)
{
pa_operation *o = NULL;
if (!pulsesrc->mainloop)
goto no_mainloop;
if (!pulsesrc->source_output_idx)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
GST_DEBUG_OBJECT (pulsesrc, "setting mute state to %d", mute);
if (!(o = pa_context_set_source_output_mute (pulsesrc->context,
pulsesrc->source_output_idx, mute, NULL, NULL)))
goto mute_failed;
/* We don't really care about the result of this call */
unlock:
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
return;
/* ERRORS */
no_mainloop:
{
pulsesrc->mute = mute;
pulsesrc->mute_set = TRUE;
GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
return;
}
no_index:
{
pulsesrc->mute = mute;
pulsesrc->mute_set = TRUE;
GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
return;
}
mute_failed:
{
GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
("pa_stream_set_source_output_mute() failed: %s",
pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
goto unlock;
}
}
#endif
static void static void
gst_pulsesrc_set_property (GObject * object, gst_pulsesrc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec) guint prop_id, const GValue * value, GParamSpec * pspec)
@ -444,6 +759,14 @@ gst_pulsesrc_set_property (GObject * object,
pa_proplist_free (pulsesrc->proplist); pa_proplist_free (pulsesrc->proplist);
pulsesrc->proplist = gst_pulse_make_proplist (pulsesrc->properties); pulsesrc->proplist = gst_pulse_make_proplist (pulsesrc->properties);
break; break;
#ifdef HAVE_PULSE_1_0
case PROP_VOLUME:
gst_pulsesrc_set_stream_volume (pulsesrc, g_value_get_double (value));
break;
case PROP_MUTE:
gst_pulsesrc_set_stream_mute (pulsesrc, g_value_get_boolean (value));
break;
#endif
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
@ -476,6 +799,14 @@ gst_pulsesrc_get_property (GObject * object,
case PROP_SOURCE_OUTPUT_INDEX: case PROP_SOURCE_OUTPUT_INDEX:
g_value_set_uint (value, pulsesrc->source_output_idx); g_value_set_uint (value, pulsesrc->source_output_idx);
break; break;
#ifdef HAVE_PULSE_1_0
case PROP_VOLUME:
g_value_set_double (value, gst_pulsesrc_get_stream_volume (pulsesrc));
break;
case PROP_MUTE:
g_value_set_boolean (value, gst_pulsesrc_get_stream_mute (pulsesrc));
break;
#endif
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
@ -569,6 +900,30 @@ gst_pulsesrc_stream_overflow_cb (pa_stream * s, void *userdata)
GST_WARNING_OBJECT (GST_PULSESRC_CAST (userdata), "Got overflow"); GST_WARNING_OBJECT (GST_PULSESRC_CAST (userdata), "Got overflow");
} }
#ifdef HAVE_PULSE_1_0
static void
gst_pulsesrc_context_subscribe_cb (pa_context * c,
pa_subscription_event_type_t t, uint32_t idx, void *userdata)
{
GstPulseSrc *psrc = GST_PULSESRC (userdata);
if (t != (PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT | PA_SUBSCRIPTION_EVENT_CHANGE)
&& t != (PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT | PA_SUBSCRIPTION_EVENT_NEW))
return;
if (idx != psrc->source_output_idx)
return;
/* Actually this event is also triggered when other properties of the stream
* change that are unrelated to the volume. However it is probably cheaper to
* signal the change here and check for the volume when the GObject property
* is read instead of querying it always. */
/* inform streaming thread to notify */
g_atomic_int_compare_and_exchange (&psrc->notify, 0, 1);
}
#endif
static gboolean static gboolean
gst_pulsesrc_open (GstAudioSrc * asrc) gst_pulsesrc_open (GstAudioSrc * asrc)
{ {
@ -591,6 +946,10 @@ gst_pulsesrc_open (GstAudioSrc * asrc)
pa_context_set_state_callback (pulsesrc->context, pa_context_set_state_callback (pulsesrc->context,
gst_pulsesrc_context_state_cb, pulsesrc); gst_pulsesrc_context_state_cb, pulsesrc);
#ifdef HAVE_PULSE_1_0
pa_context_set_subscribe_callback (pulsesrc->context,
gst_pulsesrc_context_subscribe_cb, pulsesrc);
#endif
GST_DEBUG_OBJECT (pulsesrc, "connect to server %s", GST_DEBUG_OBJECT (pulsesrc, "connect to server %s",
GST_STR_NULL (pulsesrc->server)); GST_STR_NULL (pulsesrc->server));
@ -672,6 +1031,13 @@ gst_pulsesrc_read (GstAudioSrc * asrc, gpointer data, guint length)
pa_threaded_mainloop_lock (pulsesrc->mainloop); pa_threaded_mainloop_lock (pulsesrc->mainloop);
pulsesrc->in_read = TRUE; pulsesrc->in_read = TRUE;
#ifdef HAVE_PULSE_1_0
if (g_atomic_int_compare_and_exchange (&pulsesrc->notify, 1, 0)) {
g_object_notify (G_OBJECT (pulsesrc), "volume");
g_object_notify (G_OBJECT (pulsesrc), "mute");
}
#endif
if (pulsesrc->paused) if (pulsesrc->paused)
goto was_paused; goto was_paused;
@ -985,9 +1351,27 @@ gst_pulsesrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
pa_buffer_attr wanted; pa_buffer_attr wanted;
const pa_buffer_attr *actual; const pa_buffer_attr *actual;
GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc); GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
pa_stream_flags_t flags;
#ifdef HAVE_PULSE_1_0
pa_operation *o;
#endif
pa_threaded_mainloop_lock (pulsesrc->mainloop); pa_threaded_mainloop_lock (pulsesrc->mainloop);
#ifdef HAVE_PULSE_1_0
/* enable event notifications */
GST_LOG_OBJECT (pulsesrc, "subscribing to context events");
if (!(o = pa_context_subscribe (pulsesrc->context,
PA_SUBSCRIPTION_MASK_SINK_INPUT, NULL, NULL))) {
GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
("pa_context_subscribe() failed: %s",
pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
goto unlock_and_fail;
}
pa_operation_unref (o);
#endif
wanted.maxlength = -1; wanted.maxlength = -1;
wanted.tlength = -1; wanted.tlength = -1;
wanted.prebuf = 0; wanted.prebuf = 0;
@ -1000,11 +1384,19 @@ gst_pulsesrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
GST_INFO_OBJECT (pulsesrc, "minreq: %d", wanted.minreq); GST_INFO_OBJECT (pulsesrc, "minreq: %d", wanted.minreq);
GST_INFO_OBJECT (pulsesrc, "fragsize: %d", wanted.fragsize); GST_INFO_OBJECT (pulsesrc, "fragsize: %d", wanted.fragsize);
flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
PA_STREAM_NOT_MONOTONIC | PA_STREAM_ADJUST_LATENCY |
PA_STREAM_START_CORKED;
#ifdef HAVE_PULSE_1_0
if (pulsesrc->mute_set && pulsesrc->mute)
flags |= PA_STREAM_START_MUTED;
#endif
if (pa_stream_connect_record (pulsesrc->stream, pulsesrc->device, &wanted, if (pa_stream_connect_record (pulsesrc->stream, pulsesrc->device, &wanted,
PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE | flags) < 0) {
PA_STREAM_NOT_MONOTONIC | PA_STREAM_ADJUST_LATENCY |
PA_STREAM_START_CORKED) < 0)
goto connect_failed; goto connect_failed;
}
pulsesrc->corked = TRUE; pulsesrc->corked = TRUE;
@ -1028,6 +1420,13 @@ gst_pulsesrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
pulsesrc->source_output_idx = pa_stream_get_index (pulsesrc->stream); pulsesrc->source_output_idx = pa_stream_get_index (pulsesrc->stream);
g_object_notify (G_OBJECT (pulsesrc), "source-output-index"); g_object_notify (G_OBJECT (pulsesrc), "source-output-index");
#ifdef HAVE_PULSE_1_0
if (pulsesrc->volume_set) {
gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
pulsesrc->volume_set = FALSE;
}
#endif
/* get the actual buffering properties now */ /* get the actual buffering properties now */
actual = pa_stream_get_buffer_attr (pulsesrc->stream); actual = pa_stream_get_buffer_attr (pulsesrc->stream);

View file

@ -72,6 +72,15 @@ struct _GstPulseSrc
GstPulseMixerCtrl *mixer; GstPulseMixerCtrl *mixer;
GstPulseProbe *probe; GstPulseProbe *probe;
#ifdef HAVE_PULSE_1_0
gdouble volume;
gboolean volume_set:1;
gboolean mute:1;
gboolean mute_set:1;
gint notify; /* atomic */
#endif
gboolean corked:1; gboolean corked:1;
gboolean stream_connected:1; gboolean stream_connected:1;
gboolean operation_success:1; gboolean operation_success:1;

View file

@ -69,16 +69,18 @@ GST_DEBUG_CATEGORY_STATIC (aacparse_debug);
#define AAC_FRAME_DURATION(parse) (GST_SECOND/parse->frames_per_sec) #define AAC_FRAME_DURATION(parse) (GST_SECOND/parse->frames_per_sec)
gboolean gst_aac_parse_start (GstBaseParse * parse); static gboolean gst_aac_parse_start (GstBaseParse * parse);
gboolean gst_aac_parse_stop (GstBaseParse * parse); static gboolean gst_aac_parse_stop (GstBaseParse * parse);
static gboolean gst_aac_parse_sink_setcaps (GstBaseParse * parse, static gboolean gst_aac_parse_sink_setcaps (GstBaseParse * parse,
GstCaps * caps); GstCaps * caps);
static GstCaps *gst_aac_parse_sink_getcaps (GstBaseParse * parse,
GstCaps * filter);
gboolean gst_aac_parse_check_valid_frame (GstBaseParse * parse, static gboolean gst_aac_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * size, gint * skipsize); GstBaseParseFrame * frame, guint * size, gint * skipsize);
GstFlowReturn gst_aac_parse_parse_frame (GstBaseParse * parse, static GstFlowReturn gst_aac_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame); GstBaseParseFrame * frame);
gboolean gst_aac_parse_convert (GstBaseParse * parse, gboolean gst_aac_parse_convert (GstBaseParse * parse,
@ -131,6 +133,7 @@ gst_aac_parse_class_init (GstAacParseClass * klass)
parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start); parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop); parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_setcaps); parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_setcaps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_getcaps);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_aac_parse_parse_frame); parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_aac_parse_parse_frame);
parse_class->check_valid_frame = parse_class->check_valid_frame =
GST_DEBUG_FUNCPTR (gst_aac_parse_check_valid_frame); GST_DEBUG_FUNCPTR (gst_aac_parse_check_valid_frame);
@ -568,7 +571,7 @@ gst_aac_parse_detect_stream (GstAacParse * aacparse,
* *
* Returns: TRUE if buffer contains a valid frame. * Returns: TRUE if buffer contains a valid frame.
*/ */
gboolean static gboolean
gst_aac_parse_check_valid_frame (GstBaseParse * parse, gst_aac_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize) GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{ {
@ -643,7 +646,7 @@ gst_aac_parse_check_valid_frame (GstBaseParse * parse,
* Returns: GST_FLOW_OK if frame was successfully parsed and can be pushed * Returns: GST_FLOW_OK if frame was successfully parsed and can be pushed
* forward. Otherwise appropriate error is returned. * forward. Otherwise appropriate error is returned.
*/ */
GstFlowReturn static GstFlowReturn
gst_aac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) gst_aac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{ {
GstAacParse *aacparse; GstAacParse *aacparse;
@ -700,7 +703,7 @@ gst_aac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
* *
* Returns: TRUE if startup succeeded. * Returns: TRUE if startup succeeded.
*/ */
gboolean static gboolean
gst_aac_parse_start (GstBaseParse * parse) gst_aac_parse_start (GstBaseParse * parse)
{ {
GstAacParse *aacparse; GstAacParse *aacparse;
@ -721,9 +724,44 @@ gst_aac_parse_start (GstBaseParse * parse)
* *
* Returns: TRUE is stopping succeeded. * Returns: TRUE is stopping succeeded.
*/ */
gboolean static gboolean
gst_aac_parse_stop (GstBaseParse * parse) gst_aac_parse_stop (GstBaseParse * parse)
{ {
GST_DEBUG ("stop"); GST_DEBUG ("stop");
return TRUE; return TRUE;
} }
static GstCaps *
gst_aac_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
/* FIXME: handle filter caps */
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
/* Remove the framed field */
peercaps = gst_caps_make_writable (peercaps);
n = gst_caps_get_size (peercaps);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (peercaps, i);
gst_structure_remove_field (s, "framed");
}
res =
gst_caps_intersect_full (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_PARSE_SRC_PAD (parse)),
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
} else {
res =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD
(parse)));
}
return res;
}

View file

@ -165,6 +165,8 @@ static GstFlowReturn gst_ac3_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame); GstBaseParseFrame * frame);
static gboolean gst_ac3_parse_src_event (GstBaseParse * parse, static gboolean gst_ac3_parse_src_event (GstBaseParse * parse,
GstEvent * event); GstEvent * event);
static GstCaps *gst_ac3_parse_get_sink_caps (GstBaseParse * parse,
GstCaps * filter);
#define gst_ac3_parse_parent_class parent_class #define gst_ac3_parse_parent_class parent_class
G_DEFINE_TYPE (GstAc3Parse, gst_ac3_parse, GST_TYPE_BASE_PARSE); G_DEFINE_TYPE (GstAc3Parse, gst_ac3_parse, GST_TYPE_BASE_PARSE);
@ -195,8 +197,8 @@ gst_ac3_parse_class_init (GstAc3ParseClass * klass)
parse_class->check_valid_frame = parse_class->check_valid_frame =
GST_DEBUG_FUNCPTR (gst_ac3_parse_check_valid_frame); GST_DEBUG_FUNCPTR (gst_ac3_parse_check_valid_frame);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_ac3_parse_parse_frame); parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_ac3_parse_parse_frame);
parse_class->src_event = GST_DEBUG_FUNCPTR (gst_ac3_parse_src_event); parse_class->src_event = GST_DEBUG_FUNCPTR (gst_ac3_parse_src_event);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_ac3_parse_get_sink_caps);
} }
static void static void
@ -702,3 +704,40 @@ gst_ac3_parse_src_event (GstBaseParse * parse, GstEvent * event)
return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event); return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
} }
static GstCaps *
gst_ac3_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
/* FIXME: handle filter */
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
/* Remove the framed and alignment field. We can convert
* between different alignments. */
peercaps = gst_caps_make_writable (peercaps);
n = gst_caps_get_size (peercaps);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (peercaps, i);
gst_structure_remove_field (s, "framed");
gst_structure_remove_field (s, "alignment");
}
res =
gst_caps_intersect_full (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_PARSE_SRC_PAD (parse)),
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
} else {
res =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD
(parse)));
}
return res;
}

View file

@ -71,16 +71,18 @@ static const gint block_size_wb[16] =
#define AMR_FRAME_DURATION (GST_SECOND/AMR_FRAMES_PER_SECOND) #define AMR_FRAME_DURATION (GST_SECOND/AMR_FRAMES_PER_SECOND)
#define AMR_MIME_HEADER_SIZE 9 #define AMR_MIME_HEADER_SIZE 9
gboolean gst_amr_parse_start (GstBaseParse * parse); static gboolean gst_amr_parse_start (GstBaseParse * parse);
gboolean gst_amr_parse_stop (GstBaseParse * parse); static gboolean gst_amr_parse_stop (GstBaseParse * parse);
static gboolean gst_amr_parse_sink_setcaps (GstBaseParse * parse, static gboolean gst_amr_parse_sink_setcaps (GstBaseParse * parse,
GstCaps * caps); GstCaps * caps);
static GstCaps *gst_amr_parse_sink_getcaps (GstBaseParse * parse,
GstCaps * filter);
gboolean gst_amr_parse_check_valid_frame (GstBaseParse * parse, static gboolean gst_amr_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize); GstBaseParseFrame * frame, guint * framesize, gint * skipsize);
GstFlowReturn gst_amr_parse_parse_frame (GstBaseParse * parse, static GstFlowReturn gst_amr_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame); GstBaseParseFrame * frame);
G_DEFINE_TYPE (GstAmrParse, gst_amr_parse, GST_TYPE_BASE_PARSE); G_DEFINE_TYPE (GstAmrParse, gst_amr_parse, GST_TYPE_BASE_PARSE);
@ -112,6 +114,7 @@ gst_amr_parse_class_init (GstAmrParseClass * klass)
parse_class->start = GST_DEBUG_FUNCPTR (gst_amr_parse_start); parse_class->start = GST_DEBUG_FUNCPTR (gst_amr_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_amr_parse_stop); parse_class->stop = GST_DEBUG_FUNCPTR (gst_amr_parse_stop);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_setcaps); parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_setcaps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_getcaps);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_amr_parse_parse_frame); parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_amr_parse_parse_frame);
parse_class->check_valid_frame = parse_class->check_valid_frame =
GST_DEBUG_FUNCPTR (gst_amr_parse_check_valid_frame); GST_DEBUG_FUNCPTR (gst_amr_parse_check_valid_frame);
@ -251,7 +254,7 @@ gst_amr_parse_parse_header (GstAmrParse * amrparse,
* *
* Returns: TRUE if the given data contains valid frame. * Returns: TRUE if the given data contains valid frame.
*/ */
gboolean static gboolean
gst_amr_parse_check_valid_frame (GstBaseParse * parse, gst_amr_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize) GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{ {
@ -338,7 +341,7 @@ done:
* *
* Returns: #GstFlowReturn defining the parsing status. * Returns: #GstFlowReturn defining the parsing status.
*/ */
GstFlowReturn static GstFlowReturn
gst_amr_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) gst_amr_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{ {
return GST_FLOW_OK; return GST_FLOW_OK;
@ -353,7 +356,7 @@ gst_amr_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
* *
* Returns: TRUE on success. * Returns: TRUE on success.
*/ */
gboolean static gboolean
gst_amr_parse_start (GstBaseParse * parse) gst_amr_parse_start (GstBaseParse * parse)
{ {
GstAmrParse *amrparse; GstAmrParse *amrparse;
@ -374,7 +377,7 @@ gst_amr_parse_start (GstBaseParse * parse)
* *
* Returns: TRUE on success. * Returns: TRUE on success.
*/ */
gboolean static gboolean
gst_amr_parse_stop (GstBaseParse * parse) gst_amr_parse_stop (GstBaseParse * parse)
{ {
GstAmrParse *amrparse; GstAmrParse *amrparse;
@ -385,3 +388,41 @@ gst_amr_parse_stop (GstBaseParse * parse)
amrparse->header = 0; amrparse->header = 0;
return TRUE; return TRUE;
} }
static GstCaps *
gst_amr_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
/* FIXME: handle filter caps */
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
/* Rename structure names */
peercaps = gst_caps_make_writable (peercaps);
n = gst_caps_get_size (peercaps);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (peercaps, i);
if (gst_structure_has_name (s, "audio/AMR"))
gst_structure_set_name (s, "audio/x-amr-nb-sh");
else
gst_structure_set_name (s, "audio/x-amr-wb-sh");
}
res =
gst_caps_intersect_full (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_PARSE_SRC_PAD (parse)),
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
} else {
res =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD
(parse)));
}
return res;
}

View file

@ -78,6 +78,8 @@ static gboolean gst_dca_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * size, gint * skipsize); GstBaseParseFrame * frame, guint * size, gint * skipsize);
static GstFlowReturn gst_dca_parse_parse_frame (GstBaseParse * parse, static GstFlowReturn gst_dca_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame); GstBaseParseFrame * frame);
static GstCaps *gst_dca_parse_get_sink_caps (GstBaseParse * parse,
GstCaps * filter);
#define gst_dca_parse_parent_class parent_class #define gst_dca_parse_parent_class parent_class
G_DEFINE_TYPE (GstDcaParse, gst_dca_parse, GST_TYPE_BASE_PARSE); G_DEFINE_TYPE (GstDcaParse, gst_dca_parse, GST_TYPE_BASE_PARSE);
@ -99,6 +101,7 @@ gst_dca_parse_class_init (GstDcaParseClass * klass)
parse_class->check_valid_frame = parse_class->check_valid_frame =
GST_DEBUG_FUNCPTR (gst_dca_parse_check_valid_frame); GST_DEBUG_FUNCPTR (gst_dca_parse_check_valid_frame);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_dca_parse_parse_frame); parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_dca_parse_parse_frame);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_dca_parse_get_sink_caps);
gst_element_class_add_pad_template (element_class, gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template)); gst_static_pad_template_get (&sink_template));
@ -464,3 +467,38 @@ broken_header:
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
} }
static GstCaps *
gst_dca_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
/* FIXME: handle filter caps */
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
/* Remove the framed field */
peercaps = gst_caps_make_writable (peercaps);
n = gst_caps_get_size (peercaps);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (peercaps, i);
gst_structure_remove_field (s, "framed");
}
res =
gst_caps_intersect_full (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_PARSE_SRC_PAD (parse)),
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
} else {
res =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD
(parse)));
}
return res;
}

View file

@ -201,6 +201,8 @@ static GstFlowReturn gst_flac_parse_pre_push_frame (GstBaseParse * parse,
static gboolean gst_flac_parse_convert (GstBaseParse * parse, static gboolean gst_flac_parse_convert (GstBaseParse * parse,
GstFormat src_format, gint64 src_value, GstFormat dest_format, GstFormat src_format, gint64 src_value, GstFormat dest_format,
gint64 * dest_value); gint64 * dest_value);
static GstCaps *gst_flac_parse_get_sink_caps (GstBaseParse * parse,
GstCaps * filter);
#define gst_flac_parse_parent_class parent_class #define gst_flac_parse_parent_class parent_class
G_DEFINE_TYPE (GstFlacParse, gst_flac_parse, GST_TYPE_BASE_PARSE); G_DEFINE_TYPE (GstFlacParse, gst_flac_parse, GST_TYPE_BASE_PARSE);
@ -233,6 +235,8 @@ gst_flac_parse_class_init (GstFlacParseClass * klass)
baseparse_class->pre_push_frame = baseparse_class->pre_push_frame =
GST_DEBUG_FUNCPTR (gst_flac_parse_pre_push_frame); GST_DEBUG_FUNCPTR (gst_flac_parse_pre_push_frame);
baseparse_class->convert = GST_DEBUG_FUNCPTR (gst_flac_parse_convert); baseparse_class->convert = GST_DEBUG_FUNCPTR (gst_flac_parse_convert);
baseparse_class->get_sink_caps =
GST_DEBUG_FUNCPTR (gst_flac_parse_get_sink_caps);
gst_element_class_add_pad_template (element_class, gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory)); gst_static_pad_template_get (&src_factory));
@ -1502,3 +1506,38 @@ gst_flac_parse_convert (GstBaseParse * parse,
return GST_BASE_PARSE_CLASS (parent_class)->convert (parse, src_format, return GST_BASE_PARSE_CLASS (parent_class)->convert (parse, src_format,
src_value, dest_format, dest_value); src_value, dest_format, dest_value);
} }
static GstCaps *
gst_flac_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
/* FIXME: handle filter caps */
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
/* Remove the framed field */
peercaps = gst_caps_make_writable (peercaps);
n = gst_caps_get_size (peercaps);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (peercaps, i);
gst_structure_remove_field (s, "framed");
}
res =
gst_caps_intersect_full (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_PARSE_SRC_PAD (parse)),
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
} else {
res =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD
(parse)));
}
return res;
}

View file

@ -100,6 +100,8 @@ static GstFlowReturn gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
static gboolean gst_mpeg_audio_parse_convert (GstBaseParse * parse, static gboolean gst_mpeg_audio_parse_convert (GstBaseParse * parse,
GstFormat src_format, gint64 src_value, GstFormat src_format, gint64 src_value,
GstFormat dest_format, gint64 * dest_value); GstFormat dest_format, gint64 * dest_value);
static GstCaps *gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse,
GstCaps * filter);
#define gst_mpeg_audio_parse_parent_class parent_class #define gst_mpeg_audio_parse_parent_class parent_class
G_DEFINE_TYPE (GstMpegAudioParse, gst_mpeg_audio_parse, GST_TYPE_BASE_PARSE); G_DEFINE_TYPE (GstMpegAudioParse, gst_mpeg_audio_parse, GST_TYPE_BASE_PARSE);
@ -161,6 +163,8 @@ gst_mpeg_audio_parse_class_init (GstMpegAudioParseClass * klass)
parse_class->pre_push_frame = parse_class->pre_push_frame =
GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_pre_push_frame); GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_pre_push_frame);
parse_class->convert = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_convert); parse_class->convert = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_convert);
parse_class->get_sink_caps =
GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_sink_caps);
/* register tags */ /* register tags */
#define GST_TAG_CRC "has-crc" #define GST_TAG_CRC "has-crc"
@ -1288,3 +1292,38 @@ gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
return GST_FLOW_OK; return GST_FLOW_OK;
} }
static GstCaps *
gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
/* FIXME: handle filter caps */
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
/* Remove the parsed field */
peercaps = gst_caps_make_writable (peercaps);
n = gst_caps_get_size (peercaps);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (peercaps, i);
gst_structure_remove_field (s, "parsed");
}
res =
gst_caps_intersect_full (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_PARSE_SRC_PAD (parse)),
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
} else {
res =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD
(parse)));
}
return res;
}

View file

@ -140,7 +140,7 @@ gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
p = *src; p = *src;
q = *(src - 4); q = *(src - 4);
/* difference between the current pixel and right neighbor. */ /* difference between the current pixel and left neighbor. */
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16; r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8; g = ((p & 0xff00) - (q & 0xff00)) >> 8;
b = (p & 0xff) - (q & 0xff); b = (p & 0xff) - (q & 0xff);
@ -199,8 +199,12 @@ gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
dest[width + 3] = v3; dest[width + 3] = v3;
dest[width * 2] = v2; dest[width * 2] = v2;
dest[width * 2 + 1] = v2; dest[width * 2 + 1] = v2;
dest[width * 2 + 2] = 0;
dest[width * 2 + 3] = 0;
dest[width * 3] = v2; dest[width * 3] = v2;
dest[width * 3 + 1] = v2; dest[width * 3 + 1] = v2;
dest[width * 3 + 2] = 0;
dest[width * 3 + 3] = 0;
src += 4; src += 4;
dest += 4; dest += 4;

View file

@ -328,7 +328,4 @@ gst_quarktv_init (GstQuarkTV * filter)
{ {
filter->planes = PLANES; filter->planes = PLANES;
filter->current_plane = filter->planes - 1; filter->current_plane = filter->planes - 1;
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
} }

View file

@ -131,14 +131,15 @@ enum
#define RATIO 0.95 #define RATIO 0.95
static guint32 palettes[COLORS * PATTERN]; static guint32 palettes[COLORS * PATTERN];
static gint swap_tab[] = { 2, 1, 0, 3 };
#define gst_radioactv_parent_class parent_class #define gst_radioactv_parent_class parent_class
G_DEFINE_TYPE (GstRadioacTV, gst_radioactv, GST_TYPE_VIDEO_FILTER); G_DEFINE_TYPE (GstRadioacTV, gst_radioactv, GST_TYPE_VIDEO_FILTER);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN #if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define CAPS_STR GST_VIDEO_CAPS_MAKE ("RGBx") #define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ RGBx, BGRx }")
#else #else
#define CAPS_STR GST_VIDEO_CAPS_MAKE ("xBGR") #define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
#endif #endif
static GstStaticPadTemplate gst_radioactv_src_template = static GstStaticPadTemplate gst_radioactv_src_template =
@ -162,18 +163,20 @@ makePalette (void)
#define DELTA (255/(COLORS/2-1)) #define DELTA (255/(COLORS/2-1))
/* red, gree, blue */
for (i = 0; i < COLORS / 2; i++) { for (i = 0; i < COLORS / 2; i++) {
palettes[i] = i * DELTA; palettes[i] = i * DELTA;
palettes[COLORS + i] = (i * DELTA) << 8; palettes[COLORS + i] = (i * DELTA) << 8;
palettes[COLORS * 2 + i] = (i * DELTA) << 16; palettes[COLORS * 2 + i] = (i * DELTA) << 16;
} }
for (i = 0; i < COLORS / 2; i++) { for (i = 0; i < COLORS / 2; i++) {
palettes[+i + COLORS / 2] = 255 | (i * DELTA) << 16 | (i * DELTA) << 8; palettes[i + COLORS / 2] = 255 | (i * DELTA) << 16 | (i * DELTA) << 8;
palettes[COLORS + i + COLORS / 2] = palettes[COLORS + i + COLORS / 2] =
(255 << 8) | (i * DELTA) << 16 | i * DELTA; (255 << 8) | (i * DELTA) << 16 | i * DELTA;
palettes[COLORS * 2 + i + COLORS / 2] = palettes[COLORS * 2 + i + COLORS / 2] =
(255 << 16) | (i * DELTA) << 8 | i * DELTA; (255 << 16) | (i * DELTA) << 8 | i * DELTA;
} }
/* white */
for (i = 0; i < COLORS; i++) { for (i = 0; i < COLORS; i++) {
palettes[COLORS * 3 + i] = (255 * i / COLORS) * 0x10101; palettes[COLORS * 3 + i] = (255 * i / COLORS) * 0x10101;
} }
@ -347,7 +350,19 @@ gst_radioactv_transform (GstBaseTransform * trans, GstBuffer * in,
height = GST_VIDEO_FRAME_HEIGHT (&in_frame); height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
palette = &palettes[COLORS * filter->color]; #if G_BYTE_ORDER == G_LITTLE_ENDIAN
if (GST_VIDEO_FRAME_FORMAT (&in_frame) == GST_VIDEO_FORMAT_RGBx) {
palette = &palettes[COLORS * filter->color];
} else {
palette = &palettes[COLORS * swap_tab[filter->color]];
}
#else
if (GST_VIDEO_FRAME_FORMAT (&in_frame) == GST_VIDEO_FORMAT_xBGR) {
palette = &palettes[COLORS * filter->color];
} else {
palette = &palettes[COLORS * swap_tab[filter->color]];
}
#endif
diff = filter->diff; diff = filter->diff;
if (filter->mode == 3 && filter->trigger) if (filter->mode == 3 && filter->trigger)
@ -643,7 +658,4 @@ gst_radioactv_init (GstRadioacTV * filter)
filter->color = DEFAULT_COLOR; filter->color = DEFAULT_COLOR;
filter->interval = DEFAULT_INTERVAL; filter->interval = DEFAULT_INTERVAL;
filter->trigger = DEFAULT_TRIGGER; filter->trigger = DEFAULT_TRIGGER;
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
} }

View file

@ -311,9 +311,9 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
guint32 *src, *dest; guint32 *src, *dest;
GstVideoFrame in_frame, out_frame; GstVideoFrame in_frame, out_frame;
gint x, y, i; gint x, y, i;
gint dx, dy; gint dx, dy, o_dx;
gint h, v; gint h, v;
gint width, height; gint m_w, m_h, v_w, v_h;
gint *p, *q, *r; gint *p, *q, *r;
gint8 *vp; gint8 *vp;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
@ -344,22 +344,24 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
else else
motiondetect (filter, src); motiondetect (filter, src);
m_w = filter->map_w;
m_h = filter->map_h;
v_w = GST_VIDEO_FRAME_WIDTH (&in_frame);
v_h = GST_VIDEO_FRAME_HEIGHT (&in_frame);
/* simulate surface wave */ /* simulate surface wave */
width = filter->map_w;
height = filter->map_h;
/* This function is called only 30 times per second. To increase a speed /* This function is called only 30 times per second. To increase a speed
* of wave, iterates this loop several times. */ * of wave, iterates this loop several times. */
for (i = loopnum; i > 0; i--) { for (i = loopnum; i > 0; i--) {
/* wave simulation */ /* wave simulation */
p = filter->map1 + width + 1; p = filter->map1 + m_w + 1;
q = filter->map2 + width + 1; q = filter->map2 + m_w + 1;
r = filter->map3 + width + 1; r = filter->map3 + m_w + 1;
for (y = height - 2; y > 0; y--) { for (y = m_h - 2; y > 0; y--) {
for (x = width - 2; x > 0; x--) { for (x = m_w - 2; x > 0; x--) {
h = *(p - width - 1) + *(p - width + 1) + *(p + width - 1) + *(p + h = *(p - m_w - 1) + *(p - m_w + 1) + *(p + m_w - 1) + *(p + m_w + 1)
width + 1) + *(p - m_w) + *(p - 1) + *(p + 1) + *(p + m_w) - (*p) * 9;
+ *(p - width) + *(p - 1) + *(p + 1) + *(p + width) - (*p) * 9;
h = h >> 3; h = h >> 3;
v = *p - *q; v = *p - *q;
v += h - (v >> decay); v += h - (v >> decay);
@ -374,11 +376,11 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
} }
/* low pass filter */ /* low pass filter */
p = filter->map3 + width + 1; p = filter->map3 + m_w + 1;
q = filter->map2 + width + 1; q = filter->map2 + m_w + 1;
for (y = height - 2; y > 0; y--) { for (y = m_h - 2; y > 0; y--) {
for (x = width - 2; x > 0; x--) { for (x = m_w - 2; x > 0; x--) {
h = *(p - width) + *(p - 1) + *(p + 1) + *(p + width) + (*p) * 60; h = *(p - m_w) + *(p - 1) + *(p + 1) + *(p + m_w) + (*p) * 60;
*q = h >> 6; *q = h >> 6;
p++; p++;
q++; q++;
@ -394,12 +396,12 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
vp = filter->vtable; vp = filter->vtable;
p = filter->map1; p = filter->map1;
for (y = height - 1; y > 0; y--) { for (y = m_h - 1; y > 0; y--) {
for (x = width - 1; x > 0; x--) { for (x = m_w - 1; x > 0; x--) {
/* difference of the height between two voxel. They are twiced to /* difference of the height between two voxel. They are twiced to
* emphasise the wave. */ * emphasise the wave. */
vp[0] = sqrtable[((p[0] - p[1]) >> (point - 1)) & 0xff]; vp[0] = sqrtable[((p[0] - p[1]) >> (point - 1)) & 0xff];
vp[1] = sqrtable[((p[0] - p[width]) >> (point - 1)) & 0xff]; vp[1] = sqrtable[((p[0] - p[m_w]) >> (point - 1)) & 0xff];
p++; p++;
vp += 2; vp += 2;
} }
@ -407,48 +409,34 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
vp += 2; vp += 2;
} }
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
vp = filter->vtable; vp = filter->vtable;
/* draw refracted image. The vector table is stretched. */ /* draw refracted image. The vector table is stretched. */
for (y = 0; y < height; y += 2) { for (y = 0; y < v_h; y += 2) {
for (x = 0; x < width; x += 2) { for (x = 0; x < v_w; x += 2) {
h = (gint) vp[0]; h = (gint) vp[0];
v = (gint) vp[1]; v = (gint) vp[1];
dx = x + h; dx = x + h;
dy = y + v; dy = y + v;
if (dx < 0) dx = CLAMP (dx, 0, (v_w - 1));
dx = 0; dy = CLAMP (dy, 0, (v_h - 1));
if (dy < 0) dest[0] = src[dy * v_w + dx];
dy = 0;
if (dx >= width)
dx = width - 1;
if (dy >= height)
dy = height - 1;
dest[0] = src[dy * width + dx];
i = dx; o_dx = dx;
dx = x + 1 + (h + (gint) vp[2]) / 2; dx = x + 1 + (h + (gint) vp[2]) / 2;
if (dx < 0) dx = CLAMP (dx, 0, (v_w - 1));
dx = 0; dest[1] = src[dy * v_w + dx];
if (dx >= width)
dx = width - 1;
dest[1] = src[dy * width + dx];
dy = y + 1 + (v + (gint) vp[filter->map_w * 2 + 1]) / 2; dy = y + 1 + (v + (gint) vp[m_w * 2 + 1]) / 2;
if (dy < 0) dy = CLAMP (dy, 0, (v_h - 1));
dy = 0; dest[v_w] = src[dy * v_w + o_dx];
if (dy >= height)
dy = height - 1;
dest[width] = src[dy * width + i];
dest[width + 1] = src[dy * width + dx]; dest[v_w + 1] = src[dy * v_w + dx];
dest += 2; dest += 2;
vp += 2; vp += 2;
} }
dest += width; dest += v_w;
vp += 2; vp += 2;
} }
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
@ -493,9 +481,11 @@ gst_rippletv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
filter->map_h = height / 2 + 1; filter->map_h = height / 2 + 1;
filter->map_w = width / 2 + 1; filter->map_w = width / 2 + 1;
/* we over allocate the buffers, as the render code does not handle clipping
* very well */
if (filter->map) if (filter->map)
g_free (filter->map); g_free (filter->map);
filter->map = g_new0 (gint, filter->map_h * filter->map_w * 3); filter->map = g_new0 (gint, (1 + filter->map_h) * filter->map_w * 3);
filter->map1 = filter->map; filter->map1 = filter->map;
filter->map2 = filter->map + filter->map_w * filter->map_h; filter->map2 = filter->map + filter->map_w * filter->map_h;
@ -503,15 +493,15 @@ gst_rippletv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
if (filter->vtable) if (filter->vtable)
g_free (filter->vtable); g_free (filter->vtable);
filter->vtable = g_new0 (gint8, filter->map_h * filter->map_w * 2); filter->vtable = g_new0 (gint8, (1 + filter->map_h) * filter->map_w * 2);
if (filter->background) if (filter->background)
g_free (filter->background); g_free (filter->background);
filter->background = g_new0 (gint16, width * height); filter->background = g_new0 (gint16, width * (height + 1));
if (filter->diff) if (filter->diff)
g_free (filter->diff); g_free (filter->diff);
filter->diff = g_new0 (guint8, width * height); filter->diff = g_new0 (guint8, width * (height + 1));
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
return TRUE; return TRUE;
@ -650,6 +640,7 @@ gst_rippletv_init (GstRippleTV * filter)
{ {
filter->mode = DEFAULT_MODE; filter->mode = DEFAULT_MODE;
/* FIXME: remove this when memory corruption after resizes are fixed */
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter)); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter)); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
} }

View file

@ -289,7 +289,4 @@ gst_shagadelictv_init (GstShagadelicTV * filter)
{ {
filter->ripple = NULL; filter->ripple = NULL;
filter->spiral = NULL; filter->spiral = NULL;
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
} }

View file

@ -296,7 +296,4 @@ static void
gst_streaktv_init (GstStreakTV * filter) gst_streaktv_init (GstStreakTV * filter)
{ {
filter->feedback = DEFAULT_FEEDBACK; filter->feedback = DEFAULT_FEEDBACK;
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
} }

View file

@ -351,7 +351,4 @@ gst_vertigotv_init (GstVertigoTV * filter)
filter->phase = 0.0; filter->phase = 0.0;
filter->phase_increment = 0.02; filter->phase_increment = 0.02;
filter->zoomrate = 1.01; filter->zoomrate = 1.01;
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
} }

View file

@ -133,11 +133,7 @@ initDistTable (GstWarpTV * filter, gint width, gint height)
{ {
gint32 halfw, halfh, *distptr; gint32 halfw, halfh, *distptr;
gint x, y; gint x, y;
#ifdef PS2
float m; float m;
#else
float m;
#endif
halfw = width >> 1; halfw = width >> 1;
halfh = height >> 1; halfh = height >> 1;
@ -304,6 +300,5 @@ gst_warptv_class_init (GstWarpTVClass * klass)
static void static void
gst_warptv_init (GstWarpTV * warptv) gst_warptv_init (GstWarpTV * warptv)
{ {
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (warptv)); /* nothing to do */
gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (warptv));
} }

View file

@ -781,10 +781,10 @@ gst_flv_demux_push_tags (GstFlvDemux * demux)
} }
static void static void
gst_flv_demux_update_resync (GstFlvDemux * demux, guint32 pts, guint32 * last, gst_flv_demux_update_resync (GstFlvDemux * demux, guint32 pts, gboolean discont,
GstClockTime * offset) guint32 * last, GstClockTime * offset)
{ {
if (ABS (pts - *last) >= RESYNC_THRESHOLD) { if (!discont && ABS (pts - *last) >= RESYNC_THRESHOLD) {
/* Theoretically, we should use substract the duration of the last buffer, /* Theoretically, we should use substract the duration of the last buffer,
but this demuxer sends no durations on buffers, not sure if it cannot but this demuxer sends no durations on buffers, not sure if it cannot
know, or just does not care to calculate. */ know, or just does not care to calculate. */
@ -992,8 +992,8 @@ gst_flv_demux_parse_tag_audio (GstFlvDemux * demux, GstBuffer * buffer)
} }
/* detect (and deem to be resyncs) large pts gaps */ /* detect (and deem to be resyncs) large pts gaps */
gst_flv_demux_update_resync (demux, pts, &demux->last_audio_pts, gst_flv_demux_update_resync (demux, pts, demux->audio_need_discont,
&demux->audio_time_offset); &demux->last_audio_pts, &demux->audio_time_offset);
/* Fill buffer with data */ /* Fill buffer with data */
GST_BUFFER_TIMESTAMP (outbuf) = pts * GST_MSECOND + demux->audio_time_offset; GST_BUFFER_TIMESTAMP (outbuf) = pts * GST_MSECOND + demux->audio_time_offset;
@ -1369,8 +1369,8 @@ gst_flv_demux_parse_tag_video (GstFlvDemux * demux, GstBuffer * buffer)
} }
/* detect (and deem to be resyncs) large pts gaps */ /* detect (and deem to be resyncs) large pts gaps */
gst_flv_demux_update_resync (demux, pts, &demux->last_video_pts, gst_flv_demux_update_resync (demux, pts, demux->video_need_discont,
&demux->video_time_offset); &demux->last_video_pts, &demux->video_time_offset);
/* Fill buffer with data */ /* Fill buffer with data */
GST_BUFFER_TIMESTAMP (outbuf) = pts * GST_MSECOND + demux->video_time_offset; GST_BUFFER_TIMESTAMP (outbuf) = pts * GST_MSECOND + demux->video_time_offset;

View file

@ -199,7 +199,7 @@ GstQTMuxFormatProp gst_qt_mux_format_list[] = {
"ismlmux", "ismlmux",
"ISML", "ISML",
"GstISMLMux", "GstISMLMux",
GST_STATIC_CAPS ("video/quicktime, variant = (string) iso"), GST_STATIC_CAPS ("video/quicktime, variant = (string) iso-fragmented"),
GST_STATIC_CAPS (MPEG4V_CAPS "; " H264_CAPS), GST_STATIC_CAPS (MPEG4V_CAPS "; " H264_CAPS),
GST_STATIC_CAPS (MP3_CAPS "; " AAC_CAPS) GST_STATIC_CAPS (MP3_CAPS "; " AAC_CAPS)
} }

View file

@ -594,6 +594,7 @@ gst_multi_file_sink_ensure_max_files (GstMultiFileSink * multifilesink)
multifilesink->n_files >= multifilesink->max_files) { multifilesink->n_files >= multifilesink->max_files) {
filename = multifilesink->files->data; filename = multifilesink->files->data;
g_remove (filename); g_remove (filename);
g_free (filename);
multifilesink->files = g_slist_delete_link (multifilesink->files, multifilesink->files = g_slist_delete_link (multifilesink->files,
multifilesink->files); multifilesink->files);
multifilesink->n_files -= 1; multifilesink->n_files -= 1;

View file

@ -1164,7 +1164,8 @@ gst_wavparse_stream_headers (GstWavParse * wav)
if (tag == GST_RIFF_TAG_JUNK || tag == GST_RIFF_TAG_JUNQ || if (tag == GST_RIFF_TAG_JUNK || tag == GST_RIFF_TAG_JUNQ ||
tag == GST_RIFF_TAG_bext || tag == GST_RIFF_TAG_BEXT || tag == GST_RIFF_TAG_bext || tag == GST_RIFF_TAG_BEXT ||
tag == GST_RIFF_TAG_LIST) { tag == GST_RIFF_TAG_LIST || tag == GST_RIFF_TAG_ID32 ||
tag == GST_RIFF_TAG_IDVX) {
GST_DEBUG_OBJECT (wav, "skipping %" GST_FOURCC_FORMAT " chunk", GST_DEBUG_OBJECT (wav, "skipping %" GST_FOURCC_FORMAT " chunk",
GST_FOURCC_ARGS (tag)); GST_FOURCC_ARGS (tag));
gst_buffer_unref (buf); gst_buffer_unref (buf);

View file

@ -40,19 +40,19 @@ run_pipeline (GstElement * pipeline)
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
} }
#if !GLIB_CHECK_VERSION(2,26,0)
static gchar * static gchar *
g_mkdtemp (const gchar * template) g_mkdtemp (gchar * template)
{ {
gchar *s;
gchar *tmpdir; gchar *tmpdir;
s = g_strdup (template); tmpdir = mkdtemp (template);
tmpdir = mkdtemp (s);
if (tmpdir == NULL) { if (tmpdir == NULL) {
g_free (s); g_free (template);
} }
return tmpdir; return tmpdir;
} }
#endif
GST_START_TEST (test_multifilesink_key_frame) GST_START_TEST (test_multifilesink_key_frame)
{ {
@ -93,7 +93,6 @@ GST_START_TEST (test_multifilesink_key_frame)
g_free (mfs_pattern); g_free (mfs_pattern);
g_free (my_tmpdir); g_free (my_tmpdir);
g_free (template);
} }
GST_END_TEST; GST_END_TEST;
@ -144,7 +143,6 @@ GST_START_TEST (test_multifilesink_max_files)
g_free (mfs_pattern); g_free (mfs_pattern);
g_free (my_tmpdir); g_free (my_tmpdir);
g_free (template);
} }
GST_END_TEST; GST_END_TEST;
@ -202,7 +200,6 @@ GST_START_TEST (test_multifilesrc)
g_free (mfs_pattern); g_free (mfs_pattern);
g_free (my_tmpdir); g_free (my_tmpdir);
g_free (template);
} }
GST_END_TEST; GST_END_TEST;