Merge branch 'master' into 0.11

Conflicts:
	ext/speex/gstspeexdec.c
	ext/speex/gstspeexenc.c
	gst/isomp4/atoms.c
	gst/isomp4/gstqtmux.c
This commit is contained in:
Wim Taymans 2011-10-06 12:23:39 +02:00
commit 586ef0babd
15 changed files with 670 additions and 1213 deletions

View file

@ -1,13 +1,14 @@
plugin_LTLIBRARIES = libgstspeex.la
libgstspeex_la_SOURCES = gstspeex.c gstspeexdec.c gstspeexenc.c
libgstspeex_la_CFLAGS = \
libgstspeex_la_CFLAGS = -DGST_USE_UNSTABLE_API \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \
$(SPEEX_CFLAGS)
libgstspeex_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \
$(GST_PLUGINS_BASE_LIBS) \
-lgsttag-$(GST_MAJORMINOR) -lgstaudio-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(SPEEX_LIBS)

View file

@ -78,53 +78,43 @@ GST_STATIC_PAD_TEMPLATE ("sink",
);
#define gst_speex_dec_parent_class parent_class
G_DEFINE_TYPE (GstSpeexDec, gst_speex_dec, GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstSpeexDec, gst_speex_dec, GST_TYPE_AUDIO_DECODER);
static gboolean speex_dec_sink_event (GstPad * pad, GstEvent * event);
static GstFlowReturn speex_dec_chain (GstPad * pad, GstBuffer * buf);
static GstStateChangeReturn speex_dec_change_state (GstElement * element,
GstStateChange transition);
static gboolean speex_dec_src_event (GstPad * pad, GstEvent * event);
static gboolean speex_dec_src_query (GstPad * pad, GstQuery * query);
static gboolean speex_dec_sink_query (GstPad * pad, GstQuery * query);
static const GstQueryType *speex_get_src_query_types (GstPad * pad);
static const GstQueryType *speex_get_sink_query_types (GstPad * pad);
static gboolean speex_dec_convert (GstPad * pad,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value);
static gboolean gst_speex_dec_start (GstAudioDecoder * dec);
static gboolean gst_speex_dec_stop (GstAudioDecoder * dec);
static gboolean gst_speex_dec_set_format (GstAudioDecoder * bdec,
GstCaps * caps);
static GstFlowReturn gst_speex_dec_handle_frame (GstAudioDecoder * dec,
GstBuffer * buffer);
static void gst_speex_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_speex_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static GstFlowReturn speex_dec_chain_parse_data (GstSpeexDec * dec,
GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
static GstFlowReturn speex_dec_chain_parse_header (GstSpeexDec * dec,
GstBuffer * buf);
static GstFlowReturn speex_dec_chain_parse_comments (GstSpeexDec * dec,
GstBuffer * buf);
static void
gst_speex_dec_class_init (GstSpeexDecClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstAudioDecoderClass *base_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
base_class = (GstAudioDecoderClass *) klass;
gobject_class->set_property = gst_speex_dec_set_property;
gobject_class->get_property = gst_speex_dec_get_property;
base_class->start = GST_DEBUG_FUNCPTR (gst_speex_dec_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_speex_dec_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_speex_dec_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_speex_dec_handle_frame);
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ENH,
g_param_spec_boolean ("enh", "Enh", "Enable perceptual enhancement",
DEFAULT_ENH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state = GST_DEBUG_FUNCPTR (speex_dec_change_state);
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&speex_dec_src_factory));
gst_element_class_add_pad_template (gstelement_class,
@ -140,7 +130,6 @@ gst_speex_dec_class_init (GstSpeexDecClass * klass)
static void
gst_speex_dec_reset (GstSpeexDec * dec)
{
gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED);
dec->packetno = 0;
dec->frame_size = 0;
dec->frame_duration = 0;
@ -166,396 +155,38 @@ gst_speex_dec_reset (GstSpeexDec * dec)
static void
gst_speex_dec_init (GstSpeexDec * dec)
{
dec->sinkpad =
gst_pad_new_from_static_template (&speex_dec_sink_factory, "sink");
gst_pad_set_chain_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (speex_dec_chain));
gst_pad_set_event_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (speex_dec_sink_event));
gst_pad_set_query_type_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (speex_get_sink_query_types));
gst_pad_set_query_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (speex_dec_sink_query));
gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
dec->srcpad =
gst_pad_new_from_static_template (&speex_dec_src_factory, "src");
gst_pad_use_fixed_caps (dec->srcpad);
gst_pad_set_event_function (dec->srcpad,
GST_DEBUG_FUNCPTR (speex_dec_src_event));
gst_pad_set_query_type_function (dec->srcpad,
GST_DEBUG_FUNCPTR (speex_get_src_query_types));
gst_pad_set_query_function (dec->srcpad,
GST_DEBUG_FUNCPTR (speex_dec_src_query));
gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
dec->enh = DEFAULT_ENH;
gst_speex_dec_reset (dec);
}
static gboolean
speex_dec_sink_setcaps (GstPad * pad, GstCaps * caps)
gst_speex_dec_start (GstAudioDecoder * dec)
{
GstSpeexDec *dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
gboolean ret = TRUE;
GstStructure *s;
const GValue *streamheader;
GstSpeexDec *sd = GST_SPEEX_DEC (dec);
s = gst_caps_get_structure (caps, 0);
if ((streamheader = gst_structure_get_value (s, "streamheader")) &&
G_VALUE_HOLDS (streamheader, GST_TYPE_ARRAY) &&
gst_value_array_get_size (streamheader) >= 2) {
const GValue *header, *vorbiscomment;
GstBuffer *buf;
GstFlowReturn res = GST_FLOW_OK;
GST_DEBUG_OBJECT (dec, "start");
gst_speex_dec_reset (sd);
header = gst_value_array_get_value (streamheader, 0);
if (header && G_VALUE_HOLDS (header, GST_TYPE_BUFFER)) {
buf = gst_value_get_buffer (header);
res = speex_dec_chain_parse_header (dec, buf);
if (res != GST_FLOW_OK)
goto done;
gst_buffer_replace (&dec->streamheader, buf);
}
/* we know about concealment */
gst_audio_decoder_set_plc_aware (dec, TRUE);
vorbiscomment = gst_value_array_get_value (streamheader, 1);
if (vorbiscomment && G_VALUE_HOLDS (vorbiscomment, GST_TYPE_BUFFER)) {
buf = gst_value_get_buffer (vorbiscomment);
res = speex_dec_chain_parse_comments (dec, buf);
if (res != GST_FLOW_OK)
goto done;
gst_buffer_replace (&dec->vorbiscomment, buf);
}
}
done:
gst_object_unref (dec);
return ret;
return TRUE;
}
static gboolean
speex_dec_convert (GstPad * pad,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value)
gst_speex_dec_stop (GstAudioDecoder * dec)
{
gboolean res = TRUE;
GstSpeexDec *dec;
guint64 scale = 1;
GstSpeexDec *sd = GST_SPEEX_DEC (dec);
dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (dec, "stop");
gst_speex_dec_reset (sd);
if (src_format == *dest_format) {
*dest_value = src_value;
res = TRUE;
goto cleanup;
}
if (dec->packetno < 1) {
res = FALSE;
goto cleanup;
}
if (pad == dec->sinkpad &&
(src_format == GST_FORMAT_BYTES || *dest_format == GST_FORMAT_BYTES)) {
res = FALSE;
goto cleanup;
}
switch (src_format) {
case GST_FORMAT_TIME:
switch (*dest_format) {
case GST_FORMAT_BYTES:
scale = 2 * dec->header->nb_channels;
case GST_FORMAT_DEFAULT:
*dest_value =
gst_util_uint64_scale_int (scale * src_value, dec->header->rate,
GST_SECOND);
break;
default:
res = FALSE;
break;
}
break;
case GST_FORMAT_DEFAULT:
switch (*dest_format) {
case GST_FORMAT_BYTES:
*dest_value = src_value * 2 * dec->header->nb_channels;
break;
case GST_FORMAT_TIME:
*dest_value =
gst_util_uint64_scale_int (src_value, GST_SECOND,
dec->header->rate);
break;
default:
res = FALSE;
break;
}
break;
case GST_FORMAT_BYTES:
switch (*dest_format) {
case GST_FORMAT_DEFAULT:
*dest_value = src_value / (2 * dec->header->nb_channels);
break;
case GST_FORMAT_TIME:
*dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND,
dec->header->rate * 2 * dec->header->nb_channels);
break;
default:
res = FALSE;
break;
}
break;
default:
res = FALSE;
break;
}
cleanup:
gst_object_unref (dec);
return res;
}
static const GstQueryType *
speex_get_sink_query_types (GstPad * pad)
{
static const GstQueryType speex_dec_sink_query_types[] = {
GST_QUERY_CONVERT,
0
};
return speex_dec_sink_query_types;
}
static gboolean
speex_dec_sink_query (GstPad * pad, GstQuery * query)
{
GstSpeexDec *dec;
gboolean res;
dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONVERT:
{
GstFormat src_fmt, dest_fmt;
gint64 src_val, dest_val;
gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
res = speex_dec_convert (pad, src_fmt, src_val, &dest_fmt, &dest_val);
if (res) {
gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
}
break;
}
default:
res = gst_pad_query_default (pad, query);
break;
}
gst_object_unref (dec);
return res;
}
static const GstQueryType *
speex_get_src_query_types (GstPad * pad)
{
static const GstQueryType speex_dec_src_query_types[] = {
GST_QUERY_POSITION,
GST_QUERY_DURATION,
0
};
return speex_dec_src_query_types;
}
static gboolean
speex_dec_src_query (GstPad * pad, GstQuery * query)
{
GstSpeexDec *dec;
gboolean res = FALSE;
dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
/* FIXME: why not just pass position/duration queries upstream to demuxer? */
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:{
GstSegment segment;
GstFormat format;
gint64 cur;
gst_query_parse_position (query, &format, NULL);
GST_PAD_STREAM_LOCK (dec->sinkpad);
segment = dec->segment;
GST_PAD_STREAM_UNLOCK (dec->sinkpad);
if (segment.format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (dec, "segment not initialised yet");
break;
}
if ((res = speex_dec_convert (dec->srcpad, GST_FORMAT_TIME,
segment.position, &format, &cur))) {
gst_query_set_position (query, format, cur);
}
break;
}
case GST_QUERY_DURATION:{
GstFormat format;
gint64 dur;
/* get duration from demuxer */
if (!gst_pad_query_peer_duration (dec->sinkpad, GST_FORMAT_TIME, &dur))
break;
gst_query_parse_duration (query, &format, NULL);
/* and convert it into the requested format */
if ((res = speex_dec_convert (dec->srcpad, GST_FORMAT_TIME,
dur, &format, &dur))) {
gst_query_set_duration (query, format, dur);
}
break;
}
default:
res = gst_pad_query_default (pad, query);
break;
}
gst_object_unref (dec);
return res;
}
static gboolean
speex_dec_src_event (GstPad * pad, GstEvent * event)
{
gboolean res = FALSE;
GstSpeexDec *dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:{
GstFormat format, tformat;
gdouble rate;
GstEvent *real_seek;
GstSeekFlags flags;
GstSeekType cur_type, stop_type;
gint64 cur, stop;
gint64 tcur, tstop;
gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
&stop_type, &stop);
/* we have to ask our peer to seek to time here as we know
* nothing about how to generate a granulepos from the src
* formats or anything.
*
* First bring the requested format to time
*/
tformat = GST_FORMAT_TIME;
if (!(res = speex_dec_convert (pad, format, cur, &tformat, &tcur)))
break;
if (!(res = speex_dec_convert (pad, format, stop, &tformat, &tstop)))
break;
/* then seek with time on the peer */
real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
flags, cur_type, tcur, stop_type, tstop);
GST_LOG_OBJECT (dec, "seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (tcur));
res = gst_pad_push_event (dec->sinkpad, real_seek);
gst_event_unref (event);
break;
}
default:
res = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (dec);
return res;
}
static gboolean
speex_dec_sink_event (GstPad * pad, GstEvent * event)
{
GstSpeexDec *dec;
gboolean ret = FALSE;
dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
ret = speex_dec_sink_setcaps (pad, caps);
gst_event_unref (event);
break;
}
case GST_EVENT_SEGMENT:{
GstSegment segment;
gst_event_copy_segment (event, &segment);
if (segment.format != GST_FORMAT_TIME)
goto newseg_wrong_format;
if (segment.rate <= 0.0)
goto newseg_wrong_rate;
#if 0
if (update) {
/* time progressed without data, see if we can fill the gap with
* some concealment data */
if (dec->segment.position < start) {
GstClockTime duration;
duration = start - dec->segment.position;
speex_dec_chain_parse_data (dec, NULL, dec->segment.position,
duration);
}
}
#endif
/* now configure the values */
dec->segment = segment;
GST_DEBUG_OBJECT (dec, "segment now: %" GST_SEGMENT_FORMAT, &segment);
ret = gst_pad_push_event (dec->srcpad, event);
break;
}
default:
ret = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (dec);
return ret;
/* ERRORS */
newseg_wrong_format:
{
GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
gst_object_unref (dec);
return FALSE;
}
newseg_wrong_rate:
{
GST_DEBUG_OBJECT (dec, "negative rates not supported yet");
gst_object_unref (dec);
return FALSE;
}
return TRUE;
}
static GstFlowReturn
speex_dec_chain_parse_header (GstSpeexDec * dec, GstBuffer * buf)
gst_speex_dec_parse_header (GstSpeexDec * dec, GstBuffer * buf)
{
GstCaps *caps;
char *data;
@ -603,7 +234,7 @@ speex_dec_chain_parse_header (GstSpeexDec * dec, GstBuffer * buf)
"rate", G_TYPE_INT, dec->header->rate,
"channels", G_TYPE_INT, dec->header->nb_channels, NULL);
if (!gst_pad_set_caps (dec->srcpad, caps))
if (!gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps))
goto nego_failed;
gst_caps_unref (caps);
@ -640,7 +271,7 @@ nego_failed:
}
static GstFlowReturn
speex_dec_chain_parse_comments (GstSpeexDec * dec, GstBuffer * buf)
gst_speex_dec_parse_comments (GstSpeexDec * dec, GstBuffer * buf)
{
GstTagList *list;
gchar *ver, *encoder = NULL;
@ -675,7 +306,8 @@ speex_dec_chain_parse_comments (GstSpeexDec * dec, GstBuffer * buf)
GST_INFO_OBJECT (dec, "tags: %" GST_PTR_FORMAT, list);
gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, list);
gst_element_found_tags_for_pad (GST_ELEMENT (dec),
GST_AUDIO_DECODER_SRC_PAD (dec), list);
g_free (encoder);
g_free (ver);
@ -683,9 +315,47 @@ speex_dec_chain_parse_comments (GstSpeexDec * dec, GstBuffer * buf)
return GST_FLOW_OK;
}
static gboolean
gst_speex_dec_set_format (GstAudioDecoder * bdec, GstCaps * caps)
{
GstSpeexDec *dec = GST_SPEEX_DEC (bdec);
gboolean ret = TRUE;
GstStructure *s;
const GValue *streamheader;
s = gst_caps_get_structure (caps, 0);
if ((streamheader = gst_structure_get_value (s, "streamheader")) &&
G_VALUE_HOLDS (streamheader, GST_TYPE_ARRAY) &&
gst_value_array_get_size (streamheader) >= 2) {
const GValue *header, *vorbiscomment;
GstBuffer *buf;
GstFlowReturn res = GST_FLOW_OK;
header = gst_value_array_get_value (streamheader, 0);
if (header && G_VALUE_HOLDS (header, GST_TYPE_BUFFER)) {
buf = gst_value_get_buffer (header);
res = gst_speex_dec_parse_header (dec, buf);
if (res != GST_FLOW_OK)
goto done;
gst_buffer_replace (&dec->streamheader, buf);
}
vorbiscomment = gst_value_array_get_value (streamheader, 1);
if (vorbiscomment && G_VALUE_HOLDS (vorbiscomment, GST_TYPE_BUFFER)) {
buf = gst_value_get_buffer (vorbiscomment);
res = gst_speex_dec_parse_comments (dec, buf);
if (res != GST_FLOW_OK)
goto done;
gst_buffer_replace (&dec->vorbiscomment, buf);
}
}
done:
return ret;
}
static GstFlowReturn
speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
GstClockTime timestamp, GstClockTime duration)
gst_speex_dec_parse_data (GstSpeexDec * dec, GstBuffer * buf)
{
GstFlowReturn res = GST_FLOW_OK;
gint i, fpp;
@ -696,13 +366,7 @@ speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
if (!dec->frame_duration)
goto not_negotiated;
if (timestamp != -1) {
dec->segment.position = timestamp;
} else {
timestamp = dec->segment.position;
}
if (buf) {
if (G_LIKELY (gst_buffer_get_size (buf))) {
/* send data to the bitstream */
data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
speex_bits_read_from (&dec->bits, data, size);
@ -711,16 +375,16 @@ speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
fpp = dec->header->frames_per_packet;
bits = &dec->bits;
GST_DEBUG_OBJECT (dec, "received buffer of size %u, fpp %d, %d bits", size,
fpp, speex_bits_remaining (bits));
GST_DEBUG_OBJECT (dec, "received buffer of size %u, fpp %d, %d bits",
size, fpp, speex_bits_remaining (bits));
} else {
/* FIXME ? actually consider how much concealment is needed */
/* concealment data, pass NULL as the bits parameters */
GST_DEBUG_OBJECT (dec, "creating concealment data");
fpp = dec->header->frames_per_packet;
bits = NULL;
}
/* now decode each frame, catering for unknown number of them (e.g. rtp) */
for (i = 0; i < fpp; i++) {
GstBuffer *outbuf;
@ -730,9 +394,10 @@ speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
GST_LOG_OBJECT (dec, "decoding frame %d/%d, %d bits remaining", i, fpp,
bits ? speex_bits_remaining (bits) : -1);
#if 0
res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
res =
gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec),
GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2,
GST_PAD_CAPS (dec->srcpad), &outbuf);
GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (dec)), &outbuf);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
@ -754,39 +419,27 @@ speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
if (fpp == 0 && speex_bits_remaining (bits) < 8) {
/* if we did not know how many frames to expect, then we get this
at the end if there are leftover bits to pad to the next byte */
GST_DEBUG_OBJECT (dec, "Discarding leftover bits");
} else {
GST_WARNING_OBJECT (dec, "Unexpected end of stream found");
}
gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), NULL, 1);
gst_buffer_unref (outbuf);
outbuf = NULL;
break;
} else if (ret == -2) {
GST_WARNING_OBJECT (dec, "Decoding error: corrupted stream?");
gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), NULL, 1);
gst_buffer_unref (outbuf);
outbuf = NULL;
break;
}
if (bits && speex_bits_remaining (bits) < 0) {
GST_WARNING_OBJECT (dec, "Decoding overflow: corrupted stream?");
gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), NULL, 1);
gst_buffer_unref (outbuf);
outbuf = NULL;
break;
}
if (dec->header->nb_channels == 2)
speex_decode_stereo_int (out_data, dec->frame_size, dec->stereo);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = dec->frame_duration;
dec->segment.position += dec->frame_duration;
timestamp = dec->segment.position;
GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (dec->frame_duration));
res = gst_pad_push (dec->srcpad, outbuf);
res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));
@ -826,48 +479,55 @@ memcmp_buffers (GstBuffer * buf1, GstBuffer * buf2)
}
static GstFlowReturn
speex_dec_chain (GstPad * pad, GstBuffer * buf)
gst_speex_dec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buf)
{
GstFlowReturn res;
GstSpeexDec *dec;
dec = GST_SPEEX_DEC (gst_pad_get_parent (pad));
/* no fancy draining */
if (G_UNLIKELY (!buf))
return GST_FLOW_OK;
dec = GST_SPEEX_DEC (bdec);
/* If we have the streamheader and vorbiscomment from the caps already
* ignore them here */
if (dec->streamheader && dec->vorbiscomment) {
if (memcmp_buffers (dec->streamheader, buf)) {
GST_DEBUG_OBJECT (dec, "found streamheader");
gst_audio_decoder_finish_frame (bdec, NULL, 1);
res = GST_FLOW_OK;
} else if (memcmp_buffers (dec->vorbiscomment, buf)) {
GST_DEBUG_OBJECT (dec, "found vorbiscomments");
gst_audio_decoder_finish_frame (bdec, NULL, 1);
res = GST_FLOW_OK;
} else {
res =
speex_dec_chain_parse_data (dec, buf, GST_BUFFER_TIMESTAMP (buf),
GST_BUFFER_DURATION (buf));
res = gst_speex_dec_parse_data (dec, buf);
}
} else {
/* Otherwise fall back to packet counting and assume that the
* first two packets are the headers. */
switch (dec->packetno) {
case 0:
res = speex_dec_chain_parse_header (dec, buf);
GST_DEBUG_OBJECT (dec, "counted streamheader");
res = gst_speex_dec_parse_header (dec, buf);
gst_audio_decoder_finish_frame (bdec, NULL, 1);
break;
case 1:
res = speex_dec_chain_parse_comments (dec, buf);
GST_DEBUG_OBJECT (dec, "counted vorbiscomments");
res = gst_speex_dec_parse_comments (dec, buf);
gst_audio_decoder_finish_frame (bdec, NULL, 1);
break;
default:
res =
speex_dec_chain_parse_data (dec, buf, GST_BUFFER_TIMESTAMP (buf),
GST_BUFFER_DURATION (buf));
{
res = gst_speex_dec_parse_data (dec, buf);
break;
}
}
}
dec->packetno++;
gst_buffer_unref (buf);
gst_object_unref (dec);
return res;
}
@ -906,37 +566,3 @@ gst_speex_dec_set_property (GObject * object, guint prop_id,
break;
}
}
static GstStateChangeReturn
speex_dec_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
GstSpeexDec *dec = GST_SPEEX_DEC (element);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
case GST_STATE_CHANGE_READY_TO_PAUSED:
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret != GST_STATE_CHANGE_SUCCESS)
return ret;
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_speex_dec_reset (dec);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default:
break;
}
return ret;
}

View file

@ -22,6 +22,8 @@
#define __GST_SPEEX_DEC_H__
#include <gst/gst.h>
#include <gst/audio/gstaudiodecoder.h>
#include <speex/speex.h>
#include <speex/speex_callbacks.h>
#include <speex/speex_header.h>
@ -44,11 +46,7 @@ typedef struct _GstSpeexDec GstSpeexDec;
typedef struct _GstSpeexDecClass GstSpeexDecClass;
struct _GstSpeexDec {
GstElement element;
/* pads */
GstPad *sinkpad;
GstPad *srcpad;
GstAudioDecoder element;
void *state;
SpeexStereoState *stereo;
@ -67,14 +65,12 @@ struct _GstSpeexDec {
GstClockTime frame_duration;
guint64 packetno;
GstSegment segment; /* STREAM LOCK */
GstBuffer *streamheader;
GstBuffer *vorbiscomment;
};
struct _GstSpeexDecClass {
GstElementClass parent_class;
GstAudioDecoderClass parent_class;
};
GType gst_speex_dec_get_type (void);

File diff suppressed because it is too large Load diff

View file

@ -23,7 +23,7 @@
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/audio/gstaudioencoder.h>
#include <speex/speex.h>
#include <speex/speex_header.h>
@ -53,14 +53,7 @@ typedef struct _GstSpeexEnc GstSpeexEnc;
typedef struct _GstSpeexEncClass GstSpeexEncClass;
struct _GstSpeexEnc {
GstElement element;
/* pads */
GstPad *sinkpad,
*srcpad;
gint packet_count;
gint n_packets;
GstAudioEncoder element;
SpeexBits bits;
SpeexHeader header;
@ -70,9 +63,9 @@ struct _GstSpeexEnc {
const SpeexMode *speex_mode;
#endif
void *state;
GstSpeexMode mode;
GstAdapter *adapter;
/* properties */
GstSpeexMode mode;
gfloat quality;
gint bitrate;
gboolean vbr;
@ -81,40 +74,24 @@ struct _GstSpeexEnc {
gboolean dtx;
gint complexity;
gint nframes;
gint lookahead;
gchar *last_message;
gint channels;
gint rate;
gboolean setup;
gboolean header_sent;
guint64 samples_in;
guint64 bytes_out;
GstTagList *tags;
gchar *last_message;
gint frame_size;
guint64 frameno;
guint64 frameno_out;
gint lookahead;
guint8 *comments;
gint comment_len;
/* Timestamp and granulepos tracking */
GstClockTime start_ts;
GstClockTime next_ts;
guint64 granulepos_offset;
};
struct _GstSpeexEncClass {
GstElementClass parent_class;
/* signals */
void (*frame_encoded) (GstElement *element);
GstAudioEncoderClass parent_class;
};
GType gst_speex_enc_get_type (void);

View file

@ -2743,6 +2743,81 @@ atom_moov_chunks_add_offset (AtomMOOV * moov, guint32 offset)
}
}
void
atom_trak_update_bitrates (AtomTRAK * trak, guint32 avg_bitrate,
guint32 max_bitrate)
{
AtomESDS *esds = NULL;
AtomData *btrt = NULL;
AtomWAVE *wave = NULL;
AtomSTSD *stsd;
GList *iter;
GList *extensioniter = NULL;
g_return_if_fail (trak != NULL);
if (avg_bitrate == 0 && max_bitrate == 0)
return;
stsd = &trak->mdia.minf.stbl.stsd;
for (iter = stsd->entries; iter; iter = g_list_next (iter)) {
SampleTableEntry *entry = iter->data;
switch (entry->kind) {
case AUDIO:{
SampleTableEntryMP4A *audioentry = (SampleTableEntryMP4A *) entry;
extensioniter = audioentry->extension_atoms;
break;
}
case VIDEO:{
SampleTableEntryMP4V *videoentry = (SampleTableEntryMP4V *) entry;
extensioniter = videoentry->extension_atoms;
break;
}
default:
break;
}
}
for (; extensioniter; extensioniter = g_list_next (extensioniter)) {
AtomInfo *atominfo = extensioniter->data;
if (atominfo->atom->type == FOURCC_esds) {
esds = (AtomESDS *) atominfo->atom;
} else if (atominfo->atom->type == FOURCC_btrt) {
btrt = (AtomData *) atominfo->atom;
} else if (atominfo->atom->type == FOURCC_wave) {
wave = (AtomWAVE *) atominfo->atom;
}
}
/* wave might have an esds internally */
if (wave) {
for (extensioniter = wave->extension_atoms; extensioniter;
extensioniter = g_list_next (extensioniter)) {
AtomInfo *atominfo = extensioniter->data;
if (atominfo->atom->type == FOURCC_esds) {
esds = (AtomESDS *) atominfo->atom;
break;
}
}
}
if (esds) {
if (avg_bitrate && esds->es.dec_conf_desc.avg_bitrate == 0)
esds->es.dec_conf_desc.avg_bitrate = avg_bitrate;
if (max_bitrate && esds->es.dec_conf_desc.max_bitrate == 0)
esds->es.dec_conf_desc.max_bitrate = max_bitrate;
}
if (btrt) {
/* type(4bytes) + size(4bytes) + buffersize(4bytes) +
* maxbitrate(bytes) + avgbitrate(bytes) */
if (max_bitrate && GST_READ_UINT32_BE (btrt->data + 4) == 0)
GST_WRITE_UINT32_BE (btrt->data + 4, max_bitrate);
if (avg_bitrate && GST_READ_UINT32_BE (btrt->data + 8) == 0)
GST_WRITE_UINT32_BE (btrt->data + 8, avg_bitrate);
}
}
/*
* Meta tags functions
*/
@ -3973,17 +4048,13 @@ build_btrt_extension (guint32 buffer_size_db, guint32 avg_bitrate,
GstBuffer *buf;
guint8 *data;
if (buffer_size_db == 0 && avg_bitrate == 0 && max_bitrate == 0)
return 0;
data = g_malloc (12);
GST_WRITE_UINT32_BE (data, buffer_size_db);
GST_WRITE_UINT32_BE (data + 4, max_bitrate);
GST_WRITE_UINT32_BE (data + 8, avg_bitrate);
buf = _gst_buffer_new_wrapped (data, 12, g_free);
atom_data =
atom_data_new_from_gst_buffer (GST_MAKE_FOURCC ('b', 't', 'r', 't'), buf);
atom_data = atom_data_new_from_gst_buffer (FOURCC_btrt, buf);
gst_buffer_unref (buf);
return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,

View file

@ -906,6 +906,9 @@ void atom_trak_set_video_type (AtomTRAK * trak, AtomsContext * context,
VisualSampleEntry * entry, guint32 rate,
GList * ext_atoms_list);
void atom_trak_update_bitrates (AtomTRAK * trak, guint32 avg_bitrate,
guint32 max_bitrate);
AtomInfo * build_codec_data_extension (guint32 fourcc, const GstBuffer * codec_data);
AtomInfo * build_mov_aac_extension (AtomTRAK * trak, const GstBuffer * codec_data,
guint32 avg_bitrate, guint32 max_bitrate);

View file

@ -208,6 +208,7 @@ G_BEGIN_DECLS
#define FOURCC_mfhd GST_MAKE_FOURCC('m','f','h','d')
#define FOURCC_mvhd GST_MAKE_FOURCC('m','v','h','d')
#define FOURCC_traf GST_MAKE_FOURCC('t','r','a','f')
#define FOURCC_btrt GST_MAKE_FOURCC('b','t','r','t')
/* Xiph fourcc */
#define FOURCC_XiTh GST_MAKE_FOURCC('X','i','T','h')

View file

@ -368,6 +368,8 @@ gst_qt_mux_pad_reset (GstQTPad * qtpad)
qtpad->avg_bitrate = 0;
qtpad->max_bitrate = 0;
qtpad->ts_n_entries = 0;
qtpad->total_duration = 0;
qtpad->total_bytes = 0;
qtpad->buf_head = 0;
qtpad->buf_tail = 0;
@ -1788,6 +1790,20 @@ gst_qt_mux_stop_file (GstQTMux * qtmux)
qtpad->last_dts > first_ts)) {
first_ts = qtpad->last_dts;
}
/* update average bitrate of streams if needed */
{
guint32 avgbitrate = 0;
guint32 maxbitrate = qtpad->max_bitrate;
if (qtpad->avg_bitrate)
avgbitrate = qtpad->avg_bitrate;
else if (qtpad->total_duration > 0)
avgbitrate = (guint32) gst_util_uint64_scale_round (qtpad->total_bytes,
8 * GST_SECOND, qtpad->total_duration);
atom_trak_update_bitrates (qtpad->trak, avgbitrate, maxbitrate);
}
}
if (qtmux->fragment_sequence) {
@ -2147,7 +2163,8 @@ gst_qt_mux_add_buffer (GstQTMux * qtmux, GstQTPad * pad, GstBuffer * buf)
buf = pad->prepare_buf_func (pad, buf, qtmux);
}
if (G_LIKELY (buf != NULL && GST_CLOCK_TIME_IS_VALID (pad->first_ts))) {
if (G_LIKELY (buf != NULL && GST_CLOCK_TIME_IS_VALID (pad->first_ts) &&
pad->first_ts != 0)) {
buf = gst_buffer_make_writable (buf);
check_and_subtract_ts (qtmux, &GST_BUFFER_TIMESTAMP (buf), pad->first_ts);
}
@ -2309,6 +2326,12 @@ again:
duration = MAX (duration, ts);
}
/* for computing the avg bitrate */
if (G_LIKELY (last_buf)) {
pad->total_bytes += GST_BUFFER_SIZE (last_buf);
pad->total_duration += duration;
}
gst_buffer_replace (&pad->last_buf, buf);
last_dts = gst_util_uint64_scale_round (pad->last_dts,

View file

@ -97,6 +97,10 @@ struct _GstQTPad
/* bitrates */
guint32 avg_bitrate, max_bitrate;
/* for avg bitrate calculation */
guint64 total_bytes;
guint64 total_duration;
GstBuffer *last_buf;
/* dts of last_buf */
GstClockTime last_dts;

View file

@ -436,6 +436,7 @@ gst_matroska_demux_reset (GstElement * element)
gst_segment_init (&demux->common.segment, GST_FORMAT_TIME);
demux->last_stop_end = GST_CLOCK_TIME_NONE;
demux->seek_block = 0;
demux->stream_start_time = GST_CLOCK_TIME_NONE;
demux->common.offset = 0;
demux->cluster_time = GST_CLOCK_TIME_NONE;
@ -1370,10 +1371,11 @@ gst_matroska_demux_query (GstMatroskaDemux * demux, GstPad * pad,
if (format == GST_FORMAT_TIME) {
GST_OBJECT_LOCK (demux);
if (context)
gst_query_set_position (query, GST_FORMAT_TIME, context->pos);
gst_query_set_position (query, GST_FORMAT_TIME,
context->pos - demux->stream_start_time);
else
gst_query_set_position (query, GST_FORMAT_TIME,
demux->common.segment.last_stop);
demux->common.segment.last_stop - demux->stream_start_time);
GST_OBJECT_UNLOCK (demux);
} else if (format == GST_FORMAT_DEFAULT && context
&& context->default_duration) {
@ -1748,9 +1750,15 @@ gst_matroska_demux_search_pos (GstMatroskaDemux * demux, GstClockTime time)
retry:
GST_LOG_OBJECT (demux,
"opos: %" G_GUINT64_FORMAT ", otime: %" GST_TIME_FORMAT, opos,
GST_TIME_ARGS (otime));
newpos = gst_util_uint64_scale (opos, time, otime) - chunk;
"opos: %" G_GUINT64_FORMAT ", otime: %" GST_TIME_FORMAT ", %"
GST_TIME_FORMAT " in stream time (start %" GST_TIME_FORMAT "), time %"
GST_TIME_FORMAT, opos, GST_TIME_ARGS (otime),
GST_TIME_ARGS (otime - demux->stream_start_time),
GST_TIME_ARGS (demux->stream_start_time), GST_TIME_ARGS (time));
newpos =
gst_util_uint64_scale (opos - demux->common.ebml_segment_start,
time - demux->stream_start_time,
otime - demux->stream_start_time) - chunk;
if (newpos < 0)
newpos = 0;
/* favour undershoot */
@ -1918,6 +1926,14 @@ gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
GST_DEBUG_OBJECT (demux, "configuring seek");
gst_segment_set_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
/* compensate for clip start time */
if (GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
seeksegment.last_stop += demux->stream_start_time;
seeksegment.start += demux->stream_start_time;
if (GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
seeksegment.stop += demux->stream_start_time;
/* note that time should stay at indicated position */
}
}
GST_DEBUG_OBJECT (demux, "New segment %" GST_SEGMENT_FORMAT, &seeksegment);
@ -1988,7 +2004,7 @@ gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
GST_TIME_FORMAT, GST_TIME_ARGS (entry->time));
seeksegment.start = entry->time;
seeksegment.last_stop = entry->time;
seeksegment.time = entry->time;
seeksegment.time = entry->time - demux->stream_start_time;
}
if (flush) {
@ -3259,10 +3275,15 @@ gst_matroska_demux_parse_blockgroup_or_simpleblock (GstMatroskaDemux * demux,
GST_DEBUG_OBJECT (demux,
"generating segment starting at %" GST_TIME_FORMAT,
GST_TIME_ARGS (lace_time));
/* pretend we seeked here */
gst_segment_set_seek (&demux->common.segment, demux->common.segment.rate,
GST_FORMAT_TIME, 0, GST_SEEK_TYPE_SET, lace_time,
GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE, NULL);
if (!GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
demux->stream_start_time = lace_time;
GST_DEBUG_OBJECT (demux,
"Setting stream start time to %" GST_TIME_FORMAT,
GST_TIME_ARGS (lace_time));
}
gst_segment_set_newsegment (&demux->common.segment, FALSE,
demux->common.segment.rate, GST_FORMAT_TIME, lace_time,
GST_CLOCK_TIME_NONE, lace_time - demux->stream_start_time);
/* now convey our segment notion downstream */
gst_matroska_demux_send_event (demux, gst_event_new_new_segment (FALSE,
demux->common.segment.rate, demux->common.segment.format,
@ -3449,9 +3470,10 @@ gst_matroska_demux_parse_blockgroup_or_simpleblock (GstMatroskaDemux * demux,
GST_OBJECT_LOCK (demux);
if (demux->common.segment.duration == -1 ||
demux->common.segment.duration < lace_time) {
demux->common.segment.duration <
lace_time - demux->stream_start_time) {
gst_segment_set_duration (&demux->common.segment, GST_FORMAT_TIME,
last_stop_end);
last_stop_end - demux->stream_start_time);
GST_OBJECT_UNLOCK (demux);
gst_element_post_message (GST_ELEMENT_CAST (demux),
gst_message_new_duration (GST_OBJECT_CAST (demux),
@ -4106,12 +4128,10 @@ gst_matroska_demux_parse_id (GstMatroskaDemux * demux, guint32 id,
demux->first_cluster_offset = demux->common.offset;
GST_DEBUG_OBJECT (demux, "signaling no more pads");
gst_element_no_more_pads (GST_ELEMENT (demux));
/* send initial newsegment */
gst_matroska_demux_send_event (demux,
gst_event_new_new_segment (FALSE, 1.0,
GST_FORMAT_TIME, 0,
(demux->common.segment.duration >
0) ? demux->common.segment.duration : -1, 0));
/* send initial newsegment - we wait till we know the first
incoming timestamp, so we can properly set the start of
the segment. */
demux->need_newsegment = TRUE;
}
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = demux->common.offset;

View file

@ -71,6 +71,7 @@ typedef struct _GstMatroskaDemux {
/* keeping track of playback position */
gboolean segment_running;
GstClockTime last_stop_end;
GstClockTime stream_start_time;
GstEvent *close_segment;
GstEvent *new_segment;

View file

@ -744,8 +744,8 @@ gst_v4l2_set_defaults (GstV4l2Object * v4l2object)
if (v4l2object->tv_norm)
norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=%d, norm=%p",
v4l2object->tv_norm, norm);
GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
"norm=%p", (guint64) v4l2object->tv_norm, norm);
if (norm) {
gst_tuner_set_norm (tuner, norm);
} else {

View file

@ -165,11 +165,12 @@ gst_ximage_src_open_display (GstXImageSrc * s, const gchar * name)
if (s->xid != 0) {
status = XGetWindowAttributes (s->xcontext->disp, s->xid, &attrs);
if (status) {
GST_DEBUG_OBJECT (s, "Found window XID %p", s->xid);
GST_DEBUG_OBJECT (s, "Found window XID %" G_GUINT64_FORMAT, s->xid);
s->xwindow = s->xid;
goto window_found;
} else {
GST_WARNING_OBJECT (s, "Failed to get window %p attributes", s->xid);
GST_WARNING_OBJECT (s, "Failed to get window %" G_GUINT64_FORMAT
" attributes", s->xid);
}
}
@ -177,13 +178,14 @@ gst_ximage_src_open_display (GstXImageSrc * s, const gchar * name)
GST_DEBUG_OBJECT (s, "Looking for window %s", s->xname);
window = gst_ximage_src_find_window (s, s->xcontext->root, s->xname);
if (window != 0) {
GST_DEBUG_OBJECT (s, "Found window named %s as %p, ", s->xname, window);
GST_DEBUG_OBJECT (s, "Found window named %s, ", s->xname);
status = XGetWindowAttributes (s->xcontext->disp, window, &attrs);
if (status) {
s->xwindow = window;
goto window_found;
} else {
GST_WARNING_OBJECT (s, "Failed to get window %p attributes", window);
GST_WARNING_OBJECT (s, "Failed to get window attributes for "
"window named %s", s->xname);
}
}
}
@ -195,7 +197,7 @@ gst_ximage_src_open_display (GstXImageSrc * s, const gchar * name)
g_assert (s->xwindow != 0);
s->width = attrs.width;
s->height = attrs.height;
GST_INFO_OBJECT (s, "Using default window %p, size of %dx%d", s->xwindow,
GST_INFO_OBJECT (s, "Using default window size of %dx%d",
s->width, s->height);
}
use_root_window:
@ -675,8 +677,7 @@ gst_ximage_src_ximage_get (GstXImageSrc * ximagesrc)
} else
#endif /* HAVE_XSHM */
{
GST_DEBUG_OBJECT (ximagesrc,
"Retrieving screen using XGetImage, window %p", ximagesrc->xwindow);
GST_DEBUG_OBJECT (ximagesrc, "Retrieving screen using XGetImage");
if (ximagesrc->remote) {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,

View file

@ -41,6 +41,17 @@ static GstPad *mysrcpad, *mysinkpad;
"layer = (int) 3, " \
"channels = (int) 2, " \
"rate = (int) 48000"
#define AUDIO_AAC_CAPS_STRING "audio/mpeg, " \
"mpegversion=(int)4, " \
"channels=(int)1, " \
"rate=(int)44100, " \
"stream-format=(string)raw, " \
"level=(string)2, " \
"base-profile=(string)lc, " \
"profile=(string)lc, " \
"codec_data=(buffer)1208"
#define VIDEO_CAPS_STRING "video/mpeg, " \
"mpegversion = (int) 4, " \
"systemstream = (boolean) false, " \
@ -48,6 +59,19 @@ static GstPad *mysrcpad, *mysinkpad;
"height = (int) 288, " \
"framerate = (fraction) 25/1"
#define VIDEO_CAPS_H264_STRING "video/x-h264, " \
"width=(int)320, " \
"height=(int)240, " \
"framerate=(fraction)30/1, " \
"pixel-aspect-ratio=(fraction)1/1, " \
"codec_data=(buffer)01640014ffe1001867640014a" \
"cd94141fb0110000003001773594000f14299600" \
"1000568ebecb22c, " \
"stream-format=(string)avc, " \
"alignment=(string)au, " \
"level=(string)2, " \
"profile=(string)high"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
@ -57,11 +81,22 @@ static GstStaticPadTemplate srcvideotemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_ALWAYS,
GST_STATIC_CAPS (VIDEO_CAPS_STRING));
static GstStaticPadTemplate srcvideoh264template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (VIDEO_CAPS_H264_STRING));
static GstStaticPadTemplate srcaudiotemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (AUDIO_CAPS_STRING));
static GstStaticPadTemplate srcaudioaactemplate =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (AUDIO_AAC_CAPS_STRING));
/* setup and teardown needs some special handling for muxer */
static GstPad *
@ -684,6 +719,141 @@ GST_START_TEST (test_encodebin_mp4mux)
GST_END_TEST;
static gboolean
extract_tags (const gchar * location, GstTagList ** taglist)
{
gboolean ret = TRUE;
GstElement *src;
GstBus *bus;
GstElement *pipeline =
gst_parse_launch ("filesrc name=src ! qtdemux ! fakesink", NULL);
src = gst_bin_get_by_name (GST_BIN (pipeline), "src");
g_object_set (src, "location", location, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING)
!= GST_STATE_CHANGE_FAILURE);
if (*taglist == NULL) {
*taglist = gst_tag_list_new ();
}
while (1) {
GstMessage *msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_TAG | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS) {
gst_message_unref (msg);
break;
} else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
ret = FALSE;
gst_message_unref (msg);
break;
} else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_TAG) {
GstTagList *tags;
gst_message_parse_tag (msg, &tags);
gst_tag_list_insert (*taglist, tags, GST_TAG_MERGE_REPLACE);
gst_tag_list_free (tags);
}
gst_message_unref (msg);
}
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (src);
gst_object_unref (pipeline);
return ret;
}
static void
test_average_bitrate_custom (const gchar * elementname,
GstStaticPadTemplate * tmpl, const gchar * sinkpadname)
{
gchar *location;
GstElement *qtmux;
GstElement *filesink;
GstBuffer *inbuffer;
GstCaps *caps;
int i;
gint bytes[] = { 16, 22, 12 };
gint64 durations[] = { GST_SECOND * 3, GST_SECOND * 5, GST_SECOND * 2 };
gint64 total_bytes = 0;
GstClockTime total_duration = 0;
location = g_strdup_printf ("%s/%s-%d", g_get_tmp_dir (), "qtmuxtest",
g_random_int ());
GST_INFO ("Using location %s for bitrate test", location);
qtmux = gst_check_setup_element (elementname);
filesink = gst_element_factory_make ("filesink", NULL);
g_object_set (filesink, "location", location, NULL);
gst_element_link (qtmux, filesink);
mysrcpad = setup_src_pad (qtmux, tmpl, NULL, sinkpadname);
fail_unless (mysrcpad != NULL);
gst_pad_set_active (mysrcpad, TRUE);
fail_unless (gst_element_set_state (filesink,
GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE,
"could not set filesink to playing");
fail_unless (gst_element_set_state (qtmux,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
for (i = 0; i < 3; i++) {
inbuffer = gst_buffer_new_and_alloc (bytes[i]);
caps = gst_caps_copy (gst_pad_get_pad_template_caps (mysrcpad));
gst_buffer_set_caps (inbuffer, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = total_duration;
GST_BUFFER_DURATION (inbuffer) = (GstClockTime) durations[i];
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
total_bytes += GST_BUFFER_SIZE (inbuffer);
total_duration += GST_BUFFER_DURATION (inbuffer);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
}
/* send eos to have moov written */
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()) == TRUE);
gst_element_set_state (qtmux, GST_STATE_NULL);
gst_element_set_state (filesink, GST_STATE_NULL);
gst_pad_set_active (mysrcpad, FALSE);
teardown_src_pad (mysrcpad);
gst_object_unref (filesink);
gst_check_teardown_element (qtmux);
/* check the bitrate tag */
{
GstTagList *taglist = NULL;
guint bitrate = 0;
guint expected;
fail_unless (extract_tags (location, &taglist));
fail_unless (gst_tag_list_get_uint (taglist, GST_TAG_BITRATE, &bitrate));
expected =
(guint) gst_util_uint64_scale_round ((guint64) total_bytes,
(guint64) 8 * GST_SECOND, (guint64) total_duration);
fail_unless (bitrate == expected);
}
}
GST_START_TEST (test_average_bitrate)
{
test_average_bitrate_custom ("mp4mux", &srcaudioaactemplate, "audio_%d");
test_average_bitrate_custom ("mp4mux", &srcvideoh264template, "video_%d");
test_average_bitrate_custom ("qtmux", &srcaudioaactemplate, "audio_%d");
test_average_bitrate_custom ("qtmux", &srcvideoh264template, "video_%d");
}
GST_END_TEST;
static Suite *
qtmux_suite (void)
{
@ -712,6 +882,8 @@ qtmux_suite (void)
tcase_add_test (tc_chain, test_video_pad_frag_asc_streamable);
tcase_add_test (tc_chain, test_audio_pad_frag_asc_streamable);
tcase_add_test (tc_chain, test_average_bitrate);
tcase_add_test (tc_chain, test_reuse);
tcase_add_test (tc_chain, test_encodebin_qtmux);
tcase_add_test (tc_chain, test_encodebin_mp4mux);