Merge remote-tracking branch 'origin/master' into 0.11

Conflicts:
	ext/opus/gstopusdec.c
	ext/opus/gstopusparse.c
	gst-libs/gst/video/gstbasevideodecoder.c
	gst-libs/gst/video/gstbasevideodecoder.h
This commit is contained in:
Tim-Philipp Müller 2011-11-26 15:37:25 +00:00
commit 77361e2919
11 changed files with 347 additions and 48 deletions

View file

@ -70,3 +70,19 @@ const GstAudioChannelPosition gst_opus_channel_positions[][8] = {
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_LFE},
};
const char *gst_opus_channel_names[] = {
"mono",
"front left",
"front right",
"rear center",
"rear left",
"rear right",
"lfe",
"front center",
"front left of center",
"front right of center",
"side left",
"side right",
"none"
};

View file

@ -27,6 +27,7 @@
G_BEGIN_DECLS
extern const GstAudioChannelPosition gst_opus_channel_positions[][8];
extern const char *gst_opus_channel_names[];
G_END_DECLS

View file

@ -219,7 +219,8 @@ gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
data = gst_buffer_map (buf, NULL, NULL, GST_MAP_READ);
g_return_val_if_fail (dec->n_channels != data[9], GST_FLOW_ERROR);
g_return_val_if_fail (dec->n_channels == 0
|| dec->n_channels == data[9], GST_FLOW_ERROR);
dec->n_channels = data[9];
dec->pre_skip = GST_READ_UINT16_LE (data + 10);
@ -288,6 +289,7 @@ gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
dec->sample_rate);
if (pos) {
GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");
gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
}

View file

@ -187,11 +187,9 @@ static void
gst_opus_enc_class_init (GstOpusEncClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstAudioEncoderClass *base_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
base_class = (GstAudioEncoderClass *) klass;
gobject_class->set_property = gst_opus_enc_set_property;
@ -446,7 +444,8 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
GstAudioChannelPosition pos = GST_AUDIO_INFO_POSITION (info, n);
int c;
GST_DEBUG_OBJECT (enc, "Channel %d has position %d", n, pos);
GST_DEBUG_OBJECT (enc, "Channel %d has position %d (%s)", n, pos,
gst_opus_channel_names[pos]);
for (c = 0; c < enc->n_channels; ++c) {
if (gst_opus_channel_positions[enc->n_channels - 1][c] == pos) {
GST_DEBUG_OBJECT (enc, "Found in Vorbis mapping as channel %d", c);
@ -456,12 +455,13 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
if (c == enc->n_channels) {
/* We did not find that position, so use undefined */
GST_WARNING_OBJECT (enc,
"Position %d not found in Vorbis mapping, using unknown mapping",
pos);
"Position %d (%s) not found in Vorbis mapping, using unknown mapping",
pos, gst_opus_channel_positions[pos]);
enc->channel_mapping_family = 255;
return;
}
GST_DEBUG_OBJECT (enc, "Mapping output channel %d to %d", c, n);
GST_DEBUG_OBJECT (enc, "Mapping output channel %d to %d (%s)", c, n,
gst_opus_channel_names[pos]);
enc->channel_mapping[c] = n;
}
GST_INFO_OBJECT (enc, "Permutation found, using Vorbis mapping");
@ -512,13 +512,17 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
static gboolean
gst_opus_enc_setup (GstOpusEnc * enc)
{
int error = OPUS_OK;
int error = OPUS_OK, n;
guint8 trivial_mapping[256];
GST_DEBUG_OBJECT (enc, "setup");
for (n = 0; n < 256; ++n)
trivial_mapping[n] = n;
enc->state =
opus_multistream_encoder_create (enc->sample_rate, enc->n_channels,
(enc->n_channels + 1) / 2, enc->n_channels / 2, enc->channel_mapping,
enc->n_channels, 0, trivial_mapping,
enc->audio_or_voip ? OPUS_APPLICATION_AUDIO : OPUS_APPLICATION_VOIP,
&error);
if (!enc->state || error != OPUS_OK)

View file

@ -44,8 +44,8 @@ gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
gst_byte_writer_put_uint16_le (&bw, 0); /* output gain */
gst_byte_writer_put_uint8 (&bw, channel_mapping_family);
if (channel_mapping_family > 0) {
gst_byte_writer_put_uint8 (&bw, (nchannels + 1) / 2);
gst_byte_writer_put_uint8 (&bw, nchannels / 2);
gst_byte_writer_put_uint8 (&bw, nchannels);
gst_byte_writer_put_uint8 (&bw, 0);
gst_byte_writer_put_data (&bw, channel_mapping, nchannels);
}

View file

@ -286,20 +286,11 @@ gst_opus_parse_parse_frame (GstBaseParse * base, GstBaseParseFrame * frame)
if (!parse->header_sent) {
GstCaps *caps;
guint8 channels, channel_mapping_family, channel_mapping[256];
data = gst_buffer_map (frame->buffer, &size, NULL, GST_MAP_READ);
/* FIXME : Check available size ? */
guint8 channels;
/* Opus streams can decode to 1 or 2 channels, so use the header
value if we have one, or 2 otherwise */
if (is_idheader) {
channels = data[9];
channel_mapping_family = data[18];
/* header probing will already have done the size check */
memcpy (channel_mapping, data + 21, channels);
gst_buffer_unmap (frame->buffer, data, size);
gst_buffer_replace (&parse->id_header, frame->buffer);
GST_DEBUG_OBJECT (parse, "Found ID header, keeping");
return GST_BASE_PARSE_FLOW_DROPPED;
@ -318,6 +309,7 @@ gst_opus_parse_parse_frame (GstBaseParse * base, GstBaseParseFrame * frame)
gst_opus_header_create_caps_from_headers (&caps, &parse->headers,
parse->id_header, parse->comment_header);
} else {
guint8 channel_mapping_family, channel_mapping[256];
GST_INFO_OBJECT (parse,
"No headers, blindly setting up canonical stereo");
channels = 2;

View file

@ -473,7 +473,7 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder,
if (deadline < 0) {
GST_LOG_OBJECT (dec, "Skipping late frame (%f s past deadline)",
(double) -deadline / GST_SECOND);
gst_base_video_decoder_finish_frame (decoder, frame);
gst_base_video_decoder_drop_frame (decoder, frame);
} else {
ret = gst_base_video_decoder_alloc_src_frame (decoder, frame);

View file

@ -954,6 +954,9 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder,
base_video_decoder->current_frame = NULL;
}
base_video_decoder->dropped = 0;
base_video_decoder->processed = 0;
GST_BASE_VIDEO_CODEC (base_video_decoder)->system_frame_number = 0;
base_video_decoder->base_picture_number = 0;
@ -1349,30 +1352,12 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
return frame;
}
/**
* gst_base_video_decoder_finish_frame:
* @base_video_decoder: a #GstBaseVideoDecoder
* @frame: a decoded #GstVideoFrameState
*
* @frame should have a valid decoded data buffer, whose metadata fields
* are then appropriately set according to frame data and pushed downstream.
* If no output data is provided, @frame is considered skipped.
* In any case, the frame is considered finished and released.
*
* Returns: a #GstFlowReturn resulting from sending data downstream
*/
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrameState * frame)
static void
gst_base_video_decoder_prepare_finish_frame (GstBaseVideoDecoder *
base_video_decoder, GstVideoFrameState * frame)
{
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
GstBuffer *src_buffer;
GstFlowReturn ret = GST_FLOW_OK;
GList *l, *events = NULL;
GST_LOG_OBJECT (base_video_decoder, "finish frame");
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
#ifndef GST_DISABLE_GST_DEBUG
GST_LOG_OBJECT (base_video_decoder,
"n %d in %" G_GSIZE_FORMAT " out %" G_GSIZE_FORMAT,
@ -1398,9 +1383,12 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
break;
}
for (l = g_list_last (events); l; l = l->prev)
for (l = g_list_last (events); l; l = l->prev) {
GST_LOG_OBJECT (base_video_decoder, "pushing %s event",
GST_EVENT_TYPE_NAME (l->data));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
l->data);
}
g_list_free (events);
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
@ -1459,8 +1447,106 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
}
}
base_video_decoder->last_timestamp = frame->presentation_timestamp;
}
/* no buffer data means this frame is skipped/dropped */
static void
gst_base_video_decoder_do_finish_frame (GstBaseVideoDecoder * dec,
GstVideoFrameState * frame)
{
GST_BASE_VIDEO_CODEC (dec)->frames =
g_list_remove (GST_BASE_VIDEO_CODEC (dec)->frames, frame);
if (frame->src_buffer)
gst_buffer_unref (frame->src_buffer);
gst_base_video_codec_free_frame (frame);
}
/**
* gst_base_video_decoder_drop_frame:
* @dec: a #GstBaseVideoDecoder
* @frame: the #GstVideoFrame to drop
*
* Similar to gst_base_video_decoder_finish_frame(), but drops @frame in any
* case and posts a QoS message with the frame's details on the bus.
* In any case, the frame is considered finished and released.
*
* Returns: a #GstFlowReturn, usually GST_FLOW_OK.
*
* Since: 0.10.23
*/
GstFlowReturn
gst_base_video_decoder_drop_frame (GstBaseVideoDecoder * dec,
GstVideoFrameState * frame)
{
GstClockTime stream_time, jitter, earliest_time, qostime, timestamp;
GstSegment *segment;
GstMessage *qos_msg;
gdouble proportion;
GST_LOG_OBJECT (dec, "drop frame");
GST_BASE_VIDEO_CODEC_STREAM_LOCK (dec);
gst_base_video_decoder_prepare_finish_frame (dec, frame);
GST_DEBUG_OBJECT (dec, "dropping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
dec->dropped++;
/* post QoS message */
timestamp = frame->presentation_timestamp;
proportion = GST_BASE_VIDEO_CODEC (dec)->proportion;
segment = &GST_BASE_VIDEO_CODEC (dec)->segment;
stream_time =
gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp);
qostime = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp);
earliest_time = GST_BASE_VIDEO_CODEC (dec)->earliest_time;
jitter = GST_CLOCK_DIFF (qostime, earliest_time);
qos_msg = gst_message_new_qos (GST_OBJECT_CAST (dec), FALSE,
qostime, stream_time, timestamp, GST_CLOCK_TIME_NONE);
gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
dec->processed, dec->dropped);
gst_element_post_message (GST_ELEMENT_CAST (dec), qos_msg);
/* now free the frame */
gst_base_video_decoder_do_finish_frame (dec, frame);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (dec);
return GST_FLOW_OK;
}
/**
* gst_base_video_decoder_finish_frame:
* @base_video_decoder: a #GstBaseVideoDecoder
* @frame: a decoded #GstVideoFrameState
*
* @frame should have a valid decoded data buffer, whose metadata fields
* are then appropriately set according to frame data and pushed downstream.
* If no output data is provided, @frame is considered skipped.
* In any case, the frame is considered finished and released.
*
* Returns: a #GstFlowReturn resulting from sending data downstream
*/
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrameState * frame)
{
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
GstBuffer *src_buffer;
GstFlowReturn ret = GST_FLOW_OK;
GST_LOG_OBJECT (base_video_decoder, "finish frame");
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
gst_base_video_decoder_prepare_finish_frame (base_video_decoder, frame);
base_video_decoder->processed++;
/* no buffer data means this frame is skipped */
if (!frame->src_buffer) {
GST_DEBUG_OBJECT (base_video_decoder, "skipping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
@ -1566,9 +1652,8 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
}
done:
GST_BASE_VIDEO_CODEC (base_video_decoder)->frames =
g_list_remove (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame);
gst_base_video_codec_free_frame (frame);
gst_base_video_decoder_do_finish_frame (base_video_decoder, frame);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);

View file

@ -188,6 +188,10 @@ struct _GstBaseVideoDecoder
* GST_META_API_VIDEO_CROP */
gboolean use_cropping;
/* qos messages: frames dropped/processed */
guint dropped;
guint processed;
/* FIXME before moving to base */
void *padding[GST_PADDING_LARGE];
};
@ -273,6 +277,8 @@ GstClockTimeDiff gst_base_video_decoder_get_max_decode_time (
GstVideoFrameState *frame);
GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrameState *frame);
GstFlowReturn gst_base_video_decoder_drop_frame (GstBaseVideoDecoder *dec,
GstVideoFrameState *frame);
GType gst_base_video_decoder_get_type (void);

View file

@ -57,6 +57,12 @@ else
check_voaacenc =
endif
if USE_VOAMRWBENC
check_voamrwbenc = elements/voamrwbenc
else
check_voamrwbenc =
endif
if USE_EXIF
check_jifmux = elements/jifmux
else
@ -162,6 +168,7 @@ check_PROGRAMS = \
$(check_faac) \
$(check_faad) \
$(check_voaacenc) \
$(check_voamrwbenc) \
$(check_mpeg2enc) \
$(check_mplex) \
$(check_ofa) \

View file

@ -0,0 +1,186 @@
/* GStreamer
*
* unit test for voamrwbenc
*
* Copyright (C) <2011> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <unistd.h>
#include <gst/check/gstcheck.h>
/* For ease of programming we use globals to keep refs for our floating
* src and sink pads we create; otherwise we always have to do get_pad,
* get_peer, and then remove references in every test function */
static GstPad *mysrcpad, *mysinkpad;
#define AUDIO_CAPS_STRING "audio/x-raw-int, " \
"rate = (int) 16000, " \
"channels = (int) 1, " \
"width = (int) 16, " \
"depth = (int) 16, " \
"signed = (boolean) true, " \
"endianness = (int) BYTE_ORDER "
#define AMRWB_CAPS_STRING "audio/AMR-WB"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (AMRWB_CAPS_STRING));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (AUDIO_CAPS_STRING));
static GstElement *
setup_voamrwbenc (void)
{
GstElement *voamrwbenc;
GST_DEBUG ("setup_voamrwbenc");
voamrwbenc = gst_check_setup_element ("voamrwbenc");
/* ensure mode as expected */
g_object_set (voamrwbenc, "band-mode", 0, NULL);
mysrcpad = gst_check_setup_src_pad (voamrwbenc, &srctemplate, NULL);
mysinkpad = gst_check_setup_sink_pad (voamrwbenc, &sinktemplate, NULL);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
return voamrwbenc;
}
static void
cleanup_voamrwbenc (GstElement * voamrwbenc)
{
GST_DEBUG ("cleanup_aacenc");
gst_element_set_state (voamrwbenc, GST_STATE_NULL);
gst_pad_set_active (mysrcpad, FALSE);
gst_pad_set_active (mysinkpad, FALSE);
gst_check_teardown_src_pad (voamrwbenc);
gst_check_teardown_sink_pad (voamrwbenc);
gst_check_teardown_element (voamrwbenc);
}
static void
do_test (void)
{
GstElement *voamrwbenc;
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint i, num_buffers;
const gint nbuffers = 10;
voamrwbenc = setup_voamrwbenc ();
fail_unless (gst_element_set_state (voamrwbenc,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
/* corresponds to audio buffer mentioned in the caps */
inbuffer = gst_buffer_new_and_alloc (320 * nbuffers * 2);
/* makes valgrind's memcheck happier */
memset (GST_BUFFER_DATA (inbuffer), 0, GST_BUFFER_SIZE (inbuffer));
caps = gst_caps_from_string (AUDIO_CAPS_STRING);
gst_buffer_set_caps (inbuffer, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
/* send eos to have all flushed if needed */
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()) == TRUE);
num_buffers = g_list_length (buffers);
fail_unless_equals_int (num_buffers, nbuffers);
/* clean up buffers */
for (i = 0; i < num_buffers; ++i) {
gint size;
guint8 *data;
GstClockTime time, dur;
outbuffer = GST_BUFFER (buffers->data);
fail_if (outbuffer == NULL);
data = GST_BUFFER_DATA (outbuffer);
size = GST_BUFFER_SIZE (outbuffer);
/* at least for mode 0 */
fail_unless (size == 18);
fail_unless ((data[0] & 0x83) == 0);
fail_unless (((data[0] >> 3) & 0xF) == 0);
time = GST_BUFFER_TIMESTAMP (outbuffer);
dur = GST_BUFFER_DURATION (outbuffer);
fail_unless (time == 20 * GST_MSECOND * i);
fail_unless (dur == 20 * GST_MSECOND);
buffers = g_list_remove (buffers, outbuffer);
ASSERT_BUFFER_REFCOUNT (outbuffer, "outbuffer", 1);
gst_buffer_unref (outbuffer);
outbuffer = NULL;
}
cleanup_voamrwbenc (voamrwbenc);
g_list_free (buffers);
buffers = NULL;
}
GST_START_TEST (test_enc)
{
do_test ();
}
GST_END_TEST;
static Suite *
voamrwbenc_suite (void)
{
Suite *s = suite_create ("voamrwbenc");
TCase *tc_chain = tcase_create ("general");
suite_add_tcase (s, tc_chain);
tcase_add_test (tc_chain, test_enc);
return s;
}
int
main (int argc, char **argv)
{
int nf;
Suite *s = voamrwbenc_suite ();
SRunner *sr = srunner_create (s);
gst_check_init (&argc, &argv);
srunner_run_all (sr, CK_NORMAL);
nf = srunner_ntests_failed (sr);
srunner_free (sr);
return nf;
}