mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 04:01:08 +00:00
osxaudiosrc: iOS resampling causes stuttering
Fixes stuttering audio when iOS AU is resampling. To make AU resample, one has to request a rate that differs from AVAudioSession's sampleRate. The resampling itself is not the culprit, but rather our API misuse. AudioUnitRender modifies the mDataByteSize members with the actual read bytes count. Therefore, they must be reinitialized before each AudioUnitRender. (The buffers themselves can be preallocated.) The "stutter" was caused by one AudioUnitRender making the buffer too small for other AudioUnitRender invocations, making them fail with -50 (paramErr). By way of luck, when AU didn't resample, all AudioUnitRender invocations read the same number of bytes. (This patch addresses some non-interleaved audio concerns, but at this moment the elements do not support non-interleaved audio and non-interleaved is untested.) https://bugzilla.gnome.org/show_bug.cgi?id=744922
This commit is contained in:
parent
e3ca4d1c86
commit
8ca40fa86f
5 changed files with 38 additions and 23 deletions
|
@ -384,8 +384,16 @@ gst_osx_audio_src_io_proc (GstOsxAudioRingBuffer * buf,
|
|||
gint writeseg;
|
||||
gint len;
|
||||
gint remaining;
|
||||
UInt32 n;
|
||||
gint offset = 0;
|
||||
|
||||
/* Previous invoke of AudioUnitRender changed mDataByteSize into
|
||||
* number of bytes actually read. Reset the members. */
|
||||
for (n = 0; n < buf->core_audio->recBufferList->mNumberBuffers; ++n) {
|
||||
buf->core_audio->recBufferList->mBuffers[n].mDataByteSize =
|
||||
buf->core_audio->recBufferSize;
|
||||
}
|
||||
|
||||
status = AudioUnitRender (buf->core_audio->audiounit, ioActionFlags,
|
||||
inTimeStamp, inBusNumber, inNumberFrames, buf->core_audio->recBufferList);
|
||||
|
||||
|
@ -394,6 +402,9 @@ gst_osx_audio_src_io_proc (GstOsxAudioRingBuffer * buf,
|
|||
return status;
|
||||
}
|
||||
|
||||
/* TODO: To support non-interleaved audio, go over all mBuffers,
|
||||
* not just the first one. */
|
||||
|
||||
remaining = buf->core_audio->recBufferList->mBuffers[0].mDataByteSize;
|
||||
|
||||
while (remaining) {
|
||||
|
|
|
@ -126,9 +126,11 @@ gst_core_audio_initialize (GstCoreAudio * core_audio,
|
|||
|
||||
if (core_audio->is_src) {
|
||||
/* create AudioBufferList needed for recording */
|
||||
core_audio->recBufferSize = frame_size * format.mBytesPerFrame;
|
||||
core_audio->recBufferList =
|
||||
buffer_list_alloc (format.mChannelsPerFrame,
|
||||
frame_size * format.mBytesPerFrame);
|
||||
buffer_list_alloc (format.mChannelsPerFrame, core_audio->recBufferSize,
|
||||
/* Currently always TRUE (i.e. interleaved) */
|
||||
!(format.mFormatFlags & kAudioFormatFlagIsNonInterleaved));
|
||||
}
|
||||
|
||||
/* Initialize the AudioUnit */
|
||||
|
@ -141,10 +143,8 @@ gst_core_audio_initialize (GstCoreAudio * core_audio,
|
|||
return TRUE;
|
||||
|
||||
error:
|
||||
if (core_audio->is_src && core_audio->recBufferList) {
|
||||
buffer_list_free (core_audio->recBufferList);
|
||||
core_audio->recBufferList = NULL;
|
||||
}
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
|
@ -153,11 +153,9 @@ gst_core_audio_unitialize (GstCoreAudio * core_audio)
|
|||
{
|
||||
AudioUnitUninitialize (core_audio->audiounit);
|
||||
|
||||
if (core_audio->recBufferList) {
|
||||
buffer_list_free (core_audio->recBufferList);
|
||||
core_audio->recBufferList = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
gst_core_audio_set_volume (GstCoreAudio * core_audio, gfloat volume)
|
||||
|
|
|
@ -90,6 +90,7 @@ struct _GstCoreAudio
|
|||
|
||||
/* For LPCM in/out */
|
||||
AudioUnit audiounit;
|
||||
UInt32 recBufferSize; /* AudioUnitRender clobbers mDataByteSize */
|
||||
AudioBufferList *recBufferList;
|
||||
|
||||
#ifndef HAVE_IOS
|
||||
|
|
|
@ -150,18 +150,21 @@ gst_core_audio_io_proc_stop (GstCoreAudio * core_audio)
|
|||
}
|
||||
|
||||
AudioBufferList *
|
||||
buffer_list_alloc (int channels, int size)
|
||||
buffer_list_alloc (UInt32 channels, UInt32 size, gboolean interleaved)
|
||||
{
|
||||
AudioBufferList *list;
|
||||
int total_size;
|
||||
int n;
|
||||
gsize list_size;
|
||||
UInt32 num_buffers, n;
|
||||
|
||||
total_size = sizeof (AudioBufferList) + 1 * sizeof (AudioBuffer);
|
||||
list = (AudioBufferList *) g_malloc (total_size);
|
||||
num_buffers = interleaved ? 1 : channels;
|
||||
list_size = G_STRUCT_OFFSET (AudioBufferList, mBuffers[num_buffers]);
|
||||
list = (AudioBufferList *) g_malloc (list_size);
|
||||
|
||||
list->mNumberBuffers = 1;
|
||||
for (n = 0; n < (int) list->mNumberBuffers; ++n) {
|
||||
list->mBuffers[n].mNumberChannels = channels;
|
||||
list->mNumberBuffers = num_buffers;
|
||||
for (n = 0; n < num_buffers; ++n) {
|
||||
/* See http://lists.apple.com/archives/coreaudio-api/2015/Feb/msg00027.html */
|
||||
list->mBuffers[n].mNumberChannels = interleaved ? channels : 1;
|
||||
/* AudioUnitRender will keep overwriting mDataByteSize */
|
||||
list->mBuffers[n].mDataByteSize = size;
|
||||
list->mBuffers[n].mData = g_malloc (size);
|
||||
}
|
||||
|
@ -172,10 +175,12 @@ buffer_list_alloc (int channels, int size)
|
|||
void
|
||||
buffer_list_free (AudioBufferList * list)
|
||||
{
|
||||
int n;
|
||||
UInt32 n;
|
||||
|
||||
for (n = 0; n < (int) list->mNumberBuffers; ++n) {
|
||||
if (list->mBuffers[n].mData)
|
||||
if (list == NULL)
|
||||
return;
|
||||
|
||||
for (n = 0; n < list->mNumberBuffers; ++n) {
|
||||
g_free (list->mBuffers[n].mData);
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ gboolean gst_core_audio_io_proc_start (GstCoreAudio * core_a
|
|||
|
||||
gboolean gst_core_audio_io_proc_stop (GstCoreAudio * core_audio);
|
||||
|
||||
AudioBufferList * buffer_list_alloc (int channels, int size);
|
||||
AudioBufferList * buffer_list_alloc (UInt32 channels, UInt32 size, gboolean interleaved);
|
||||
|
||||
void buffer_list_free (AudioBufferList * list);
|
||||
|
||||
|
|
Loading…
Reference in a new issue