wasapi: Allow opening devices in exclusive mode

This provides much lower latency compared to opening in shared mode,
but it also means that the device cannot be opened by any other
application. The advantage is that the achievable latency is much
lower.

In shared mode, WASAPI's engine period is 10ms, and so that is the
lowest latency achievable.

In exclusive mode, the limit is the device period itself, which in my
testing with USB DACs, on-board PCI sound-cards, and HDMI cards is
between 2ms and 3.33ms.

We set our audioringbuffer limits to match the device, so the
achievable sink latency is 6-9ms. Further improvements can be made if
needed.

https://bugzilla.gnome.org/show_bug.cgi?id=793289
This commit is contained in:
Nirbheek Chauhan 2018-02-06 23:40:49 +05:30
parent 4b388814af
commit 6ecbb7556a
6 changed files with 232 additions and 68 deletions

View file

@ -40,8 +40,6 @@
#include "gstwasapisink.h"
#include <mmdeviceapi.h>
GST_DEBUG_CATEGORY_STATIC (gst_wasapi_sink_debug);
#define GST_CAT_DEFAULT gst_wasapi_sink_debug
@ -50,15 +48,17 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_WASAPI_STATIC_CAPS));
#define DEFAULT_ROLE GST_WASAPI_DEVICE_ROLE_CONSOLE
#define DEFAULT_MUTE FALSE
#define DEFAULT_ROLE GST_WASAPI_DEVICE_ROLE_CONSOLE
#define DEFAULT_MUTE FALSE
#define DEFAULT_EXCLUSIVE FALSE
enum
{
PROP_0,
PROP_ROLE,
PROP_MUTE,
PROP_DEVICE
PROP_DEVICE,
PROP_EXCLUSIVE
};
static void gst_wasapi_sink_dispose (GObject * object);
@ -116,6 +116,12 @@ gst_wasapi_sink_class_init (GstWasapiSinkClass * klass)
"WASAPI playback device as a GUID string",
NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class,
PROP_EXCLUSIVE,
g_param_spec_boolean ("exclusive", "Exclusive mode",
"Open the device in exclusive mode",
DEFAULT_EXCLUSIVE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
gst_element_class_set_static_metadata (gstelement_class, "WasapiSrc",
"Sink/Audio",
@ -209,6 +215,10 @@ gst_wasapi_sink_set_property (GObject * object, guint prop_id,
device ? g_utf8_to_utf16 (device, -1, NULL, NULL, NULL) : NULL;
break;
}
case PROP_EXCLUSIVE:
self->sharemode = g_value_get_boolean (value)
? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED;
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -232,6 +242,10 @@ gst_wasapi_sink_get_property (GObject * object, guint prop_id,
g_value_take_string (value, self->device_strid ?
g_utf16_to_utf8 (self->device_strid, -1, NULL, NULL, NULL) : NULL);
break;
case PROP_EXCLUSIVE:
g_value_set_boolean (value,
self->sharemode == AUDCLNT_SHAREMODE_EXCLUSIVE);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -244,7 +258,6 @@ gst_wasapi_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
GstWasapiSink *self = GST_WASAPI_SINK (bsink);
WAVEFORMATEX *format = NULL;
GstCaps *caps = NULL;
HRESULT hr;
GST_DEBUG_OBJECT (self, "entering get caps");
@ -252,16 +265,18 @@ gst_wasapi_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
caps = gst_caps_ref (self->cached_caps);
} else {
GstCaps *template_caps;
gboolean ret;
template_caps = gst_pad_get_pad_template_caps (bsink->sinkpad);
if (!self->client)
gst_wasapi_sink_open (GST_AUDIO_SINK (bsink));
hr = IAudioClient_GetMixFormat (self->client, &format);
if (hr != S_OK || format == NULL) {
ret = gst_wasapi_util_get_device_format (GST_ELEMENT (self),
self->sharemode, self->device, self->client, &format);
if (!ret) {
GST_ELEMENT_ERROR (self, STREAM, FORMAT, (NULL),
("GetMixFormat failed: %s", gst_wasapi_util_hresult_to_string (hr)));
("failed to detect format"));
goto out;
}
@ -302,6 +317,7 @@ gst_wasapi_sink_open (GstAudioSink * asink)
{
GstWasapiSink *self = GST_WASAPI_SINK (asink);
gboolean res = FALSE;
IMMDevice *device = NULL;
IAudioClient *client = NULL;
GST_DEBUG_OBJECT (self, "opening device");
@ -314,7 +330,7 @@ gst_wasapi_sink_open (GstAudioSink * asink)
* For example, perhaps we should automatically switch to the new device if
* the default device is changed and a device isn't explicitly selected. */
if (!gst_wasapi_util_get_device_client (GST_ELEMENT (self), FALSE,
self->role, self->device_strid, &client)) {
self->role, self->device_strid, &device, &client)) {
if (!self->device_strid)
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
("Failed to get default device"));
@ -325,6 +341,7 @@ gst_wasapi_sink_open (GstAudioSink * asink)
}
self->client = client;
self->device = device;
res = TRUE;
beach:
@ -337,6 +354,11 @@ gst_wasapi_sink_close (GstAudioSink * asink)
{
GstWasapiSink *self = GST_WASAPI_SINK (asink);
if (self->device != NULL) {
IUnknown_Release (self->device);
self->device = NULL;
}
if (self->client != NULL) {
IUnknown_Release (self->client);
self->client = NULL;
@ -352,7 +374,7 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
gboolean res = FALSE;
REFERENCE_TIME latency_rt;
IAudioRenderClient *render_client = NULL;
gint64 default_period, min_period;
gint64 default_period, min_period, use_period;
guint bpf, rate;
HRESULT hr;
@ -364,15 +386,27 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
GST_INFO_OBJECT (self, "wasapi default period: %" G_GINT64_FORMAT
", min period: %" G_GINT64_FORMAT, default_period, min_period);
/* Set hnsBufferDuration to 0, which should, in theory, tell the device to
* create a buffer with the smallest latency possible. In practice, this is
* usually 2 * default_period. See:
* https://msdn.microsoft.com/en-us/library/windows/desktop/dd370871(v=vs.85).aspx
*
* NOTE: min_period is a lie, and I have never seen WASAPI use it as the
* current period */
hr = IAudioClient_Initialize (self->client, AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 0, 0, self->mix_format, NULL);
if (self->sharemode == AUDCLNT_SHAREMODE_SHARED) {
use_period = default_period;
/* Set hnsBufferDuration to 0, which should, in theory, tell the device to
* create a buffer with the smallest latency possible. In practice, this is
* usually 2 * default_period. See:
* https://msdn.microsoft.com/en-us/library/windows/desktop/dd370871(v=vs.85).aspx
*
* NOTE: min_period is a lie, and I have never seen WASAPI use it as the
* current period */
hr = IAudioClient_Initialize (self->client, AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 0, 0, self->mix_format, NULL);
} else {
use_period = min_period;
/* For some reason, we need to call this another time for exclusive mode */
CoInitialize (NULL);
/* FIXME: We should be able to use min_period as the device buffer size,
* but I'm hitting a problem in GStreamer. */
hr = IAudioClient_Initialize (self->client, AUDCLNT_SHAREMODE_EXCLUSIVE,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, use_period, use_period,
self->mix_format, NULL);
}
if (hr != S_OK) {
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
("IAudioClient::Initialize () failed: %s",
@ -394,8 +428,10 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
/* Actual latency-time/buffer-time are different now */
spec->segsize = gst_util_uint64_scale_int_round (rate * bpf,
default_period * 100, GST_SECOND);
spec->segtotal = (self->buffer_frame_count * bpf) / spec->segsize;
use_period * 100, GST_SECOND);
/* We need a minimum of 2 segments to ensure glitch-free playback */
spec->segtotal = MAX (self->buffer_frame_count * bpf / spec->segsize, 2);
GST_INFO_OBJECT (self, "segsize is %i, segtotal is %i", spec->segsize,
spec->segtotal);
@ -450,6 +486,9 @@ gst_wasapi_sink_unprepare (GstAudioSink * asink)
{
GstWasapiSink *self = GST_WASAPI_SINK (asink);
if (self->sharemode == AUDCLNT_SHAREMODE_EXCLUSIVE)
CoUninitialize ();
if (self->client != NULL) {
IAudioClient_Stop (self->client);
}
@ -471,33 +510,37 @@ gst_wasapi_sink_write (GstAudioSink * asink, gpointer data, guint length)
guint pending = length;
while (pending > 0) {
guint have_frames, can_frames, n_frames, n_frames_padding, write_len;
/* We have N frames to be written out */
have_frames = pending / (self->mix_format->nBlockAlign);
guint n_frames, write_len;
WaitForSingleObject (self->event_handle, INFINITE);
/* Frames the card hasn't rendered yet */
hr = IAudioClient_GetCurrentPadding (self->client, &n_frames_padding);
if (hr != S_OK) {
GST_ERROR_OBJECT (self, "IAudioClient::GetCurrentPadding failed: %s",
gst_wasapi_util_hresult_to_string (hr));
length = 0;
goto beach;
if (self->sharemode == AUDCLNT_SHAREMODE_SHARED) {
guint have_frames, can_frames, n_frames_padding;
/* Frames the card hasn't rendered yet */
hr = IAudioClient_GetCurrentPadding (self->client, &n_frames_padding);
if (hr != S_OK) {
GST_ERROR_OBJECT (self, "IAudioClient::GetCurrentPadding failed: %s",
gst_wasapi_util_hresult_to_string (hr));
length = 0;
goto beach;
}
/* We have N frames to be written out */
have_frames = pending / (self->mix_format->nBlockAlign);
/* We can write out these many frames */
can_frames = self->buffer_frame_count - n_frames_padding;
/* We will write out these many frames, and this much length */
n_frames = MIN (can_frames, have_frames);
GST_TRACE_OBJECT (self, "total: %i, unread: %i, have: %i (%i bytes), "
"will write: %i", self->buffer_frame_count, n_frames_padding,
have_frames, pending, n_frames);
} else {
n_frames = self->buffer_frame_count;
}
/* We can write out these many frames */
can_frames = self->buffer_frame_count - n_frames_padding;
/* We will write out these many frames, and this much length */
n_frames = MIN (can_frames, have_frames);
write_len = n_frames * self->mix_format->nBlockAlign;
GST_TRACE_OBJECT (self, "total: %i, unread: %i, have: %i (%i bytes), "
"will write: %i (%i bytes)", self->buffer_frame_count, n_frames_padding,
have_frames, pending, n_frames, write_len);
hr = IAudioRenderClient_GetBuffer (self->render_client, n_frames,
(BYTE **) & dst);
if (hr != S_OK) {

View file

@ -40,6 +40,7 @@ struct _GstWasapiSink
{
GstAudioSink parent;
IMMDevice *device;
IAudioClient *client;
IAudioRenderClient *render_client;
HANDLE event_handle;
@ -57,6 +58,7 @@ struct _GstWasapiSink
/* properties */
gint role;
gint sharemode;
gboolean mute;
wchar_t *device_strid;
};

View file

@ -38,8 +38,6 @@
#include "gstwasapisrc.h"
#include <mmdeviceapi.h>
GST_DEBUG_CATEGORY_STATIC (gst_wasapi_src_debug);
#define GST_CAT_DEFAULT gst_wasapi_src_debug
@ -48,13 +46,15 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_WASAPI_STATIC_CAPS));
#define DEFAULT_ROLE GST_WASAPI_DEVICE_ROLE_CONSOLE
#define DEFAULT_ROLE GST_WASAPI_DEVICE_ROLE_CONSOLE
#define DEFAULT_EXCLUSIVE FALSE
enum
{
PROP_0,
PROP_ROLE,
PROP_DEVICE
PROP_DEVICE,
PROP_EXCLUSIVE
};
static void gst_wasapi_src_dispose (GObject * object);
@ -108,6 +108,12 @@ gst_wasapi_src_class_init (GstWasapiSrcClass * klass)
"WASAPI playback device as a GUID string",
NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class,
PROP_EXCLUSIVE,
g_param_spec_boolean ("exclusive", "Exclusive mode",
"Open the device in exclusive mode",
DEFAULT_EXCLUSIVE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_add_static_pad_template (gstelement_class, &src_template);
gst_element_class_set_static_metadata (gstelement_class, "WasapiSrc",
"Source/Audio",
@ -206,6 +212,10 @@ gst_wasapi_src_set_property (GObject * object, guint prop_id,
device ? g_utf8_to_utf16 (device, -1, NULL, NULL, NULL) : NULL;
break;
}
case PROP_EXCLUSIVE:
self->sharemode = g_value_get_boolean (value)
? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED;
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -226,6 +236,10 @@ gst_wasapi_src_get_property (GObject * object, guint prop_id,
g_value_take_string (value, self->device_strid ?
g_utf16_to_utf8 (self->device_strid, -1, NULL, NULL, NULL) : NULL);
break;
case PROP_EXCLUSIVE:
g_value_set_boolean (value,
self->sharemode == AUDCLNT_SHAREMODE_EXCLUSIVE);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -238,7 +252,6 @@ gst_wasapi_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
GstWasapiSrc *self = GST_WASAPI_SRC (bsrc);
WAVEFORMATEX *format = NULL;
GstCaps *caps = NULL;
HRESULT hr;
GST_DEBUG_OBJECT (self, "entering get caps");
@ -246,16 +259,18 @@ gst_wasapi_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
caps = gst_caps_ref (self->cached_caps);
} else {
GstCaps *template_caps;
gboolean ret;
template_caps = gst_pad_get_pad_template_caps (bsrc->srcpad);
if (!self->client)
gst_wasapi_src_open (GST_AUDIO_SRC (bsrc));
hr = IAudioClient_GetMixFormat (self->client, &format);
if (hr != S_OK || format == NULL) {
ret = gst_wasapi_util_get_device_format (GST_ELEMENT (self),
self->sharemode, self->device, self->client, &format);
if (!ret) {
GST_ELEMENT_ERROR (self, STREAM, FORMAT, (NULL),
("GetMixFormat failed: %s", gst_wasapi_util_hresult_to_string (hr)));
("failed to detect format"));
goto out;
}
@ -297,6 +312,7 @@ gst_wasapi_src_open (GstAudioSrc * asrc)
GstWasapiSrc *self = GST_WASAPI_SRC (asrc);
gboolean res = FALSE;
IAudioClient *client = NULL;
IMMDevice *device = NULL;
if (self->client)
return TRUE;
@ -306,7 +322,7 @@ gst_wasapi_src_open (GstAudioSrc * asrc)
* For example, perhaps we should automatically switch to the new device if
* the default device is changed and a device isn't explicitly selected. */
if (!gst_wasapi_util_get_device_client (GST_ELEMENT (self), TRUE,
self->role, self->device_strid, &client)) {
self->role, self->device_strid, &device, &client)) {
if (!self->device_strid)
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
("Failed to get default device"));
@ -317,6 +333,7 @@ gst_wasapi_src_open (GstAudioSrc * asrc)
}
self->client = client;
self->device = device;
res = TRUE;
beach:
@ -329,6 +346,11 @@ gst_wasapi_src_close (GstAudioSrc * asrc)
{
GstWasapiSrc *self = GST_WASAPI_SRC (asrc);
if (self->device != NULL) {
IUnknown_Release (self->device);
self->device = NULL;
}
if (self->client != NULL) {
IUnknown_Release (self->client);
self->client = NULL;
@ -346,7 +368,7 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
guint64 client_clock_freq = 0;
IAudioCaptureClient *capture_client = NULL;
REFERENCE_TIME latency_rt;
gint64 default_period, min_period;
gint64 default_period, min_period, use_period;
guint bpf, rate, buffer_frames;
HRESULT hr;
@ -359,18 +381,30 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
GST_INFO_OBJECT (self, "wasapi default period: %" G_GINT64_FORMAT
", min period: %" G_GINT64_FORMAT, default_period, min_period);
/* Set hnsBufferDuration to 0, which should, in theory, tell the device to
* create a buffer with the smallest latency possible. In practice, this is
* usually 2 * default_period. See:
* https://msdn.microsoft.com/en-us/library/windows/desktop/dd370871(v=vs.85).aspx
*
* NOTE: min_period is a lie, and I have never seen WASAPI use it as the
* current period */
hr = IAudioClient_Initialize (self->client, AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 0, 0, self->mix_format, NULL);
if (self->sharemode == AUDCLNT_SHAREMODE_SHARED) {
use_period = default_period;
/* Set hnsBufferDuration to 0, which should, in theory, tell the device to
* create a buffer with the smallest latency possible. In practice, this is
* usually 2 * default_period. See:
* https://msdn.microsoft.com/en-us/library/windows/desktop/dd370871(v=vs.85).aspx
*
* NOTE: min_period is a lie, and I have never seen WASAPI use it as the
* current period */
hr = IAudioClient_Initialize (self->client, AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 0, 0, self->mix_format, NULL);
} else {
use_period = default_period;
/* For some reason, we need to call this another time for exclusive mode */
CoInitialize (NULL);
/* FIXME: We should be able to use min_period as the device buffer size,
* but I'm hitting a problem in GStreamer. */
hr = IAudioClient_Initialize (self->client, AUDCLNT_SHAREMODE_EXCLUSIVE,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, use_period, use_period,
self->mix_format, NULL);
}
if (hr != S_OK) {
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
("IAudioClient::Initialize failed: %s",
("IAudioClient::Initialize () failed: %s",
gst_wasapi_util_hresult_to_string (hr)));
goto beach;
}
@ -388,8 +422,10 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
"rate is %i Hz", buffer_frames, bpf, rate);
spec->segsize = gst_util_uint64_scale_int_round (rate * bpf,
default_period * 100, GST_SECOND);
spec->segtotal = (buffer_frames * bpf) / spec->segsize;
use_period * 100, GST_SECOND);
/* We need a minimum of 2 segments to ensure glitch-free playback */
spec->segtotal = MAX (self->buffer_frame_count * bpf / spec->segsize, 2);
GST_INFO_OBJECT (self, "segsize is %i, segtotal is %i", spec->segsize,
spec->segtotal);
@ -460,6 +496,9 @@ gst_wasapi_src_unprepare (GstAudioSrc * asrc)
{
GstWasapiSrc *self = GST_WASAPI_SRC (asrc);
if (self->sharemode == AUDCLNT_SHAREMODE_EXCLUSIVE)
CoUninitialize ();
if (self->client != NULL) {
IAudioClient_Stop (self->client);
}

View file

@ -40,6 +40,7 @@ struct _GstWasapiSrc
{
GstAudioSrc parent;
IMMDevice *device;
IAudioClient *client;
IAudioClock *client_clock;
guint64 client_clock_freq;
@ -59,6 +60,7 @@ struct _GstWasapiSrc
/* properties */
gint role;
gint sharemode;
wchar_t *device_strid;
};

View file

@ -25,8 +25,6 @@
#include "gstwasapiutil.h"
#include "gstwasapidevice.h"
#include <mmdeviceapi.h>
/* This was only added to MinGW in ~2015 and our Cerbero toolchain is too old */
#if defined(_MSC_VER)
#include <functiondiscoverykeys_devpkey.h>
@ -427,10 +425,83 @@ err:
return ret;
}
gboolean
gst_wasapi_util_get_device_format (GstElement * element,
gint device_mode, IMMDevice * device, IAudioClient * client,
WAVEFORMATEX ** ret_format)
{
WAVEFORMATEX *format;
HRESULT hr;
*ret_format = NULL;
hr = IAudioClient_GetMixFormat (client, &format);
if (hr != S_OK || format == NULL) {
GST_ERROR_OBJECT (element, "GetMixFormat failed: %s",
gst_wasapi_util_hresult_to_string (hr));
return FALSE;
}
/* WASAPI always accepts the format returned by GetMixFormat in shared mode */
if (device_mode == AUDCLNT_SHAREMODE_SHARED)
goto out;
/* WASAPI may or may not support this format in exclusive mode */
hr = IAudioClient_IsFormatSupported (client, AUDCLNT_SHAREMODE_EXCLUSIVE,
format, NULL);
if (hr == S_OK)
goto out;
CoTaskMemFree (format);
/* Open the device property store, and get the format that WASAPI has been
* using for sending data to the device */
{
PROPVARIANT var;
IPropertyStore *prop_store = NULL;
hr = IMMDevice_OpenPropertyStore (device, STGM_READ, &prop_store);
if (hr != S_OK) {
GST_ERROR_OBJECT (element, "OpenPropertyStore failed: %s",
gst_wasapi_util_hresult_to_string (hr));
return FALSE;
}
hr = IPropertyStore_GetValue (prop_store, &PKEY_AudioEngine_DeviceFormat,
&var);
if (hr != S_OK) {
GST_ERROR_OBJECT (element, "GetValue failed: %s",
gst_wasapi_util_hresult_to_string (hr));
IUnknown_Release (prop_store);
return FALSE;
}
format = malloc (var.blob.cbSize);
memcpy (format, var.blob.pBlobData, var.blob.cbSize);
PropVariantClear (&var);
IUnknown_Release (prop_store);
}
/* WASAPI may or may not support this format in exclusive mode */
hr = IAudioClient_IsFormatSupported (client, AUDCLNT_SHAREMODE_EXCLUSIVE,
format, NULL);
if (hr == S_OK)
goto out;
GST_ERROR_OBJECT (element, "AudioEngine DeviceFormat not supported");
free (format);
return FALSE;
out:
*ret_format = format;
return TRUE;
}
gboolean
gst_wasapi_util_get_device_client (GstElement * element,
gboolean capture, gint role, const wchar_t * device_strid,
IAudioClient ** ret_client)
IMMDevice ** ret_device, IAudioClient ** ret_client)
{
gboolean res = FALSE;
HRESULT hr;
@ -468,7 +539,9 @@ gst_wasapi_util_get_device_client (GstElement * element,
}
IUnknown_AddRef (client);
IUnknown_AddRef (device);
*ret_client = client;
*ret_device = device;
res = TRUE;

View file

@ -25,6 +25,7 @@
#include <gst/audio/gstaudiosrc.h>
#include <gst/audio/gstaudiosink.h>
#include <mmdeviceapi.h>
#include <audioclient.h>
/* Static Caps shared between source, sink, and device provider */
@ -57,7 +58,11 @@ gboolean gst_wasapi_util_get_devices (GstElement * element, gboolean active,
gboolean gst_wasapi_util_get_device_client (GstElement * element,
gboolean capture, gint role, const wchar_t * device_strid,
IAudioClient ** ret_client);
IMMDevice ** ret_device, IAudioClient ** ret_client);
gboolean gst_wasapi_util_get_device_format (GstElement * element,
gint device_mode, IMMDevice * device, IAudioClient * client,
WAVEFORMATEX ** ret_format);
gboolean gst_wasapi_util_get_render_client (GstElement * element,
IAudioClient * client, IAudioRenderClient ** ret_render_client);