mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 04:01:08 +00:00
wasapi: Use a macro for HRESULT failure paths
Saves a lot of boilerplate across all files.
This commit is contained in:
parent
751e85fa45
commit
14b2d6b27a
4 changed files with 85 additions and 246 deletions
|
@ -399,12 +399,7 @@ gst_wasapi_sink_get_can_frames (GstWasapiSink * self)
|
||||||
|
|
||||||
/* Frames the card hasn't rendered yet */
|
/* Frames the card hasn't rendered yet */
|
||||||
hr = IAudioClient_GetCurrentPadding (self->client, &n_frames_padding);
|
hr = IAudioClient_GetCurrentPadding (self->client, &n_frames_padding);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClient::GetCurrentPadding, -1);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetCurrentPadding failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (self, "%i unread frames (padding)", n_frames_padding);
|
GST_DEBUG_OBJECT (self, "%i unread frames (padding)", n_frames_padding);
|
||||||
|
|
||||||
|
@ -425,10 +420,7 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
hr = IAudioClient_GetDevicePeriod (self->client, &default_period,
|
hr = IAudioClient_GetDevicePeriod (self->client, &default_period,
|
||||||
&min_period);
|
&min_period);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClient::GetDevicePeriod, FALSE);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetDevicePeriod failed");
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_INFO_OBJECT (self, "wasapi default period: %" G_GINT64_FORMAT
|
GST_INFO_OBJECT (self, "wasapi default period: %" G_GINT64_FORMAT
|
||||||
", min period: %" G_GINT64_FORMAT, default_period, min_period);
|
", min period: %" G_GINT64_FORMAT, default_period, min_period);
|
||||||
|
@ -470,13 +462,7 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
/* Calculate a new aligned period. First get the aligned buffer size. */
|
/* Calculate a new aligned period. First get the aligned buffer size. */
|
||||||
hr = IAudioClient_GetBufferSize (self->client, &n_frames);
|
hr = IAudioClient_GetBufferSize (self->client, &n_frames);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClient::GetBufferSize, FALSE);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
|
|
||||||
("IAudioClient::GetBufferSize() failed: %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
device_period = (GST_SECOND / 100) * n_frames / rate;
|
device_period = (GST_SECOND / 100) * n_frames / rate;
|
||||||
|
|
||||||
|
@ -487,20 +473,12 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
|
||||||
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, device_period,
|
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, device_period,
|
||||||
device_period, self->mix_format, NULL);
|
device_period, self->mix_format, NULL);
|
||||||
}
|
}
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::Initialize, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
|
|
||||||
("IAudioClient::Initialize () failed: %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Total size of the allocated buffer that we will write to */
|
/* Total size of the allocated buffer that we will write to */
|
||||||
hr = IAudioClient_GetBufferSize (self->client, &self->buffer_frame_count);
|
hr = IAudioClient_GetBufferSize (self->client, &self->buffer_frame_count);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetBufferSize, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetBufferSize failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
GST_INFO_OBJECT (self, "buffer size is %i frames, bpf is %i bytes, "
|
GST_INFO_OBJECT (self, "buffer size is %i frames, bpf is %i bytes, "
|
||||||
"rate is %i Hz", self->buffer_frame_count, bpf, rate);
|
"rate is %i Hz", self->buffer_frame_count, bpf, rate);
|
||||||
|
|
||||||
|
@ -516,19 +494,14 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
/* Get latency for logging */
|
/* Get latency for logging */
|
||||||
hr = IAudioClient_GetStreamLatency (self->client, &latency_rt);
|
hr = IAudioClient_GetStreamLatency (self->client, &latency_rt);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetStreamLatency, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetStreamLatency failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
GST_INFO_OBJECT (self, "wasapi stream latency: %" G_GINT64_FORMAT " (%"
|
GST_INFO_OBJECT (self, "wasapi stream latency: %" G_GINT64_FORMAT " (%"
|
||||||
G_GINT64_FORMAT "ms)", latency_rt, latency_rt / 10000);
|
G_GINT64_FORMAT "ms)", latency_rt, latency_rt / 10000);
|
||||||
|
|
||||||
/* Set the event handler which will trigger writes */
|
/* Set the event handler which will trigger writes */
|
||||||
hr = IAudioClient_SetEventHandle (self->client, self->event_handle);
|
hr = IAudioClient_SetEventHandle (self->client, self->event_handle);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::SetEventHandle, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::SetEventHandle failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Get render sink client and start it up */
|
/* Get render sink client and start it up */
|
||||||
if (!gst_wasapi_util_get_render_client (GST_ELEMENT (self), self->client,
|
if (!gst_wasapi_util_get_render_client (GST_ELEMENT (self), self->client,
|
||||||
|
@ -556,32 +529,17 @@ gst_wasapi_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
hr = IAudioRenderClient_GetBuffer (self->render_client, n_frames,
|
hr = IAudioRenderClient_GetBuffer (self->render_client, n_frames,
|
||||||
(BYTE **) & dst);
|
(BYTE **) & dst);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioRenderClient::GetBuffer, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, WRITE, (NULL),
|
|
||||||
("IAudioRenderClient::GetBuffer failed: %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (self, "pre-wrote %i bytes of silence", len);
|
GST_DEBUG_OBJECT (self, "pre-wrote %i bytes of silence", len);
|
||||||
|
|
||||||
hr = IAudioRenderClient_ReleaseBuffer (self->render_client, n_frames,
|
hr = IAudioRenderClient_ReleaseBuffer (self->render_client, n_frames,
|
||||||
AUDCLNT_BUFFERFLAGS_SILENT);
|
AUDCLNT_BUFFERFLAGS_SILENT);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioRenderClient::ReleaseBuffer, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self, "IAudioRenderClient::ReleaseBuffer failed: %s",
|
|
||||||
msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
hr = IAudioClient_Start (self->client);
|
hr = IAudioClient_Start (self->client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::Start, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::Start failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SINK
|
gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SINK
|
||||||
(self)->ringbuffer, self->positions);
|
(self)->ringbuffer, self->positions);
|
||||||
|
@ -660,27 +618,14 @@ gst_wasapi_sink_write (GstAudioSink * asink, gpointer data, guint length)
|
||||||
|
|
||||||
hr = IAudioRenderClient_GetBuffer (self->render_client, n_frames,
|
hr = IAudioRenderClient_GetBuffer (self->render_client, n_frames,
|
||||||
(BYTE **) & dst);
|
(BYTE **) & dst);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_AND (hr, IAudioRenderClient::GetBuffer, length = 0; goto beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, WRITE, (NULL),
|
|
||||||
("IAudioRenderClient::GetBuffer failed: %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
length = 0;
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
memcpy (dst, data, write_len);
|
memcpy (dst, data, write_len);
|
||||||
|
|
||||||
hr = IAudioRenderClient_ReleaseBuffer (self->render_client, n_frames,
|
hr = IAudioRenderClient_ReleaseBuffer (self->render_client, n_frames,
|
||||||
self->mute ? AUDCLNT_BUFFERFLAGS_SILENT : 0);
|
self->mute ? AUDCLNT_BUFFERFLAGS_SILENT : 0);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_AND (hr, IAudioRenderClient::ReleaseBuffer, length = 0;
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
goto beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioRenderClient::ReleaseBuffer failed: %s",
|
|
||||||
msg);
|
|
||||||
g_free (msg);
|
|
||||||
length = 0;
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
pending -= write_len;
|
pending -= write_len;
|
||||||
}
|
}
|
||||||
|
@ -698,12 +643,7 @@ gst_wasapi_sink_delay (GstAudioSink * asink)
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
|
|
||||||
hr = IAudioClient_GetCurrentPadding (self->client, &delay);
|
hr = IAudioClient_GetCurrentPadding (self->client, &delay);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClient::GetCurrentPadding, 0);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, READ, (NULL),
|
|
||||||
("IAudioClient::GetCurrentPadding failed %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
return delay;
|
return delay;
|
||||||
}
|
}
|
||||||
|
@ -714,21 +654,12 @@ gst_wasapi_sink_reset (GstAudioSink * asink)
|
||||||
GstWasapiSink *self = GST_WASAPI_SINK (asink);
|
GstWasapiSink *self = GST_WASAPI_SINK (asink);
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
|
|
||||||
if (self->client) {
|
if (!self->client)
|
||||||
hr = IAudioClient_Stop (self->client);
|
|
||||||
if (hr != S_OK) {
|
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::Stop () failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
hr = IAudioClient_Stop (self->client);
|
||||||
|
HR_FAILED_RET (hr, IAudioClient::Stop,);
|
||||||
|
|
||||||
hr = IAudioClient_Reset (self->client);
|
hr = IAudioClient_Reset (self->client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClient::Reset,);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::Reset () failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -387,10 +387,8 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
hr = IAudioClient_GetDevicePeriod (self->client, &default_period,
|
hr = IAudioClient_GetDevicePeriod (self->client, &default_period,
|
||||||
&min_period);
|
&min_period);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClient::GetDevicePeriod, FALSE);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetDevicePeriod failed");
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
GST_INFO_OBJECT (self, "wasapi default period: %" G_GINT64_FORMAT
|
GST_INFO_OBJECT (self, "wasapi default period: %" G_GINT64_FORMAT
|
||||||
", min period: %" G_GINT64_FORMAT, default_period, min_period);
|
", min period: %" G_GINT64_FORMAT, default_period, min_period);
|
||||||
|
|
||||||
|
@ -431,13 +429,7 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
/* Calculate a new aligned period. First get the aligned buffer size. */
|
/* Calculate a new aligned period. First get the aligned buffer size. */
|
||||||
hr = IAudioClient_GetBufferSize (self->client, &n_frames);
|
hr = IAudioClient_GetBufferSize (self->client, &n_frames);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetBufferSize, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
|
|
||||||
("IAudioClient::GetBufferSize() failed: %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
device_period = (GST_SECOND / 100) * n_frames / rate;
|
device_period = (GST_SECOND / 100) * n_frames / rate;
|
||||||
|
|
||||||
|
@ -448,20 +440,11 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
|
||||||
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, device_period,
|
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, device_period,
|
||||||
device_period, self->mix_format, NULL);
|
device_period, self->mix_format, NULL);
|
||||||
}
|
}
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::Initialize, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
|
|
||||||
("IAudioClient::Initialize () failed: %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Total size in frames of the allocated buffer that we will read from */
|
/* Total size in frames of the allocated buffer that we will read from */
|
||||||
hr = IAudioClient_GetBufferSize (self->client, &buffer_frames);
|
hr = IAudioClient_GetBufferSize (self->client, &buffer_frames);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetBufferSize, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetBufferSize failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_INFO_OBJECT (self, "buffer size is %i frames, bpf is %i bytes, "
|
GST_INFO_OBJECT (self, "buffer size is %i frames, bpf is %i bytes, "
|
||||||
"rate is %i Hz", buffer_frames, bpf, rate);
|
"rate is %i Hz", buffer_frames, bpf, rate);
|
||||||
|
@ -477,31 +460,22 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
|
||||||
|
|
||||||
/* Get WASAPI latency for logging */
|
/* Get WASAPI latency for logging */
|
||||||
hr = IAudioClient_GetStreamLatency (self->client, &latency_rt);
|
hr = IAudioClient_GetStreamLatency (self->client, &latency_rt);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetStreamLatency, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::GetStreamLatency failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
GST_INFO_OBJECT (self, "wasapi stream latency: %" G_GINT64_FORMAT " (%"
|
GST_INFO_OBJECT (self, "wasapi stream latency: %" G_GINT64_FORMAT " (%"
|
||||||
G_GINT64_FORMAT " ms)", latency_rt, latency_rt / 10000);
|
G_GINT64_FORMAT " ms)", latency_rt, latency_rt / 10000);
|
||||||
|
|
||||||
/* Set the event handler which will trigger reads */
|
/* Set the event handler which will trigger reads */
|
||||||
hr = IAudioClient_SetEventHandle (self->client, self->event_handle);
|
hr = IAudioClient_SetEventHandle (self->client, self->event_handle);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::SetEventHandle, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::SetEventHandle failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Get the clock and the clock freq */
|
/* Get the clock and the clock freq */
|
||||||
if (!gst_wasapi_util_get_clock (GST_ELEMENT (self), self->client,
|
if (!gst_wasapi_util_get_clock (GST_ELEMENT (self), self->client,
|
||||||
&self->client_clock)) {
|
&self->client_clock))
|
||||||
goto beach;
|
goto beach;
|
||||||
}
|
|
||||||
|
|
||||||
hr = IAudioClock_GetFrequency (self->client_clock, &self->client_clock_freq);
|
hr = IAudioClock_GetFrequency (self->client_clock, &self->client_clock_freq);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClock::GetFrequency, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClock::GetFrequency failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Get capture source client and start it up */
|
/* Get capture source client and start it up */
|
||||||
if (!gst_wasapi_util_get_capture_client (GST_ELEMENT (self), self->client,
|
if (!gst_wasapi_util_get_capture_client (GST_ELEMENT (self), self->client,
|
||||||
|
@ -510,10 +484,7 @@ gst_wasapi_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
|
||||||
}
|
}
|
||||||
|
|
||||||
hr = IAudioClient_Start (self->client);
|
hr = IAudioClient_Start (self->client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClock::Start, beach);
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::Start failed");
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SRC
|
gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SRC
|
||||||
(self)->ringbuffer, self->positions);
|
(self)->ringbuffer, self->positions);
|
||||||
|
@ -636,13 +607,7 @@ gst_wasapi_src_read (GstAudioSrc * asrc, gpointer data, guint length,
|
||||||
|
|
||||||
/* Always release all captured buffers if we've captured any at all */
|
/* Always release all captured buffers if we've captured any at all */
|
||||||
hr = IAudioCaptureClient_ReleaseBuffer (self->capture_client, have_frames);
|
hr = IAudioCaptureClient_ReleaseBuffer (self->capture_client, have_frames);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_AND (hr, IAudioClock::ReleaseBuffer, goto beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self,
|
|
||||||
"IAudioCaptureClient::ReleaseBuffer () failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -659,12 +624,7 @@ gst_wasapi_src_delay (GstAudioSrc * asrc)
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
|
|
||||||
hr = IAudioClient_GetCurrentPadding (self->client, &delay);
|
hr = IAudioClient_GetCurrentPadding (self->client, &delay);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClock::GetCurrentPadding, 0);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ELEMENT_ERROR (self, RESOURCE, READ, (NULL),
|
|
||||||
("IAudioClient::GetCurrentPadding failed %s", msg));
|
|
||||||
g_free (msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
return delay;
|
return delay;
|
||||||
}
|
}
|
||||||
|
@ -675,23 +635,14 @@ gst_wasapi_src_reset (GstAudioSrc * asrc)
|
||||||
GstWasapiSrc *self = GST_WASAPI_SRC (asrc);
|
GstWasapiSrc *self = GST_WASAPI_SRC (asrc);
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
|
|
||||||
if (self->client) {
|
if (!self->client)
|
||||||
hr = IAudioClient_Stop (self->client);
|
|
||||||
if (hr != S_OK) {
|
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::Stop () failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
hr = IAudioClient_Stop (self->client);
|
||||||
|
HR_FAILED_RET (hr, IAudioClock::Stop,);
|
||||||
|
|
||||||
hr = IAudioClient_Reset (self->client);
|
hr = IAudioClient_Reset (self->client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IAudioClock::Reset,);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (self, "IAudioClient::Reset () failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstClockTime
|
static GstClockTime
|
||||||
|
@ -706,8 +657,7 @@ gst_wasapi_src_get_time (GstClock * clock, gpointer user_data)
|
||||||
return GST_CLOCK_TIME_NONE;
|
return GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
hr = IAudioClock_GetPosition (self->client_clock, &devpos, NULL);
|
hr = IAudioClock_GetPosition (self->client_clock, &devpos, NULL);
|
||||||
if (G_UNLIKELY (hr != S_OK))
|
HR_FAILED_RET (hr, IAudioClock::GetPosition, GST_CLOCK_TIME_NONE);
|
||||||
return GST_CLOCK_TIME_NONE;
|
|
||||||
|
|
||||||
result = gst_util_uint64_scale_int (devpos, GST_SECOND,
|
result = gst_util_uint64_scale_int (devpos, GST_SECOND,
|
||||||
self->client_clock_freq);
|
self->client_clock_freq);
|
||||||
|
|
|
@ -286,29 +286,23 @@ gst_wasapi_util_hresult_to_string (HRESULT hr)
|
||||||
}
|
}
|
||||||
|
|
||||||
static IMMDeviceEnumerator *
|
static IMMDeviceEnumerator *
|
||||||
gst_wasapi_util_get_device_enumerator (GstElement * element)
|
gst_wasapi_util_get_device_enumerator (GstElement * self)
|
||||||
{
|
{
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
IMMDeviceEnumerator *enumerator = NULL;
|
IMMDeviceEnumerator *enumerator = NULL;
|
||||||
|
|
||||||
hr = CoCreateInstance (&CLSID_MMDeviceEnumerator, NULL, CLSCTX_ALL,
|
hr = CoCreateInstance (&CLSID_MMDeviceEnumerator, NULL, CLSCTX_ALL,
|
||||||
&IID_IMMDeviceEnumerator, (void **) &enumerator);
|
&IID_IMMDeviceEnumerator, (void **) &enumerator);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, CoCreateInstance (MMDeviceEnumerator), NULL);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element, "CoCreateInstance (MMDeviceEnumerator) failed"
|
|
||||||
": %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
return enumerator;
|
return enumerator;
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
gst_wasapi_util_get_devices (GstElement * element, gboolean active,
|
gst_wasapi_util_get_devices (GstElement * self, gboolean active,
|
||||||
GList ** devices)
|
GList ** devices)
|
||||||
{
|
{
|
||||||
gboolean ret = FALSE;
|
gboolean res = FALSE;
|
||||||
static GstStaticCaps scaps = GST_STATIC_CAPS (GST_WASAPI_STATIC_CAPS);
|
static GstStaticCaps scaps = GST_STATIC_CAPS (GST_WASAPI_STATIC_CAPS);
|
||||||
DWORD dwStateMask = active ? DEVICE_STATE_ACTIVE : DEVICE_STATEMASK_ALL;
|
DWORD dwStateMask = active ? DEVICE_STATE_ACTIVE : DEVICE_STATEMASK_ALL;
|
||||||
IMMDeviceCollection *device_collection = NULL;
|
IMMDeviceCollection *device_collection = NULL;
|
||||||
|
@ -319,27 +313,16 @@ gst_wasapi_util_get_devices (GstElement * element, gboolean active,
|
||||||
|
|
||||||
*devices = NULL;
|
*devices = NULL;
|
||||||
|
|
||||||
enumerator = gst_wasapi_util_get_device_enumerator (element);
|
enumerator = gst_wasapi_util_get_device_enumerator (self);
|
||||||
if (!enumerator)
|
if (!enumerator)
|
||||||
return FALSE;
|
return FALSE;
|
||||||
|
|
||||||
hr = IMMDeviceEnumerator_EnumAudioEndpoints (enumerator, eAll, dwStateMask,
|
hr = IMMDeviceEnumerator_EnumAudioEndpoints (enumerator, eAll, dwStateMask,
|
||||||
&device_collection);
|
&device_collection);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IMMDeviceEnumerator::EnumAudioEndpoints, err);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element, "IMMDeviceEnumerator::EnumAudioEndpoints "
|
|
||||||
"failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto err;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr = IMMDeviceCollection_GetCount (device_collection, &count);
|
hr = IMMDeviceCollection_GetCount (device_collection, &count);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IMMDeviceCollection::GetCount, err);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element, "Failed to count devices: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Create a GList of GstDevices* to return */
|
/* Create a GList of GstDevices* to return */
|
||||||
for (ii = 0; ii < count; ii++) {
|
for (ii = 0; ii < count; ii++) {
|
||||||
|
@ -404,7 +387,7 @@ gst_wasapi_util_get_devices (GstElement * element, gboolean active,
|
||||||
(void **) &client);
|
(void **) &client);
|
||||||
if (hr != S_OK) {
|
if (hr != S_OK) {
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
||||||
GST_ERROR_OBJECT (element, "IMMDevice::Activate (IID_IAudioClient) failed"
|
GST_ERROR_OBJECT (self, "IMMDevice::Activate (IID_IAudioClient) failed"
|
||||||
"on %s: %s", strid, msg);
|
"on %s: %s", strid, msg);
|
||||||
g_free (msg);
|
g_free (msg);
|
||||||
goto next;
|
goto next;
|
||||||
|
@ -453,18 +436,18 @@ gst_wasapi_util_get_devices (GstElement * element, gboolean active,
|
||||||
g_free (strid);
|
g_free (strid);
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = TRUE;
|
res = TRUE;
|
||||||
|
|
||||||
err:
|
err:
|
||||||
if (enumerator)
|
if (enumerator)
|
||||||
IUnknown_Release (enumerator);
|
IUnknown_Release (enumerator);
|
||||||
if (device_collection)
|
if (device_collection)
|
||||||
IUnknown_Release (device_collection);
|
IUnknown_Release (device_collection);
|
||||||
return ret;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
gst_wasapi_util_get_device_format (GstElement * element,
|
gst_wasapi_util_get_device_format (GstElement * self,
|
||||||
gint device_mode, IMMDevice * device, IAudioClient * client,
|
gint device_mode, IMMDevice * device, IAudioClient * client,
|
||||||
WAVEFORMATEX ** ret_format)
|
WAVEFORMATEX ** ret_format)
|
||||||
{
|
{
|
||||||
|
@ -474,12 +457,7 @@ gst_wasapi_util_get_device_format (GstElement * element,
|
||||||
*ret_format = NULL;
|
*ret_format = NULL;
|
||||||
|
|
||||||
hr = IAudioClient_GetMixFormat (client, &format);
|
hr = IAudioClient_GetMixFormat (client, &format);
|
||||||
if (hr != S_OK || format == NULL) {
|
HR_FAILED_RET (hr, IAudioClient::GetMixFormat, FALSE);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element, "GetMixFormat failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* WASAPI always accepts the format returned by GetMixFormat in shared mode */
|
/* WASAPI always accepts the format returned by GetMixFormat in shared mode */
|
||||||
if (device_mode == AUDCLNT_SHAREMODE_SHARED)
|
if (device_mode == AUDCLNT_SHAREMODE_SHARED)
|
||||||
|
@ -500,18 +478,13 @@ gst_wasapi_util_get_device_format (GstElement * element,
|
||||||
IPropertyStore *prop_store = NULL;
|
IPropertyStore *prop_store = NULL;
|
||||||
|
|
||||||
hr = IMMDevice_OpenPropertyStore (device, STGM_READ, &prop_store);
|
hr = IMMDevice_OpenPropertyStore (device, STGM_READ, &prop_store);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_RET (hr, IMMDevice::OpenPropertyStore, FALSE);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element, "OpenPropertyStore failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr = IPropertyStore_GetValue (prop_store, &PKEY_AudioEngine_DeviceFormat,
|
hr = IPropertyStore_GetValue (prop_store, &PKEY_AudioEngine_DeviceFormat,
|
||||||
&var);
|
&var);
|
||||||
if (hr != S_OK) {
|
if (hr != S_OK) {
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
||||||
GST_ERROR_OBJECT (element, "GetValue failed: %s", msg);
|
GST_ERROR_OBJECT (self, "GetValue failed: %s", msg);
|
||||||
g_free (msg);
|
g_free (msg);
|
||||||
IUnknown_Release (prop_store);
|
IUnknown_Release (prop_store);
|
||||||
return FALSE;
|
return FALSE;
|
||||||
|
@ -530,7 +503,7 @@ gst_wasapi_util_get_device_format (GstElement * element,
|
||||||
if (hr == S_OK)
|
if (hr == S_OK)
|
||||||
goto out;
|
goto out;
|
||||||
|
|
||||||
GST_ERROR_OBJECT (element, "AudioEngine DeviceFormat not supported");
|
GST_ERROR_OBJECT (self, "AudioEngine DeviceFormat not supported");
|
||||||
free (format);
|
free (format);
|
||||||
return FALSE;
|
return FALSE;
|
||||||
|
|
||||||
|
@ -540,7 +513,7 @@ out:
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
gst_wasapi_util_get_device_client (GstElement * element,
|
gst_wasapi_util_get_device_client (GstElement * self,
|
||||||
gboolean capture, gint role, const wchar_t * device_strid,
|
gboolean capture, gint role, const wchar_t * device_strid,
|
||||||
IMMDevice ** ret_device, IAudioClient ** ret_client)
|
IMMDevice ** ret_device, IAudioClient ** ret_client)
|
||||||
{
|
{
|
||||||
|
@ -550,24 +523,18 @@ gst_wasapi_util_get_device_client (GstElement * element,
|
||||||
IMMDevice *device = NULL;
|
IMMDevice *device = NULL;
|
||||||
IAudioClient *client = NULL;
|
IAudioClient *client = NULL;
|
||||||
|
|
||||||
if (!(enumerator = gst_wasapi_util_get_device_enumerator (element)))
|
if (!(enumerator = gst_wasapi_util_get_device_enumerator (self)))
|
||||||
goto beach;
|
goto beach;
|
||||||
|
|
||||||
if (!device_strid) {
|
if (!device_strid) {
|
||||||
hr = IMMDeviceEnumerator_GetDefaultAudioEndpoint (enumerator,
|
hr = IMMDeviceEnumerator_GetDefaultAudioEndpoint (enumerator,
|
||||||
capture ? eCapture : eRender, role, &device);
|
capture ? eCapture : eRender, role, &device);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IMMDeviceEnumerator::GetDefaultAudioEndpoint, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element,
|
|
||||||
"IMMDeviceEnumerator::GetDefaultAudioEndpoint failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
hr = IMMDeviceEnumerator_GetDevice (enumerator, device_strid, &device);
|
hr = IMMDeviceEnumerator_GetDevice (enumerator, device_strid, &device);
|
||||||
if (hr != S_OK) {
|
if (hr != S_OK) {
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
||||||
GST_ERROR_OBJECT (element, "IMMDeviceEnumerator::GetDevice (%S) failed"
|
GST_ERROR_OBJECT (self, "IMMDeviceEnumerator::GetDevice (%S) failed"
|
||||||
": %s", device_strid, msg);
|
": %s", device_strid, msg);
|
||||||
g_free (msg);
|
g_free (msg);
|
||||||
goto beach;
|
goto beach;
|
||||||
|
@ -576,13 +543,7 @@ gst_wasapi_util_get_device_client (GstElement * element,
|
||||||
|
|
||||||
hr = IMMDevice_Activate (device, &IID_IAudioClient, CLSCTX_ALL, NULL,
|
hr = IMMDevice_Activate (device, &IID_IAudioClient, CLSCTX_ALL, NULL,
|
||||||
(void **) &client);
|
(void **) &client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IMMDevice::Activate (IID_IAudioClient), beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element, "IMMDevice::Activate (IID_IAudioClient) failed"
|
|
||||||
": %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
IUnknown_AddRef (client);
|
IUnknown_AddRef (client);
|
||||||
IUnknown_AddRef (device);
|
IUnknown_AddRef (device);
|
||||||
|
@ -605,7 +566,7 @@ beach:
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
gst_wasapi_util_get_render_client (GstElement * element, IAudioClient * client,
|
gst_wasapi_util_get_render_client (GstElement * self, IAudioClient * client,
|
||||||
IAudioRenderClient ** ret_render_client)
|
IAudioRenderClient ** ret_render_client)
|
||||||
{
|
{
|
||||||
gboolean res = FALSE;
|
gboolean res = FALSE;
|
||||||
|
@ -614,13 +575,7 @@ gst_wasapi_util_get_render_client (GstElement * element, IAudioClient * client,
|
||||||
|
|
||||||
hr = IAudioClient_GetService (client, &IID_IAudioRenderClient,
|
hr = IAudioClient_GetService (client, &IID_IAudioRenderClient,
|
||||||
(void **) &render_client);
|
(void **) &render_client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetService, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element,
|
|
||||||
"IAudioClient::GetService (IID_IAudioRenderClient) failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
*ret_render_client = render_client;
|
*ret_render_client = render_client;
|
||||||
res = TRUE;
|
res = TRUE;
|
||||||
|
@ -630,7 +585,7 @@ beach:
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
gst_wasapi_util_get_capture_client (GstElement * element, IAudioClient * client,
|
gst_wasapi_util_get_capture_client (GstElement * self, IAudioClient * client,
|
||||||
IAudioCaptureClient ** ret_capture_client)
|
IAudioCaptureClient ** ret_capture_client)
|
||||||
{
|
{
|
||||||
gboolean res = FALSE;
|
gboolean res = FALSE;
|
||||||
|
@ -639,13 +594,7 @@ gst_wasapi_util_get_capture_client (GstElement * element, IAudioClient * client,
|
||||||
|
|
||||||
hr = IAudioClient_GetService (client, &IID_IAudioCaptureClient,
|
hr = IAudioClient_GetService (client, &IID_IAudioCaptureClient,
|
||||||
(void **) &capture_client);
|
(void **) &capture_client);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetService, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element,
|
|
||||||
"IAudioClient::GetService (IID_IAudioCaptureClient) failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
*ret_capture_client = capture_client;
|
*ret_capture_client = capture_client;
|
||||||
res = TRUE;
|
res = TRUE;
|
||||||
|
@ -655,7 +604,7 @@ beach:
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean
|
gboolean
|
||||||
gst_wasapi_util_get_clock (GstElement * element, IAudioClient * client,
|
gst_wasapi_util_get_clock (GstElement * self, IAudioClient * client,
|
||||||
IAudioClock ** ret_clock)
|
IAudioClock ** ret_clock)
|
||||||
{
|
{
|
||||||
gboolean res = FALSE;
|
gboolean res = FALSE;
|
||||||
|
@ -663,13 +612,7 @@ gst_wasapi_util_get_clock (GstElement * element, IAudioClient * client,
|
||||||
IAudioClock *clock = NULL;
|
IAudioClock *clock = NULL;
|
||||||
|
|
||||||
hr = IAudioClient_GetService (client, &IID_IAudioClock, (void **) &clock);
|
hr = IAudioClient_GetService (client, &IID_IAudioClock, (void **) &clock);
|
||||||
if (hr != S_OK) {
|
HR_FAILED_GOTO (hr, IAudioClient::GetService, beach);
|
||||||
gchar *msg = gst_wasapi_util_hresult_to_string (hr);
|
|
||||||
GST_ERROR_OBJECT (element,
|
|
||||||
"IAudioClient::GetService (IID_IAudioClock) failed: %s", msg);
|
|
||||||
g_free (msg);
|
|
||||||
goto beach;
|
|
||||||
}
|
|
||||||
|
|
||||||
*ret_clock = clock;
|
*ret_clock = clock;
|
||||||
res = TRUE;
|
res = TRUE;
|
||||||
|
|
|
@ -35,6 +35,21 @@
|
||||||
"rate = " GST_AUDIO_RATE_RANGE ", " \
|
"rate = " GST_AUDIO_RATE_RANGE ", " \
|
||||||
"channels = " GST_AUDIO_CHANNELS_RANGE
|
"channels = " GST_AUDIO_CHANNELS_RANGE
|
||||||
|
|
||||||
|
/* Standard error path */
|
||||||
|
#define HR_FAILED_AND(hr,func,and) \
|
||||||
|
do { \
|
||||||
|
if (hr != S_OK) { \
|
||||||
|
gchar *msg = gst_wasapi_util_hresult_to_string (hr); \
|
||||||
|
GST_ERROR_OBJECT (self, #func " failed: %s", msg); \
|
||||||
|
g_free (msg); \
|
||||||
|
and; \
|
||||||
|
} \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
#define HR_FAILED_RET(hr,func,ret) HR_FAILED_AND(hr,func,return ret)
|
||||||
|
|
||||||
|
#define HR_FAILED_GOTO(hr,func,where) HR_FAILED_AND(hr,func,res = FALSE; goto where)
|
||||||
|
|
||||||
/* Device role enum property */
|
/* Device role enum property */
|
||||||
typedef enum
|
typedef enum
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in a new issue