gstreamer/sys/wasapi2/gstwasapi2src.c
Seungha Yang b7abd34285 wasapi2src: Add support for loopback recording
... and add various device error handling.

This loopback implementation is functionally identical to that of wasapisrc.
When it's enabled, wasapi2src will read data from render device instead of
capture device.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2311>
2021-06-09 22:15:06 +09:00

482 lines
14 KiB
C

/*
* Copyright (C) 2008 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
* Copyright (C) 2018 Centricular Ltd.
* Author: Nirbheek Chauhan <nirbheek@centricular.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-wasapi2src
* @title: wasapi2src
*
* Provides audio capture from the Windows Audio Session API available with
* Windows 10.
*
* ## Example pipelines
* |[
* gst-launch-1.0 -v wasapi2src ! fakesink
* ]| Capture from the default audio device and render to fakesink.
*
* |[
* gst-launch-1.0 -v wasapi2src low-latency=true ! fakesink
* ]| Capture from the default audio device with the minimum possible latency and render to fakesink.
*
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwasapi2src.h"
#include "gstwasapi2util.h"
#include "gstwasapi2ringbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_src_debug);
#define GST_CAT_DEFAULT gst_wasapi2_src_debug
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS));
#define DEFAULT_LOW_LATENCY FALSE
#define DEFAULT_MUTE FALSE
#define DEFAULT_VOLUME 1.0
#define DEFAULT_LOOPBACK FALSE
enum
{
PROP_0,
PROP_DEVICE,
PROP_LOW_LATENCY,
PROP_MUTE,
PROP_VOLUME,
PROP_DISPATCHER,
PROP_LOOPBACK,
};
struct _GstWasapi2Src
{
GstAudioBaseSrc parent;
/* properties */
gchar *device_id;
gboolean low_latency;
gboolean mute;
gdouble volume;
gpointer dispatcher;
gboolean loopback;
gboolean mute_changed;
gboolean volume_changed;
};
static void gst_wasapi2_src_finalize (GObject * object);
static void gst_wasapi2_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wasapi2_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_wasapi2_src_change_state (GstElement *
element, GstStateChange transition);
static GstCaps *gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
static GstAudioRingBuffer *gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc *
src);
static void gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute);
static gboolean gst_wasapi2_src_get_mute (GstWasapi2Src * self);
static void gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume);
static gdouble gst_wasapi2_src_get_volume (GstWasapi2Src * self);
#define gst_wasapi2_src_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstWasapi2Src, gst_wasapi2_src,
GST_TYPE_AUDIO_BASE_SRC,
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void
gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
GstAudioBaseSrcClass *audiobasesrc_class = GST_AUDIO_BASE_SRC_CLASS (klass);
gobject_class->finalize = gst_wasapi2_src_finalize;
gobject_class->set_property = gst_wasapi2_src_set_property;
gobject_class->get_property = gst_wasapi2_src_get_property;
g_object_class_install_property (gobject_class, PROP_DEVICE,
g_param_spec_string ("device", "Device",
"WASAPI playback device as a GUID string",
NULL, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
g_param_spec_boolean ("low-latency", "Low latency",
"Optimize all settings for lowest latency. Always safe to enable.",
DEFAULT_LOW_LATENCY, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MUTE,
g_param_spec_boolean ("mute", "Mute", "Mute state of this stream",
DEFAULT_MUTE, GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_VOLUME,
g_param_spec_double ("volume", "Volume", "Volume of this stream",
0.0, 1.0, DEFAULT_VOLUME,
GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
/**
* GstWasapi2Src:dispatcher:
*
* ICoreDispatcher COM object used for activating device from UI thread.
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_DISPATCHER,
g_param_spec_pointer ("dispatcher", "Dispatcher",
"ICoreDispatcher COM object to use. In order for application to ask "
"permission of audio device, device activation should be running "
"on UI thread via ICoreDispatcher. This element will increase "
"the reference count of given ICoreDispatcher and release it after "
"use. Therefore, caller does not need to consider additional "
"reference count management",
GST_PARAM_MUTABLE_READY | G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
/**
* GstWasapi2Src:loopback:
*
* Open render device for loopback recording
*
* Since: 1.20
*/
g_object_class_install_property (gobject_class, PROP_LOOPBACK,
g_param_spec_boolean ("loopback", "Loopback recording",
"Open render device for loopback recording", DEFAULT_LOOPBACK,
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
gst_element_class_add_static_pad_template (element_class, &src_template);
gst_element_class_set_static_metadata (element_class, "Wasapi2Src",
"Source/Audio/Hardware",
"Stream audio from an audio capture device through WASAPI",
"Nirbheek Chauhan <nirbheek@centricular.com>, "
"Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
"Seungha Yang <seungha@centricular.com>");
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_wasapi2_src_change_state);
basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_src_get_caps);
audiobasesrc_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_wasapi2_src_create_ringbuffer);
GST_DEBUG_CATEGORY_INIT (gst_wasapi2_src_debug, "wasapi2src",
0, "Windows audio session API source");
}
static void
gst_wasapi2_src_init (GstWasapi2Src * self)
{
self->mute = DEFAULT_MUTE;
self->volume = DEFAULT_VOLUME;
self->low_latency = DEFAULT_LOW_LATENCY;
self->loopback = DEFAULT_LOOPBACK;
}
static void
gst_wasapi2_src_finalize (GObject * object)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (object);
g_free (self->device_id);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_wasapi2_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (object);
switch (prop_id) {
case PROP_DEVICE:
g_free (self->device_id);
self->device_id = g_value_dup_string (value);
break;
case PROP_LOW_LATENCY:
self->low_latency = g_value_get_boolean (value);
break;
case PROP_MUTE:
gst_wasapi2_src_set_mute (self, g_value_get_boolean (value));
break;
case PROP_VOLUME:
gst_wasapi2_src_set_volume (self, g_value_get_double (value));
break;
case PROP_DISPATCHER:
self->dispatcher = g_value_get_pointer (value);
break;
case PROP_LOOPBACK:
self->loopback = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_wasapi2_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (object);
switch (prop_id) {
case PROP_DEVICE:
g_value_set_string (value, self->device_id);
break;
case PROP_LOW_LATENCY:
g_value_set_boolean (value, self->low_latency);
break;
case PROP_MUTE:
g_value_set_boolean (value, gst_wasapi2_src_get_mute (self));
break;
case PROP_VOLUME:
g_value_set_double (value, gst_wasapi2_src_get_volume (self));
break;
case PROP_LOOPBACK:
g_value_set_boolean (value, self->loopback);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_wasapi2_src_change_state (GstElement * element, GstStateChange transition)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (element);
GstAudioBaseSrc *asrc = GST_AUDIO_BASE_SRC_CAST (element);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* If we have pending volume/mute values to set, do here */
GST_OBJECT_LOCK (self);
if (asrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (asrc->ringbuffer);
if (self->volume_changed) {
gst_wasapi2_ring_buffer_set_volume (ringbuffer, self->volume);
self->volume_changed = FALSE;
}
if (self->mute_changed) {
gst_wasapi2_ring_buffer_set_mute (ringbuffer, self->mute);
self->mute_changed = FALSE;
}
}
GST_OBJECT_UNLOCK (self);
break;
default:
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
static GstCaps *
gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
GstAudioBaseSrc *asrc = GST_AUDIO_BASE_SRC_CAST (bsrc);
GstCaps *caps = NULL;
GST_OBJECT_LOCK (bsrc);
if (asrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (asrc->ringbuffer);
gst_object_ref (ringbuffer);
GST_OBJECT_UNLOCK (bsrc);
/* Get caps might be able to block if device is not activated yet */
caps = gst_wasapi2_ring_buffer_get_caps (ringbuffer);
gst_object_unref (ringbuffer);
} else {
GST_OBJECT_UNLOCK (bsrc);
}
if (!caps)
caps = gst_pad_get_pad_template_caps (bsrc->srcpad);
if (filter) {
GstCaps *filtered =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = filtered;
}
GST_DEBUG_OBJECT (bsrc, "returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
static GstAudioRingBuffer *
gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc * src)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (src);
GstAudioRingBuffer *ringbuffer;
gchar *name;
GstWasapi2ClientDeviceClass device_class =
GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE;
if (self->loopback)
device_class = GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE;
name = g_strdup_printf ("%s-ringbuffer", GST_OBJECT_NAME (src));
ringbuffer =
gst_wasapi2_ring_buffer_new (device_class,
self->low_latency, self->device_id, self->dispatcher, name);
g_free (name);
return ringbuffer;
}
static void
gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute)
{
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
HRESULT hr;
GST_OBJECT_LOCK (self);
self->mute = mute;
self->mute_changed = TRUE;
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_set_mute (ringbuffer, mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set mute");
} else {
self->mute_changed = FALSE;
}
}
GST_OBJECT_UNLOCK (self);
}
static gboolean
gst_wasapi2_src_get_mute (GstWasapi2Src * self)
{
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
gboolean mute;
HRESULT hr;
GST_OBJECT_LOCK (self);
mute = self->mute;
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_get_mute (ringbuffer, &mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't get mute");
} else {
self->mute = mute;
}
}
GST_OBJECT_UNLOCK (self);
return mute;
}
static void
gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume)
{
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
HRESULT hr;
GST_OBJECT_LOCK (self);
self->volume = volume;
/* clip volume value */
self->volume = MAX (0.0, self->volume);
self->volume = MIN (1.0, self->volume);
self->volume_changed = TRUE;
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_set_volume (ringbuffer, (gfloat) self->volume);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume_changed = FALSE;
}
}
GST_OBJECT_UNLOCK (self);
}
static gdouble
gst_wasapi2_src_get_volume (GstWasapi2Src * self)
{
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
gfloat volume;
HRESULT hr;
GST_OBJECT_LOCK (self);
volume = (gfloat) self->volume;
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_get_volume (ringbuffer, &volume);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume = volume;
}
}
GST_OBJECT_UNLOCK (self);
volume = MAX (0.0, volume);
volume = MIN (1.0, volume);
return volume;
}