wasapi2: Rewrite plugin and implement audioringbuffer subclass

... based on MediaFoundation work queue API.

By this commit, wasapi2 plugin will make use of pull mode scheduling
with audioringbuffer subclass.
There are several drawbacks of audiosrc/audiosink subclassing
(not audiobasesrc/audiobasesink) for WASAPI API, which are:
* audiosrc/audiosink classes try to set high priority to
  read/write thread via MMCSS (Multimedia Class Scheduler Service)
  but it's not allowed in case of UWP application.
  In order to use MMCSS in UWP, application should use MediaFoundation
  work queue indirectly.
  Since audiosrc/audiosink scheduling model is not compatible with
  MediaFoundation's work queue model, audioringbuffer subclassing
  is required.
* WASAPI capture device might report larger packet size than expected
  (i.e., larger frames we can read than expected frame size per period).
  Meanwhile, in any case, application should drain all packets at that moment.
  In order to handle the case, wasapi/wasapi2 plugins were making use of
  GstAdapter which is obviously sub-optimal because it requires additional
  memory allocation and copy.
  By implementing audioringbuffer subclassing, we can avoid such inefficiency.

In this commit, all the device read/write operations will be moved
to newly implemented wasapi2ringbuffer class and
existing wasapi2client class will take care of device enumeration
and activation parts only.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2306>
This commit is contained in:
Seungha Yang 2021-05-10 20:45:28 +09:00
parent 4b42671c99
commit a8ec40c850
13 changed files with 1631 additions and 1686 deletions

File diff suppressed because it is too large Load diff

View file

@ -39,50 +39,16 @@ GType gst_wasapi2_client_device_class_get_type (void);
G_DECLARE_FINAL_TYPE (GstWasapi2Client,
gst_wasapi2_client, GST, WASAPI2_CLIENT, GstObject);
GstCaps * gst_wasapi2_client_get_caps (GstWasapi2Client * client);
HRESULT gst_wasapi2_client_open (GstWasapi2Client * client,
GstAudioRingBufferSpec * spec,
GstAudioRingBuffer * buf);
HRESULT gst_wasapi2_client_start (GstWasapi2Client * client);
HRESULT gst_wasapi2_client_stop (GstWasapi2Client * client);
HRESULT gst_wasapi2_client_read (GstWasapi2Client * client,
gpointer data,
guint length,
guint * read_length);
HRESULT gst_wasapi2_client_write (GstWasapi2Client * client,
gpointer data,
guint length,
guint * write_length);
HRESULT gst_wasapi2_client_delay (GstWasapi2Client * client,
guint32 * delay);
HRESULT gst_wasapi2_client_set_mute (GstWasapi2Client * client,
gboolean mute);
HRESULT gst_wasapi2_client_get_mute (GstWasapi2Client * client,
gboolean * mute);
HRESULT gst_wasapi2_client_set_volume (GstWasapi2Client * client,
gfloat volume);
HRESULT gst_wasapi2_client_get_volume (GstWasapi2Client * client,
gfloat * volume);
gboolean gst_wasapi2_client_ensure_activation (GstWasapi2Client * client);
GstWasapi2Client * gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
gboolean low_latency,
gint device_index,
const gchar * device_id,
gpointer dispatcher);
G_DEFINE_AUTOPTR_CLEANUP_FUNC (GstWasapi2Client, gst_object_unref)
gboolean gst_wasapi2_client_ensure_activation (GstWasapi2Client * client);
IAudioClient * gst_wasapi2_client_get_handle (GstWasapi2Client * client);
GstCaps * gst_wasapi2_client_get_caps (GstWasapi2Client * client);
G_END_DECLS

View file

@ -183,7 +183,7 @@ gst_wasapi2_device_provider_probe_internal (GstWasapi2DeviceProvider * self,
gchar *device_id = NULL;
gchar *device_name = NULL;
client = gst_wasapi2_client_new (client_class, FALSE, i, NULL, NULL);
client = gst_wasapi2_client_new (client_class, i, NULL, NULL);
if (!client)
return;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,55 @@
/* GStreamer
* Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_WASAPI2_RING_BUFFER_H__
#define __GST_WASAPI2_RING_BUFFER_H__
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include "gstwasapi2client.h"
G_BEGIN_DECLS
#define GST_TYPE_WASAPI2_RING_BUFFER (gst_wasapi2_ring_buffer_get_type())
G_DECLARE_FINAL_TYPE (GstWasapi2RingBuffer, gst_wasapi2_ring_buffer,
GST, WASAPI2_RING_BUFFER, GstAudioRingBuffer);
GstAudioRingBuffer * gst_wasapi2_ring_buffer_new (GstWasapi2ClientDeviceClass device_class,
gboolean low_latency,
const gchar *device_id,
gpointer dispatcher,
const gchar * name);
GstCaps * gst_wasapi2_ring_buffer_get_caps (GstWasapi2RingBuffer * buf);
HRESULT gst_wasapi2_ring_buffer_set_mute (GstWasapi2RingBuffer * buf,
gboolean mute);
HRESULT gst_wasapi2_ring_buffer_get_mute (GstWasapi2RingBuffer * buf,
gboolean * mute);
HRESULT gst_wasapi2_ring_buffer_set_volume (GstWasapi2RingBuffer * buf,
gfloat volume);
HRESULT gst_wasapi2_ring_buffer_get_volume (GstWasapi2RingBuffer * buf,
gfloat * volume);
G_END_DECLS
#endif /* __GST_WASAPI2_RING_BUFFER_H__ */

View file

@ -31,11 +31,11 @@
*
* ## Example pipelines
* |[
* gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapi2sink
* gst-launch-1.0 -v audiotestsink samplesperbuffer=160 ! wasapi2sink
* ]| Generate 20 ms buffers and render to the default audio device.
*
* |[
* gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapi2sink low-latency=true
* gst-launch-1.0 -v audiotestsink samplesperbuffer=160 ! wasapi2sink low-latency=true
* ]| Same as above, but with the minimum possible latency
*
*/
@ -45,7 +45,7 @@
#include "gstwasapi2sink.h"
#include "gstwasapi2util.h"
#include "gstwasapi2client.h"
#include "gstwasapi2ringbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_sink_debug);
#define GST_CAT_DEFAULT gst_wasapi2_sink_debug
@ -59,9 +59,6 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
#define DEFAULT_MUTE FALSE
#define DEFAULT_VOLUME 1.0
#define GST_WASAPI2_SINK_LOCK(s) g_mutex_lock(&(s)->lock)
#define GST_WASAPI2_SINK_UNLOCK(s) g_mutex_unlock(&(s)->lock)
enum
{
PROP_0,
@ -74,11 +71,7 @@ enum
struct _GstWasapi2Sink
{
GstAudioSink parent;
GstWasapi2Client *client;
GstCaps *cached_caps;
gboolean started;
GstAudioBaseSink parent;
/* properties */
gchar *device_id;
@ -89,30 +82,21 @@ struct _GstWasapi2Sink
gboolean mute_changed;
gboolean volume_changed;
/* to protect audioclient from set/get property */
GMutex lock;
};
static void gst_wasapi2_sink_dispose (GObject * object);
static void gst_wasapi2_sink_finalize (GObject * object);
static void gst_wasapi2_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wasapi2_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_wasapi2_sink_change_state (GstElement *
element, GstStateChange transition);
static GstCaps *gst_wasapi2_sink_get_caps (GstBaseSink * bsink,
GstCaps * filter);
static gboolean gst_wasapi2_sink_prepare (GstAudioSink * asink,
GstAudioRingBufferSpec * spec);
static gboolean gst_wasapi2_sink_unprepare (GstAudioSink * asink);
static gboolean gst_wasapi2_sink_open (GstAudioSink * asink);
static gboolean gst_wasapi2_sink_close (GstAudioSink * asink);
static gint gst_wasapi2_sink_write (GstAudioSink * asink,
gpointer data, guint length);
static guint gst_wasapi2_sink_delay (GstAudioSink * asink);
static void gst_wasapi2_sink_reset (GstAudioSink * asink);
static GstAudioRingBuffer *gst_wasapi2_sink_create_ringbuffer (GstAudioBaseSink
* sink);
static void gst_wasapi2_sink_set_mute (GstWasapi2Sink * self, gboolean mute);
static gboolean gst_wasapi2_sink_get_mute (GstWasapi2Sink * self);
@ -120,7 +104,8 @@ static void gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume);
static gdouble gst_wasapi2_sink_get_volume (GstWasapi2Sink * self);
#define gst_wasapi2_sink_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstWasapi2Sink, gst_wasapi2_sink, GST_TYPE_AUDIO_SINK,
G_DEFINE_TYPE_WITH_CODE (GstWasapi2Sink, gst_wasapi2_sink,
GST_TYPE_AUDIO_BASE_SINK,
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void
@ -129,9 +114,9 @@ gst_wasapi2_sink_class_init (GstWasapi2SinkClass * klass)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
GstAudioSinkClass *audiosink_class = GST_AUDIO_SINK_CLASS (klass);
GstAudioBaseSinkClass *audiobasesink_class =
GST_AUDIO_BASE_SINK_CLASS (klass);
gobject_class->dispose = gst_wasapi2_sink_dispose;
gobject_class->finalize = gst_wasapi2_sink_finalize;
gobject_class->set_property = gst_wasapi2_sink_set_property;
gobject_class->get_property = gst_wasapi2_sink_get_property;
@ -184,15 +169,13 @@ gst_wasapi2_sink_class_init (GstWasapi2SinkClass * klass)
"Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
"Seungha Yang <seungha@centricular.com>");
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_wasapi2_sink_change_state);
basesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_get_caps);
audiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_prepare);
audiosink_class->unprepare = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_unprepare);
audiosink_class->open = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_open);
audiosink_class->close = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_close);
audiosink_class->write = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_write);
audiosink_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_delay);
audiosink_class->reset = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_reset);
audiobasesink_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_wasapi2_sink_create_ringbuffer);
GST_DEBUG_CATEGORY_INIT (gst_wasapi2_sink_debug, "wasapi2sink",
0, "Windows audio session API sink");
@ -204,21 +187,6 @@ gst_wasapi2_sink_init (GstWasapi2Sink * self)
self->low_latency = DEFAULT_LOW_LATENCY;
self->mute = DEFAULT_MUTE;
self->volume = DEFAULT_VOLUME;
g_mutex_init (&self->lock);
}
static void
gst_wasapi2_sink_dispose (GObject * object)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
GST_WASAPI2_SINK_LOCK (self);
gst_clear_object (&self->client);
gst_clear_caps (&self->cached_caps);
GST_WASAPI2_SINK_UNLOCK (self);
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
@ -227,7 +195,6 @@ gst_wasapi2_sink_finalize (GObject * object)
GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
g_free (self->device_id);
g_mutex_clear (&self->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -286,30 +253,59 @@ gst_wasapi2_sink_get_property (GObject * object, guint prop_id,
}
}
static GstStateChangeReturn
gst_wasapi2_sink_change_state (GstElement * element, GstStateChange transition)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (element);
GstAudioBaseSink *asink = GST_AUDIO_BASE_SINK_CAST (element);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* If we have pending volume/mute values to set, do here */
GST_OBJECT_LOCK (self);
if (asink->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (asink->ringbuffer);
if (self->volume_changed) {
gst_wasapi2_ring_buffer_set_volume (ringbuffer, self->volume);
self->volume_changed = FALSE;
}
if (self->mute_changed) {
gst_wasapi2_ring_buffer_set_mute (ringbuffer, self->mute);
self->mute_changed = FALSE;
}
}
GST_OBJECT_UNLOCK (self);
break;
default:
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
static GstCaps *
gst_wasapi2_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (bsink);
GstAudioBaseSink *asink = GST_AUDIO_BASE_SINK_CAST (bsink);
GstCaps *caps = NULL;
/* In case of UWP, device activation might not be finished yet */
if (self->client && !gst_wasapi2_client_ensure_activation (self->client)) {
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
("Failed to activate device"));
return NULL;
GST_OBJECT_LOCK (bsink);
if (asink->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (asink->ringbuffer);
gst_object_ref (ringbuffer);
GST_OBJECT_UNLOCK (bsink);
/* Get caps might be able to block if device is not activated yet */
caps = gst_wasapi2_ring_buffer_get_caps (ringbuffer);
} else {
GST_OBJECT_UNLOCK (bsink);
}
if (self->client)
caps = gst_wasapi2_client_get_caps (self->client);
/* store one caps here so that we can return device caps even if
* audioclient was closed due to unprepare() */
if (!self->cached_caps && caps)
self->cached_caps = gst_caps_ref (caps);
if (!caps && self->cached_caps)
caps = gst_caps_ref (self->cached_caps);
if (!caps)
caps = gst_pad_get_pad_template_caps (bsink->sinkpad);
@ -320,238 +316,81 @@ gst_wasapi2_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
caps = filtered;
}
GST_DEBUG_OBJECT (self, "returning caps %" GST_PTR_FORMAT, caps);
GST_DEBUG_OBJECT (bsink, "returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
static gboolean
gst_wasapi2_sink_open_unlocked (GstAudioSink * asink)
static GstAudioRingBuffer *
gst_wasapi2_sink_create_ringbuffer (GstAudioBaseSink * sink)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
GstWasapi2Sink *self = GST_WASAPI2_SINK (sink);
GstAudioRingBuffer *ringbuffer;
gchar *name;
gst_clear_object (&self->client);
name = g_strdup_printf ("%s-ringbuffer", GST_OBJECT_NAME (sink));
self->client =
gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
self->low_latency, -1, self->device_id, self->dispatcher);
ringbuffer =
gst_wasapi2_ring_buffer_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
self->low_latency, self->device_id, self->dispatcher, name);
if (!self->client)
return FALSE;
g_free (name);
return TRUE;
}
static gboolean
gst_wasapi2_sink_open (GstAudioSink * asink)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
gboolean ret;
GST_DEBUG_OBJECT (self, "Opening device");
GST_WASAPI2_SINK_LOCK (self);
ret = gst_wasapi2_sink_open_unlocked (asink);
GST_WASAPI2_SINK_UNLOCK (self);
if (!ret) {
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
("Failed to open device"));
return FALSE;
}
return TRUE;
}
static gboolean
gst_wasapi2_sink_close (GstAudioSink * asink)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
GST_WASAPI2_SINK_LOCK (self);
gst_clear_object (&self->client);
gst_clear_caps (&self->cached_caps);
self->started = FALSE;
GST_WASAPI2_SINK_UNLOCK (self);
return TRUE;
}
static gboolean
gst_wasapi2_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK (asink);
gboolean ret = FALSE;
HRESULT hr;
GST_WASAPI2_SINK_LOCK (self);
if (!self->client && !gst_wasapi2_sink_open_unlocked (asink)) {
GST_ERROR_OBJECT (self, "No audio client was configured");
goto done;
}
if (!gst_wasapi2_client_ensure_activation (self->client)) {
GST_ERROR_OBJECT (self, "Couldn't activate audio device");
goto done;
}
hr = gst_wasapi2_client_open (self->client, spec, bsink->ringbuffer);
if (!gst_wasapi2_result (hr)) {
GST_ERROR_OBJECT (self, "Couldn't open audio client");
goto done;
}
/* Set mute and volume here again, maybe when "mute" property was set, audioclient
* might not be configured at that moment */
if (self->mute_changed) {
gst_wasapi2_client_set_mute (self->client, self->mute);
self->mute_changed = FALSE;
}
if (self->volume_changed) {
gst_wasapi2_client_set_volume (self->client, self->volume);
self->volume_changed = FALSE;
}
/* Will start IAudioClient on the first write request */
self->started = FALSE;
ret = TRUE;
done:
GST_WASAPI2_SINK_UNLOCK (self);
return ret;
}
static gboolean
gst_wasapi2_sink_unprepare (GstAudioSink * asink)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
self->started = FALSE;
/* Will reopen device later prepare() */
GST_WASAPI2_SINK_LOCK (self);
if (self->client) {
gst_wasapi2_client_stop (self->client);
gst_clear_object (&self->client);
}
GST_WASAPI2_SINK_UNLOCK (self);
return TRUE;
}
static gint
gst_wasapi2_sink_write (GstAudioSink * asink, gpointer data, guint length)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
HRESULT hr;
guint write_len = 0;
if (!self->client) {
GST_ERROR_OBJECT (self, "No audio client was configured");
return -1;
}
if (!self->started) {
HRESULT hr = gst_wasapi2_client_start (self->client);
if (!gst_wasapi2_result (hr)) {
GST_ERROR_OBJECT (self, "Failed to re-start client");
return -1;
}
self->started = TRUE;
}
hr = gst_wasapi2_client_write (self->client, data, length, &write_len);
if (!gst_wasapi2_result (hr)) {
GST_ERROR_OBJECT (self, "Failed to write");
return -1;
}
return (gint) write_len;
}
static guint
gst_wasapi2_sink_delay (GstAudioSink * asink)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
guint32 delay;
HRESULT hr;
if (!self->client)
return 0;
hr = gst_wasapi2_client_delay (self->client, &delay);
if (!gst_wasapi2_result (hr)) {
GST_WARNING_OBJECT (self, "Failed to get delay");
return 0;
}
return delay;
}
static void
gst_wasapi2_sink_reset (GstAudioSink * asink)
{
GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
GST_INFO_OBJECT (self, "reset called");
self->started = FALSE;
if (!self->client)
return;
gst_wasapi2_client_stop (self->client);
return ringbuffer;
}
static void
gst_wasapi2_sink_set_mute (GstWasapi2Sink * self, gboolean mute)
{
GST_WASAPI2_SINK_LOCK (self);
GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
HRESULT hr;
GST_OBJECT_LOCK (self);
self->mute = mute;
self->mute_changed = TRUE;
if (self->client) {
HRESULT hr = gst_wasapi2_client_set_mute (self->client, mute);
if (bsink->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
hr = gst_wasapi2_ring_buffer_set_mute (ringbuffer, mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set mute");
} else {
self->mute_changed = FALSE;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SINK_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
}
static gboolean
gst_wasapi2_sink_get_mute (GstWasapi2Sink * self)
{
GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
gboolean mute;
HRESULT hr;
GST_WASAPI2_SINK_LOCK (self);
GST_OBJECT_LOCK (self);
mute = self->mute;
if (self->client) {
HRESULT hr = gst_wasapi2_client_get_mute (self->client, &mute);
if (bsink->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
hr = gst_wasapi2_ring_buffer_get_mute (ringbuffer, &mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't get mute state");
GST_INFO_OBJECT (self, "Couldn't get mute");
} else {
self->mute = mute;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SINK_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
return mute;
}
@ -559,7 +398,10 @@ gst_wasapi2_sink_get_mute (GstWasapi2Sink * self)
static void
gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume)
{
GST_WASAPI2_SINK_LOCK (self);
GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
HRESULT hr;
GST_OBJECT_LOCK (self);
self->volume = volume;
/* clip volume value */
@ -567,42 +409,47 @@ gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume)
self->volume = MIN (1.0, self->volume);
self->volume_changed = TRUE;
if (self->client) {
HRESULT hr =
gst_wasapi2_client_set_volume (self->client, (gfloat) self->volume);
if (bsink->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
hr = gst_wasapi2_ring_buffer_set_volume (ringbuffer, (gfloat) self->volume);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume_changed = FALSE;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SINK_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
}
static gdouble
gst_wasapi2_sink_get_volume (GstWasapi2Sink * self)
{
GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
gfloat volume;
HRESULT hr;
GST_WASAPI2_SINK_LOCK (self);
GST_OBJECT_LOCK (self);
volume = (gfloat) self->volume;
if (self->client) {
HRESULT hr = gst_wasapi2_client_get_volume (self->client, &volume);
if (bsink->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
hr = gst_wasapi2_ring_buffer_get_volume (ringbuffer, &volume);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't get volume");
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume = volume;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SINK_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
volume = MAX (0.0, volume);
volume = MIN (1.0, volume);

View file

@ -27,7 +27,7 @@ G_BEGIN_DECLS
#define GST_TYPE_WASAPI2_SINK (gst_wasapi2_sink_get_type ())
G_DECLARE_FINAL_TYPE (GstWasapi2Sink,
gst_wasapi2_sink, GST, WASAPI2_SINK, GstAudioSink);
gst_wasapi2_sink, GST, WASAPI2_SINK, GstAudioBaseSink);
G_END_DECLS

View file

@ -43,7 +43,7 @@
#include "gstwasapi2src.h"
#include "gstwasapi2util.h"
#include "gstwasapi2client.h"
#include "gstwasapi2ringbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_src_debug);
#define GST_CAT_DEFAULT gst_wasapi2_src_debug
@ -57,9 +57,6 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
#define DEFAULT_MUTE FALSE
#define DEFAULT_VOLUME 1.0
#define GST_WASAPI2_SRC_LOCK(s) g_mutex_lock(&(s)->lock)
#define GST_WASAPI2_SRC_UNLOCK(s) g_mutex_unlock(&(s)->lock)
enum
{
PROP_0,
@ -72,11 +69,7 @@ enum
struct _GstWasapi2Src
{
GstAudioSrc parent;
GstWasapi2Client *client;
GstCaps *cached_caps;
gboolean started;
GstAudioBaseSrc parent;
/* properties */
gchar *device_id;
@ -87,29 +80,20 @@ struct _GstWasapi2Src
gboolean mute_changed;
gboolean volume_changed;
/* to protect audioclient from set/get property */
GMutex lock;
};
static void gst_wasapi2_src_dispose (GObject * object);
static void gst_wasapi2_src_finalize (GObject * object);
static void gst_wasapi2_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wasapi2_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
static GstStateChangeReturn gst_wasapi2_src_change_state (GstElement *
element, GstStateChange transition);
static gboolean gst_wasapi2_src_open (GstAudioSrc * asrc);
static gboolean gst_wasapi2_src_close (GstAudioSrc * asrc);
static gboolean gst_wasapi2_src_prepare (GstAudioSrc * asrc,
GstAudioRingBufferSpec * spec);
static gboolean gst_wasapi2_src_unprepare (GstAudioSrc * asrc);
static guint gst_wasapi2_src_read (GstAudioSrc * asrc, gpointer data,
guint length, GstClockTime * timestamp);
static guint gst_wasapi2_src_delay (GstAudioSrc * asrc);
static void gst_wasapi2_src_reset (GstAudioSrc * asrc);
static GstCaps *gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
static GstAudioRingBuffer *gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc *
src);
static void gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute);
static gboolean gst_wasapi2_src_get_mute (GstWasapi2Src * self);
@ -117,7 +101,8 @@ static void gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume);
static gdouble gst_wasapi2_src_get_volume (GstWasapi2Src * self);
#define gst_wasapi2_src_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstWasapi2Src, gst_wasapi2_src, GST_TYPE_AUDIO_SRC,
G_DEFINE_TYPE_WITH_CODE (GstWasapi2Src, gst_wasapi2_src,
GST_TYPE_AUDIO_BASE_SRC,
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void
@ -126,9 +111,8 @@ gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
GstAudioSrcClass *audiosrc_class = GST_AUDIO_SRC_CLASS (klass);
GstAudioBaseSrcClass *audiobasesrc_class = GST_AUDIO_BASE_SRC_CLASS (klass);
gobject_class->dispose = gst_wasapi2_src_dispose;
gobject_class->finalize = gst_wasapi2_src_finalize;
gobject_class->set_property = gst_wasapi2_src_set_property;
gobject_class->get_property = gst_wasapi2_src_get_property;
@ -181,15 +165,13 @@ gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
"Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
"Seungha Yang <seungha@centricular.com>");
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_wasapi2_src_change_state);
basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_src_get_caps);
audiosrc_class->open = GST_DEBUG_FUNCPTR (gst_wasapi2_src_open);
audiosrc_class->close = GST_DEBUG_FUNCPTR (gst_wasapi2_src_close);
audiosrc_class->read = GST_DEBUG_FUNCPTR (gst_wasapi2_src_read);
audiosrc_class->prepare = GST_DEBUG_FUNCPTR (gst_wasapi2_src_prepare);
audiosrc_class->unprepare = GST_DEBUG_FUNCPTR (gst_wasapi2_src_unprepare);
audiosrc_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_src_delay);
audiosrc_class->reset = GST_DEBUG_FUNCPTR (gst_wasapi2_src_reset);
audiobasesrc_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_wasapi2_src_create_ringbuffer);
GST_DEBUG_CATEGORY_INIT (gst_wasapi2_src_debug, "wasapi2src",
0, "Windows audio session API source");
@ -201,21 +183,6 @@ gst_wasapi2_src_init (GstWasapi2Src * self)
self->mute = DEFAULT_MUTE;
self->volume = DEFAULT_VOLUME;
self->low_latency = DEFAULT_LOW_LATENCY;
g_mutex_init (&self->lock);
}
static void
gst_wasapi2_src_dispose (GObject * object)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (object);
GST_WASAPI2_SRC_LOCK (self);
gst_clear_object (&self->client);
gst_clear_caps (&self->cached_caps);
GST_WASAPI2_SRC_UNLOCK (self);
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
@ -224,7 +191,6 @@ gst_wasapi2_src_finalize (GObject * object)
GstWasapi2Src *self = GST_WASAPI2_SRC (object);
g_free (self->device_id);
g_mutex_clear (&self->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -283,30 +249,60 @@ gst_wasapi2_src_get_property (GObject * object, guint prop_id,
}
}
static GstStateChangeReturn
gst_wasapi2_src_change_state (GstElement * element, GstStateChange transition)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (element);
GstAudioBaseSrc *asrc = GST_AUDIO_BASE_SRC_CAST (element);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* If we have pending volume/mute values to set, do here */
GST_OBJECT_LOCK (self);
if (asrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (asrc->ringbuffer);
if (self->volume_changed) {
gst_wasapi2_ring_buffer_set_volume (ringbuffer, self->volume);
self->volume_changed = FALSE;
}
if (self->mute_changed) {
gst_wasapi2_ring_buffer_set_mute (ringbuffer, self->mute);
self->mute_changed = FALSE;
}
}
GST_OBJECT_UNLOCK (self);
break;
default:
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
static GstCaps *
gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (bsrc);
GstAudioBaseSrc *asrc = GST_AUDIO_BASE_SRC_CAST (bsrc);
GstCaps *caps = NULL;
/* In case of UWP, device activation might not be finished yet */
if (self->client && !gst_wasapi2_client_ensure_activation (self->client)) {
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
("Failed to activate device"));
return NULL;
GST_OBJECT_LOCK (bsrc);
if (asrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (asrc->ringbuffer);
gst_object_ref (ringbuffer);
GST_OBJECT_UNLOCK (bsrc);
/* Get caps might be able to block if device is not activated yet */
caps = gst_wasapi2_ring_buffer_get_caps (ringbuffer);
gst_object_unref (ringbuffer);
} else {
GST_OBJECT_UNLOCK (bsrc);
}
if (self->client)
caps = gst_wasapi2_client_get_caps (self->client);
/* store one caps here so that we can return device caps even if
* audioclient was closed due to unprepare() */
if (!self->cached_caps && caps)
self->cached_caps = gst_caps_ref (caps);
if (!caps && self->cached_caps)
caps = gst_caps_ref (self->cached_caps);
if (!caps)
caps = gst_pad_get_pad_template_caps (bsrc->srcpad);
@ -317,239 +313,79 @@ gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
caps = filtered;
}
GST_DEBUG_OBJECT (self, "returning caps %" GST_PTR_FORMAT, caps);
GST_DEBUG_OBJECT (bsrc, "returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
static gboolean
gst_wasapi2_src_open_unlocked (GstAudioSrc * asrc)
static GstAudioRingBuffer *
gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc * src)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
GstWasapi2Src *self = GST_WASAPI2_SRC (src);
GstAudioRingBuffer *ringbuffer;
gchar *name;
gst_clear_object (&self->client);
name = g_strdup_printf ("%s-ringbuffer", GST_OBJECT_NAME (src));
self->client =
gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE,
self->low_latency, -1, self->device_id, self->dispatcher);
ringbuffer =
gst_wasapi2_ring_buffer_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE,
self->low_latency, self->device_id, self->dispatcher, name);
g_free (name);
if (!self->client)
return FALSE;
return TRUE;
}
static gboolean
gst_wasapi2_src_open (GstAudioSrc * asrc)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
gboolean ret;
GST_DEBUG_OBJECT (self, "Opening device");
GST_WASAPI2_SRC_LOCK (self);
ret = gst_wasapi2_src_open_unlocked (asrc);
GST_WASAPI2_SRC_UNLOCK (self);
if (!ret) {
GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
("Failed to open device"));
return FALSE;
}
return TRUE;
}
static gboolean
gst_wasapi2_src_close (GstAudioSrc * asrc)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
GST_WASAPI2_SRC_LOCK (self);
gst_clear_object (&self->client);
gst_clear_caps (&self->cached_caps);
self->started = FALSE;
GST_WASAPI2_SRC_UNLOCK (self);
return TRUE;
}
static gboolean
gst_wasapi2_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC (asrc);
gboolean ret = FALSE;
HRESULT hr;
GST_WASAPI2_SRC_LOCK (self);
if (!self->client && !gst_wasapi2_src_open_unlocked (asrc)) {
GST_ERROR_OBJECT (self, "No audio client was configured");
goto done;
}
if (!gst_wasapi2_client_ensure_activation (self->client)) {
GST_ERROR_OBJECT (self, "Couldn't activate audio device");
goto done;
}
hr = gst_wasapi2_client_open (self->client, spec, bsrc->ringbuffer);
if (!gst_wasapi2_result (hr)) {
GST_ERROR_OBJECT (self, "Couldn't open audio client");
goto done;
}
/* Set mute and volume here again, maybe when "mute" property was set, audioclient
* might not be configured at that moment */
if (self->mute_changed) {
gst_wasapi2_client_set_mute (self->client, self->mute);
self->mute_changed = FALSE;
}
if (self->volume_changed) {
gst_wasapi2_client_set_volume (self->client, self->volume);
self->volume_changed = FALSE;
}
/* Will start IAudioClient on the first read request */
self->started = FALSE;
ret = TRUE;
done:
GST_WASAPI2_SRC_UNLOCK (self);
return ret;
}
static gboolean
gst_wasapi2_src_unprepare (GstAudioSrc * asrc)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
self->started = FALSE;
/* Will reopen device later prepare() */
GST_WASAPI2_SRC_LOCK (self);
if (self->client) {
gst_wasapi2_client_stop (self->client);
gst_clear_object (&self->client);
}
GST_WASAPI2_SRC_UNLOCK (self);
return TRUE;
}
static guint
gst_wasapi2_src_read (GstAudioSrc * asrc, gpointer data, guint length,
GstClockTime * timestamp)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
HRESULT hr;
guint read_len = 0;
if (!self->client) {
GST_ERROR_OBJECT (self, "No audio client was configured");
return -1;
}
if (!self->started) {
HRESULT hr = gst_wasapi2_client_start (self->client);
if (!gst_wasapi2_result (hr)) {
GST_ERROR_OBJECT (self, "Failed to re-start client");
return -1;
}
self->started = TRUE;
}
hr = gst_wasapi2_client_read (self->client, data, length, &read_len);
if (!gst_wasapi2_result (hr)) {
GST_WARNING_OBJECT (self, "Failed to read data");
return -1;
}
return read_len;
}
static guint
gst_wasapi2_src_delay (GstAudioSrc * asrc)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
guint32 delay;
HRESULT hr;
if (!self->client)
return 0;
hr = gst_wasapi2_client_delay (self->client, &delay);
if (!gst_wasapi2_result (hr)) {
GST_WARNING_OBJECT (self, "Failed to get delay");
return 0;
}
return delay;
}
static void
gst_wasapi2_src_reset (GstAudioSrc * asrc)
{
GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
GST_DEBUG_OBJECT (self, "reset called");
self->started = FALSE;
if (!self->client)
return;
gst_wasapi2_client_stop (self->client);
return ringbuffer;
}
static void
gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute)
{
GST_WASAPI2_SRC_LOCK (self);
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
HRESULT hr;
GST_OBJECT_LOCK (self);
self->mute = mute;
self->mute_changed = TRUE;
if (self->client) {
HRESULT hr = gst_wasapi2_client_set_mute (self->client, mute);
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_set_mute (ringbuffer, mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set mute");
} else {
self->mute_changed = FALSE;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SRC_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
}
static gboolean
gst_wasapi2_src_get_mute (GstWasapi2Src * self)
{
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
gboolean mute;
HRESULT hr;
GST_WASAPI2_SRC_LOCK (self);
GST_OBJECT_LOCK (self);
mute = self->mute;
if (self->client) {
HRESULT hr = gst_wasapi2_client_get_mute (self->client, &mute);
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_get_mute (ringbuffer, &mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't get mute state");
GST_INFO_OBJECT (self, "Couldn't get mute");
} else {
self->mute = mute;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SRC_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
return mute;
}
@ -557,7 +393,10 @@ gst_wasapi2_src_get_mute (GstWasapi2Src * self)
static void
gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume)
{
GST_WASAPI2_SRC_LOCK (self);
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
HRESULT hr;
GST_OBJECT_LOCK (self);
self->volume = volume;
/* clip volume value */
@ -565,42 +404,47 @@ gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume)
self->volume = MIN (1.0, self->volume);
self->volume_changed = TRUE;
if (self->client) {
HRESULT hr =
gst_wasapi2_client_set_volume (self->client, (gfloat) self->volume);
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_set_volume (ringbuffer, (gfloat) self->volume);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume_changed = FALSE;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SRC_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
}
static gdouble
gst_wasapi2_src_get_volume (GstWasapi2Src * self)
{
GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
gfloat volume;
HRESULT hr;
GST_WASAPI2_SRC_LOCK (self);
GST_OBJECT_LOCK (self);
volume = (gfloat) self->volume;
if (self->client) {
HRESULT hr = gst_wasapi2_client_get_volume (self->client, &volume);
if (bsrc->ringbuffer) {
GstWasapi2RingBuffer *ringbuffer =
GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
hr = gst_wasapi2_ring_buffer_get_volume (ringbuffer, &volume);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't get volume");
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume = volume;
}
} else {
GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
GST_WASAPI2_SRC_UNLOCK (self);
GST_OBJECT_UNLOCK (self);
volume = MAX (0.0, volume);
volume = MIN (1.0, volume);

View file

@ -27,7 +27,7 @@ G_BEGIN_DECLS
#define GST_TYPE_WASAPI2_SRC (gst_wasapi2_src_get_type ())
G_DECLARE_FINAL_TYPE (GstWasapi2Src,
gst_wasapi2_src, GST, WASAPI2_SRC, GstAudioSrc);
gst_wasapi2_src, GST, WASAPI2_SRC, GstAudioBaseSrc);
G_END_DECLS

View file

@ -25,10 +25,56 @@
#include "gstwasapi2util.h"
#include <audioclient.h>
#include <mmdeviceapi.h>
GST_DEBUG_CATEGORY_EXTERN (gst_wasapi2_debug);
#define GST_CAT_DEFAULT gst_wasapi2_debug
/* Desktop only defines */
#ifndef KSAUDIO_SPEAKER_MONO
#define KSAUDIO_SPEAKER_MONO (SPEAKER_FRONT_CENTER)
#endif
#ifndef KSAUDIO_SPEAKER_1POINT1
#define KSAUDIO_SPEAKER_1POINT1 (SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY)
#endif
#ifndef KSAUDIO_SPEAKER_STEREO
#define KSAUDIO_SPEAKER_STEREO (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT)
#endif
#ifndef KSAUDIO_SPEAKER_2POINT1
#define KSAUDIO_SPEAKER_2POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_LOW_FREQUENCY)
#endif
#ifndef KSAUDIO_SPEAKER_3POINT0
#define KSAUDIO_SPEAKER_3POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER)
#endif
#ifndef KSAUDIO_SPEAKER_3POINT1
#define KSAUDIO_SPEAKER_3POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY)
#endif
#ifndef KSAUDIO_SPEAKER_QUAD
#define KSAUDIO_SPEAKER_QUAD (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT)
#endif
#define KSAUDIO_SPEAKER_SURROUND (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
SPEAKER_FRONT_CENTER | SPEAKER_BACK_CENTER)
#ifndef KSAUDIO_SPEAKER_5POINT0
#define KSAUDIO_SPEAKER_5POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | \
SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT)
#endif
#define KSAUDIO_SPEAKER_5POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | \
SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT)
#ifndef KSAUDIO_SPEAKER_7POINT0
#define KSAUDIO_SPEAKER_7POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | \
SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | \
SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT)
#endif
#ifndef KSAUDIO_SPEAKER_7POINT1
#define KSAUDIO_SPEAKER_7POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | \
SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | \
SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER)
#endif
/* *INDENT-OFF* */
static struct
{
@ -57,6 +103,27 @@ static struct
{SPEAKER_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER},
{SPEAKER_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
};
static DWORD default_ch_masks[] = {
0,
KSAUDIO_SPEAKER_MONO,
/* 2ch */
KSAUDIO_SPEAKER_STEREO,
/* 2.1ch */
/* KSAUDIO_SPEAKER_3POINT0 ? */
KSAUDIO_SPEAKER_2POINT1,
/* 4ch */
/* KSAUDIO_SPEAKER_3POINT1 or KSAUDIO_SPEAKER_SURROUND ? */
KSAUDIO_SPEAKER_QUAD,
/* 5ch */
KSAUDIO_SPEAKER_5POINT0,
/* 5.1ch */
KSAUDIO_SPEAKER_5POINT1,
/* 7ch */
KSAUDIO_SPEAKER_7POINT0,
/* 7.1ch */
KSAUDIO_SPEAKER_7POINT1,
};
/* *INDENT-ON* */
static const gchar *
@ -192,3 +259,173 @@ _gst_wasapi2_result (HRESULT hr, GstDebugCategory * cat, const gchar * file,
return SUCCEEDED (hr);
#endif
}
static void
gst_wasapi_util_channel_position_all_none (guint channels,
GstAudioChannelPosition * position)
{
guint i;
for (i = 0; i < channels; i++)
position[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
}
guint64
gst_wasapi2_util_waveformatex_to_channel_mask (WAVEFORMATEX * format,
GstAudioChannelPosition ** out_position)
{
guint i, ch;
guint64 mask = 0;
GstAudioChannelPosition *pos = NULL;
WORD nChannels = 0;
DWORD dwChannelMask = 0;
nChannels = format->nChannels;
if (format->wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
WAVEFORMATEXTENSIBLE *extensible = (WAVEFORMATEXTENSIBLE *) format;
dwChannelMask = extensible->dwChannelMask;
}
if (out_position)
*out_position = NULL;
if (nChannels > 2 && !dwChannelMask) {
GST_WARNING ("Unknown channel mask value for %d channel stream", nChannels);
if (nChannels >= G_N_ELEMENTS (default_ch_masks)) {
GST_ERROR ("To may channels %d", nChannels);
return 0;
}
dwChannelMask = default_ch_masks[nChannels];
}
pos = g_new (GstAudioChannelPosition, nChannels);
gst_wasapi_util_channel_position_all_none (nChannels, pos);
/* Too many channels, have to assume that they are all non-positional */
if (nChannels > G_N_ELEMENTS (wasapi_to_gst_pos)) {
GST_INFO ("Got too many (%i) channels, assuming non-positional", nChannels);
goto out;
}
/* Too many bits in the channel mask, and the bits don't match nChannels */
if (dwChannelMask >> (G_N_ELEMENTS (wasapi_to_gst_pos) + 1) != 0) {
GST_WARNING ("Too many bits in channel mask (%lu), assuming "
"non-positional", dwChannelMask);
goto out;
}
/* Map WASAPI's channel mask to Gstreamer's channel mask and positions.
* If the no. of bits in the mask > nChannels, we will ignore the extra. */
for (i = 0, ch = 0; i < G_N_ELEMENTS (wasapi_to_gst_pos) && ch < nChannels;
i++) {
if (!(dwChannelMask & wasapi_to_gst_pos[i].wasapi_pos))
/* no match, try next */
continue;
mask |= G_GUINT64_CONSTANT (1) << wasapi_to_gst_pos[i].gst_pos;
pos[ch++] = wasapi_to_gst_pos[i].gst_pos;
}
/* XXX: Warn if some channel masks couldn't be mapped? */
GST_DEBUG ("Converted WASAPI mask 0x%" G_GINT64_MODIFIER "x -> 0x%"
G_GINT64_MODIFIER "x", (guint64) dwChannelMask, (guint64) mask);
out:
if (out_position) {
*out_position = pos;
} else {
g_free (pos);
}
return mask;
}
const gchar *
gst_wasapi2_util_waveformatex_to_audio_format (WAVEFORMATEX * format)
{
const gchar *fmt_str = NULL;
GstAudioFormat fmt = GST_AUDIO_FORMAT_UNKNOWN;
switch (format->wFormatTag) {
case WAVE_FORMAT_PCM:
fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
format->wBitsPerSample, format->wBitsPerSample);
break;
case WAVE_FORMAT_IEEE_FLOAT:
if (format->wBitsPerSample == 32)
fmt = GST_AUDIO_FORMAT_F32LE;
else if (format->wBitsPerSample == 64)
fmt = GST_AUDIO_FORMAT_F64LE;
break;
case WAVE_FORMAT_EXTENSIBLE:
{
WAVEFORMATEXTENSIBLE *ex = (WAVEFORMATEXTENSIBLE *) format;
if (IsEqualGUID (&ex->SubFormat, &KSDATAFORMAT_SUBTYPE_PCM)) {
fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
format->wBitsPerSample, ex->Samples.wValidBitsPerSample);
} else if (IsEqualGUID (&ex->SubFormat, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)) {
if (format->wBitsPerSample == 32
&& ex->Samples.wValidBitsPerSample == 32)
fmt = GST_AUDIO_FORMAT_F32LE;
else if (format->wBitsPerSample == 64 &&
ex->Samples.wValidBitsPerSample == 64)
fmt = GST_AUDIO_FORMAT_F64LE;
}
break;
}
default:
break;
}
if (fmt != GST_AUDIO_FORMAT_UNKNOWN)
fmt_str = gst_audio_format_to_string (fmt);
return fmt_str;
}
gboolean
gst_wasapi2_util_parse_waveformatex (WAVEFORMATEX * format,
GstCaps * template_caps, GstCaps ** out_caps,
GstAudioChannelPosition ** out_positions)
{
const gchar *afmt;
guint64 channel_mask;
*out_caps = NULL;
/* TODO: handle SPDIF and other encoded formats */
/* 1 or 2 channels <= 16 bits sample size OR
* 1 or 2 channels > 16 bits sample size or >2 channels */
if (format->wFormatTag != WAVE_FORMAT_PCM &&
format->wFormatTag != WAVE_FORMAT_IEEE_FLOAT &&
format->wFormatTag != WAVE_FORMAT_EXTENSIBLE)
/* Unhandled format tag */
return FALSE;
/* WASAPI can only tell us one canonical mix format that it will accept. The
* alternative is calling IsFormatSupported on all combinations of formats.
* Instead, it's simpler and faster to require conversion inside gstreamer */
afmt = gst_wasapi2_util_waveformatex_to_audio_format (format);
if (afmt == NULL)
return FALSE;
*out_caps = gst_caps_copy (template_caps);
channel_mask = gst_wasapi2_util_waveformatex_to_channel_mask (format,
out_positions);
gst_caps_set_simple (*out_caps,
"format", G_TYPE_STRING, afmt,
"channels", G_TYPE_INT, format->nChannels,
"rate", G_TYPE_INT, format->nSamplesPerSec, NULL);
if (channel_mask) {
gst_caps_set_simple (*out_caps,
"channel-mask", GST_TYPE_BITMASK, channel_mask, NULL);
}
return TRUE;
}

View file

@ -23,6 +23,7 @@
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <windows.h>
#include <initguid.h>
#include <audioclient.h>
G_BEGIN_DECLS
@ -34,6 +35,13 @@ G_BEGIN_DECLS
"rate = " GST_AUDIO_RATE_RANGE ", " \
"channels = " GST_AUDIO_CHANNELS_RANGE
#define GST_WASAPI2_CLEAR_COM(obj) G_STMT_START { \
if (obj) { \
(obj)->Release (); \
(obj) = NULL; \
} \
} G_STMT_END
gboolean _gst_wasapi2_result (HRESULT hr,
GstDebugCategory * cat,
const gchar * file,
@ -43,6 +51,16 @@ gboolean _gst_wasapi2_result (HRESULT hr,
#define gst_wasapi2_result(result) \
_gst_wasapi2_result (result, GST_CAT_DEFAULT, __FILE__, GST_FUNCTION, __LINE__)
guint64 gst_wasapi2_util_waveformatex_to_channel_mask (WAVEFORMATEX * format,
GstAudioChannelPosition ** out_position);
const gchar * gst_wasapi2_util_waveformatex_to_audio_format (WAVEFORMATEX * format);
gboolean gst_wasapi2_util_parse_waveformatex (WAVEFORMATEX * format,
GstCaps * template_caps,
GstCaps ** out_caps,
GstAudioChannelPosition ** out_positions);
G_END_DECLS
#endif /* __GST_WASAPI_UTIL_H__ */

View file

@ -4,6 +4,7 @@ wasapi2_sources = [
'gstwasapi2util.c',
'gstwasapi2client.cpp',
'gstwasapi2device.c',
'gstwasapi2ringbuffer.cpp',
'plugin.c',
]
@ -26,7 +27,8 @@ ole32_dep = cc.find_library('ole32', required : get_option('wasapi2'))
ksuser_dep = cc.find_library('ksuser', required : get_option('wasapi2'))
runtimeobject_dep = cc.find_library('runtimeobject', required : get_option('wasapi2'))
mmdeviceapi_dep = cc.find_library('mmdevapi', required : get_option('wasapi2'))
wasapi2_dep = [ole32_dep, ksuser_dep, runtimeobject_dep, mmdeviceapi_dep]
mfplat_dep = cc.find_library('mfplat', required : get_option('wasapi2'))
wasapi2_dep = [ole32_dep, ksuser_dep, runtimeobject_dep, mmdeviceapi_dep, mfplat_dep]
extra_args = []
foreach dep: wasapi2_dep

View file

@ -26,14 +26,23 @@
#include "gstwasapi2sink.h"
#include "gstwasapi2src.h"
#include "gstwasapi2device.h"
#include "gstwasapi2util.h"
#include <mfapi.h>
GST_DEBUG_CATEGORY (gst_wasapi2_debug);
GST_DEBUG_CATEGORY (gst_wasapi2_client_debug);
static void
plugin_deinit (gpointer data)
{
MFShutdown ();
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GstRank rank = GST_RANK_SECONDARY;
HRESULT hr;
/**
* plugin-wasapi2:
@ -41,6 +50,11 @@ plugin_init (GstPlugin * plugin)
* Since: 1.18
*/
hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
if (!gst_wasapi2_result (hr)) {
GST_WARNING ("MFStartup failure, hr: 0x%x", hr);
return TRUE;
}
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) && !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
/* If we are building for UWP, wasapi2 plugin should have the highest rank */
rank = GST_RANK_PRIMARY + 1;
@ -56,6 +70,10 @@ plugin_init (GstPlugin * plugin)
gst_device_provider_register (plugin, "wasapi2deviceprovider",
rank, GST_TYPE_WASAPI2_DEVICE_PROVIDER);
g_object_set_data_full (G_OBJECT (plugin),
"plugin-wasapi2-shutdown", "shutdown-data",
(GDestroyNotify) plugin_deinit);
return TRUE;
}