wpe: Implement audio support

The wpesrc bin now exposes "sometimes" audio src pads, one for every PCM audio
stream created by WPEWebKit.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2252>
This commit is contained in:
Philippe Normand 2019-12-08 11:49:20 +00:00 committed by GStreamer Marge Bot
parent cb4f6c877e
commit f4bc5c6c65
11 changed files with 561 additions and 30 deletions

View file

@ -286,7 +286,10 @@ WPEView::WPEView(WebKitWebContext* web_context, GstWpeVideoSrc* src, GstGLContex
wpe_view_backend_add_activity_state(wpeViewBackend, wpe_view_activity_state_visible | wpe_view_activity_state_focused | wpe_view_activity_state_in_window);
#endif
webkit.view = WEBKIT_WEB_VIEW(g_object_new(WEBKIT_TYPE_WEB_VIEW, "web-context", web_context, "backend", viewBackend, nullptr));
webkit.view = WEBKIT_WEB_VIEW(g_object_new(WEBKIT_TYPE_WEB_VIEW,
"web-context", web_context,
"backend", viewBackend,
nullptr));
g_signal_connect(webkit.view, "load-failed", G_CALLBACK(s_loadFailed), src);
g_signal_connect(webkit.view, "load-failed-with-tls-errors", G_CALLBACK(s_loadFailedWithTLSErrors), src);
@ -517,6 +520,11 @@ void WPEView::setDrawBackground(gboolean drawsBackground)
webkit_web_view_set_background_color(webkit.view, &color);
}
void WPEView::registerAudioReceiver(const struct wpe_audio_receiver* audioReceiver, gpointer userData)
{
wpe_audio_register_receiver(audioReceiver, userData);
}
void WPEView::releaseImage(gpointer imagePointer)
{
s_view->dispatch([&]() {

View file

@ -23,6 +23,7 @@
#include <glib.h>
#include <gst/gl/gstglfuncs.h>
#include <gst/gl/egl/gstgldisplay_egl.h>
#include <wpe/extensions/audio.h>
#include <wpe/fdo.h>
#include <wpe/fdo-egl.h>
#include <wpe/webkit.h>
@ -50,6 +51,9 @@ public:
void loadUri(const gchar*);
void loadData(GBytes*);
void setDrawBackground(gboolean);
void registerAudioReceiver(const struct wpe_audio_receiver*, gpointer);
GstEGLImage* image();
GstBuffer* buffer();
@ -96,7 +100,7 @@ private:
struct wpe_view_backend_exportable_fdo* exportable;
int width;
int height;
} wpe { nullptr, 0, 0 };
} wpe { nullptr, 0, 0, };
struct {
gchar* uri;

24
ext/wpe/gstwpe-private.h Normal file
View file

@ -0,0 +1,24 @@
/* Copyright (C) <2018, 2019> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018, 2019> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#pragma once
#include <gst/gst.h>
void gst_wpe_video_src_register_audio_receiver(GstElement*, const struct wpe_audio_receiver*, gpointer);

View file

@ -68,12 +68,14 @@
GST_DEBUG_CATEGORY (wpe_video_src_debug);
GST_DEBUG_CATEGORY (wpe_view_debug);
GST_DEBUG_CATEGORY (wpe_src_debug);
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (wpe_video_src_debug, "wpesrc", 0, "WPE Source");
GST_DEBUG_CATEGORY_INIT (wpe_video_src_debug, "wpevideosrc", 0, "WPE Video Source");
GST_DEBUG_CATEGORY_INIT (wpe_view_debug, "wpeview", 0, "WPE Threaded View");
GST_DEBUG_CATEGORY_INIT (wpe_src_debug, "wpesrc", 0, "WPE Source");
gboolean result = gst_element_register (plugin, "wpevideosrc", GST_RANK_NONE,
GST_TYPE_WPE_VIDEO_SRC);

View file

@ -1,5 +1,5 @@
/* Copyright (C) <2018> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
/* Copyright (C) <2018, 2019> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018, 2019> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@ -21,20 +21,65 @@
* SECTION:element-wpesrc
* @title: wpesrc
*
* FIXME The wpesrc element is used to produce a video texture representing a
* web page rendered off-screen by WPE.
* The wpesrc element is used to produce a video texture representing a web page
* rendered off-screen by WPE.
*
* Starting from WPEBackend-FDO 1.6.x, software rendering support is available.
* This features allows wpesrc to be used on machines without GPU, and/or for
* testing purpose. To enable it, set the `LIBGL_ALWAYS_SOFTWARE=true`
* environment variable and make sure `video/x-raw, format=BGRA` caps are
* negotiated by the wpesrc element.
*
* Additionally, any audio stream created by WPE is exposed as "sometimes" audio
* source pads.
*/
#include "gstwpesrcbin.h"
#include "gstwpevideosrc.h"
#include "gstwpe-private.h"
#include "WPEThreadedView.h"
#include <gst/allocators/allocators.h>
#include <gst/base/gstflowcombiner.h>
#include <wpe/extensions/audio.h>
G_DEFINE_TYPE (GstWpeAudioPad, gst_wpe_audio_pad, GST_TYPE_GHOST_PAD);
static void
gst_wpe_audio_pad_class_init (GstWpeAudioPadClass * klass)
{
}
static void
gst_wpe_audio_pad_init (GstWpeAudioPad * pad)
{
gst_audio_info_init (&pad->info);
pad->discont_pending = FALSE;
pad->buffer_time = GST_CLOCK_TIME_NONE;
}
static GstWpeAudioPad *
gst_wpe_audio_pad_new (const gchar * name)
{
GstWpeAudioPad *pad = GST_WPE_AUDIO_PAD (g_object_new (gst_wpe_audio_pad_get_type (),
"name", name, "direction", GST_PAD_SRC, NULL));
if (!gst_ghost_pad_construct (GST_GHOST_PAD (pad))) {
gst_object_unref (pad);
return NULL;
}
return pad;
}
struct _GstWpeSrc
{
GstBin parent;
GstAllocator *fd_allocator;
GstElement *video_src;
GHashTable *audio_src_pads;
GstFlowCombiner *flow_combiner;
};
enum
@ -54,12 +99,15 @@ static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 };
static void gst_wpe_src_uri_handler_init (gpointer iface, gpointer data);
GST_DEBUG_CATEGORY_EXTERN (wpe_src_debug);
#define GST_CAT_DEFAULT wpe_src_debug
#define gst_wpe_src_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstWpeSrc, gst_wpe_src, GST_TYPE_BIN,
G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_wpe_src_uri_handler_init));
static GstStaticPadTemplate video_src_factory =
GST_STATIC_PAD_TEMPLATE ("video_src", GST_PAD_SRC, GST_PAD_ALWAYS,
GST_STATIC_PAD_TEMPLATE ("video", GST_PAD_SRC, GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("video/x-raw(memory:GLMemory), "
"format = (string) RGBA, "
"width = " GST_VIDEO_SIZE_RANGE ", "
@ -77,6 +125,166 @@ GST_STATIC_PAD_TEMPLATE ("video_src", GST_PAD_SRC, GST_PAD_ALWAYS,
#endif
));
static GstStaticPadTemplate audio_src_factory =
GST_STATIC_PAD_TEMPLATE ("audio_%u", GST_PAD_SRC, GST_PAD_SOMETIMES,
GST_STATIC_CAPS ( \
GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F32)) ", layout=(string)interleaved; " \
GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F64)) ", layout=(string)interleaved; " \
GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (S16)) ", layout=(string)interleaved" \
));
static GstFlowReturn
gst_wpe_src_chain_buffer (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstWpeSrc *src = GST_WPE_SRC (gst_object_get_parent (parent));
GstFlowReturn result, chain_result;
chain_result = gst_proxy_pad_chain_default (pad, GST_OBJECT_CAST (src), buffer);
result = gst_flow_combiner_update_pad_flow (src->flow_combiner, pad, chain_result);
gst_object_unref (src);
if (result == GST_FLOW_FLUSHING)
return chain_result;
return result;
}
static void
on_audio_receiver_handle_start(void* data, uint32_t id, int32_t channels, const char* format, int32_t sampleRate)
{
GstWpeSrc* src = GST_WPE_SRC (data);
GstWpeAudioPad *audio_pad;
GstPad *pad;
gchar *name;
GstEvent *stream_start;
GstSegment segment;
GstCaps *caps;
GST_DEBUG_OBJECT (src, "Exposing audio pad for stream %u", id);
name = g_strdup_printf ("audio_%u", id);
audio_pad = gst_wpe_audio_pad_new (name);
pad = GST_PAD_CAST (audio_pad);
g_free (name);
gst_pad_set_active (pad, TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (src), pad);
gst_flow_combiner_add_pad (src->flow_combiner, pad);
name = gst_pad_create_stream_id_printf(pad, GST_ELEMENT_CAST (src), "%03u", id);
stream_start = gst_event_new_stream_start (name);
gst_pad_push_event (pad, stream_start);
g_free (name);
caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, format,
"rate", G_TYPE_INT, sampleRate,
"channels", G_TYPE_INT, channels,
"channel-mask", GST_TYPE_BITMASK, gst_audio_channel_get_fallback_mask (channels),
"layout", G_TYPE_STRING, "interleaved", NULL);
gst_audio_info_from_caps (&audio_pad->info, caps);
gst_pad_push_event (pad, gst_event_new_caps (caps));
gst_caps_unref (caps);
gst_segment_init (&segment, GST_FORMAT_TIME);
gst_pad_push_event (pad, gst_event_new_segment (&segment));
g_hash_table_insert (src->audio_src_pads, GUINT_TO_POINTER (id), audio_pad);
}
static void
on_audio_receiver_handle_packet(void* data, struct wpe_audio_packet_export* packet_export, uint32_t id, int32_t fd, uint32_t size)
{
GstWpeSrc* src = GST_WPE_SRC (data);
GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
GstPad *pad = GST_PAD_CAST (audio_pad);
GstBuffer *buffer;
GstClock *clock;
g_return_if_fail (GST_IS_PAD (pad));
g_return_if_fail (fd >= 0);
GST_TRACE_OBJECT (pad, "Handling incoming audio packet");
buffer = gst_buffer_new ();
GstMemory *mem = gst_fd_allocator_alloc (src->fd_allocator, dup (fd), size, GST_FD_MEMORY_FLAG_KEEP_MAPPED);
gst_buffer_append_memory (buffer, mem);
gst_buffer_add_audio_meta (buffer, &audio_pad->info, size, NULL);
clock = gst_element_get_clock (GST_ELEMENT_CAST (src));
if (clock) {
GstClockTime now;
GstClockTime base_time = gst_element_get_base_time (GST_ELEMENT_CAST (src));
now = gst_clock_get_time (clock);
if (now > base_time)
now -= base_time;
else
now = 0;
gst_object_unref (clock);
audio_pad->buffer_time = now;
GST_BUFFER_DTS (buffer) = audio_pad->buffer_time;
}
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
if (audio_pad->discont_pending) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
audio_pad->discont_pending = FALSE;
}
gst_flow_combiner_update_pad_flow (src->flow_combiner, pad, gst_pad_push (pad, buffer));
wpe_audio_packet_export_release (packet_export);
close (fd);
}
static void
on_audio_receiver_handle_stop(void* data, uint32_t id)
{
GstWpeSrc* src = GST_WPE_SRC (data);
GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
GstPad *pad = GST_PAD_CAST (audio_pad);
g_return_if_fail (GST_IS_PAD (pad));
GST_INFO_OBJECT(pad, "Stopping");
gst_pad_push_event (pad, gst_event_new_eos ());
gst_element_remove_pad (GST_ELEMENT_CAST (src), pad);
gst_flow_combiner_remove_pad (src->flow_combiner, pad);
g_hash_table_remove (src->audio_src_pads, GUINT_TO_POINTER (id));
}
static void
on_audio_receiver_handle_pause(void* data, uint32_t id)
{
GstWpeSrc* src = GST_WPE_SRC (data);
GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
GstPad *pad = GST_PAD_CAST (audio_pad);
g_return_if_fail (GST_IS_PAD (pad));
GST_INFO_OBJECT(pad, "Pausing");
gst_pad_push_event (pad, gst_event_new_gap (audio_pad->buffer_time, GST_CLOCK_TIME_NONE));
audio_pad->discont_pending = TRUE;
}
static void
on_audio_receiver_handle_resume(void* data, uint32_t id)
{
GstWpeSrc* src = GST_WPE_SRC (data);
GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
GstPad *pad = GST_PAD_CAST (audio_pad);
g_return_if_fail (GST_IS_PAD (pad));
GST_INFO_OBJECT(pad, "Resuming");
}
static const struct wpe_audio_receiver audio_receiver = {
.handle_start = on_audio_receiver_handle_start,
.handle_packet = on_audio_receiver_handle_packet,
.handle_stop = on_audio_receiver_handle_stop,
.handle_pause = on_audio_receiver_handle_pause,
.handle_resume = on_audio_receiver_handle_resume
};
static void
gst_wpe_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes)
{
@ -86,6 +294,37 @@ gst_wpe_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes)
g_signal_emit_by_name (self->video_src, "load-bytes", bytes, NULL);
}
static void
gst_wpe_src_set_location (GstWpeSrc * src, const gchar * location)
{
GstPad *pad;
GstPad *ghost_pad;
GstProxyPad *proxy_pad;
g_object_set (src->video_src, "location", location, NULL);
ghost_pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src), "video");
if (GST_IS_PAD (ghost_pad)) {
gst_object_unref (ghost_pad);
return;
}
gst_bin_add (GST_BIN_CAST (src), src->video_src);
pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src->video_src), "src");
ghost_pad = gst_ghost_pad_new_from_template ("video", pad,
gst_static_pad_template_get (&video_src_factory));
proxy_pad = gst_proxy_pad_get_internal (GST_PROXY_PAD (ghost_pad));
gst_pad_set_active (GST_PAD_CAST (proxy_pad), TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (src), GST_PAD_CAST (ghost_pad));
gst_flow_combiner_add_pad (src->flow_combiner, GST_PAD_CAST (ghost_pad));
gst_pad_set_chain_function (GST_PAD_CAST (proxy_pad), gst_wpe_src_chain_buffer);
gst_object_unref (proxy_pad);
gst_object_unref (pad);
}
static void
gst_wpe_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
@ -102,8 +341,12 @@ gst_wpe_src_set_property (GObject * object, guint prop_id,
{
GstWpeSrc *self = GST_WPE_SRC (object);
if (self->video_src)
g_object_set_property (G_OBJECT (self->video_src), pspec->name, value);
if (self->video_src) {
if (prop_id == PROP_LOCATION)
gst_wpe_src_set_location (self, g_value_get_string (value));
else
g_object_set_property (G_OBJECT (self->video_src), pspec->name, value);
}
}
static GstURIType
@ -134,7 +377,7 @@ gst_wpe_src_set_uri (GstURIHandler * handler, const gchar * uri,
{
GstWpeSrc *src = GST_WPE_SRC (handler);
g_object_set (src->video_src, "location", uri + 6, NULL);
gst_wpe_src_set_location(src, uri + 6);
return TRUE;
}
@ -152,23 +395,49 @@ gst_wpe_src_uri_handler_init (gpointer iface_ptr, gpointer data)
static void
gst_wpe_src_init (GstWpeSrc * src)
{
gst_bin_set_suppressed_flags (GST_BIN_CAST (src),
static_cast<GstElementFlags>(GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK));
GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
src->fd_allocator = gst_fd_allocator_new ();
src->audio_src_pads = g_hash_table_new (g_direct_hash, g_direct_equal);
src->flow_combiner = gst_flow_combiner_new ();
src->video_src = gst_element_factory_make ("wpevideosrc", NULL);
gst_bin_add (GST_BIN_CAST (src), src->video_src);
gst_wpe_video_src_register_audio_receiver (src->video_src, &audio_receiver, src);
}
GstPad *pad =
gst_element_get_static_pad (GST_ELEMENT_CAST (src->video_src), "src");
static GstStateChangeReturn
gst_wpe_src_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn result;
GstWpeSrc *src = GST_WPE_SRC (element);
GstPad *ghost_pad = gst_ghost_pad_new_from_template ("video_src", pad,
gst_static_pad_template_get (&video_src_factory));
GstProxyPad *proxy_pad =
gst_proxy_pad_get_internal (GST_PROXY_PAD (ghost_pad));
gst_pad_set_active (GST_PAD_CAST (proxy_pad), TRUE);
gst_object_unref (proxy_pad);
GST_DEBUG_OBJECT (src, "%s", gst_state_change_get_name (transition));
result = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS , change_state, (element, transition), GST_STATE_CHANGE_FAILURE);
gst_element_add_pad (GST_ELEMENT_CAST (src), GST_PAD_CAST (ghost_pad));
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:{
gst_flow_combiner_reset (src->flow_combiner);
break;
}
default:
break;
}
gst_object_unref (pad);
return result;
}
static void
gst_wpe_src_dispose (GObject *object)
{
GstWpeSrc *src = GST_WPE_SRC (object);
g_hash_table_unref (src->audio_src_pads);
gst_flow_combiner_free (src->flow_combiner);
gst_object_unref (src->fd_allocator);
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
static void
@ -179,6 +448,7 @@ gst_wpe_src_class_init (GstWpeSrcClass * klass)
gobject_class->set_property = gst_wpe_src_set_property;
gobject_class->get_property = gst_wpe_src_get_property;
gobject_class->dispose = gst_wpe_src_dispose;
g_object_class_install_property (gobject_class, PROP_LOCATION,
g_param_spec_string ("location", "location", "The URL to display", "",
@ -189,7 +459,7 @@ gst_wpe_src_class_init (GstWpeSrcClass * klass)
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class, "WPE source",
"Source/Video", "Creates a video stream from a WPE browser",
"Source/Video/Audio", "Creates a video stream from a WPE browser",
"Philippe Normand <philn@igalia.com>, Žan Doberšek "
"<zdobersek@igalia.com>");
@ -206,10 +476,8 @@ gst_wpe_src_class_init (GstWpeSrcClass * klass)
G_CALLBACK (gst_wpe_src_load_bytes), NULL, NULL, NULL, G_TYPE_NONE, 1,
G_TYPE_BYTES);
gst_element_class_set_static_metadata (element_class, "WPE source",
"Source/Video", "Creates a video stream from a WPE browser",
"Philippe Normand <philn@igalia.com>, Žan Doberšek "
"<zdobersek@igalia.com>");
element_class->change_state = GST_DEBUG_FUNCPTR (gst_wpe_src_change_state);
gst_element_class_add_static_pad_template (element_class, &video_src_factory);
gst_element_class_add_static_pad_template (element_class, &audio_src_factory);
}

View file

@ -1,5 +1,5 @@
/* Copyright (C) <2018> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
/* Copyright (C) <2018, 2019> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018, 2019> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@ -21,9 +21,36 @@
#pragma once
#include <gst/gst.h>
#include <gst/audio/audio.h>
G_BEGIN_DECLS
GType gst_wpe_audio_pad_get_type(void);
#define GST_TYPE_WPE_AUDIO_PAD (gst_wpe_audio_pad_get_type())
#define GST_WPE_AUDIO_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WPE_AUDIO_PAD,GstWpeAudioPad))
#define GST_IS_WPE_AUDIO_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WPE_AUDIO_PAD))
#define GST_WPE_AUDIO_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WPE_AUDIO_PAD,GstWpeAudioPadClass))
#define GST_IS_WPE_AUDIO_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WPE_AUDIO_PAD))
#define GST_WPE_AUDIO_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WPE_AUDIO_PAD,GstWpeAudioPadClass))
typedef struct _GstWpeAudioPad GstWpeAudioPad;
typedef struct _GstWpeAudioPadClass GstWpeAudioPadClass;
struct _GstWpeAudioPad
{
GstGhostPad parent;
GstAudioInfo info;
GstClockTime buffer_time;
gboolean discont_pending;
};
struct _GstWpeAudioPadClass
{
GstGhostPadClass parent_class;
};
#define GST_TYPE_WPE_SRC (gst_wpe_src_get_type())
#define GST_WPE_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WPE_SRC,GstWpeSrc))
#define GST_WPE_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_WPE_SRC,GstWpeSrcClass))

View file

@ -86,6 +86,7 @@
#endif
#include "gstwpevideosrc.h"
#include "gstwpe-private.h"
#include <gst/gl/gl.h>
#include <gst/gl/egl/gstglmemoryegl.h>
#include <gst/gl/wayland/gstgldisplay_wayland.h>
@ -128,6 +129,8 @@ struct _GstWpeVideoSrc
gint64 n_frames; /* total frames sent */
WPEView *view;
const struct wpe_audio_receiver *audio_receiver;
gpointer audio_receiver_data;
GMutex lock;
};
@ -296,6 +299,11 @@ gst_wpe_video_src_start (GstWpeVideoSrc * src)
if (created_view) {
src->n_frames = 0;
if (src->audio_receiver) {
src->view->registerAudioReceiver(src->audio_receiver, src->audio_receiver_data);
src->audio_receiver = NULL,
src->audio_receiver_data = NULL;
}
}
WPE_UNLOCK (src);
return TRUE;
@ -735,4 +743,17 @@ gst_wpe_video_src_class_init (GstWpeVideoSrcClass * klass)
static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
G_CALLBACK (gst_wpe_video_src_load_bytes), NULL, NULL, NULL,
G_TYPE_NONE, 1, G_TYPE_BYTES);
}
void
gst_wpe_video_src_register_audio_receiver(GstElement* video_src, const struct wpe_audio_receiver* receiver, gpointer user_data)
{
GstWpeVideoSrc* src = GST_WPE_VIDEO_SOURCE(video_src);
if (!src->view) {
src->audio_receiver = receiver;
src->audio_receiver_data = user_data;
return;
}
src->view->registerAudioReceiver(receiver, user_data);
}

View file

@ -18,7 +18,7 @@ endif
gstwpe = library('gstwpe',
['WPEThreadedView.cpp', 'gstwpe.cpp', 'gstwpevideosrc.cpp', 'gstwpesrcbin.cpp'],
dependencies : [egl_dep, wpe_dep, wpe_fdo_dep, gstvideo_dep, gstbase_dep, gstgl_dep, xkbcommon_dep, wl_server_dep],
dependencies : [egl_dep, wpe_dep, wpe_fdo_dep, gstallocators_dep, gstaudio_dep, gstvideo_dep, gstbase_dep, gstgl_dep, xkbcommon_dep, wl_server_dep],
cpp_args : gst_plugins_bad_args + ['-DHAVE_CONFIG_H=1'],
include_directories : [configinc],
install : true,

View file

@ -14,6 +14,7 @@ subdir('uvch264')
subdir('va')
subdir('waylandsink')
subdir('webrtc')
subdir('wpe')
executable('playout',
'playout.c',

View file

@ -0,0 +1,14 @@
examples = ['wpe']
foreach example : examples
exe_name = example
src_file = '@0@.c'.format(example)
executable(exe_name,
src_file,
install: false,
include_directories : [configinc],
dependencies : [glib_dep, gst_dep],
c_args : ['-DHAVE_CONFIG_H=1'],
)
endforeach

162
tests/examples/wpe/wpe.c Normal file
View file

@ -0,0 +1,162 @@
/* Copyright (C) <2018, 2019> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/gst.h>
static GMainLoop *loop;
static GstElement *pipe1;
static GstBus *bus1;
static gboolean
_bus_watch (GstBus * bus, GstMessage * msg, GstElement * pipe)
{
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_STATE_CHANGED:
if (GST_ELEMENT (msg->src) == pipe) {
GstState old, new, pending;
gst_message_parse_state_changed (msg, &old, &new, &pending);
{
gchar *dump_name = g_strconcat ("state_changed-",
gst_element_state_get_name (old), "_",
gst_element_state_get_name (new), NULL);
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (msg->src),
GST_DEBUG_GRAPH_SHOW_ALL, dump_name);
g_free (dump_name);
}
}
break;
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *dbg_info = NULL;
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipe),
GST_DEBUG_GRAPH_SHOW_ALL, "error");
gst_message_parse_error (msg, &err, &dbg_info);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
g_error_free (err);
g_free (dbg_info);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_EOS:{
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipe),
GST_DEBUG_GRAPH_SHOW_ALL, "eos");
g_print ("EOS received\n");
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
_wpe_pad_added (GstElement * src, GstPad * new_pad, GstElement * pipe)
{
GstElement *out;
GstPad *sink;
gchar *name = gst_pad_get_name (new_pad);
gchar *bin_name;
if (g_str_has_prefix (name, "audio")) {
out =
gst_parse_bin_from_description
("audioresample ! audioconvert ! autoaudiosink", TRUE, NULL);
} else {
out =
gst_parse_bin_from_description
("queue ! glcolorconvert ! gtkglsink enable-last-sample=0", TRUE, NULL);
}
bin_name = g_strdup_printf ("%s-bin", name);
g_free (name);
gst_object_set_name (GST_OBJECT_CAST (out), bin_name);
g_free (bin_name);
gst_bin_add (GST_BIN (pipe), out);
sink = out->sinkpads->data;
gst_pad_link (new_pad, sink);
gst_element_sync_state_with_parent (out);
}
static void
_wpe_pad_removed (GstElement * src, GstPad * pad, GstElement * pipe)
{
gchar *name = gst_pad_get_name (pad);
gchar *bin_name = g_strdup_printf ("%s-bin", name);
GstElement *bin = gst_bin_get_by_name (GST_BIN_CAST (pipe), bin_name);
if (GST_IS_ELEMENT (bin)) {
gst_bin_remove (GST_BIN_CAST (pipe), bin);
gst_element_set_state (bin, GST_STATE_NULL);
}
g_free (name);
g_free (bin_name);
}
int
main (int argc, char *argv[])
{
GstElement *src;
if (argc < 2) {
g_printerr ("Usage: %s <website url>\n", argv[0]);
return 1;
}
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
pipe1 = gst_pipeline_new (NULL);
bus1 = gst_pipeline_get_bus (GST_PIPELINE (pipe1));
gst_bus_add_watch (bus1, (GstBusFunc) _bus_watch, pipe1);
src = gst_element_factory_make ("wpesrc", NULL);
gst_bin_add (GST_BIN_CAST (pipe1), src);
gst_element_set_state (GST_ELEMENT (pipe1), GST_STATE_READY);
g_signal_connect (src, "pad-added", G_CALLBACK (_wpe_pad_added), pipe1);
g_signal_connect (src, "pad-removed", G_CALLBACK (_wpe_pad_removed), pipe1);
g_object_set (src, "location", argv[1], NULL);
g_print ("Starting pipeline\n");
gst_element_set_state (GST_ELEMENT (pipe1), GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (GST_ELEMENT (pipe1), GST_STATE_NULL);
g_print ("Pipeline stopped\n");
gst_bus_remove_watch (bus1);
gst_object_unref (bus1);
gst_object_unref (pipe1);
gst_deinit ();
return 0;
}