webrtc: indent sources

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-examples/-/merge_requests/16>
This commit is contained in:
Matthew Waters 2020-06-19 12:30:23 +10:00
parent e1c3dad258
commit 204945b902
3 changed files with 140 additions and 123 deletions

View file

@ -43,7 +43,8 @@ GST_DEBUG_CATEGORY_STATIC (debug_category);
#define GET_CUSTOM_DATA(env, thiz, fieldID) (WebRTC *)(gintptr)(*env)->GetLongField (env, thiz, fieldID) #define GET_CUSTOM_DATA(env, thiz, fieldID) (WebRTC *)(gintptr)(*env)->GetLongField (env, thiz, fieldID)
#define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(gintptr)data) #define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(gintptr)data)
enum AppState { enum AppState
{
APP_STATE_UNKNOWN = 0, APP_STATE_UNKNOWN = 0,
APP_STATE_ERROR = 1, /* generic error */ APP_STATE_ERROR = 1, /* generic error */
SERVER_CONNECTING = 1000, SERVER_CONNECTING = 1000,
@ -191,11 +192,13 @@ on_incoming_decodebin_stream (GstElement * decodebin, GstPad * pad,
name = gst_structure_get_name (gst_caps_get_structure (caps, 0)); name = gst_structure_get_name (gst_caps_get_structure (caps, 0));
if (g_str_has_prefix (name, "video")) { if (g_str_has_prefix (name, "video")) {
GstElement *sink = handle_media_stream (pad, webrtc->pipe, "videoconvert", "glimagesink"); GstElement *sink =
handle_media_stream (pad, webrtc->pipe, "videoconvert", "glimagesink");
if (webrtc->video_sink == NULL) { if (webrtc->video_sink == NULL) {
webrtc->video_sink = sink; webrtc->video_sink = sink;
if (webrtc->native_window) if (webrtc->native_window)
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), (gpointer) webrtc->native_window); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink),
(gpointer) webrtc->native_window);
} }
} else if (g_str_has_prefix (name, "audio")) { } else if (g_str_has_prefix (name, "audio")) {
handle_media_stream (pad, webrtc->pipe, "audioconvert", "autoaudiosink"); handle_media_stream (pad, webrtc->pipe, "audioconvert", "autoaudiosink");
@ -223,14 +226,15 @@ on_incoming_stream (GstElement * webrtcbin, GstPad * pad, WebRTC * webrtc)
} }
static void static void
send_ice_candidate_message (GstElement * webrtcbin G_GNUC_UNUSED, guint mlineindex, send_ice_candidate_message (GstElement * webrtcbin G_GNUC_UNUSED,
gchar * candidate, WebRTC * webrtc) guint mlineindex, gchar * candidate, WebRTC * webrtc)
{ {
gchar *text; gchar *text;
JsonObject *ice, *msg; JsonObject *ice, *msg;
if (webrtc->app_state < PEER_CALL_NEGOTIATING) { if (webrtc->app_state < PEER_CALL_NEGOTIATING) {
cleanup_and_quit_loop (webrtc, "Can't send ICE, not in call", APP_STATE_ERROR); cleanup_and_quit_loop (webrtc, "Can't send ICE, not in call",
APP_STATE_ERROR);
return; return;
} }
@ -253,7 +257,8 @@ send_sdp_offer (WebRTC * webrtc, GstWebRTCSessionDescription * offer)
JsonObject *msg, *sdp; JsonObject *msg, *sdp;
if (webrtc->app_state < PEER_CALL_NEGOTIATING) { if (webrtc->app_state < PEER_CALL_NEGOTIATING) {
cleanup_and_quit_loop (webrtc, "Can't send offer, not in call", APP_STATE_ERROR); cleanup_and_quit_loop (webrtc, "Can't send offer, not in call",
APP_STATE_ERROR);
return; return;
} }
@ -290,7 +295,8 @@ on_offer_created (GstPromise * promise, WebRTC * webrtc)
gst_promise_unref (promise); gst_promise_unref (promise);
promise = gst_promise_new (); promise = gst_promise_new ();
g_signal_emit_by_name (webrtc->webrtcbin, "set-local-description", offer, promise); g_signal_emit_by_name (webrtc->webrtcbin, "set-local-description", offer,
promise);
gst_promise_interrupt (promise); gst_promise_interrupt (promise);
gst_promise_unref (promise); gst_promise_unref (promise);
@ -338,8 +344,7 @@ start_pipeline (WebRTC * webrtc)
"vp8enc keyframe-max-dist=30 deadline=1 error-resilient=default ! rtpvp8pay picture-id-mode=15-bit mtu=1300 ! " "vp8enc keyframe-max-dist=30 deadline=1 error-resilient=default ! rtpvp8pay picture-id-mode=15-bit mtu=1300 ! "
"queue max-size-time=300000000 ! " RTP_CAPS_VP8 " ! sendrecv.sink_0 " "queue max-size-time=300000000 ! " RTP_CAPS_VP8 " ! sendrecv.sink_0 "
"openslessrc ! queue ! audioconvert ! audioresample ! audiorate ! queue ! opusenc ! rtpopuspay ! " "openslessrc ! queue ! audioconvert ! audioresample ! audiorate ! queue ! opusenc ! rtpopuspay ! "
"queue ! " RTP_CAPS_OPUS " ! sendrecv.sink_1 ", "queue ! " RTP_CAPS_OPUS " ! sendrecv.sink_1 ", &error);
&error);
if (error) { if (error) {
g_printerr ("Failed to parse launch: %s\n", error->message); g_printerr ("Failed to parse launch: %s\n", error->message);
@ -361,8 +366,8 @@ start_pipeline (WebRTC * webrtc)
g_signal_connect (webrtc->webrtcbin, "on-ice-candidate", g_signal_connect (webrtc->webrtcbin, "on-ice-candidate",
G_CALLBACK (send_ice_candidate_message), webrtc); G_CALLBACK (send_ice_candidate_message), webrtc);
/* Incoming streams will be exposed via this signal */ /* Incoming streams will be exposed via this signal */
g_signal_connect (webrtc->webrtcbin, "pad-added", G_CALLBACK (on_incoming_stream), g_signal_connect (webrtc->webrtcbin, "pad-added",
webrtc); G_CALLBACK (on_incoming_stream), webrtc);
/* Lifetime is the same as the pipeline itself */ /* Lifetime is the same as the pipeline itself */
gst_object_unref (webrtc->webrtcbin); gst_object_unref (webrtc->webrtcbin);
@ -425,8 +430,7 @@ register_with_server (WebRTC * webrtc)
} }
static void static void
on_server_closed (SoupWebsocketConnection * conn G_GNUC_UNUSED, on_server_closed (SoupWebsocketConnection * conn G_GNUC_UNUSED, WebRTC * webrtc)
WebRTC * webrtc)
{ {
webrtc->app_state = SERVER_CLOSED; webrtc->app_state = SERVER_CLOSED;
cleanup_and_quit_loop (webrtc, "Server connection closed", 0); cleanup_and_quit_loop (webrtc, "Server connection closed", 0);
@ -458,22 +462,23 @@ on_server_message (SoupWebsocketConnection * conn, SoupWebsocketDataType type,
/* Server has accepted our registration, we are ready to send commands */ /* Server has accepted our registration, we are ready to send commands */
if (g_strcmp0 (text, "HELLO") == 0) { if (g_strcmp0 (text, "HELLO") == 0) {
if (webrtc->app_state != SERVER_REGISTERING) { if (webrtc->app_state != SERVER_REGISTERING) {
cleanup_and_quit_loop (webrtc, "ERROR: Received HELLO when not registering", cleanup_and_quit_loop (webrtc,
APP_STATE_ERROR); "ERROR: Received HELLO when not registering", APP_STATE_ERROR);
goto out; goto out;
} }
webrtc->app_state = SERVER_REGISTERED; webrtc->app_state = SERVER_REGISTERED;
g_print ("Registered with server\n"); g_print ("Registered with server\n");
/* Ask signalling server to connect us with a specific peer */ /* Ask signalling server to connect us with a specific peer */
if (!setup_call (webrtc)) { if (!setup_call (webrtc)) {
cleanup_and_quit_loop (webrtc, "ERROR: Failed to setup call", PEER_CALL_ERROR); cleanup_and_quit_loop (webrtc, "ERROR: Failed to setup call",
PEER_CALL_ERROR);
goto out; goto out;
} }
/* Call has been setup by the server, now we can start negotiation */ /* Call has been setup by the server, now we can start negotiation */
} else if (g_strcmp0 (text, "SESSION_OK") == 0) { } else if (g_strcmp0 (text, "SESSION_OK") == 0) {
if (webrtc->app_state != PEER_CONNECTING) { if (webrtc->app_state != PEER_CONNECTING) {
cleanup_and_quit_loop (webrtc, "ERROR: Received SESSION_OK when not calling", cleanup_and_quit_loop (webrtc,
PEER_CONNECTION_ERROR); "ERROR: Received SESSION_OK when not calling", PEER_CONNECTION_ERROR);
goto out; goto out;
} }
@ -560,8 +565,8 @@ on_server_message (SoupWebsocketConnection * conn, SoupWebsocketDataType type,
/* Set remote description on our pipeline */ /* Set remote description on our pipeline */
{ {
GstPromise *promise = gst_promise_new (); GstPromise *promise = gst_promise_new ();
g_signal_emit_by_name (webrtc->webrtcbin, "set-remote-description", answer, g_signal_emit_by_name (webrtc->webrtcbin, "set-remote-description",
promise); answer, promise);
gst_promise_interrupt (promise); gst_promise_interrupt (promise);
gst_promise_unref (promise); gst_promise_unref (promise);
} }
@ -577,8 +582,8 @@ on_server_message (SoupWebsocketConnection * conn, SoupWebsocketDataType type,
sdpmlineindex = json_object_get_int_member (ice, "sdpMLineIndex"); sdpmlineindex = json_object_get_int_member (ice, "sdpMLineIndex");
/* Add ice candidate sent by remote peer */ /* Add ice candidate sent by remote peer */
g_signal_emit_by_name (webrtc->webrtcbin, "add-ice-candidate", sdpmlineindex, g_signal_emit_by_name (webrtc->webrtcbin, "add-ice-candidate",
candidate); sdpmlineindex, candidate);
} else { } else {
g_printerr ("Ignoring unknown JSON message:\n%s\n", text); g_printerr ("Ignoring unknown JSON message:\n%s\n", text);
} }
@ -590,12 +595,12 @@ on_server_message (SoupWebsocketConnection * conn, SoupWebsocketDataType type,
} }
static void static void
on_server_connected (SoupSession * session, GAsyncResult * res, on_server_connected (SoupSession * session, GAsyncResult * res, WebRTC * webrtc)
WebRTC * webrtc)
{ {
GError *error = NULL; GError *error = NULL;
webrtc->ws_conn = soup_session_websocket_connect_finish (session, res, &error); webrtc->ws_conn =
soup_session_websocket_connect_finish (session, res, &error);
if (error) { if (error) {
cleanup_and_quit_loop (webrtc, error->message, SERVER_CONNECTION_ERROR); cleanup_and_quit_loop (webrtc, error->message, SERVER_CONNECTION_ERROR);
g_error_free (error); g_error_free (error);
@ -607,8 +612,10 @@ on_server_connected (SoupSession * session, GAsyncResult * res,
webrtc->app_state = SERVER_CONNECTED; webrtc->app_state = SERVER_CONNECTED;
g_print ("Connected to signalling server\n"); g_print ("Connected to signalling server\n");
g_signal_connect (webrtc->ws_conn, "closed", G_CALLBACK (on_server_closed), webrtc); g_signal_connect (webrtc->ws_conn, "closed", G_CALLBACK (on_server_closed),
g_signal_connect (webrtc->ws_conn, "message", G_CALLBACK (on_server_message), webrtc); webrtc);
g_signal_connect (webrtc->ws_conn, "message", G_CALLBACK (on_server_message),
webrtc);
/* Register with the server so it knows about us and can accept commands */ /* Register with the server so it knows about us and can accept commands */
register_with_server (webrtc); register_with_server (webrtc);
@ -736,7 +743,8 @@ _call_thread (WebRTC * webrtc)
context = g_main_context_new (); context = g_main_context_new ();
webrtc->loop = g_main_loop_new (context, FALSE); webrtc->loop = g_main_loop_new (context, FALSE);
g_main_context_invoke (context, (GSourceFunc) _unlock_mutex, &webrtc->lock); g_main_context_invoke (context, (GSourceFunc) _unlock_mutex, &webrtc->lock);
g_main_context_invoke (context, (GSourceFunc) connect_to_websocket_server_async, webrtc); g_main_context_invoke (context,
(GSourceFunc) connect_to_websocket_server_async, webrtc);
g_main_context_push_thread_default (context); g_main_context_push_thread_default (context);
g_cond_broadcast (&webrtc->cond); g_cond_broadcast (&webrtc->cond);
g_main_loop_run (webrtc->loop); g_main_loop_run (webrtc->loop);
@ -814,7 +822,6 @@ native_class_init (JNIEnv * env, jclass klass)
__android_log_print (ANDROID_LOG_ERROR, "GstPlayer", "%s", message); __android_log_print (ANDROID_LOG_ERROR, "GstPlayer", "%s", message);
(*env)->ThrowNew (env, exception_class, message); (*env)->ThrowNew (env, exception_class, message);
} }
//gst_debug_set_threshold_from_string ("gl*:7", FALSE); //gst_debug_set_threshold_from_string ("gl*:7", FALSE);
} }
@ -837,11 +844,13 @@ native_set_surface (JNIEnv * env, jobject thiz, jobject surface)
webrtc->native_window = new_native_window; webrtc->native_window = new_native_window;
if (webrtc->video_sink) if (webrtc->video_sink)
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (webrtc->video_sink), (guintptr) new_native_window); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (webrtc->video_sink),
(guintptr) new_native_window);
} }
static void static void
native_set_signalling_server (JNIEnv * env, jobject thiz, jstring server) { native_set_signalling_server (JNIEnv * env, jobject thiz, jstring server)
{
WebRTC *webrtc = GET_CUSTOM_DATA (env, thiz, native_webrtc_field_id); WebRTC *webrtc = GET_CUSTOM_DATA (env, thiz, native_webrtc_field_id);
const gchar *s; const gchar *s;
@ -856,7 +865,8 @@ native_set_signalling_server (JNIEnv * env, jobject thiz, jstring server) {
} }
static void static void
native_set_call_id(JNIEnv * env, jobject thiz, jstring peer_id) { native_set_call_id (JNIEnv * env, jobject thiz, jstring peer_id)
{
WebRTC *webrtc = GET_CUSTOM_DATA (env, thiz, native_webrtc_field_id); WebRTC *webrtc = GET_CUSTOM_DATA (env, thiz, native_webrtc_field_id);
const gchar *s; const gchar *s;

View file

@ -250,7 +250,8 @@ on_incoming_decodebin_stream (GstElement * decodebin, GstPad * pad,
} }
static void static void
on_incoming_stream (GstElement * webrtc, GstPad * pad, ReceiverEntry *receiver_entry) on_incoming_stream (GstElement * webrtc, GstPad * pad,
ReceiverEntry * receiver_entry)
{ {
GstElement *decodebin; GstElement *decodebin;
GstPad *sinkpad; GstPad *sinkpad;
@ -287,10 +288,11 @@ create_receiver_entry (SoupWebsocketConnection * connection)
G_CALLBACK (soup_websocket_message_cb), (gpointer) receiver_entry); G_CALLBACK (soup_websocket_message_cb), (gpointer) receiver_entry);
error = NULL; error = NULL;
receiver_entry->pipeline = gst_parse_launch ("webrtcbin name=webrtcbin stun-server=stun://" STUN_SERVER " " receiver_entry->pipeline =
gst_parse_launch ("webrtcbin name=webrtcbin stun-server=stun://"
STUN_SERVER " "
"audiotestsrc is-live=true wave=red-noise ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! " "audiotestsrc is-live=true wave=red-noise ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! "
"queue ! " RTP_CAPS_OPUS "97 ! webrtcbin. " "queue ! " RTP_CAPS_OPUS "97 ! webrtcbin. ", &error);
, &error);
if (error != NULL) { if (error != NULL) {
g_error ("Could not create WebRTC pipeline: %s\n", error->message); g_error ("Could not create WebRTC pipeline: %s\n", error->message);
g_error_free (error); g_error_free (error);
@ -302,18 +304,24 @@ create_receiver_entry (SoupWebsocketConnection * connection)
g_assert (receiver_entry->webrtcbin != NULL); g_assert (receiver_entry->webrtcbin != NULL);
/* Incoming streams will be exposed via this signal */ /* Incoming streams will be exposed via this signal */
g_signal_connect (receiver_entry->webrtcbin, "pad-added", G_CALLBACK (on_incoming_stream), g_signal_connect (receiver_entry->webrtcbin, "pad-added",
receiver_entry); G_CALLBACK (on_incoming_stream), receiver_entry);
#if 0 #if 0
GstElement *rtpbin = gst_bin_get_by_name (GST_BIN (receiver_entry->webrtcbin), "rtpbin"); GstElement *rtpbin =
gst_bin_get_by_name (GST_BIN (receiver_entry->webrtcbin), "rtpbin");
g_object_set (rtpbin, "latency", 40, NULL); g_object_set (rtpbin, "latency", 40, NULL);
gst_object_unref (rtpbin); gst_object_unref (rtpbin);
#endif #endif
// Create a 2nd transceiver for the receive only video stream // Create a 2nd transceiver for the receive only video stream
video_caps = gst_caps_from_string ("application/x-rtp,media=video,encoding-name=H264,payload=" RTP_PAYLOAD_TYPE ",clock-rate=90000,packetization-mode=(string)1, profile-level-id=(string)42c016"); video_caps =
g_signal_emit_by_name (receiver_entry->webrtcbin, "add-transceiver", GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, video_caps, NULL, &trans); gst_caps_from_string
("application/x-rtp,media=video,encoding-name=H264,payload="
RTP_PAYLOAD_TYPE
",clock-rate=90000,packetization-mode=(string)1, profile-level-id=(string)42c016");
g_signal_emit_by_name (receiver_entry->webrtcbin, "add-transceiver",
GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, video_caps, NULL, &trans);
gst_caps_unref (video_caps); gst_caps_unref (video_caps);
gst_object_unref (trans); gst_object_unref (trans);

View file

@ -536,8 +536,7 @@ on_offer_received (GstSDPMessage *sdp)
/* Set remote description on our pipeline */ /* Set remote description on our pipeline */
{ {
promise = gst_promise_new_with_change_func (on_offer_set, NULL, NULL); promise = gst_promise_new_with_change_func (on_offer_set, NULL, NULL);
g_signal_emit_by_name (webrtc1, "set-remote-description", offer, g_signal_emit_by_name (webrtc1, "set-remote-description", offer, promise);
promise);
} }
gst_webrtc_session_description_free (offer); gst_webrtc_session_description_free (offer);
} }