From d1f3a3ef40a69fe50efe6e2b76400e7f5f5dfb6c Mon Sep 17 00:00:00 2001 From: trilene Date: Tue, 27 Oct 2020 13:14:06 -0400 Subject: Support video calls --- src/WebRTCSession.cpp | 805 +++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 635 insertions(+), 170 deletions(-) (limited to 'src/WebRTCSession.cpp') diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index 1c11f750..177bdf7a 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -1,7 +1,16 @@ #include +#include +#include #include +#include +#include +#include +#include +#include +#include #include "Logging.h" +#include "UserSettingsPage.h" #include "WebRTCSession.h" #ifdef GSTREAMER_AVAILABLE @@ -15,6 +24,9 @@ extern "C" } #endif +// https://github.com/vector-im/riot-web/issues/10173 +constexpr std::string_view STUN_SERVER = "stun://turn.matrix.org:3478"; + Q_DECLARE_METATYPE(webrtc::State) using webrtc::State; @@ -39,7 +51,7 @@ WebRTCSession::init(std::string *errorMessage) GError *error = nullptr; if (!gst_init_check(nullptr, nullptr, &error)) { - std::string strError = std::string("WebRTC: failed to initialise GStreamer: "); + std::string strError("WebRTC: failed to initialise GStreamer: "); if (error) { strError += error->message; g_error_free(error); @@ -50,51 +62,14 @@ WebRTCSession::init(std::string *errorMessage) return false; } + initialised_ = true; gchar *version = gst_version_string(); - std::string gstVersion(version); + nhlog::ui()->info("WebRTC: initialised {}", version); g_free(version); - nhlog::ui()->info("WebRTC: initialised " + gstVersion); - - // GStreamer Plugins: - // Base: audioconvert, audioresample, opus, playback, volume - // Good: autodetect, rtpmanager - // Bad: dtls, srtp, webrtc - // libnice [GLib]: nice - initialised_ = true; - std::string strError = gstVersion + ": Missing plugins: "; - const gchar *needed[] = {"audioconvert", - "audioresample", - "autodetect", - "dtls", - "nice", - "opus", - "playback", - "rtpmanager", - "srtp", - "volume", - "webrtc", - nullptr}; - GstRegistry *registry = gst_registry_get(); - for (guint i = 0; i < g_strv_length((gchar **)needed); i++) { - GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); - if (!plugin) { - strError += std::string(needed[i]) + " "; - initialised_ = false; - continue; - } - gst_object_unref(plugin); - } - - if (initialised_) { #if GST_CHECK_VERSION(1, 18, 0) - startDeviceMonitor(); + startDeviceMonitor(); #endif - } else { - nhlog::ui()->error(strError); - if (errorMessage) - *errorMessage = strError; - } - return initialised_; + return true; #else (void)errorMessage; return false; @@ -103,37 +78,154 @@ WebRTCSession::init(std::string *errorMessage) #ifdef GSTREAMER_AVAILABLE namespace { -bool isoffering_; + +struct AudioSource +{ + std::string name; + GstDevice *device; +}; + +struct VideoSource +{ + struct Caps + { + std::string resolution; + std::vector frameRates; + }; + std::string name; + GstDevice *device; + std::vector caps; +}; + std::string localsdp_; std::vector localcandidates_; -std::vector> audioSources_; +bool haveAudioStream_; +bool haveVideoStream_; +std::vector audioSources_; +std::vector videoSources_; + +using FrameRate = std::pair; +std::optional +getFrameRate(const GValue *value) +{ + if (GST_VALUE_HOLDS_FRACTION(value)) { + gint num = gst_value_get_fraction_numerator(value); + gint den = gst_value_get_fraction_denominator(value); + return FrameRate{num, den}; + } + return std::nullopt; +} + +void +addFrameRate(std::vector &rates, const FrameRate &rate) +{ + constexpr double minimumFrameRate = 15.0; + if (static_cast(rate.first) / rate.second >= minimumFrameRate) + rates.push_back(std::to_string(rate.first) + "/" + std::to_string(rate.second)); +} + +std::pair +tokenise(std::string_view str, char delim) +{ + std::pair ret; + auto pos = str.find_first_of(delim); + auto s = str.data(); + std::from_chars(s, s + pos, ret.first); + std::from_chars(s + pos + 1, s + str.size(), ret.second); + return ret; +} void addDevice(GstDevice *device) { - if (device) { - gchar *name = gst_device_get_display_name(device); - nhlog::ui()->debug("WebRTC: device added: {}", name); + if (!device) + return; + + gchar *name = gst_device_get_display_name(device); + gchar *type = gst_device_get_device_class(device); + bool isVideo = !std::strncmp(type, "Video", 5); + g_free(type); + nhlog::ui()->debug("WebRTC: {} device added: {}", isVideo ? "video" : "audio", name); + if (!isVideo) { audioSources_.push_back({name, device}); g_free(name); + return; + } + + GstCaps *gstcaps = gst_device_get_caps(device); + if (!gstcaps) { + nhlog::ui()->debug("WebRTC: unable to get caps for {}", name); + g_free(name); + return; + } + + VideoSource source{name, device, {}}; + g_free(name); + guint nCaps = gst_caps_get_size(gstcaps); + for (guint i = 0; i < nCaps; ++i) { + GstStructure *structure = gst_caps_get_structure(gstcaps, i); + const gchar *name = gst_structure_get_name(structure); + if (!std::strcmp(name, "video/x-raw")) { + gint widthpx, heightpx; + if (gst_structure_get(structure, + "width", + G_TYPE_INT, + &widthpx, + "height", + G_TYPE_INT, + &heightpx, + nullptr)) { + VideoSource::Caps caps; + caps.resolution = + std::to_string(widthpx) + "x" + std::to_string(heightpx); + const GValue *value = + gst_structure_get_value(structure, "framerate"); + if (auto fr = getFrameRate(value); fr) + addFrameRate(caps.frameRates, *fr); + else if (GST_VALUE_HOLDS_LIST(value)) { + guint nRates = gst_value_list_get_size(value); + for (guint j = 0; j < nRates; ++j) { + const GValue *rate = + gst_value_list_get_value(value, j); + if (auto fr = getFrameRate(rate); fr) + addFrameRate(caps.frameRates, *fr); + } + } + if (!caps.frameRates.empty()) + source.caps.push_back(std::move(caps)); + } + } } + gst_caps_unref(gstcaps); + videoSources_.push_back(std::move(source)); } #if GST_CHECK_VERSION(1, 18, 0) +template +bool +removeDevice(T &sources, GstDevice *device, bool changed) +{ + if (auto it = std::find_if(sources.begin(), + sources.end(), + [device](const auto &s) { return s.device == device; }); + it != sources.end()) { + nhlog::ui()->debug(std::string("WebRTC: device ") + + (changed ? "changed: " : "removed: ") + "{}", + it->name); + gst_object_unref(device); + sources.erase(it); + return true; + } + return false; +} + void removeDevice(GstDevice *device, bool changed) { if (device) { - if (auto it = std::find_if(audioSources_.begin(), - audioSources_.end(), - [device](const auto &s) { return s.second == device; }); - it != audioSources_.end()) { - nhlog::ui()->debug(std::string("WebRTC: device ") + - (changed ? "changed: " : "removed: ") + "{}", - it->first); - gst_object_unref(device); - audioSources_.erase(it); - } + if (removeDevice(audioSources_, device, changed) || + removeDevice(videoSources_, device, changed)) + return; } } #endif @@ -194,7 +286,7 @@ parseSDP(const std::string &sdp, GstWebRTCSDPType type) return gst_webrtc_session_description_new(type, msg); } else { nhlog::ui()->error("WebRTC: failed to parse remote session description"); - gst_object_unref(msg); + gst_sdp_message_free(msg); return nullptr; } } @@ -250,7 +342,7 @@ iceGatheringStateChanged(GstElement *webrtc, g_object_get(webrtc, "ice-gathering-state", &newState, nullptr); if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) { nhlog::ui()->debug("WebRTC: GstWebRTCICEGatheringState -> Complete"); - if (isoffering_) { + if (WebRTCSession::instance().isOffering()) { emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); emit WebRTCSession::instance().stateChanged(State::OFFERSENT); } else { @@ -266,7 +358,7 @@ gboolean onICEGatheringCompletion(gpointer timerid) { *(guint *)(timerid) = 0; - if (isoffering_) { + if (WebRTCSession::instance().isOffering()) { emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); emit WebRTCSession::instance().stateChanged(State::OFFERSENT); } else { @@ -286,25 +378,25 @@ addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); #if GST_CHECK_VERSION(1, 18, 0) - localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); + localcandidates_.push_back({std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate}); return; #else if (WebRTCSession::instance().state() >= State::OFFERSENT) { emit WebRTCSession::instance().newICECandidate( - {"audio", (uint16_t)mlineIndex, candidate}); + {std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate}); return; } - localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); + localcandidates_.push_back({std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate}); // GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers // GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.18. - // Use a 100ms timeout in the meantime + // Use a 1s timeout in the meantime static guint timerid = 0; if (timerid) g_source_remove(timerid); - timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); + timerid = g_timeout_add(1000, onICEGatheringCompletion, &timerid); #endif } @@ -329,40 +421,166 @@ iceConnectionStateChanged(GstElement *webrtc, } } +// https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/issues/1164 +struct KeyFrameRequestData +{ + GstElement *pipe = nullptr; + GstElement *decodebin = nullptr; + gint packetsLost = 0; + guint timerid = 0; + std::string statsField; +} keyFrameRequestData_; + +void +sendKeyFrameRequest() +{ + GstPad *sinkpad = gst_element_get_static_pad(keyFrameRequestData_.decodebin, "sink"); + if (!gst_pad_push_event(sinkpad, + gst_event_new_custom(GST_EVENT_CUSTOM_UPSTREAM, + gst_structure_new_empty("GstForceKeyUnit")))) + nhlog::ui()->error("WebRTC: key frame request failed"); + else + nhlog::ui()->debug("WebRTC: sent key frame request"); + + gst_object_unref(sinkpad); +} + void -linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) +testPacketLoss_(GstPromise *promise, gpointer G_GNUC_UNUSED) { - GstCaps *caps = gst_pad_get_current_caps(newpad); - if (!caps) + const GstStructure *reply = gst_promise_get_reply(promise); + gint packetsLost = 0; + GstStructure *rtpStats; + if (!gst_structure_get(reply, + keyFrameRequestData_.statsField.c_str(), + GST_TYPE_STRUCTURE, + &rtpStats, + nullptr)) { + nhlog::ui()->error("WebRTC: get-stats: no field: {}", + keyFrameRequestData_.statsField); + gst_promise_unref(promise); return; + } + gst_structure_get_int(rtpStats, "packets-lost", &packetsLost); + gst_structure_free(rtpStats); + gst_promise_unref(promise); + if (packetsLost > keyFrameRequestData_.packetsLost) { + nhlog::ui()->debug("WebRTC: inbound video lost packet count: {}", packetsLost); + keyFrameRequestData_.packetsLost = packetsLost; + sendKeyFrameRequest(); + } +} - const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); - gst_caps_unref(caps); +gboolean +testPacketLoss(gpointer G_GNUC_UNUSED) +{ + if (keyFrameRequestData_.pipe) { + GstElement *webrtc = + gst_bin_get_by_name(GST_BIN(keyFrameRequestData_.pipe), "webrtcbin"); + GstPromise *promise = + gst_promise_new_with_change_func(testPacketLoss_, nullptr, nullptr); + g_signal_emit_by_name(webrtc, "get-stats", nullptr, promise); + gst_object_unref(webrtc); + return TRUE; + } + return FALSE; +} - GstPad *queuepad = nullptr; - if (g_str_has_prefix(name, "audio")) { +#if GST_CHECK_VERSION(1, 18, 0) +void +setWaitForKeyFrame(GstBin *decodebin G_GNUC_UNUSED, GstElement *element, gpointer G_GNUC_UNUSED) +{ + if (!std::strcmp( + gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(element))), + "rtpvp8depay")) + g_object_set(element, "wait-for-keyframe", TRUE, nullptr); +} +#endif + +void +linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe) +{ + GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); + GstCaps *sinkcaps = gst_pad_get_current_caps(sinkpad); + const GstStructure *structure = gst_caps_get_structure(sinkcaps, 0); + + gchar *mediaType = nullptr; + guint ssrc = 0; + gst_structure_get( + structure, "media", G_TYPE_STRING, &mediaType, "ssrc", G_TYPE_UINT, &ssrc, nullptr); + gst_caps_unref(sinkcaps); + gst_object_unref(sinkpad); + + WebRTCSession *session = &WebRTCSession::instance(); + GstElement *queue = gst_element_factory_make("queue", nullptr); + if (!std::strcmp(mediaType, "audio")) { nhlog::ui()->debug("WebRTC: received incoming audio stream"); - GstElement *queue = gst_element_factory_make("queue", nullptr); + haveAudioStream_ = true; GstElement *convert = gst_element_factory_make("audioconvert", nullptr); GstElement *resample = gst_element_factory_make("audioresample", nullptr); GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); + gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); gst_element_link_many(queue, convert, resample, sink, nullptr); gst_element_sync_state_with_parent(queue); gst_element_sync_state_with_parent(convert); gst_element_sync_state_with_parent(resample); gst_element_sync_state_with_parent(sink); - queuepad = gst_element_get_static_pad(queue, "sink"); + } else if (!std::strcmp(mediaType, "video")) { + nhlog::ui()->debug("WebRTC: received incoming video stream"); + if (!session->getVideoItem()) { + g_free(mediaType); + gst_object_unref(queue); + nhlog::ui()->error("WebRTC: video call item not set"); + return; + } + haveVideoStream_ = true; + keyFrameRequestData_.statsField = + std::string("rtp-inbound-stream-stats_") + std::to_string(ssrc); + GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr); + GstElement *glupload = gst_element_factory_make("glupload", nullptr); + GstElement *glcolorconvert = gst_element_factory_make("glcolorconvert", nullptr); + GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr); + GstElement *glsinkbin = gst_element_factory_make("glsinkbin", nullptr); + g_object_set(qmlglsink, "widget", session->getVideoItem(), nullptr); + g_object_set(glsinkbin, "sink", qmlglsink, nullptr); + + gst_bin_add_many( + GST_BIN(pipe), queue, videoconvert, glupload, glcolorconvert, glsinkbin, nullptr); + gst_element_link_many( + queue, videoconvert, glupload, glcolorconvert, glsinkbin, nullptr); + gst_element_sync_state_with_parent(queue); + gst_element_sync_state_with_parent(videoconvert); + gst_element_sync_state_with_parent(glupload); + gst_element_sync_state_with_parent(glcolorconvert); + gst_element_sync_state_with_parent(glsinkbin); + } else { + g_free(mediaType); + gst_object_unref(queue); + nhlog::ui()->error("WebRTC: unknown pad type: {}", GST_PAD_NAME(newpad)); + return; } + GstPad *queuepad = gst_element_get_static_pad(queue, "sink"); if (queuepad) { if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) nhlog::ui()->error("WebRTC: unable to link new pad"); else { - emit WebRTCSession::instance().stateChanged(State::CONNECTED); + if (!session->isVideo() || + (haveAudioStream_ && + (haveVideoStream_ || session->isRemoteVideoRecvOnly()))) { + emit session->stateChanged(State::CONNECTED); + if (haveVideoStream_) { + keyFrameRequestData_.pipe = pipe; + keyFrameRequestData_.decodebin = decodebin; + keyFrameRequestData_.timerid = + g_timeout_add_seconds(3, testPacketLoss, nullptr); + } + } } gst_object_unref(queuepad); } + g_free(mediaType); } void @@ -373,7 +591,12 @@ addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) nhlog::ui()->debug("WebRTC: received incoming stream"); GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); + // hardware decoding needs investigation; eg rendering fails if vaapi plugin installed + g_object_set(decodebin, "force-sw-decoders", TRUE, nullptr); g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); +#if GST_CHECK_VERSION(1, 18, 0) + g_signal_connect(decodebin, "element-added", G_CALLBACK(setWaitForKeyFrame), pipe); +#endif gst_bin_add(GST_BIN(pipe), decodebin); gst_element_sync_state_with_parent(decodebin); GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); @@ -382,51 +605,134 @@ addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) gst_object_unref(sinkpad); } -std::string::const_iterator -findName(const std::string &sdp, const std::string &name) +bool +strstr_(std::string_view str1, std::string_view str2) { - return std::search( - sdp.cbegin(), - sdp.cend(), - name.cbegin(), - name.cend(), - [](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); }); + return std::search(str1.cbegin(), + str1.cend(), + str2.cbegin(), + str2.cend(), + [](unsigned char c1, unsigned char c2) { + return std::tolower(c1) == std::tolower(c2); + }) != str1.cend(); } -int -getPayloadType(const std::string &sdp, const std::string &name) +bool +getMediaAttributes(const GstSDPMessage *sdp, + const char *mediaType, + const char *encoding, + int &payloadType, + bool &recvOnly) { - // eg a=rtpmap:111 opus/48000/2 - auto e = findName(sdp, name); - if (e == sdp.cend()) { - nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing"); - return -1; + payloadType = -1; + recvOnly = false; + for (guint mlineIndex = 0; mlineIndex < gst_sdp_message_medias_len(sdp); ++mlineIndex) { + const GstSDPMedia *media = gst_sdp_message_get_media(sdp, mlineIndex); + if (!std::strcmp(gst_sdp_media_get_media(media), mediaType)) { + recvOnly = gst_sdp_media_get_attribute_val(media, "recvonly") != nullptr; + const gchar *rtpval = nullptr; + for (guint n = 0; n == 0 || rtpval; ++n) { + rtpval = gst_sdp_media_get_attribute_val_n(media, "rtpmap", n); + if (rtpval && strstr_(rtpval, encoding)) { + payloadType = std::atoi(rtpval); + break; + } + } + return true; + } } + return false; +} - if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) { - nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + - " payload type"); - return -1; - } else { - ++s; - try { - return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s)); - } catch (...) { - nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + - " payload type"); +template +std::vector +deviceNames(T &sources, const std::string &defaultDevice) +{ + std::vector ret; + ret.reserve(sources.size()); + std::transform(sources.cbegin(), + sources.cend(), + std::back_inserter(ret), + [](const auto &s) { return s.name; }); + + // move default device to top of the list + if (auto it = std::find_if(ret.begin(), + ret.end(), + [&defaultDevice](const auto &s) { return s == defaultDevice; }); + it != ret.end()) + std::swap(ret.front(), *it); + + return ret; +} + +} + +bool +WebRTCSession::havePlugins(bool isVideo, std::string *errorMessage) +{ + if (!initialised_ && !init(errorMessage)) + return false; + if (!isVideo && haveVoicePlugins_) + return true; + if (isVideo && haveVideoPlugins_) + return true; + + const gchar *voicePlugins[] = {"audioconvert", + "audioresample", + "autodetect", + "dtls", + "nice", + "opus", + "playback", + "rtpmanager", + "srtp", + "volume", + "webrtc", + nullptr}; + + const gchar *videoPlugins[] = {"opengl", "qmlgl", "rtp", "videoconvert", "vpx", nullptr}; + + std::string strError("Missing GStreamer plugins: "); + const gchar **needed = isVideo ? videoPlugins : voicePlugins; + bool &havePlugins = isVideo ? haveVideoPlugins_ : haveVoicePlugins_; + havePlugins = true; + GstRegistry *registry = gst_registry_get(); + for (guint i = 0; i < g_strv_length((gchar **)needed); i++) { + GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); + if (!plugin) { + havePlugins = false; + strError += std::string(needed[i]) + " "; + continue; } + gst_object_unref(plugin); } - return -1; -} + if (!havePlugins) { + nhlog::ui()->error(strError); + if (errorMessage) + *errorMessage = strError; + return false; + } + + if (isVideo) { + // load qmlglsink to register GStreamer's GstGLVideoItem QML type + GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr); + gst_object_unref(qmlglsink); + } + return true; } bool -WebRTCSession::createOffer() +WebRTCSession::createOffer(bool isVideo) { - isoffering_ = true; + isOffering_ = true; + isVideo_ = isVideo; + isRemoteVideoRecvOnly_ = false; + videoItem_ = nullptr; + haveAudioStream_ = false; + haveVideoStream_ = false; localsdp_.clear(); localcandidates_.clear(); - return startPipeline(111); // a dynamic opus payload type + return startPipeline(111, isVideo ? 96 : -1); // dynamic payload types } bool @@ -436,19 +742,42 @@ WebRTCSession::acceptOffer(const std::string &sdp) if (state_ != State::DISCONNECTED) return false; - isoffering_ = false; + isOffering_ = false; + isRemoteVideoRecvOnly_ = false; + videoItem_ = nullptr; + haveAudioStream_ = false; + haveVideoStream_ = false; localsdp_.clear(); localcandidates_.clear(); - int opusPayloadType = getPayloadType(sdp, "opus"); - if (opusPayloadType == -1) - return false; - GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); if (!offer) return false; - if (!startPipeline(opusPayloadType)) { + int opusPayloadType; + bool recvOnly; + if (getMediaAttributes(offer->sdp, "audio", "opus", opusPayloadType, recvOnly)) { + if (opusPayloadType == -1) { + nhlog::ui()->error("WebRTC: remote audio offer - no opus encoding"); + gst_webrtc_session_description_free(offer); + return false; + } + } else { + nhlog::ui()->error("WebRTC: remote offer - no audio media"); + gst_webrtc_session_description_free(offer); + return false; + } + + int vp8PayloadType; + isVideo_ = + getMediaAttributes(offer->sdp, "video", "vp8", vp8PayloadType, isRemoteVideoRecvOnly_); + if (isVideo_ && vp8PayloadType == -1) { + nhlog::ui()->error("WebRTC: remote video offer - no vp8 encoding"); + gst_webrtc_session_description_free(offer); + return false; + } + + if (!startPipeline(opusPayloadType, vp8PayloadType)) { gst_webrtc_session_description_free(offer); return false; } @@ -473,6 +802,13 @@ WebRTCSession::acceptAnswer(const std::string &sdp) return false; } + if (isVideo_) { + int unused; + if (!getMediaAttributes( + answer->sdp, "video", "vp8", unused, isRemoteVideoRecvOnly_)) + isRemoteVideoRecvOnly_ = true; + } + g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); gst_webrtc_session_description_free(answer); return true; @@ -497,21 +833,23 @@ WebRTCSession::acceptICECandidates( } bool -WebRTCSession::startPipeline(int opusPayloadType) +WebRTCSession::startPipeline(int opusPayloadType, int vp8PayloadType) { if (state_ != State::DISCONNECTED) return false; emit stateChanged(State::INITIATING); - if (!createPipeline(opusPayloadType)) + if (!createPipeline(opusPayloadType, vp8PayloadType)) { + end(); return false; + } webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); - if (!stunServer_.empty()) { - nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_); - g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr); + if (settings_->useStunServer()) { + nhlog::ui()->info("WebRTC: setting STUN server: {}", STUN_SERVER); + g_object_set(webrtc_, "stun-server", STUN_SERVER, nullptr); } for (const auto &uri : turnServers_) { @@ -523,7 +861,7 @@ WebRTCSession::startPipeline(int opusPayloadType) nhlog::ui()->warn("WebRTC: no TURN server provided"); // generate the offer when the pipeline goes to PLAYING - if (isoffering_) + if (isOffering_) g_signal_connect( webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr); @@ -562,20 +900,19 @@ WebRTCSession::startPipeline(int opusPayloadType) } bool -WebRTCSession::createPipeline(int opusPayloadType) +WebRTCSession::createPipeline(int opusPayloadType, int vp8PayloadType) { - if (audioSources_.empty()) { - nhlog::ui()->error("WebRTC: no audio sources"); - return false; - } - - if (audioSourceIndex_ < 0 || (size_t)audioSourceIndex_ >= audioSources_.size()) { - nhlog::ui()->error("WebRTC: invalid audio source index"); + auto it = std::find_if(audioSources_.cbegin(), audioSources_.cend(), [this](const auto &s) { + return s.name == settings_->microphone().toStdString(); + }); + if (it == audioSources_.cend()) { + nhlog::ui()->error("WebRTC: unknown microphone: {}", + settings_->microphone().toStdString()); return false; } + nhlog::ui()->debug("WebRTC: microphone: {}", it->name); - GstElement *source = - gst_device_create_element(audioSources_[audioSourceIndex_].second, nullptr); + GstElement *source = gst_device_create_element(it->device, nullptr); GstElement *volume = gst_element_factory_make("volume", "srclevel"); GstElement *convert = gst_element_factory_make("audioconvert", nullptr); GstElement *resample = gst_element_factory_make("audioresample", nullptr); @@ -627,10 +964,103 @@ WebRTCSession::createPipeline(int opusPayloadType) capsfilter, webrtcbin, nullptr)) { - nhlog::ui()->error("WebRTC: failed to link pipeline elements"); - end(); + nhlog::ui()->error("WebRTC: failed to link audio pipeline elements"); + return false; + } + return isVideo_ ? addVideoPipeline(vp8PayloadType) : true; +} + +bool +WebRTCSession::addVideoPipeline(int vp8PayloadType) +{ + // allow incoming video calls despite localUser having no webcam + if (videoSources_.empty()) + return !isOffering_; + + auto it = std::find_if(videoSources_.cbegin(), videoSources_.cend(), [this](const auto &s) { + return s.name == settings_->camera().toStdString(); + }); + if (it == videoSources_.cend()) { + nhlog::ui()->error("WebRTC: unknown camera: {}", settings_->camera().toStdString()); + return false; + } + + std::string resSetting = settings_->cameraResolution().toStdString(); + const std::string &res = resSetting.empty() ? it->caps.front().resolution : resSetting; + std::string frSetting = settings_->cameraFrameRate().toStdString(); + const std::string &fr = frSetting.empty() ? it->caps.front().frameRates.front() : frSetting; + auto resolution = tokenise(res, 'x'); + auto frameRate = tokenise(fr, '/'); + nhlog::ui()->debug("WebRTC: camera: {}", it->name); + nhlog::ui()->debug("WebRTC: camera resolution: {}x{}", resolution.first, resolution.second); + nhlog::ui()->debug("WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second); + + GstElement *source = gst_device_create_element(it->device, nullptr); + GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr); + GstCaps *caps = gst_caps_new_simple("video/x-raw", + "width", + G_TYPE_INT, + resolution.first, + "height", + G_TYPE_INT, + resolution.second, + "framerate", + GST_TYPE_FRACTION, + frameRate.first, + frameRate.second, + nullptr); + g_object_set(capsfilter, "caps", caps, nullptr); + gst_caps_unref(caps); + + GstElement *convert = gst_element_factory_make("videoconvert", nullptr); + GstElement *queue1 = gst_element_factory_make("queue", nullptr); + GstElement *vp8enc = gst_element_factory_make("vp8enc", nullptr); + g_object_set(vp8enc, "deadline", 1, nullptr); + g_object_set(vp8enc, "error-resilient", 1, nullptr); + + GstElement *rtp = gst_element_factory_make("rtpvp8pay", nullptr); + GstElement *queue2 = gst_element_factory_make("queue", nullptr); + GstElement *rtpcapsfilter = gst_element_factory_make("capsfilter", nullptr); + GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp", + "media", + G_TYPE_STRING, + "video", + "encoding-name", + G_TYPE_STRING, + "VP8", + "payload", + G_TYPE_INT, + vp8PayloadType, + nullptr); + g_object_set(rtpcapsfilter, "caps", rtpcaps, nullptr); + gst_caps_unref(rtpcaps); + + gst_bin_add_many(GST_BIN(pipe_), + source, + capsfilter, + convert, + queue1, + vp8enc, + rtp, + queue2, + rtpcapsfilter, + nullptr); + + GstElement *webrtcbin = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); + if (!gst_element_link_many(source, + capsfilter, + convert, + queue1, + vp8enc, + rtp, + queue2, + rtpcapsfilter, + webrtcbin, + nullptr)) { + nhlog::ui()->error("WebRTC: failed to link video pipeline elements"); return false; } + gst_object_unref(webrtcbin); return true; } @@ -665,6 +1095,7 @@ void WebRTCSession::end() { nhlog::ui()->debug("WebRTC: ending session"); + keyFrameRequestData_ = KeyFrameRequestData{}; if (pipe_) { gst_element_set_state(pipe_, GST_STATE_NULL); gst_object_unref(pipe_); @@ -672,7 +1103,11 @@ WebRTCSession::end() g_source_remove(busWatchId_); busWatchId_ = 0; } - webrtc_ = nullptr; + webrtc_ = nullptr; + isVideo_ = false; + isOffering_ = false; + isRemoteVideoRecvOnly_ = false; + videoItem_ = nullptr; if (state_ != State::DISCONNECTED) emit stateChanged(State::DISCONNECTED); } @@ -690,6 +1125,9 @@ WebRTCSession::startDeviceMonitor() GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); gst_device_monitor_add_filter(monitor, "Audio/Source", caps); gst_caps_unref(caps); + caps = gst_caps_new_empty_simple("video/x-raw"); + gst_device_monitor_add_filter(monitor, "Video/Source", caps); + gst_caps_unref(caps); GstBus *bus = gst_device_monitor_get_bus(monitor); gst_bus_add_watch(bus, newBusMessage, nullptr); @@ -700,12 +1138,14 @@ WebRTCSession::startDeviceMonitor() } } } - -#else +#endif void WebRTCSession::refreshDevices() { +#if GST_CHECK_VERSION(1, 18, 0) + return; +#else if (!initialised_) return; @@ -715,79 +1155,97 @@ WebRTCSession::refreshDevices() GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); gst_device_monitor_add_filter(monitor, "Audio/Source", caps); gst_caps_unref(caps); + caps = gst_caps_new_empty_simple("video/x-raw"); + gst_device_monitor_add_filter(monitor, "Video/Source", caps); + gst_caps_unref(caps); } - std::for_each(audioSources_.begin(), audioSources_.end(), [](const auto &s) { - gst_object_unref(s.second); - }); - audioSources_.clear(); + auto clearDevices = [](auto &sources) { + std::for_each( + sources.begin(), sources.end(), [](auto &s) { gst_object_unref(s.device); }); + sources.clear(); + }; + clearDevices(audioSources_); + clearDevices(videoSources_); + GList *devices = gst_device_monitor_get_devices(monitor); if (devices) { - audioSources_.reserve(g_list_length(devices)); for (GList *l = devices; l != nullptr; l = l->next) addDevice(GST_DEVICE_CAST(l->data)); g_list_free(devices); } -} #endif +} std::vector -WebRTCSession::getAudioSourceNames(const std::string &defaultDevice) +WebRTCSession::getDeviceNames(bool isVideo, const std::string &defaultDevice) const { -#if !GST_CHECK_VERSION(1, 18, 0) - refreshDevices(); -#endif - // move default device to top of the list - if (auto it = std::find_if(audioSources_.begin(), - audioSources_.end(), - [&](const auto &s) { return s.first == defaultDevice; }); - it != audioSources_.end()) - std::swap(audioSources_.front(), *it); + return isVideo ? deviceNames(videoSources_, defaultDevice) + : deviceNames(audioSources_, defaultDevice); +} +std::vector +WebRTCSession::getResolutions(const std::string &cameraName) const +{ std::vector ret; - ret.reserve(audioSources_.size()); - std::for_each(audioSources_.cbegin(), audioSources_.cend(), [&](const auto &s) { - ret.push_back(s.first); - }); + if (auto it = std::find_if(videoSources_.cbegin(), + videoSources_.cend(), + [&cameraName](const auto &s) { return s.name == cameraName; }); + it != videoSources_.cend()) { + ret.reserve(it->caps.size()); + for (const auto &c : it->caps) + ret.push_back(c.resolution); + } return ret; } -#else - -bool -WebRTCSession::createOffer() +std::vector +WebRTCSession::getFrameRates(const std::string &cameraName, const std::string &resolution) const { - return false; + if (auto i = std::find_if(videoSources_.cbegin(), + videoSources_.cend(), + [&](const auto &s) { return s.name == cameraName; }); + i != videoSources_.cend()) { + if (auto j = + std::find_if(i->caps.cbegin(), + i->caps.cend(), + [&](const auto &s) { return s.resolution == resolution; }); + j != i->caps.cend()) + return j->frameRates; + } + return {}; } +#else + bool -WebRTCSession::acceptOffer(const std::string &) +WebRTCSession::havePlugins(bool, std::string *) { return false; } bool -WebRTCSession::acceptAnswer(const std::string &) +WebRTCSession::createOffer(bool) { return false; } -void -WebRTCSession::acceptICECandidates(const std::vector &) -{} - bool -WebRTCSession::startPipeline(int) +WebRTCSession::acceptOffer(const std::string &) { return false; } bool -WebRTCSession::createPipeline(int) +WebRTCSession::acceptAnswer(const std::string &) { return false; } +void +WebRTCSession::acceptICECandidates(const std::vector &) +{} + bool WebRTCSession::isMicMuted() const { @@ -808,14 +1266,21 @@ void WebRTCSession::refreshDevices() {} -void -WebRTCSession::startDeviceMonitor() -{} +std::vector +WebRTCSession::getDeviceNames(bool, const std::string &) const +{ + return {}; +} std::vector -WebRTCSession::getAudioSourceNames(const std::string &) +WebRTCSession::getResolutions(const std::string &) const { return {}; } +std::vector +WebRTCSession::getFrameRates(const std::string &, const std::string &) const +{ + return {}; +} #endif -- cgit 1.5.1 From b1300aff46625cce33f0244a173e09cba985dc3a Mon Sep 17 00:00:00 2001 From: trilene Date: Tue, 27 Oct 2020 17:26:46 -0400 Subject: Fix crash on exit --- src/CallManager.cpp | 7 +------ src/CallManager.h | 5 +---- src/ChatPage.cpp | 1 - src/WebRTCSession.cpp | 34 ++++++++++++++++++++-------------- src/WebRTCSession.h | 4 ---- src/dialogs/AcceptCall.cpp | 14 ++++++++------ src/dialogs/AcceptCall.h | 3 --- 7 files changed, 30 insertions(+), 38 deletions(-) (limited to 'src/WebRTCSession.cpp') diff --git a/src/CallManager.cpp b/src/CallManager.cpp index 4cd98a9f..a376a607 100644 --- a/src/CallManager.cpp +++ b/src/CallManager.cpp @@ -12,7 +12,6 @@ #include "Logging.h" #include "MainWindow.h" #include "MatrixClient.h" -#include "UserSettingsPage.h" #include "WebRTCSession.h" #include "dialogs/AcceptCall.h" @@ -30,18 +29,15 @@ std::vector getTurnURIs(const mtx::responses::TurnServer &turnServer); } -CallManager::CallManager(QSharedPointer userSettings) +CallManager::CallManager() : QObject() , session_(WebRTCSession::instance()) , turnServerTimer_(this) - , settings_(userSettings) { qRegisterMetaType>(); qRegisterMetaType(); qRegisterMetaType(); - session_.setSettings(userSettings); - connect( &session_, &WebRTCSession::offerCreated, @@ -265,7 +261,6 @@ CallManager::handleEvent(const RoomEvent &callInviteEvent) caller.display_name, QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url), - settings_, isVideo, MainWindow::instance()); connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent, isVideo]() { diff --git a/src/CallManager.h b/src/CallManager.h index c3afa155..f0e46b4b 100644 --- a/src/CallManager.h +++ b/src/CallManager.h @@ -5,7 +5,6 @@ #include #include -#include #include #include @@ -16,7 +15,6 @@ namespace mtx::responses { struct TurnServer; } -class UserSettings; class WebRTCSession; class CallManager : public QObject @@ -24,7 +22,7 @@ class CallManager : public QObject Q_OBJECT public: - CallManager(QSharedPointer); + CallManager(); void sendInvite(const QString &roomid, bool isVideo); void hangUp( @@ -59,7 +57,6 @@ private: std::vector remoteICECandidates_; std::vector turnURIs_; QTimer turnServerTimer_; - QSharedPointer settings_; QMediaPlayer player_; template diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp index c86c6128..4d46b8c6 100644 --- a/src/ChatPage.cpp +++ b/src/ChatPage.cpp @@ -69,7 +69,6 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) , isConnected_(true) , userSettings_{userSettings} , notificationsManager(this) - , callManager_(userSettings) { setObjectName("chatPage"); diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index 177bdf7a..40a9753e 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -9,6 +9,7 @@ #include #include +#include "ChatPage.h" #include "Logging.h" #include "UserSettingsPage.h" #include "WebRTCSession.h" @@ -847,7 +848,7 @@ WebRTCSession::startPipeline(int opusPayloadType, int vp8PayloadType) webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); - if (settings_->useStunServer()) { + if (ChatPage::instance()->userSettings()->useStunServer()) { nhlog::ui()->info("WebRTC: setting STUN server: {}", STUN_SERVER); g_object_set(webrtc_, "stun-server", STUN_SERVER, nullptr); } @@ -902,15 +903,17 @@ WebRTCSession::startPipeline(int opusPayloadType, int vp8PayloadType) bool WebRTCSession::createPipeline(int opusPayloadType, int vp8PayloadType) { - auto it = std::find_if(audioSources_.cbegin(), audioSources_.cend(), [this](const auto &s) { - return s.name == settings_->microphone().toStdString(); - }); + std::string microphoneSetting = + ChatPage::instance()->userSettings()->microphone().toStdString(); + auto it = + std::find_if(audioSources_.cbegin(), + audioSources_.cend(), + [µphoneSetting](const auto &s) { return s.name == microphoneSetting; }); if (it == audioSources_.cend()) { - nhlog::ui()->error("WebRTC: unknown microphone: {}", - settings_->microphone().toStdString()); + nhlog::ui()->error("WebRTC: unknown microphone: {}", microphoneSetting); return false; } - nhlog::ui()->debug("WebRTC: microphone: {}", it->name); + nhlog::ui()->debug("WebRTC: microphone: {}", microphoneSetting); GstElement *source = gst_device_create_element(it->device, nullptr); GstElement *volume = gst_element_factory_make("volume", "srclevel"); @@ -977,21 +980,24 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType) if (videoSources_.empty()) return !isOffering_; - auto it = std::find_if(videoSources_.cbegin(), videoSources_.cend(), [this](const auto &s) { - return s.name == settings_->camera().toStdString(); - }); + std::string cameraSetting = ChatPage::instance()->userSettings()->camera().toStdString(); + auto it = std::find_if(videoSources_.cbegin(), + videoSources_.cend(), + [&cameraSetting](const auto &s) { return s.name == cameraSetting; }); if (it == videoSources_.cend()) { - nhlog::ui()->error("WebRTC: unknown camera: {}", settings_->camera().toStdString()); + nhlog::ui()->error("WebRTC: unknown camera: {}", cameraSetting); return false; } - std::string resSetting = settings_->cameraResolution().toStdString(); + std::string resSetting = + ChatPage::instance()->userSettings()->cameraResolution().toStdString(); const std::string &res = resSetting.empty() ? it->caps.front().resolution : resSetting; - std::string frSetting = settings_->cameraFrameRate().toStdString(); + std::string frSetting = + ChatPage::instance()->userSettings()->cameraFrameRate().toStdString(); const std::string &fr = frSetting.empty() ? it->caps.front().frameRates.front() : frSetting; auto resolution = tokenise(res, 'x'); auto frameRate = tokenise(fr, '/'); - nhlog::ui()->debug("WebRTC: camera: {}", it->name); + nhlog::ui()->debug("WebRTC: camera: {}", cameraSetting); nhlog::ui()->debug("WebRTC: camera resolution: {}x{}", resolution.first, resolution.second); nhlog::ui()->debug("WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second); diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h index d5e195a8..9c7778e7 100644 --- a/src/WebRTCSession.h +++ b/src/WebRTCSession.h @@ -4,13 +4,11 @@ #include #include -#include #include "mtx/events/voip.hpp" typedef struct _GstElement GstElement; class QQuickItem; -class UserSettings; namespace webrtc { Q_NAMESPACE @@ -57,7 +55,6 @@ public: bool toggleMicMute(); void end(); - void setSettings(QSharedPointer settings) { settings_ = settings; } void setTurnServers(const std::vector &uris) { turnServers_ = uris; } void refreshDevices(); @@ -95,7 +92,6 @@ private: GstElement *pipe_ = nullptr; GstElement *webrtc_ = nullptr; unsigned int busWatchId_ = 0; - QSharedPointer settings_; std::vector turnServers_; bool init(std::string *errorMessage = nullptr); diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp index 8323e9ff..3d25ad82 100644 --- a/src/dialogs/AcceptCall.cpp +++ b/src/dialogs/AcceptCall.cpp @@ -18,7 +18,6 @@ AcceptCall::AcceptCall(const QString &caller, const QString &displayName, const QString &roomName, const QString &avatarUrl, - QSharedPointer settings, bool isVideo, QWidget *parent) : QWidget(parent) @@ -35,8 +34,10 @@ AcceptCall::AcceptCall(const QString &caller, emit close(); return; } + session->refreshDevices(); - microphones_ = session->getDeviceNames(false, settings->microphone().toStdString()); + microphones_ = session->getDeviceNames( + false, ChatPage::instance()->userSettings()->microphone().toStdString()); if (microphones_.empty()) { emit ChatPage::instance()->showNotification( tr("Incoming call: No microphone found.")); @@ -44,7 +45,8 @@ AcceptCall::AcceptCall(const QString &caller, return; } if (isVideo) - cameras_ = session->getDeviceNames(true, settings->camera().toStdString()); + cameras_ = session->getDeviceNames( + true, ChatPage::instance()->userSettings()->camera().toStdString()); setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); @@ -131,11 +133,11 @@ AcceptCall::AcceptCall(const QString &caller, if (cameraCombo_) layout->addWidget(cameraCombo_); - connect(acceptBtn_, &QPushButton::clicked, this, [this, settings, session]() { - settings->setMicrophone( + connect(acceptBtn_, &QPushButton::clicked, this, [this]() { + ChatPage::instance()->userSettings()->setMicrophone( QString::fromStdString(microphones_[microphoneCombo_->currentIndex()])); if (cameraCombo_) { - settings->setCamera( + ChatPage::instance()->userSettings()->setCamera( QString::fromStdString(cameras_[cameraCombo_->currentIndex()])); } emit accept(); diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h index 00616c53..76ca7ae1 100644 --- a/src/dialogs/AcceptCall.h +++ b/src/dialogs/AcceptCall.h @@ -3,13 +3,11 @@ #include #include -#include #include class QComboBox; class QPushButton; class QString; -class UserSettings; namespace dialogs { @@ -22,7 +20,6 @@ public: const QString &displayName, const QString &roomName, const QString &avatarUrl, - QSharedPointer settings, bool isVideo, QWidget *parent = nullptr); -- cgit 1.5.1 From c370dd831312dc957d46e76ccd8f976888a6f7d2 Mon Sep 17 00:00:00 2001 From: trilene Date: Thu, 29 Oct 2020 18:57:09 -0400 Subject: Avoid ugly error if pipeline creation fails --- src/WebRTCSession.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'src/WebRTCSession.cpp') diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index 40a9753e..d4e68b04 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -1106,8 +1106,10 @@ WebRTCSession::end() gst_element_set_state(pipe_, GST_STATE_NULL); gst_object_unref(pipe_); pipe_ = nullptr; - g_source_remove(busWatchId_); - busWatchId_ = 0; + if (busWatchId_) { + g_source_remove(busWatchId_); + busWatchId_ = 0; + } } webrtc_ = nullptr; isVideo_ = false; -- cgit 1.5.1 From 51a559ab4a102271a085abd7db4ce83c17c95cc9 Mon Sep 17 00:00:00 2001 From: trilene Date: Thu, 29 Oct 2020 19:17:10 -0400 Subject: Document better rtp payload types --- src/WebRTCSession.cpp | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) (limited to 'src/WebRTCSession.cpp') diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index d4e68b04..a5aa895d 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -607,7 +607,7 @@ addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) } bool -strstr_(std::string_view str1, std::string_view str2) +contains(std::string_view str1, std::string_view str2) { return std::search(str1.cbegin(), str1.cend(), @@ -634,7 +634,7 @@ getMediaAttributes(const GstSDPMessage *sdp, const gchar *rtpval = nullptr; for (guint n = 0; n == 0 || rtpval; ++n) { rtpval = gst_sdp_media_get_attribute_val_n(media, "rtpmap", n); - if (rtpval && strstr_(rtpval, encoding)) { + if (rtpval && contains(rtpval, encoding)) { payloadType = std::atoi(rtpval); break; } @@ -733,7 +733,13 @@ WebRTCSession::createOffer(bool isVideo) haveVideoStream_ = false; localsdp_.clear(); localcandidates_.clear(); - return startPipeline(111, isVideo ? 96 : -1); // dynamic payload types + + // opus and vp8 rtp payload types must be defined dynamically + // therefore from the range [96-127] + // see for example https://tools.ietf.org/html/rfc7587 + constexpr int opusPayloadType = 111; + constexpr int vp8PayloadType = 96; + return startPipeline(opusPayloadType, vp8PayloadType); } bool -- cgit 1.5.1 From 2838061f107861ff6a60725e326021869d5ae0a8 Mon Sep 17 00:00:00 2001 From: trilene Date: Thu, 29 Oct 2020 19:42:05 -0400 Subject: Avoid std::from_chars for now --- src/WebRTCSession.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) (limited to 'src/WebRTCSession.cpp') diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index a5aa895d..eb00f009 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -2,7 +2,6 @@ #include #include #include -#include #include #include #include @@ -129,10 +128,9 @@ std::pair tokenise(std::string_view str, char delim) { std::pair ret; + ret.first = std::atoi(str.data()); auto pos = str.find_first_of(delim); - auto s = str.data(); - std::from_chars(s, s + pos, ret.first); - std::from_chars(s + pos + 1, s + str.size(), ret.second); + ret.second = std::atoi(str.data() + pos + 1); return ret; } -- cgit 1.5.1 From b260e7eeeeb6f46deb4df1b031b1c4b46ce7f3e6 Mon Sep 17 00:00:00 2001 From: trilene Date: Thu, 29 Oct 2020 20:13:34 -0400 Subject: Unused variable --- src/WebRTCSession.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'src/WebRTCSession.cpp') diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index eb00f009..890bb866 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -128,8 +128,8 @@ std::pair tokenise(std::string_view str, char delim) { std::pair ret; - ret.first = std::atoi(str.data()); - auto pos = str.find_first_of(delim); + ret.first = std::atoi(str.data()); + auto pos = str.find_first_of(delim); ret.second = std::atoi(str.data() + pos + 1); return ret; } @@ -594,7 +594,7 @@ addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) g_object_set(decodebin, "force-sw-decoders", TRUE, nullptr); g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); #if GST_CHECK_VERSION(1, 18, 0) - g_signal_connect(decodebin, "element-added", G_CALLBACK(setWaitForKeyFrame), pipe); + g_signal_connect(decodebin, "element-added", G_CALLBACK(setWaitForKeyFrame), nullptr); #endif gst_bin_add(GST_BIN(pipe), decodebin); gst_element_sync_state_with_parent(decodebin); @@ -736,7 +736,7 @@ WebRTCSession::createOffer(bool isVideo) // therefore from the range [96-127] // see for example https://tools.ietf.org/html/rfc7587 constexpr int opusPayloadType = 111; - constexpr int vp8PayloadType = 96; + constexpr int vp8PayloadType = 96; return startPipeline(opusPayloadType, vp8PayloadType); } -- cgit 1.5.1