diff --git a/src/ActiveCallBar.cpp b/src/ActiveCallBar.cpp
index 7f07982a..549b97b9 100644
--- a/src/ActiveCallBar.cpp
+++ b/src/ActiveCallBar.cpp
@@ -33,8 +33,7 @@ ActiveCallBar::ActiveCallBar(QWidget *parent)
layout_ = new QHBoxLayout(this);
layout_->setSpacing(widgetMargin);
- layout_->setContentsMargins(
- 2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
+ layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
QFont labelFont;
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1);
@@ -56,9 +55,9 @@ ActiveCallBar::ActiveCallBar(QWidget *parent)
setMuteIcon(false);
muteBtn_->setFixedSize(buttonSize_, buttonSize_);
muteBtn_->setCornerRadius(buttonSize_ / 2);
- connect(muteBtn_, &FlatButton::clicked, this, [this](){
+ connect(muteBtn_, &FlatButton::clicked, this, [this]() {
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_))
- setMuteIcon(muted_);
+ setMuteIcon(muted_);
});
layout_->addWidget(avatar_, 0, Qt::AlignLeft);
@@ -70,21 +69,21 @@ ActiveCallBar::ActiveCallBar(QWidget *parent)
layout_->addSpacing(18);
timer_ = new QTimer(this);
- connect(timer_, &QTimer::timeout, this,
- [this](){
- auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
- int s = seconds % 60;
- int m = (seconds / 60) % 60;
- int h = seconds / 3600;
- char buf[12];
- if (h)
- snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
- else
- snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
- durationLabel_->setText(buf);
+ connect(timer_, &QTimer::timeout, this, [this]() {
+ auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
+ int s = seconds % 60;
+ int m = (seconds / 60) % 60;
+ int h = seconds / 3600;
+ char buf[12];
+ if (h)
+ snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
+ else
+ snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
+ durationLabel_->setText(buf);
});
- connect(&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
+ connect(
+ &WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
}
void
@@ -103,61 +102,59 @@ ActiveCallBar::setMuteIcon(bool muted)
}
void
-ActiveCallBar::setCallParty(
- const QString &userid,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl)
+ActiveCallBar::setCallParty(const QString &userid,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl)
{
- callPartyLabel_->setText(" " +
- (displayName.isEmpty() ? userid : displayName) + " ");
+ callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " ");
if (!avatarUrl.isEmpty())
- avatar_->setImage(avatarUrl);
+ avatar_->setImage(avatarUrl);
else
- avatar_->setLetter(utils::firstChar(roomName));
+ avatar_->setLetter(utils::firstChar(roomName));
}
void
ActiveCallBar::update(WebRTCSession::State state)
{
switch (state) {
- case WebRTCSession::State::INITIATING:
- show();
- stateLabel_->setText("Initiating call...");
- break;
- case WebRTCSession::State::INITIATED:
- show();
- stateLabel_->setText("Call initiated...");
- break;
- case WebRTCSession::State::OFFERSENT:
- show();
- stateLabel_->setText("Calling...");
- break;
- case WebRTCSession::State::CONNECTING:
- show();
- stateLabel_->setText("Connecting...");
- break;
- case WebRTCSession::State::CONNECTED:
- show();
- callStartTime_ = QDateTime::currentSecsSinceEpoch();
- timer_->start(1000);
- stateLabel_->setPixmap(QIcon(":/icons/icons/ui/place-call.png").
- pixmap(QSize(buttonSize_, buttonSize_)));
- durationLabel_->setText("00:00");
- durationLabel_->show();
- break;
- case WebRTCSession::State::ICEFAILED:
- case WebRTCSession::State::DISCONNECTED:
- hide();
- timer_->stop();
- callPartyLabel_->setText(QString());
- stateLabel_->setText(QString());
- durationLabel_->setText(QString());
- durationLabel_->hide();
- setMuteIcon(false);
- break;
- default:
- break;
+ case WebRTCSession::State::INITIATING:
+ show();
+ stateLabel_->setText("Initiating call...");
+ break;
+ case WebRTCSession::State::INITIATED:
+ show();
+ stateLabel_->setText("Call initiated...");
+ break;
+ case WebRTCSession::State::OFFERSENT:
+ show();
+ stateLabel_->setText("Calling...");
+ break;
+ case WebRTCSession::State::CONNECTING:
+ show();
+ stateLabel_->setText("Connecting...");
+ break;
+ case WebRTCSession::State::CONNECTED:
+ show();
+ callStartTime_ = QDateTime::currentSecsSinceEpoch();
+ timer_->start(1000);
+ stateLabel_->setPixmap(
+ QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_)));
+ durationLabel_->setText("00:00");
+ durationLabel_->show();
+ break;
+ case WebRTCSession::State::ICEFAILED:
+ case WebRTCSession::State::DISCONNECTED:
+ hide();
+ timer_->stop();
+ callPartyLabel_->setText(QString());
+ stateLabel_->setText(QString());
+ durationLabel_->setText(QString());
+ durationLabel_->hide();
+ setMuteIcon(false);
+ break;
+ default:
+ break;
}
}
diff --git a/src/ActiveCallBar.h b/src/ActiveCallBar.h
index 8440d7f3..1e940227 100644
--- a/src/ActiveCallBar.h
+++ b/src/ActiveCallBar.h
@@ -19,11 +19,10 @@ public:
public slots:
void update(WebRTCSession::State);
- void setCallParty(
- const QString &userid,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl);
+ void setCallParty(const QString &userid,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl);
private:
QHBoxLayout *layout_ = nullptr;
diff --git a/src/CallManager.cpp b/src/CallManager.cpp
index cbfd5135..46781313 100644
--- a/src/CallManager.cpp
+++ b/src/CallManager.cpp
@@ -1,13 +1,13 @@
#include <algorithm>
#include <cctype>
-#include <cstdint>
#include <chrono>
+#include <cstdint>
#include <QMediaPlaylist>
#include <QUrl>
-#include "CallManager.h"
#include "Cache.h"
+#include "CallManager.h"
#include "ChatPage.h"
#include "Logging.h"
#include "MainWindow.h"
@@ -34,389 +34,420 @@ getTurnURIs(const mtx::responses::TurnServer &turnServer);
}
CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
- : QObject(),
- session_(WebRTCSession::instance()),
- turnServerTimer_(this),
- settings_(userSettings)
+ : QObject()
+ , session_(WebRTCSession::instance())
+ , turnServerTimer_(this)
+ , settings_(userSettings)
{
- qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>();
- qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>();
- qRegisterMetaType<mtx::responses::TurnServer>();
+ qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>();
+ qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>();
+ qRegisterMetaType<mtx::responses::TurnServer>();
- connect(&session_, &WebRTCSession::offerCreated, this,
- [this](const std::string &sdp,
- const std::vector<CallCandidates::Candidate> &candidates)
- {
- nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_);
- emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
- emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
+ connect(
+ &session_,
+ &WebRTCSession::offerCreated,
+ this,
+ [this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
+ nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_);
+ emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
+ emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
+ QTimer::singleShot(timeoutms_, this, [this]() {
+ if (session_.state() == WebRTCSession::State::OFFERSENT) {
+ hangUp(CallHangUp::Reason::InviteTimeOut);
+ emit ChatPage::instance()->showNotification(
+ "The remote side failed to pick up.");
+ }
+ });
+ });
- QTimer::singleShot(timeoutms_, this, [this](){
- if (session_.state() == WebRTCSession::State::OFFERSENT) {
- hangUp(CallHangUp::Reason::InviteTimeOut);
- emit ChatPage::instance()->showNotification("The remote side failed to pick up.");
- }
- });
- });
-
- connect(&session_, &WebRTCSession::answerCreated, this,
- [this](const std::string &sdp,
- const std::vector<CallCandidates::Candidate> &candidates)
- {
- nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_);
- emit newMessage(roomid_, CallAnswer{callid_, sdp, 0});
- emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
- });
+ connect(
+ &session_,
+ &WebRTCSession::answerCreated,
+ this,
+ [this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
+ nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_);
+ emit newMessage(roomid_, CallAnswer{callid_, sdp, 0});
+ emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
+ });
- connect(&session_, &WebRTCSession::newICECandidate, this,
- [this](const CallCandidates::Candidate &candidate)
- {
- nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_);
- emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0});
- });
+ connect(&session_,
+ &WebRTCSession::newICECandidate,
+ this,
+ [this](const CallCandidates::Candidate &candidate) {
+ nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_);
+ emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0});
+ });
- connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer);
+ connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer);
- connect(this, &CallManager::turnServerRetrieved, this,
- [this](const mtx::responses::TurnServer &res)
- {
- nhlog::net()->info("TURN server(s) retrieved from homeserver:");
- nhlog::net()->info("username: {}", res.username);
- nhlog::net()->info("ttl: {} seconds", res.ttl);
- for (const auto &u : res.uris)
- nhlog::net()->info("uri: {}", u);
+ connect(this,
+ &CallManager::turnServerRetrieved,
+ this,
+ [this](const mtx::responses::TurnServer &res) {
+ nhlog::net()->info("TURN server(s) retrieved from homeserver:");
+ nhlog::net()->info("username: {}", res.username);
+ nhlog::net()->info("ttl: {} seconds", res.ttl);
+ for (const auto &u : res.uris)
+ nhlog::net()->info("uri: {}", u);
- // Request new credentials close to expiry
- // See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
- turnURIs_ = getTurnURIs(res);
- uint32_t ttl = std::max(res.ttl, UINT32_C(3600));
- if (res.ttl < 3600)
- nhlog::net()->warn("Setting ttl to 1 hour");
- turnServerTimer_.setInterval(ttl * 1000 * 0.9);
- });
+ // Request new credentials close to expiry
+ // See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
+ turnURIs_ = getTurnURIs(res);
+ uint32_t ttl = std::max(res.ttl, UINT32_C(3600));
+ if (res.ttl < 3600)
+ nhlog::net()->warn("Setting ttl to 1 hour");
+ turnServerTimer_.setInterval(ttl * 1000 * 0.9);
+ });
- connect(&session_, &WebRTCSession::stateChanged, this,
- [this](WebRTCSession::State state) {
- if (state == WebRTCSession::State::DISCONNECTED) {
- playRingtone("qrc:/media/media/callend.ogg", false);
- }
- else if (state == WebRTCSession::State::ICEFAILED) {
- QString error("Call connection failed.");
- if (turnURIs_.empty())
- error += " Your homeserver has no configured TURN server.";
- emit ChatPage::instance()->showNotification(error);
- hangUp(CallHangUp::Reason::ICEFailed);
- }
- });
+ connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) {
+ switch (state) {
+ case WebRTCSession::State::DISCONNECTED:
+ playRingtone("qrc:/media/media/callend.ogg", false);
+ clear();
+ break;
+ case WebRTCSession::State::ICEFAILED: {
+ QString error("Call connection failed.");
+ if (turnURIs_.empty())
+ error += " Your homeserver has no configured TURN server.";
+ emit ChatPage::instance()->showNotification(error);
+ hangUp(CallHangUp::Reason::ICEFailed);
+ break;
+ }
+ default:
+ break;
+ }
+ });
- connect(&player_, &QMediaPlayer::mediaStatusChanged, this,
- [this](QMediaPlayer::MediaStatus status) {
- if (status == QMediaPlayer::LoadedMedia)
- player_.play();
- });
+ connect(&player_,
+ &QMediaPlayer::mediaStatusChanged,
+ this,
+ [this](QMediaPlayer::MediaStatus status) {
+ if (status == QMediaPlayer::LoadedMedia)
+ player_.play();
+ });
}
void
CallManager::sendInvite(const QString &roomid)
{
- if (onActiveCall())
- return;
+ if (onActiveCall())
+ return;
- auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
- if (roomInfo.member_count != 2) {
- emit ChatPage::instance()->showNotification("Voice calls are limited to 1:1 rooms.");
- return;
- }
+ auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
+ if (roomInfo.member_count != 2) {
+ emit ChatPage::instance()->showNotification(
+ "Voice calls are limited to 1:1 rooms.");
+ return;
+ }
- std::string errorMessage;
- if (!session_.init(&errorMessage)) {
- emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
- return;
- }
+ std::string errorMessage;
+ if (!session_.init(&errorMessage)) {
+ emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
+ return;
+ }
- roomid_ = roomid;
- session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
- session_.setTurnServers(turnURIs_);
+ roomid_ = roomid;
+ session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
+ session_.setTurnServers(turnURIs_);
- generateCallID();
- nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
- std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
- const RoomMember &callee = members.front().user_id == utils::localUser() ? members.back() : members.front();
- emit newCallParty(callee.user_id, callee.display_name,
- QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url));
- playRingtone("qrc:/media/media/ringback.ogg", true);
- if (!session_.createOffer()) {
- emit ChatPage::instance()->showNotification("Problem setting up call.");
- endCall();
- }
+ generateCallID();
+ nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
+ std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
+ const RoomMember &callee =
+ members.front().user_id == utils::localUser() ? members.back() : members.front();
+ emit newCallParty(callee.user_id,
+ callee.display_name,
+ QString::fromStdString(roomInfo.name),
+ QString::fromStdString(roomInfo.avatar_url));
+ playRingtone("qrc:/media/media/ringback.ogg", true);
+ if (!session_.createOffer()) {
+ emit ChatPage::instance()->showNotification("Problem setting up call.");
+ endCall();
+ }
}
namespace {
-std::string callHangUpReasonString(CallHangUp::Reason reason)
+std::string
+callHangUpReasonString(CallHangUp::Reason reason)
{
- switch (reason) {
- case CallHangUp::Reason::ICEFailed:
- return "ICE failed";
- case CallHangUp::Reason::InviteTimeOut:
- return "Invite time out";
- default:
- return "User";
- }
+ switch (reason) {
+ case CallHangUp::Reason::ICEFailed:
+ return "ICE failed";
+ case CallHangUp::Reason::InviteTimeOut:
+ return "Invite time out";
+ default:
+ return "User";
+ }
}
}
void
CallManager::hangUp(CallHangUp::Reason reason)
{
- if (!callid_.empty()) {
- nhlog::ui()->debug("WebRTC: call id: {} - hanging up ({})", callid_,
- callHangUpReasonString(reason));
- emit newMessage(roomid_, CallHangUp{callid_, 0, reason});
- endCall();
- }
+ if (!callid_.empty()) {
+ nhlog::ui()->debug(
+ "WebRTC: call id: {} - hanging up ({})", callid_, callHangUpReasonString(reason));
+ emit newMessage(roomid_, CallHangUp{callid_, 0, reason});
+ endCall();
+ }
}
bool
CallManager::onActiveCall()
{
- return session_.state() != WebRTCSession::State::DISCONNECTED;
+ return session_.state() != WebRTCSession::State::DISCONNECTED;
}
-void CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
+void
+CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
{
- if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event)
- || handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event))
- return;
+ if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event) ||
+ handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event))
+ return;
}
template<typename T>
bool
CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event)
{
- if (std::holds_alternative<RoomEvent<T>>(event)) {
- handleEvent(std::get<RoomEvent<T>>(event));
- return true;
- }
- return false;
+ if (std::holds_alternative<RoomEvent<T>>(event)) {
+ handleEvent(std::get<RoomEvent<T>>(event));
+ return true;
+ }
+ return false;
}
void
CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
{
- const char video[] = "m=video";
- const std::string &sdp = callInviteEvent.content.sdp;
- bool isVideo = std::search(sdp.cbegin(), sdp.cend(), std::cbegin(video), std::cend(video) - 1,
- [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);})
- != sdp.cend();
-
- nhlog::ui()->debug(std::string("WebRTC: call id: {} - incoming ") + (isVideo ? "video" : "voice") +
- " CallInvite from {}", callInviteEvent.content.call_id, callInviteEvent.sender);
+ const char video[] = "m=video";
+ const std::string &sdp = callInviteEvent.content.sdp;
+ bool isVideo = std::search(sdp.cbegin(),
+ sdp.cend(),
+ std::cbegin(video),
+ std::cend(video) - 1,
+ [](unsigned char c1, unsigned char c2) {
+ return std::tolower(c1) == std::tolower(c2);
+ }) != sdp.cend();
- if (callInviteEvent.content.call_id.empty())
- return;
+ nhlog::ui()->debug(std::string("WebRTC: call id: {} - incoming ") +
+ (isVideo ? "video" : "voice") + " CallInvite from {}",
+ callInviteEvent.content.call_id,
+ callInviteEvent.sender);
- if (isVideo) {
- emit newMessage(QString::fromStdString(callInviteEvent.room_id),
- CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut});
- return;
- }
+ if (callInviteEvent.content.call_id.empty())
+ return;
- auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
- if (onActiveCall() || roomInfo.member_count != 2) {
- emit newMessage(QString::fromStdString(callInviteEvent.room_id),
- CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut});
- return;
- }
+ auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
+ if (onActiveCall() || roomInfo.member_count != 2 || isVideo) {
+ emit newMessage(QString::fromStdString(callInviteEvent.room_id),
+ CallHangUp{callInviteEvent.content.call_id,
+ 0,
+ CallHangUp::Reason::InviteTimeOut});
+ return;
+ }
- playRingtone("qrc:/media/media/ring.ogg", true);
- roomid_ = QString::fromStdString(callInviteEvent.room_id);
- callid_ = callInviteEvent.content.call_id;
- remoteICECandidates_.clear();
+ playRingtone("qrc:/media/media/ring.ogg", true);
+ roomid_ = QString::fromStdString(callInviteEvent.room_id);
+ callid_ = callInviteEvent.content.call_id;
+ remoteICECandidates_.clear();
- std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
- const RoomMember &caller =
- members.front().user_id == utils::localUser() ? members.back() : members.front();
- emit newCallParty(caller.user_id, caller.display_name,
- QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url));
+ std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
+ const RoomMember &caller =
+ members.front().user_id == utils::localUser() ? members.back() : members.front();
+ emit newCallParty(caller.user_id,
+ caller.display_name,
+ QString::fromStdString(roomInfo.name),
+ QString::fromStdString(roomInfo.avatar_url));
- auto dialog = new dialogs::AcceptCall(
- caller.user_id,
- caller.display_name,
- QString::fromStdString(roomInfo.name),
- QString::fromStdString(roomInfo.avatar_url),
- MainWindow::instance());
- connect(dialog, &dialogs::AcceptCall::accept, this,
- [this, callInviteEvent](){
- MainWindow::instance()->hideOverlay();
- answerInvite(callInviteEvent.content);});
- connect(dialog, &dialogs::AcceptCall::reject, this,
- [this](){
- MainWindow::instance()->hideOverlay();
- hangUp();});
- MainWindow::instance()->showSolidOverlayModal(dialog);
+ auto dialog = new dialogs::AcceptCall(caller.user_id,
+ caller.display_name,
+ QString::fromStdString(roomInfo.name),
+ QString::fromStdString(roomInfo.avatar_url),
+ MainWindow::instance());
+ connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() {
+ MainWindow::instance()->hideOverlay();
+ answerInvite(callInviteEvent.content);
+ });
+ connect(dialog, &dialogs::AcceptCall::reject, this, [this]() {
+ MainWindow::instance()->hideOverlay();
+ hangUp();
+ });
+ MainWindow::instance()->showSolidOverlayModal(dialog);
}
void
CallManager::answerInvite(const CallInvite &invite)
{
- stopRingtone();
- std::string errorMessage;
- if (!session_.init(&errorMessage)) {
- emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
- hangUp();
- return;
- }
+ stopRingtone();
+ std::string errorMessage;
+ if (!session_.init(&errorMessage)) {
+ emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
+ hangUp();
+ return;
+ }
- session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
- session_.setTurnServers(turnURIs_);
+ session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
+ session_.setTurnServers(turnURIs_);
- if (!session_.acceptOffer(invite.sdp)) {
- emit ChatPage::instance()->showNotification("Problem setting up call.");
- hangUp();
- return;
- }
- session_.acceptICECandidates(remoteICECandidates_);
- remoteICECandidates_.clear();
+ if (!session_.acceptOffer(invite.sdp)) {
+ emit ChatPage::instance()->showNotification("Problem setting up call.");
+ hangUp();
+ return;
+ }
+ session_.acceptICECandidates(remoteICECandidates_);
+ remoteICECandidates_.clear();
}
void
CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent)
{
- if (callCandidatesEvent.sender == utils::localUser().toStdString())
- return;
+ if (callCandidatesEvent.sender == utils::localUser().toStdString())
+ return;
- nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}",
- callCandidatesEvent.content.call_id, callCandidatesEvent.sender);
+ nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}",
+ callCandidatesEvent.content.call_id,
+ callCandidatesEvent.sender);
- if (callid_ == callCandidatesEvent.content.call_id) {
- if (onActiveCall())
- session_.acceptICECandidates(callCandidatesEvent.content.candidates);
- else {
- // CallInvite has been received and we're awaiting localUser to accept or reject the call
- for (const auto &c : callCandidatesEvent.content.candidates)
- remoteICECandidates_.push_back(c);
- }
- }
+ if (callid_ == callCandidatesEvent.content.call_id) {
+ if (onActiveCall())
+ session_.acceptICECandidates(callCandidatesEvent.content.candidates);
+ else {
+ // CallInvite has been received and we're awaiting localUser to accept or
+ // reject the call
+ for (const auto &c : callCandidatesEvent.content.candidates)
+ remoteICECandidates_.push_back(c);
+ }
+ }
}
void
CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent)
{
- nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}",
- callAnswerEvent.content.call_id, callAnswerEvent.sender);
+ nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}",
+ callAnswerEvent.content.call_id,
+ callAnswerEvent.sender);
- if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() &&
- callid_ == callAnswerEvent.content.call_id) {
- emit ChatPage::instance()->showNotification("Call answered on another device.");
- stopRingtone();
- MainWindow::instance()->hideOverlay();
- return;
- }
+ if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() &&
+ callid_ == callAnswerEvent.content.call_id) {
+ emit ChatPage::instance()->showNotification("Call answered on another device.");
+ stopRingtone();
+ MainWindow::instance()->hideOverlay();
+ return;
+ }
- if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
- stopRingtone();
- if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
- emit ChatPage::instance()->showNotification("Problem setting up call.");
- hangUp();
- }
- }
+ if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
+ stopRingtone();
+ if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
+ emit ChatPage::instance()->showNotification("Problem setting up call.");
+ hangUp();
+ }
+ }
}
void
CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent)
{
- nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}",
- callHangUpEvent.content.call_id, callHangUpReasonString(callHangUpEvent.content.reason),
- callHangUpEvent.sender);
+ nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}",
+ callHangUpEvent.content.call_id,
+ callHangUpReasonString(callHangUpEvent.content.reason),
+ callHangUpEvent.sender);
- if (callid_ == callHangUpEvent.content.call_id) {
- MainWindow::instance()->hideOverlay();
- endCall();
- }
+ if (callid_ == callHangUpEvent.content.call_id) {
+ MainWindow::instance()->hideOverlay();
+ endCall();
+ }
}
void
CallManager::generateCallID()
{
- using namespace std::chrono;
- uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
- callid_ = "c" + std::to_string(ms);
+ using namespace std::chrono;
+ uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
+ callid_ = "c" + std::to_string(ms);
+}
+
+void
+CallManager::clear()
+{
+ roomid_.clear();
+ callid_.clear();
+ remoteICECandidates_.clear();
}
void
CallManager::endCall()
{
- stopRingtone();
- session_.end();
- roomid_.clear();
- callid_.clear();
- remoteICECandidates_.clear();
+ stopRingtone();
+ clear();
+ session_.end();
}
void
CallManager::refreshTurnServer()
{
- turnURIs_.clear();
- turnServerTimer_.start(2000);
+ turnURIs_.clear();
+ turnServerTimer_.start(2000);
}
void
CallManager::retrieveTurnServer()
{
- http::client()->get_turn_server(
- [this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) {
- if (err) {
- turnServerTimer_.setInterval(5000);
- return;
- }
- emit turnServerRetrieved(res);
- });
+ http::client()->get_turn_server(
+ [this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) {
+ if (err) {
+ turnServerTimer_.setInterval(5000);
+ return;
+ }
+ emit turnServerRetrieved(res);
+ });
}
void
CallManager::playRingtone(const QString &ringtone, bool repeat)
{
- static QMediaPlaylist playlist;
- playlist.clear();
- playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop : QMediaPlaylist::CurrentItemOnce);
- playlist.addMedia(QUrl(ringtone));
- player_.setVolume(100);
- player_.setPlaylist(&playlist);
+ static QMediaPlaylist playlist;
+ playlist.clear();
+ playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop
+ : QMediaPlaylist::CurrentItemOnce);
+ playlist.addMedia(QUrl(ringtone));
+ player_.setVolume(100);
+ player_.setPlaylist(&playlist);
}
void
CallManager::stopRingtone()
{
- player_.setPlaylist(nullptr);
+ player_.setPlaylist(nullptr);
}
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer)
{
- // gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp)
- // where username and password are percent-encoded
- std::vector<std::string> ret;
- for (const auto &uri : turnServer.uris) {
- if (auto c = uri.find(':'); c == std::string::npos) {
- nhlog::ui()->error("Invalid TURN server uri: {}", uri);
- continue;
- }
- else {
- std::string scheme = std::string(uri, 0, c);
- if (scheme != "turn" && scheme != "turns") {
- nhlog::ui()->error("Invalid TURN server uri: {}", uri);
- continue;
- }
+ // gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp)
+ // where username and password are percent-encoded
+ std::vector<std::string> ret;
+ for (const auto &uri : turnServer.uris) {
+ if (auto c = uri.find(':'); c == std::string::npos) {
+ nhlog::ui()->error("Invalid TURN server uri: {}", uri);
+ continue;
+ } else {
+ std::string scheme = std::string(uri, 0, c);
+ if (scheme != "turn" && scheme != "turns") {
+ nhlog::ui()->error("Invalid TURN server uri: {}", uri);
+ continue;
+ }
- QString encodedUri = QString::fromStdString(scheme) + "://" +
- QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) + ":" +
- QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) + "@" +
- QString::fromStdString(std::string(uri, ++c));
- ret.push_back(encodedUri.toStdString());
- }
- }
- return ret;
+ QString encodedUri =
+ QString::fromStdString(scheme) + "://" +
+ QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) +
+ ":" +
+ QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) +
+ "@" + QString::fromStdString(std::string(uri, ++c));
+ ret.push_back(encodedUri.toStdString());
+ }
+ }
+ return ret;
}
}
-
diff --git a/src/CallManager.h b/src/CallManager.h
index 4ed6e4c7..3a406438 100644
--- a/src/CallManager.h
+++ b/src/CallManager.h
@@ -3,8 +3,8 @@
#include <string>
#include <vector>
-#include <QObject>
#include <QMediaPlayer>
+#include <QObject>
#include <QSharedPointer>
#include <QString>
#include <QTimer>
@@ -27,7 +27,8 @@ public:
CallManager(QSharedPointer<UserSettings>);
void sendInvite(const QString &roomid);
- void hangUp(mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
+ void hangUp(
+ mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
bool onActiveCall();
void refreshTurnServer();
@@ -35,22 +36,21 @@ public slots:
void syncEvent(const mtx::events::collections::TimelineEvents &event);
signals:
- void newMessage(const QString &roomid, const mtx::events::msg::CallInvite&);
- void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates&);
- void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer&);
- void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp&);
- void turnServerRetrieved(const mtx::responses::TurnServer&);
- void newCallParty(
- const QString &userid,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl);
+ void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
+ void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
+ void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
+ void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
+ void turnServerRetrieved(const mtx::responses::TurnServer &);
+ void newCallParty(const QString &userid,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl);
private slots:
void retrieveTurnServer();
private:
- WebRTCSession& session_;
+ WebRTCSession &session_;
QString roomid_;
std::string callid_;
const uint32_t timeoutms_ = 120000;
@@ -62,12 +62,13 @@ private:
template<typename T>
bool handleEvent_(const mtx::events::collections::TimelineEvents &event);
- void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite>&);
- void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates>&);
- void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer>&);
- void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp>&);
- void answerInvite(const mtx::events::msg::CallInvite&);
+ void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &);
+ void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &);
+ void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &);
+ void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &);
+ void answerInvite(const mtx::events::msg::CallInvite &);
void generateCallID();
+ void clear();
void endCall();
void playRingtone(const QString &ringtone, bool repeat);
void stopRingtone();
diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp
index 5ab617fa..09153154 100644
--- a/src/ChatPage.cpp
+++ b/src/ChatPage.cpp
@@ -460,9 +460,8 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
if (callManager_.onActiveCall()) {
callManager_.hangUp();
} else {
- if (auto roomInfo =
- cache::singleRoomInfo(current_room_.toStdString());
- roomInfo.member_count != 2) {
+ if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString());
+ roomInfo.member_count != 2) {
showNotification("Voice calls are limited to 1:1 rooms.");
} else {
std::vector<RoomMember> members(
@@ -471,11 +470,11 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
members.front().user_id == utils::localUser() ? members.back()
: members.front();
auto dialog = new dialogs::PlaceCall(
- callee.user_id,
- callee.display_name,
- QString::fromStdString(roomInfo.name),
- QString::fromStdString(roomInfo.avatar_url),
- MainWindow::instance());
+ callee.user_id,
+ callee.display_name,
+ QString::fromStdString(roomInfo.name),
+ QString::fromStdString(roomInfo.avatar_url),
+ MainWindow::instance());
connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
callManager_.sendInvite(current_room_);
});
diff --git a/src/EventAccessors.cpp b/src/EventAccessors.cpp
index 043e24a2..7846737b 100644
--- a/src/EventAccessors.cpp
+++ b/src/EventAccessors.cpp
@@ -72,12 +72,19 @@ struct CallType
template<class T>
std::string operator()(const T &e)
{
- if constexpr (std::is_same_v<mtx::events::RoomEvent<mtx::events::msg::CallInvite>, T>) {
- const char video[] = "m=video";
- const std::string &sdp = e.content.sdp;
- return std::search(sdp.cbegin(), sdp.cend(), std::cbegin(video), std::cend(video) - 1,
- [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);})
- != sdp.cend() ? "video" : "voice";
+ if constexpr (std::is_same_v<mtx::events::RoomEvent<mtx::events::msg::CallInvite>,
+ T>) {
+ const char video[] = "m=video";
+ const std::string &sdp = e.content.sdp;
+ return std::search(sdp.cbegin(),
+ sdp.cend(),
+ std::cbegin(video),
+ std::cend(video) - 1,
+ [](unsigned char c1, unsigned char c2) {
+ return std::tolower(c1) == std::tolower(c2);
+ }) != sdp.cend()
+ ? "video"
+ : "voice";
}
return std::string();
}
diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp
index f3fd1bdc..32b67123 100644
--- a/src/WebRTCSession.cpp
+++ b/src/WebRTCSession.cpp
@@ -1,9 +1,10 @@
#include <cctype>
-#include "WebRTCSession.h"
#include "Logging.h"
+#include "WebRTCSession.h"
-extern "C" {
+extern "C"
+{
#include "gst/gst.h"
#include "gst/sdp/sdp.h"
@@ -13,478 +14,498 @@ extern "C" {
Q_DECLARE_METATYPE(WebRTCSession::State)
-namespace {
-bool isoffering_;
-std::string localsdp_;
-std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
-
-gboolean newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data);
-GstWebRTCSessionDescription* parseSDP(const std::string &sdp, GstWebRTCSDPType type);
-void generateOffer(GstElement *webrtc);
-void setLocalDescription(GstPromise *promise, gpointer webrtc);
-void addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED);
-gboolean onICEGatheringCompletion(gpointer timerid);
-void iceConnectionStateChanged(GstElement *webrtcbin, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data G_GNUC_UNUSED);
-void createAnswer(GstPromise *promise, gpointer webrtc);
-void addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe);
-void linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe);
-std::string::const_iterator findName(const std::string &sdp, const std::string &name);
-int getPayloadType(const std::string &sdp, const std::string &name);
-}
-
-WebRTCSession::WebRTCSession() : QObject()
+WebRTCSession::WebRTCSession()
+ : QObject()
{
- qRegisterMetaType<WebRTCSession::State>();
- connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
+ qRegisterMetaType<WebRTCSession::State>();
+ connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
}
bool
WebRTCSession::init(std::string *errorMessage)
{
- if (initialised_)
- return true;
+ if (initialised_)
+ return true;
- GError *error = nullptr;
- if (!gst_init_check(nullptr, nullptr, &error)) {
- std::string strError = std::string("WebRTC: failed to initialise GStreamer: ");
- if (error) {
- strError += error->message;
- g_error_free(error);
- }
- nhlog::ui()->error(strError);
- if (errorMessage)
- *errorMessage = strError;
- return false;
- }
+ GError *error = nullptr;
+ if (!gst_init_check(nullptr, nullptr, &error)) {
+ std::string strError = std::string("WebRTC: failed to initialise GStreamer: ");
+ if (error) {
+ strError += error->message;
+ g_error_free(error);
+ }
+ nhlog::ui()->error(strError);
+ if (errorMessage)
+ *errorMessage = strError;
+ return false;
+ }
- gchar *version = gst_version_string();
- std::string gstVersion(version);
- g_free(version);
- nhlog::ui()->info("WebRTC: initialised " + gstVersion);
+ gchar *version = gst_version_string();
+ std::string gstVersion(version);
+ g_free(version);
+ nhlog::ui()->info("WebRTC: initialised " + gstVersion);
- // GStreamer Plugins:
- // Base: audioconvert, audioresample, opus, playback, volume
- // Good: autodetect, rtpmanager
- // Bad: dtls, srtp, webrtc
- // libnice [GLib]: nice
- initialised_ = true;
- std::string strError = gstVersion + ": Missing plugins: ";
- const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice",
- "opus", "playback", "rtpmanager", "srtp", "volume", "webrtc", nullptr};
- GstRegistry *registry = gst_registry_get();
- for (guint i = 0; i < g_strv_length((gchar**)needed); i++) {
- GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
- if (!plugin) {
- strError += std::string(needed[i]) + " ";
- initialised_ = false;
- continue;
- }
- gst_object_unref(plugin);
- }
+ // GStreamer Plugins:
+ // Base: audioconvert, audioresample, opus, playback, volume
+ // Good: autodetect, rtpmanager
+ // Bad: dtls, srtp, webrtc
+ // libnice [GLib]: nice
+ initialised_ = true;
+ std::string strError = gstVersion + ": Missing plugins: ";
+ const gchar *needed[] = {"audioconvert",
+ "audioresample",
+ "autodetect",
+ "dtls",
+ "nice",
+ "opus",
+ "playback",
+ "rtpmanager",
+ "srtp",
+ "volume",
+ "webrtc",
+ nullptr};
+ GstRegistry *registry = gst_registry_get();
+ for (guint i = 0; i < g_strv_length((gchar **)needed); i++) {
+ GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
+ if (!plugin) {
+ strError += std::string(needed[i]) + " ";
+ initialised_ = false;
+ continue;
+ }
+ gst_object_unref(plugin);
+ }
- if (!initialised_) {
- nhlog::ui()->error(strError);
- if (errorMessage)
- *errorMessage = strError;
- }
- return initialised_;
+ if (!initialised_) {
+ nhlog::ui()->error(strError);
+ if (errorMessage)
+ *errorMessage = strError;
+ }
+ return initialised_;
}
-bool
-WebRTCSession::createOffer()
+namespace {
+
+bool isoffering_;
+std::string localsdp_;
+std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
+
+gboolean
+newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
{
- isoffering_ = true;
- localsdp_.clear();
- localcandidates_.clear();
- return startPipeline(111); // a dynamic opus payload type
+ WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
+ switch (GST_MESSAGE_TYPE(msg)) {
+ case GST_MESSAGE_EOS:
+ nhlog::ui()->error("WebRTC: end of stream");
+ session->end();
+ break;
+ case GST_MESSAGE_ERROR:
+ GError *error;
+ gchar *debug;
+ gst_message_parse_error(msg, &error, &debug);
+ nhlog::ui()->error(
+ "WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
+ g_clear_error(&error);
+ g_free(debug);
+ session->end();
+ break;
+ default:
+ break;
+ }
+ return TRUE;
}
-bool
-WebRTCSession::acceptOffer(const std::string &sdp)
+GstWebRTCSessionDescription *
+parseSDP(const std::string &sdp, GstWebRTCSDPType type)
{
- nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp);
- if (state_ != State::DISCONNECTED)
- return false;
+ GstSDPMessage *msg;
+ gst_sdp_message_new(&msg);
+ if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
+ return gst_webrtc_session_description_new(type, msg);
+ } else {
+ nhlog::ui()->error("WebRTC: failed to parse remote session description");
+ gst_object_unref(msg);
+ return nullptr;
+ }
+}
- isoffering_ = false;
- localsdp_.clear();
- localcandidates_.clear();
+void
+setLocalDescription(GstPromise *promise, gpointer webrtc)
+{
+ const GstStructure *reply = gst_promise_get_reply(promise);
+ gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
+ GstWebRTCSessionDescription *gstsdp = nullptr;
+ gst_structure_get(reply,
+ isAnswer ? "answer" : "offer",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ &gstsdp,
+ nullptr);
+ gst_promise_unref(promise);
+ g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
- int opusPayloadType = getPayloadType(sdp, "opus");
- if (opusPayloadType == -1)
- return false;
+ gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
+ localsdp_ = std::string(sdp);
+ g_free(sdp);
+ gst_webrtc_session_description_free(gstsdp);
- GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
- if (!offer)
- return false;
+ nhlog::ui()->debug(
+ "WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_);
+}
- if (!startPipeline(opusPayloadType)) {
- gst_webrtc_session_description_free(offer);
- return false;
- }
+void
+createOffer(GstElement *webrtc)
+{
+ // create-offer first, then set-local-description
+ GstPromise *promise =
+ gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
+ g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
+}
- // set-remote-description first, then create-answer
- GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
- g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
- gst_webrtc_session_description_free(offer);
- return true;
+void
+createAnswer(GstPromise *promise, gpointer webrtc)
+{
+ // create-answer first, then set-local-description
+ gst_promise_unref(promise);
+ promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
+ g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
}
-bool
-WebRTCSession::startPipeline(int opusPayloadType)
+gboolean
+onICEGatheringCompletion(gpointer timerid)
{
- if (state_ != State::DISCONNECTED)
- return false;
+ *(guint *)(timerid) = 0;
+ if (isoffering_) {
+ emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
+ emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
+ } else {
+ emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
+ emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
+ }
+ return FALSE;
+}
- emit stateChanged(State::INITIATING);
+void
+addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
+ guint mlineIndex,
+ gchar *candidate,
+ gpointer G_GNUC_UNUSED)
+{
+ nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
- if (!createPipeline(opusPayloadType))
- return false;
+ if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
+ emit WebRTCSession::instance().newICECandidate(
+ {"audio", (uint16_t)mlineIndex, candidate});
+ return;
+ }
- webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
+ localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
- if (!stunServer_.empty()) {
- nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_);
- g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
- }
+ // GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
+ // GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.18. Use a 100ms timeout in
+ // the meantime
+ static guint timerid = 0;
+ if (timerid)
+ g_source_remove(timerid);
- for (const auto &uri : turnServers_) {
- nhlog::ui()->info("WebRTC: setting TURN server: {}", uri);
- gboolean udata;
- g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata));
- }
- if (turnServers_.empty())
- nhlog::ui()->warn("WebRTC: no TURN server provided");
+ timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
+}
- // generate the offer when the pipeline goes to PLAYING
- if (isoffering_)
- g_signal_connect(webrtc_, "on-negotiation-needed", G_CALLBACK(generateOffer), nullptr);
+void
+iceConnectionStateChanged(GstElement *webrtc,
+ GParamSpec *pspec G_GNUC_UNUSED,
+ gpointer user_data G_GNUC_UNUSED)
+{
+ GstWebRTCICEConnectionState newState;
+ g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
+ switch (newState) {
+ case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
+ nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
+ emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
+ break;
+ case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
+ nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
+ emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
+ break;
+ default:
+ break;
+ }
+}
- // on-ice-candidate is emitted when a local ICE candidate has been gathered
- g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
+void
+linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
+{
+ GstCaps *caps = gst_pad_get_current_caps(newpad);
+ if (!caps)
+ return;
- // capture ICE failure
- g_signal_connect(webrtc_, "notify::ice-connection-state",
- G_CALLBACK(iceConnectionStateChanged), nullptr);
+ const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
+ gst_caps_unref(caps);
- // incoming streams trigger pad-added
- gst_element_set_state(pipe_, GST_STATE_READY);
- g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
+ GstPad *queuepad = nullptr;
+ if (g_str_has_prefix(name, "audio")) {
+ nhlog::ui()->debug("WebRTC: received incoming audio stream");
+ GstElement *queue = gst_element_factory_make("queue", nullptr);
+ GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
+ GstElement *resample = gst_element_factory_make("audioresample", nullptr);
+ GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
+ gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
+ gst_element_sync_state_with_parent(queue);
+ gst_element_sync_state_with_parent(convert);
+ gst_element_sync_state_with_parent(resample);
+ gst_element_sync_state_with_parent(sink);
+ gst_element_link_many(queue, convert, resample, sink, nullptr);
+ queuepad = gst_element_get_static_pad(queue, "sink");
+ }
- // webrtcbin lifetime is the same as that of the pipeline
- gst_object_unref(webrtc_);
+ if (queuepad) {
+ if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
+ nhlog::ui()->error("WebRTC: unable to link new pad");
+ else {
+ emit WebRTCSession::instance().stateChanged(
+ WebRTCSession::State::CONNECTED);
+ }
+ gst_object_unref(queuepad);
+ }
+}
- // start the pipeline
- GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
- if (ret == GST_STATE_CHANGE_FAILURE) {
- nhlog::ui()->error("WebRTC: unable to start pipeline");
- end();
- return false;
- }
+void
+addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
+{
+ if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
+ return;
- GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
- gst_bus_add_watch(bus, newBusMessage, this);
- gst_object_unref(bus);
- emit stateChanged(State::INITIATED);
- return true;
+ nhlog::ui()->debug("WebRTC: received incoming stream");
+ GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
+ g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
+ gst_bin_add(GST_BIN(pipe), decodebin);
+ gst_element_sync_state_with_parent(decodebin);
+ GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
+ if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
+ nhlog::ui()->error("WebRTC: unable to link new pad");
+ gst_object_unref(sinkpad);
}
-#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload="
-
-bool
-WebRTCSession::createPipeline(int opusPayloadType)
+std::string::const_iterator
+findName(const std::string &sdp, const std::string &name)
{
- std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin "
- "autoaudiosrc ! volume name=srclevel ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! "
- "queue ! " RTP_CAPS_OPUS + std::to_string(opusPayloadType) + " ! webrtcbin.");
-
- webrtc_ = nullptr;
- GError *error = nullptr;
- pipe_ = gst_parse_launch(pipeline.c_str(), &error);
- if (error) {
- nhlog::ui()->error("WebRTC: failed to parse pipeline: {}", error->message);
- g_error_free(error);
- end();
- return false;
- }
- return true;
+ return std::search(
+ sdp.cbegin(),
+ sdp.cend(),
+ name.cbegin(),
+ name.cend(),
+ [](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); });
}
-bool
-WebRTCSession::acceptAnswer(const std::string &sdp)
+int
+getPayloadType(const std::string &sdp, const std::string &name)
{
- nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp);
- if (state_ != State::OFFERSENT)
- return false;
+ // eg a=rtpmap:111 opus/48000/2
+ auto e = findName(sdp, name);
+ if (e == sdp.cend()) {
+ nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing");
+ return -1;
+ }
- GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
- if (!answer) {
- end();
- return false;
- }
+ if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) {
+ nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
+ " payload type");
+ return -1;
+ } else {
+ ++s;
+ try {
+ return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s));
+ } catch (...) {
+ nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
+ " payload type");
+ }
+ }
+ return -1;
+}
- g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
- gst_webrtc_session_description_free(answer);
- return true;
}
-void
-WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates)
+bool
+WebRTCSession::createOffer()
{
- if (state_ >= State::INITIATED) {
- for (const auto &c : candidates) {
- nhlog::ui()->debug("WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate);
- g_signal_emit_by_name(webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str());
- }
- }
+ isoffering_ = true;
+ localsdp_.clear();
+ localcandidates_.clear();
+ return startPipeline(111); // a dynamic opus payload type
}
bool
-WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
+WebRTCSession::acceptOffer(const std::string &sdp)
{
- if (state_ < State::INITIATED)
- return false;
+ nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp);
+ if (state_ != State::DISCONNECTED)
+ return false;
- GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
- if (!srclevel)
- return false;
+ isoffering_ = false;
+ localsdp_.clear();
+ localcandidates_.clear();
- gboolean muted;
- g_object_get(srclevel, "mute", &muted, nullptr);
- g_object_set(srclevel, "mute", !muted, nullptr);
- gst_object_unref(srclevel);
- isMuted = !muted;
- return true;
-}
+ int opusPayloadType = getPayloadType(sdp, "opus");
+ if (opusPayloadType == -1)
+ return false;
-void
-WebRTCSession::end()
-{
- nhlog::ui()->debug("WebRTC: ending session");
- if (pipe_) {
- gst_element_set_state(pipe_, GST_STATE_NULL);
- gst_object_unref(pipe_);
- pipe_ = nullptr;
- }
- webrtc_ = nullptr;
- if (state_ != State::DISCONNECTED)
- emit stateChanged(State::DISCONNECTED);
-}
+ GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
+ if (!offer)
+ return false;
-namespace {
+ if (!startPipeline(opusPayloadType)) {
+ gst_webrtc_session_description_free(offer);
+ return false;
+ }
-std::string::const_iterator findName(const std::string &sdp, const std::string &name)
-{
- return std::search(sdp.cbegin(), sdp.cend(), name.cbegin(), name.cend(),
- [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);});
+ // set-remote-description first, then create-answer
+ GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
+ g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
+ gst_webrtc_session_description_free(offer);
+ return true;
}
-int getPayloadType(const std::string &sdp, const std::string &name)
+bool
+WebRTCSession::acceptAnswer(const std::string &sdp)
{
- // eg a=rtpmap:111 opus/48000/2
- auto e = findName(sdp, name);
- if (e == sdp.cend()) {
- nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing");
- return -1;
- }
-
- if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) {
- nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + " payload type");
- return -1;
- }
- else {
- ++s;
- try {
- return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s));
- }
- catch(...) {
- nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + " payload type");
- }
- }
- return -1;
-}
+ nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp);
+ if (state_ != State::OFFERSENT)
+ return false;
-gboolean
-newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
-{
- WebRTCSession *session = (WebRTCSession*)user_data;
- switch (GST_MESSAGE_TYPE(msg)) {
- case GST_MESSAGE_EOS:
- nhlog::ui()->error("WebRTC: end of stream");
- session->end();
- break;
- case GST_MESSAGE_ERROR:
- GError *error;
- gchar *debug;
- gst_message_parse_error(msg, &error, &debug);
- nhlog::ui()->error("WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
- g_clear_error(&error);
- g_free(debug);
- session->end();
- break;
- default:
- break;
- }
- return TRUE;
-}
+ GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
+ if (!answer) {
+ end();
+ return false;
+ }
-GstWebRTCSessionDescription*
-parseSDP(const std::string &sdp, GstWebRTCSDPType type)
-{
- GstSDPMessage *msg;
- gst_sdp_message_new(&msg);
- if (gst_sdp_message_parse_buffer((guint8*)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
- return gst_webrtc_session_description_new(type, msg);
- }
- else {
- nhlog::ui()->error("WebRTC: failed to parse remote session description");
- gst_object_unref(msg);
- return nullptr;
- }
+ g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
+ gst_webrtc_session_description_free(answer);
+ return true;
}
void
-generateOffer(GstElement *webrtc)
+WebRTCSession::acceptICECandidates(
+ const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates)
{
- // create-offer first, then set-local-description
- GstPromise *promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
- g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
+ if (state_ >= State::INITIATED) {
+ for (const auto &c : candidates) {
+ nhlog::ui()->debug(
+ "WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate);
+ g_signal_emit_by_name(
+ webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str());
+ }
+ }
}
-void
-setLocalDescription(GstPromise *promise, gpointer webrtc)
+bool
+WebRTCSession::startPipeline(int opusPayloadType)
{
- const GstStructure *reply = gst_promise_get_reply(promise);
- gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
- GstWebRTCSessionDescription *gstsdp = nullptr;
- gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr);
- gst_promise_unref(promise);
- g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
+ if (state_ != State::DISCONNECTED)
+ return false;
- gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
- localsdp_ = std::string(sdp);
- g_free(sdp);
- gst_webrtc_session_description_free(gstsdp);
+ emit stateChanged(State::INITIATING);
- nhlog::ui()->debug("WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_);
-}
+ if (!createPipeline(opusPayloadType))
+ return false;
-void
-addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED)
-{
- nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
+ webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
- if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
- emit WebRTCSession::instance().newICECandidate({"audio", (uint16_t)mlineIndex, candidate});
- return;
- }
+ if (!stunServer_.empty()) {
+ nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_);
+ g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
+ }
- localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
+ for (const auto &uri : turnServers_) {
+ nhlog::ui()->info("WebRTC: setting TURN server: {}", uri);
+ gboolean udata;
+ g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata));
+ }
+ if (turnServers_.empty())
+ nhlog::ui()->warn("WebRTC: no TURN server provided");
- // GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early
- // fixed in v1.18
- // use a 100ms timeout in the meantime
- static guint timerid = 0;
- if (timerid)
- g_source_remove(timerid);
+ // generate the offer when the pipeline goes to PLAYING
+ if (isoffering_)
+ g_signal_connect(
+ webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr);
- timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
-}
+ // on-ice-candidate is emitted when a local ICE candidate has been gathered
+ g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
-gboolean
-onICEGatheringCompletion(gpointer timerid)
-{
- *(guint*)(timerid) = 0;
- if (isoffering_) {
- emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
- emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
- }
- else {
- emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
- emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
- }
- return FALSE;
-}
+ // capture ICE failure
+ g_signal_connect(
+ webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr);
-void
-iceConnectionStateChanged(GstElement *webrtc, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data G_GNUC_UNUSED)
-{
- GstWebRTCICEConnectionState newState;
- g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
- switch (newState) {
- case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
- nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
- emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
- break;
- case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
- nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
- emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
- break;
- default:
- break;
- }
-}
+ // incoming streams trigger pad-added
+ gst_element_set_state(pipe_, GST_STATE_READY);
+ g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
-void
-createAnswer(GstPromise *promise, gpointer webrtc)
-{
- // create-answer first, then set-local-description
- gst_promise_unref(promise);
- promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
- g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
+ // webrtcbin lifetime is the same as that of the pipeline
+ gst_object_unref(webrtc_);
+
+ // start the pipeline
+ GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ nhlog::ui()->error("WebRTC: unable to start pipeline");
+ end();
+ return false;
+ }
+
+ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
+ gst_bus_add_watch(bus, newBusMessage, this);
+ gst_object_unref(bus);
+ emit stateChanged(State::INITIATED);
+ return true;
}
-void
-addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
+#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload="
+
+bool
+WebRTCSession::createPipeline(int opusPayloadType)
{
- if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
- return;
+ std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin "
+ "autoaudiosrc ! volume name=srclevel ! audioconvert ! "
+ "audioresample ! queue ! opusenc ! rtpopuspay ! "
+ "queue ! " RTP_CAPS_OPUS +
+ std::to_string(opusPayloadType) + " ! webrtcbin.");
- nhlog::ui()->debug("WebRTC: received incoming stream");
- GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
- g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
- gst_bin_add(GST_BIN(pipe), decodebin);
- gst_element_sync_state_with_parent(decodebin);
- GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
- if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
- nhlog::ui()->error("WebRTC: unable to link new pad");
- gst_object_unref(sinkpad);
+ webrtc_ = nullptr;
+ GError *error = nullptr;
+ pipe_ = gst_parse_launch(pipeline.c_str(), &error);
+ if (error) {
+ nhlog::ui()->error("WebRTC: failed to parse pipeline: {}", error->message);
+ g_error_free(error);
+ end();
+ return false;
+ }
+ return true;
}
-void
-linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
+bool
+WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
{
- GstCaps *caps = gst_pad_get_current_caps(newpad);
- if (!caps)
- return;
-
- const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
- gst_caps_unref(caps);
+ if (state_ < State::INITIATED)
+ return false;
- GstPad *queuepad = nullptr;
- if (g_str_has_prefix(name, "audio")) {
- nhlog::ui()->debug("WebRTC: received incoming audio stream");
- GstElement *queue = gst_element_factory_make("queue", nullptr);
- GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
- GstElement *resample = gst_element_factory_make("audioresample", nullptr);
- GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
- gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
- gst_element_sync_state_with_parent(queue);
- gst_element_sync_state_with_parent(convert);
- gst_element_sync_state_with_parent(resample);
- gst_element_sync_state_with_parent(sink);
- gst_element_link_many(queue, convert, resample, sink, nullptr);
- queuepad = gst_element_get_static_pad(queue, "sink");
- }
+ GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
+ if (!srclevel)
+ return false;
- if (queuepad) {
- if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
- nhlog::ui()->error("WebRTC: unable to link new pad");
- else {
- emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTED);
- }
- gst_object_unref(queuepad);
- }
+ gboolean muted;
+ g_object_get(srclevel, "mute", &muted, nullptr);
+ g_object_set(srclevel, "mute", !muted, nullptr);
+ gst_object_unref(srclevel);
+ isMuted = !muted;
+ return true;
}
+void
+WebRTCSession::end()
+{
+ nhlog::ui()->debug("WebRTC: ending session");
+ if (pipe_) {
+ gst_element_set_state(pipe_, GST_STATE_NULL);
+ gst_object_unref(pipe_);
+ pipe_ = nullptr;
+ }
+ webrtc_ = nullptr;
+ if (state_ != State::DISCONNECTED)
+ emit stateChanged(State::DISCONNECTED);
}
diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h
index d79047a8..6b54f370 100644
--- a/src/WebRTCSession.h
+++ b/src/WebRTCSession.h
@@ -14,52 +14,55 @@ class WebRTCSession : public QObject
Q_OBJECT
public:
- enum class State {
- ICEFAILED,
- DISCONNECTED,
- INITIATING,
- INITIATED,
- OFFERSENT,
- ANSWERSENT,
- CONNECTING,
- CONNECTED
+ enum class State
+ {
+ DISCONNECTED,
+ ICEFAILED,
+ INITIATING,
+ INITIATED,
+ OFFERSENT,
+ ANSWERSENT,
+ CONNECTING,
+ CONNECTED
};
- static WebRTCSession& instance()
+ static WebRTCSession &instance()
{
- static WebRTCSession instance;
- return instance;
+ static WebRTCSession instance;
+ return instance;
}
bool init(std::string *errorMessage = nullptr);
- State state() const {return state_;}
+ State state() const { return state_; }
bool createOffer();
bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp);
- void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate>&);
+ void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
bool toggleMuteAudioSrc(bool &isMuted);
void end();
- void setStunServer(const std::string &stunServer) {stunServer_ = stunServer;}
- void setTurnServers(const std::vector<std::string> &uris) {turnServers_ = uris;}
+ void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
+ void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
signals:
- void offerCreated(const std::string &sdp, const std::vector<mtx::events::msg::CallCandidates::Candidate>&);
- void answerCreated(const std::string &sdp, const std::vector<mtx::events::msg::CallCandidates::Candidate>&);
- void newICECandidate(const mtx::events::msg::CallCandidates::Candidate&);
+ void offerCreated(const std::string &sdp,
+ const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
+ void answerCreated(const std::string &sdp,
+ const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
+ void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt
private slots:
- void setState(State state) {state_ = state;}
+ void setState(State state) { state_ = state; }
private:
WebRTCSession();
- bool initialised_ = false;
- State state_ = State::DISCONNECTED;
- GstElement *pipe_ = nullptr;
+ bool initialised_ = false;
+ State state_ = State::DISCONNECTED;
+ GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
std::string stunServer_;
std::vector<std::string> turnServers_;
@@ -68,6 +71,6 @@ private:
bool createPipeline(int opusPayloadType);
public:
- WebRTCSession(WebRTCSession const&) = delete;
- void operator=(WebRTCSession const&) = delete;
+ WebRTCSession(WebRTCSession const &) = delete;
+ void operator=(WebRTCSession const &) = delete;
};
diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp
index 6b5e2e60..58348b15 100644
--- a/src/dialogs/AcceptCall.cpp
+++ b/src/dialogs/AcceptCall.cpp
@@ -1,4 +1,5 @@
#include <QLabel>
+#include <QPixmap>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
@@ -10,12 +11,12 @@
namespace dialogs {
-AcceptCall::AcceptCall(
- const QString &caller,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl,
- QWidget *parent) : QWidget(parent)
+AcceptCall::AcceptCall(const QString &caller,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl,
+ QWidget *parent)
+ : QWidget(parent)
{
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
@@ -39,8 +40,8 @@ AcceptCall::AcceptCall(
if (!displayName.isEmpty() && displayName != caller) {
displayNameLabel = new QLabel(displayName, this);
labelFont.setPointSizeF(f.pointSizeF() * 2);
- displayNameLabel ->setFont(labelFont);
- displayNameLabel ->setAlignment(Qt::AlignCenter);
+ displayNameLabel->setFont(labelFont);
+ displayNameLabel->setAlignment(Qt::AlignCenter);
}
QLabel *callerLabel = new QLabel(caller, this);
@@ -48,19 +49,23 @@ AcceptCall::AcceptCall(
callerLabel->setFont(labelFont);
callerLabel->setAlignment(Qt::AlignCenter);
- QLabel *voiceCallLabel = new QLabel("Voice Call", this);
- labelFont.setPointSizeF(f.pointSizeF() * 1.1);
- voiceCallLabel->setFont(labelFont);
- voiceCallLabel->setAlignment(Qt::AlignCenter);
-
auto avatar = new Avatar(this, QFontMetrics(f).height() * 6);
if (!avatarUrl.isEmpty())
- avatar->setImage(avatarUrl);
+ avatar->setImage(avatarUrl);
else
- avatar->setLetter(utils::firstChar(roomName));
+ avatar->setLetter(utils::firstChar(roomName));
+
+ const int iconSize = 24;
+ QLabel *callTypeIndicator = new QLabel(this);
+ QPixmap callIndicator(":/icons/icons/ui/place-call.png");
+ callTypeIndicator->setPixmap(callIndicator.scaled(iconSize * 2, iconSize * 2));
+
+ QLabel *callTypeLabel = new QLabel("Voice Call", this);
+ labelFont.setPointSizeF(f.pointSizeF() * 1.1);
+ callTypeLabel->setFont(labelFont);
+ callTypeLabel->setAlignment(Qt::AlignCenter);
- const int iconSize = 24;
- auto buttonLayout = new QHBoxLayout();
+ auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(20);
acceptBtn_ = new QPushButton(tr("Accept"), this);
acceptBtn_->setDefault(true);
@@ -74,10 +79,11 @@ AcceptCall::AcceptCall(
buttonLayout->addWidget(rejectBtn_);
if (displayNameLabel)
- layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
+ layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
layout->addWidget(callerLabel, 0, Qt::AlignCenter);
- layout->addWidget(voiceCallLabel, 0, Qt::AlignCenter);
layout->addWidget(avatar, 0, Qt::AlignCenter);
+ layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
+ layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
layout->addLayout(buttonLayout);
connect(acceptBtn_, &QPushButton::clicked, this, [this]() {
diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h
index 8e3ed3b2..5d2251fd 100644
--- a/src/dialogs/AcceptCall.h
+++ b/src/dialogs/AcceptCall.h
@@ -12,12 +12,11 @@ class AcceptCall : public QWidget
Q_OBJECT
public:
- AcceptCall(
- const QString &caller,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl,
- QWidget *parent = nullptr);
+ AcceptCall(const QString &caller,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl,
+ QWidget *parent = nullptr);
signals:
void accept();
diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp
index 81dd85dd..0fda1794 100644
--- a/src/dialogs/PlaceCall.cpp
+++ b/src/dialogs/PlaceCall.cpp
@@ -10,12 +10,12 @@
namespace dialogs {
-PlaceCall::PlaceCall(
- const QString &callee,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl,
- QWidget *parent) : QWidget(parent)
+PlaceCall::PlaceCall(const QString &callee,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl,
+ QWidget *parent)
+ : QWidget(parent)
{
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
@@ -34,11 +34,13 @@ PlaceCall::PlaceCall(
f.setPointSizeF(f.pointSizeF());
auto avatar = new Avatar(this, QFontMetrics(f).height() * 3);
if (!avatarUrl.isEmpty())
- avatar->setImage(avatarUrl);
+ avatar->setImage(avatarUrl);
else
- avatar->setLetter(utils::firstChar(roomName));
-
- voiceBtn_ = new QPushButton(tr("Voice Call"), this);
+ avatar->setLetter(utils::firstChar(roomName));
+ const int iconSize = 24;
+ voiceBtn_ = new QPushButton(tr("Voice"), this);
+ voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
+ voiceBtn_->setIconSize(QSize(iconSize, iconSize));
voiceBtn_->setDefault(true);
cancelBtn_ = new QPushButton(tr("Cancel"), this);
@@ -47,7 +49,7 @@ PlaceCall::PlaceCall(
buttonLayout->addWidget(voiceBtn_);
buttonLayout->addWidget(cancelBtn_);
- QString name = displayName.isEmpty() ? callee : displayName;
+ QString name = displayName.isEmpty() ? callee : displayName;
QLabel *label = new QLabel("Place a call to " + name + "?", this);
layout->addWidget(label);
diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h
index ed6fb750..f6db9ab5 100644
--- a/src/dialogs/PlaceCall.h
+++ b/src/dialogs/PlaceCall.h
@@ -12,12 +12,11 @@ class PlaceCall : public QWidget
Q_OBJECT
public:
- PlaceCall(
- const QString &callee,
- const QString &displayName,
- const QString &roomName,
- const QString &avatarUrl,
- QWidget *parent = nullptr);
+ PlaceCall(const QString &callee,
+ const QString &displayName,
+ const QString &roomName,
+ const QString &avatarUrl,
+ QWidget *parent = nullptr);
signals:
void voice();
diff --git a/src/timeline/TimelineModel.cpp b/src/timeline/TimelineModel.cpp
index e4677f53..67e07d7b 100644
--- a/src/timeline/TimelineModel.cpp
+++ b/src/timeline/TimelineModel.cpp
@@ -796,9 +796,11 @@ TimelineModel::internalAddEvents(
} else if (std::holds_alternative<mtx::events::RoomEvent<
mtx::events::msg::CallCandidates>>(e_) ||
std::holds_alternative<
- mtx::events::RoomEvent<mtx::events::msg::CallAnswer>>( e_) ||
+ mtx::events::RoomEvent<mtx::events::msg::CallAnswer>>(
+ e_) ||
std::holds_alternative<
- mtx::events::RoomEvent<mtx::events::msg::CallHangUp>>( e_)) {
+ mtx::events::RoomEvent<mtx::events::msg::CallHangUp>>(
+ e_)) {
emit newCallEvent(e_);
}
}
|