From 7a206441c86cd2aa84cbbbc6be803f03b2f355ab Mon Sep 17 00:00:00 2001 From: trilene Date: Fri, 10 Jul 2020 19:19:48 -0400 Subject: Support voice calls --- src/dialogs/AcceptCall.cpp | 53 ++++++++++++++++++++++++++++++++++++++++ src/dialogs/AcceptCall.h | 26 ++++++++++++++++++++ src/dialogs/PlaceCall.cpp | 60 ++++++++++++++++++++++++++++++++++++++++++++++ src/dialogs/PlaceCall.h | 28 ++++++++++++++++++++++ 4 files changed, 167 insertions(+) create mode 100644 src/dialogs/AcceptCall.cpp create mode 100644 src/dialogs/AcceptCall.h create mode 100644 src/dialogs/PlaceCall.cpp create mode 100644 src/dialogs/PlaceCall.h (limited to 'src/dialogs') diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp new file mode 100644 index 00000000..f04a613a --- /dev/null +++ b/src/dialogs/AcceptCall.cpp @@ -0,0 +1,53 @@ +#include +#include +#include + +#include "Config.h" +#include "dialogs/AcceptCall.h" + +namespace dialogs { + +AcceptCall::AcceptCall(const QString &caller, const QString &displayName, QWidget *parent) + : QWidget(parent) +{ + setAutoFillBackground(true); + setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); + setWindowModality(Qt::WindowModal); + setAttribute(Qt::WA_DeleteOnClose, true); + + auto layout = new QVBoxLayout(this); + layout->setSpacing(conf::modals::WIDGET_SPACING); + layout->setMargin(conf::modals::WIDGET_MARGIN); + + auto buttonLayout = new QHBoxLayout(); + buttonLayout->setSpacing(15); + buttonLayout->setMargin(0); + + acceptBtn_ = new QPushButton(tr("Accept"), this); + acceptBtn_->setDefault(true); + rejectBtn_ = new QPushButton(tr("Reject"), this); + + buttonLayout->addStretch(1); + buttonLayout->addWidget(acceptBtn_); + buttonLayout->addWidget(rejectBtn_); + + QLabel *label; + if (!displayName.isEmpty() && displayName != caller) + label = new QLabel("Accept call from " + displayName + " (" + caller + ")?", this); + else + label = new QLabel("Accept call from " + caller + "?", this); + + layout->addWidget(label); + layout->addLayout(buttonLayout); + + connect(acceptBtn_, &QPushButton::clicked, this, [this]() { + emit accept(); + emit close(); + }); + connect(rejectBtn_, &QPushButton::clicked, this, [this]() { + emit reject(); + emit close(); + }); +} + +} diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h new file mode 100644 index 00000000..a410d6b7 --- /dev/null +++ b/src/dialogs/AcceptCall.h @@ -0,0 +1,26 @@ +#pragma once + +#include +#include + +class QPushButton; + +namespace dialogs { + +class AcceptCall : public QWidget +{ + Q_OBJECT + +public: + AcceptCall(const QString &caller, const QString &displayName, QWidget *parent = nullptr); + +signals: + void accept(); + void reject(); + +private: + QPushButton *acceptBtn_; + QPushButton *rejectBtn_; +}; + +} diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp new file mode 100644 index 00000000..8b37ff6a --- /dev/null +++ b/src/dialogs/PlaceCall.cpp @@ -0,0 +1,60 @@ +#include +#include +#include +#include + +#include "Config.h" +#include "dialogs/PlaceCall.h" + +namespace dialogs { + +PlaceCall::PlaceCall(const QString &callee, const QString &displayName, QWidget *parent) + : QWidget(parent) +{ + setAutoFillBackground(true); + setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); + setWindowModality(Qt::WindowModal); + setAttribute(Qt::WA_DeleteOnClose, true); + + auto layout = new QVBoxLayout(this); + layout->setSpacing(conf::modals::WIDGET_SPACING); + layout->setMargin(conf::modals::WIDGET_MARGIN); + + auto buttonLayout = new QHBoxLayout(); + buttonLayout->setSpacing(15); + buttonLayout->setMargin(0); + + voiceBtn_ = new QPushButton(tr("Voice Call"), this); + voiceBtn_->setDefault(true); + videoBtn_ = new QPushButton(tr("Video Call"), this); + cancelBtn_ = new QPushButton(tr("Cancel"), this); + + buttonLayout->addStretch(1); + buttonLayout->addWidget(voiceBtn_); + buttonLayout->addWidget(videoBtn_); + buttonLayout->addWidget(cancelBtn_); + + QLabel *label; + if (!displayName.isEmpty() && displayName != callee) + label = new QLabel("Place a call to " + displayName + " (" + callee + ")?", this); + else + label = new QLabel("Place a call to " + callee + "?", this); + + layout->addWidget(label); + layout->addLayout(buttonLayout); + + connect(voiceBtn_, &QPushButton::clicked, this, [this]() { + emit voice(); + emit close(); + }); + connect(videoBtn_, &QPushButton::clicked, this, [this]() { + emit video(); + emit close(); + }); + connect(cancelBtn_, &QPushButton::clicked, this, [this]() { + emit cancel(); + emit close(); + }); +} + +} diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h new file mode 100644 index 00000000..b4de1428 --- /dev/null +++ b/src/dialogs/PlaceCall.h @@ -0,0 +1,28 @@ +#pragma once + +#include + +class QPushButton; +class QString; + +namespace dialogs { + +class PlaceCall : public QWidget +{ + Q_OBJECT + +public: + PlaceCall(const QString &callee, const QString &displayName, QWidget *parent = nullptr); + +signals: + void voice(); + void video(); + void cancel(); + +private: + QPushButton *voiceBtn_; + QPushButton *videoBtn_; + QPushButton *cancelBtn_; +}; + +} -- cgit 1.5.1 From aa34576dfd5781ddd9a97522ca15084f8195045f Mon Sep 17 00:00:00 2001 From: Nicolas Werner Date: Thu, 16 Jul 2020 20:19:28 +0200 Subject: Warn before kicking,banning,inviting,etc --- src/ChatPage.cpp | 56 +++++++++++++++++++++++++++++++++++---------- src/dialogs/UserProfile.cpp | 11 +++++++++ src/dialogs/UserProfile.h | 1 + 3 files changed, 56 insertions(+), 12 deletions(-) (limited to 'src/dialogs') diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp index 0b290927..36d1fc92 100644 --- a/src/ChatPage.cpp +++ b/src/ChatPage.cpp @@ -1152,11 +1152,19 @@ ChatPage::leaveRoom(const QString &room_id) void ChatPage::inviteUser(QString userid, QString reason) { + auto room = current_room_; + + if (QMessageBox::question(this, + tr("Confirm invite"), + tr("Do you really want to invite %1 (%2)?") + .arg(cache::displayName(current_room_, userid)) + .arg(userid)) != QMessageBox::Yes) + return; + http::client()->invite_user( - current_room_.toStdString(), + room.toStdString(), userid.toStdString(), - [this, userid, room = current_room_](const mtx::responses::Empty &, - mtx::http::RequestErr err) { + [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) { if (err) { emit showNotification( tr("Failed to invite %1 to %2: %3") @@ -1171,11 +1179,19 @@ ChatPage::inviteUser(QString userid, QString reason) void ChatPage::kickUser(QString userid, QString reason) { + auto room = current_room_; + + if (QMessageBox::question(this, + tr("Confirm kick"), + tr("Do you really want to kick %1 (%2)?") + .arg(cache::displayName(current_room_, userid)) + .arg(userid)) != QMessageBox::Yes) + return; + http::client()->kick_user( - current_room_.toStdString(), + room.toStdString(), userid.toStdString(), - [this, userid, room = current_room_](const mtx::responses::Empty &, - mtx::http::RequestErr err) { + [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) { if (err) { emit showNotification( tr("Failed to kick %1 to %2: %3") @@ -1190,11 +1206,19 @@ ChatPage::kickUser(QString userid, QString reason) void ChatPage::banUser(QString userid, QString reason) { + auto room = current_room_; + + if (QMessageBox::question(this, + tr("Confirm ban"), + tr("Do you really want to ban %1 (%2)?") + .arg(cache::displayName(current_room_, userid)) + .arg(userid)) != QMessageBox::Yes) + return; + http::client()->ban_user( - current_room_.toStdString(), + room.toStdString(), userid.toStdString(), - [this, userid, room = current_room_](const mtx::responses::Empty &, - mtx::http::RequestErr err) { + [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) { if (err) { emit showNotification( tr("Failed to ban %1 in %2: %3") @@ -1209,11 +1233,19 @@ ChatPage::banUser(QString userid, QString reason) void ChatPage::unbanUser(QString userid, QString reason) { + auto room = current_room_; + + if (QMessageBox::question(this, + tr("Confirm unban"), + tr("Do you really want to unban %1 (%2)?") + .arg(cache::displayName(current_room_, userid)) + .arg(userid)) != QMessageBox::Yes) + return; + http::client()->unban_user( - current_room_.toStdString(), + room.toStdString(), userid.toStdString(), - [this, userid, room = current_room_](const mtx::responses::Empty &, - mtx::http::RequestErr err) { + [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) { if (err) { emit showNotification( tr("Failed to unban %1 in %2: %3") diff --git a/src/dialogs/UserProfile.cpp b/src/dialogs/UserProfile.cpp index 3415b127..086dbb40 100644 --- a/src/dialogs/UserProfile.cpp +++ b/src/dialogs/UserProfile.cpp @@ -1,6 +1,7 @@ #include #include #include +#include #include #include @@ -97,6 +98,14 @@ UserProfile::UserProfile(QWidget *parent) if (utils::localUser() != user_id) req.invite = {user_id.toStdString()}; + if (QMessageBox::question( + this, + tr("Confirm DM"), + tr("Do you really want to invite %1 (%2) to a direct chat?") + .arg(cache::displayName(roomId_, user_id)) + .arg(user_id)) != QMessageBox::Yes) + return; + emit ChatPage::instance()->createRoom(req); }); @@ -199,6 +208,8 @@ UserProfile::init(const QString &userId, const QString &roomId) { resetToDefaults(); + this->roomId_ = roomId; + auto displayName = cache::displayName(roomId, userId); userIdLabel_->setText(userId); diff --git a/src/dialogs/UserProfile.h b/src/dialogs/UserProfile.h index 81276d2a..8129fdcf 100644 --- a/src/dialogs/UserProfile.h +++ b/src/dialogs/UserProfile.h @@ -53,6 +53,7 @@ private: void resetToDefaults(); Avatar *avatar_; + QString roomId_; QLabel *userIdLabel_; QLabel *displayNameLabel_; -- cgit 1.5.1 From 88cfa3a8fa7554ab545f6779f2dda9709f72fbbb Mon Sep 17 00:00:00 2001 From: trilene Date: Wed, 22 Jul 2020 21:15:45 -0400 Subject: Polish voice call UI --- resources/icons/ui/end-call.png | Bin 0 -> 643 bytes resources/icons/ui/microphone-mute.png | Bin 0 -> 1153 bytes resources/icons/ui/microphone-unmute.png | Bin 0 -> 1093 bytes resources/icons/ui/place-call.png | Bin 0 -> 759 bytes resources/res.qrc | 5 ++ src/ActiveCallBar.cpp | 142 ++++++++++++++++++++++++------- src/ActiveCallBar.h | 23 ++++- src/CallManager.cpp | 45 ++++++---- src/CallManager.h | 6 +- src/ChatPage.cpp | 30 ++++--- src/TextInputWidget.cpp | 20 ++--- src/TextInputWidget.h | 3 +- src/WebRTCSession.cpp | 76 +++++++++-------- src/WebRTCSession.h | 20 ++++- src/dialogs/AcceptCall.cpp | 68 ++++++++++++--- src/dialogs/AcceptCall.h | 9 +- src/dialogs/PlaceCall.cpp | 36 +++++--- src/dialogs/PlaceCall.h | 11 ++- 18 files changed, 348 insertions(+), 146 deletions(-) create mode 100644 resources/icons/ui/end-call.png create mode 100644 resources/icons/ui/microphone-mute.png create mode 100644 resources/icons/ui/microphone-unmute.png create mode 100644 resources/icons/ui/place-call.png (limited to 'src/dialogs') diff --git a/resources/icons/ui/end-call.png b/resources/icons/ui/end-call.png new file mode 100644 index 00000000..6cbb983e Binary files /dev/null and b/resources/icons/ui/end-call.png differ diff --git a/resources/icons/ui/microphone-mute.png b/resources/icons/ui/microphone-mute.png new file mode 100644 index 00000000..0042fbe2 Binary files /dev/null and b/resources/icons/ui/microphone-mute.png differ diff --git a/resources/icons/ui/microphone-unmute.png b/resources/icons/ui/microphone-unmute.png new file mode 100644 index 00000000..27999c70 Binary files /dev/null and b/resources/icons/ui/microphone-unmute.png differ diff --git a/resources/icons/ui/place-call.png b/resources/icons/ui/place-call.png new file mode 100644 index 00000000..a820cf3f Binary files /dev/null and b/resources/icons/ui/place-call.png differ diff --git a/resources/res.qrc b/resources/res.qrc index 3fd3fc96..b245f48f 100644 --- a/resources/res.qrc +++ b/resources/res.qrc @@ -70,6 +70,11 @@ icons/ui/mail-reply.png + icons/ui/place-call.png + icons/ui/end-call.png + icons/ui/microphone-mute.png + icons/ui/microphone-unmute.png + icons/emoji-categories/people.png icons/emoji-categories/people@2x.png icons/emoji-categories/nature.png diff --git a/src/ActiveCallBar.cpp b/src/ActiveCallBar.cpp index a5ef754d..5703c1ed 100644 --- a/src/ActiveCallBar.cpp +++ b/src/ActiveCallBar.cpp @@ -1,10 +1,17 @@ +#include + +#include #include #include #include #include +#include #include "ActiveCallBar.h" +#include "ChatPage.h" +#include "Utils.h" #include "WebRTCSession.h" +#include "ui/Avatar.h" #include "ui/FlatButton.h" ActiveCallBar::ActiveCallBar(QWidget *parent) @@ -12,7 +19,7 @@ ActiveCallBar::ActiveCallBar(QWidget *parent) { setAutoFillBackground(true); auto p = palette(); - p.setColor(backgroundRole(), Qt::green); + p.setColor(backgroundRole(), QColorConstants::Svg::limegreen); setPalette(p); QFont f; @@ -24,51 +31,126 @@ ActiveCallBar::ActiveCallBar(QWidget *parent) setFixedHeight(contentHeight + widgetMargin); - topLayout_ = new QHBoxLayout(this); - topLayout_->setSpacing(widgetMargin); - topLayout_->setContentsMargins( + layout_ = new QHBoxLayout(this); + layout_->setSpacing(widgetMargin); + layout_->setContentsMargins( 2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin); - topLayout_->setSizeConstraint(QLayout::SetMinimumSize); QFont labelFont; - labelFont.setPointSizeF(labelFont.pointSizeF() * 1.2); + labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1); labelFont.setWeight(QFont::Medium); + avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5); + callPartyLabel_ = new QLabel(this); callPartyLabel_->setFont(labelFont); - // TODO microphone mute/unmute icons + stateLabel_ = new QLabel(this); + stateLabel_->setFont(labelFont); + + durationLabel_ = new QLabel(this); + durationLabel_->setFont(labelFont); + durationLabel_->hide(); + muteBtn_ = new FlatButton(this); - QIcon muteIcon; - muteIcon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png"); - muteBtn_->setIcon(muteIcon); - muteBtn_->setIconSize(QSize(buttonSize_ / 2, buttonSize_ / 2)); - muteBtn_->setToolTip(tr("Mute Mic")); + setMuteIcon(false); muteBtn_->setFixedSize(buttonSize_, buttonSize_); muteBtn_->setCornerRadius(buttonSize_ / 2); - connect(muteBtn_, &FlatButton::clicked, this, [this]() { - if (WebRTCSession::instance().toggleMuteAudioSrc(muted_)) { - QIcon icon; - if (muted_) { - muteBtn_->setToolTip("Unmute Mic"); - icon.addFile(":/icons/icons/ui/round-remove-button.png"); - } else { - muteBtn_->setToolTip("Mute Mic"); - icon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png"); - } - muteBtn_->setIcon(icon); - } + connect(muteBtn_, &FlatButton::clicked, this, [this](){ + if (WebRTCSession::instance().toggleMuteAudioSrc(muted_)) + setMuteIcon(muted_); }); - topLayout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft); - topLayout_->addWidget(muteBtn_, 0, Qt::AlignRight); + layout_->addWidget(avatar_, 0, Qt::AlignLeft); + layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft); + layout_->addWidget(stateLabel_, 0, Qt::AlignLeft); + layout_->addWidget(durationLabel_, 0, Qt::AlignLeft); + layout_->addStretch(); + layout_->addWidget(muteBtn_, 0, Qt::AlignCenter); + layout_->addSpacing(18); + + timer_ = new QTimer(this); + connect(timer_, &QTimer::timeout, this, + [this](){ + auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_; + int s = seconds % 60; + int m = (seconds / 60) % 60; + int h = seconds / 3600; + char buf[12]; + if (h) + snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s); + else + snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s); + durationLabel_->setText(buf); + }); + + connect(&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update); +} + +void +ActiveCallBar::setMuteIcon(bool muted) +{ + QIcon icon; + if (muted) { + muteBtn_->setToolTip("Unmute Mic"); + icon.addFile(":/icons/icons/ui/microphone-unmute.png"); + } else { + muteBtn_->setToolTip("Mute Mic"); + icon.addFile(":/icons/icons/ui/microphone-mute.png"); + } + muteBtn_->setIcon(icon); + muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_)); } void -ActiveCallBar::setCallParty(const QString &userid, const QString &displayName) +ActiveCallBar::setCallParty( + const QString &userid, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl) { - if (!displayName.isEmpty() && displayName != userid) - callPartyLabel_->setText("Active Call: " + displayName + " (" + userid + ")"); + callPartyLabel_->setText( + (displayName.isEmpty() ? userid : displayName) + " -"); + + if (!avatarUrl.isEmpty()) + avatar_->setImage(avatarUrl); else - callPartyLabel_->setText("Active Call: " + userid); + avatar_->setLetter(utils::firstChar(roomName)); +} + +void +ActiveCallBar::update(WebRTCSession::State state) +{ + switch (state) { + case WebRTCSession::State::INITIATING: + stateLabel_->setText("Initiating call..."); + break; + case WebRTCSession::State::INITIATED: + stateLabel_->setText("Call initiated..."); + break; + case WebRTCSession::State::OFFERSENT: + stateLabel_->setText("Calling..."); + break; + case WebRTCSession::State::CONNECTING: + stateLabel_->setText("Connecting..."); + break; + case WebRTCSession::State::CONNECTED: + callStartTime_ = QDateTime::currentSecsSinceEpoch(); + timer_->start(1000); + stateLabel_->setText("Active call:"); + durationLabel_->setText("00:00"); + durationLabel_->show(); + muteBtn_->show(); + break; + case WebRTCSession::State::DISCONNECTED: + timer_->stop(); + callPartyLabel_->setText(QString()); + stateLabel_->setText(QString()); + durationLabel_->setText(QString()); + durationLabel_->hide(); + setMuteIcon(false); + break; + default: + break; + } } diff --git a/src/ActiveCallBar.h b/src/ActiveCallBar.h index dd01e2ad..8440d7f3 100644 --- a/src/ActiveCallBar.h +++ b/src/ActiveCallBar.h @@ -2,9 +2,12 @@ #include +#include "WebRTCSession.h" + class QHBoxLayout; class QLabel; -class QString; +class QTimer; +class Avatar; class FlatButton; class ActiveCallBar : public QWidget @@ -15,12 +18,24 @@ public: ActiveCallBar(QWidget *parent = nullptr); public slots: - void setCallParty(const QString &userid, const QString &displayName); + void update(WebRTCSession::State); + void setCallParty( + const QString &userid, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl); private: - QHBoxLayout *topLayout_ = nullptr; + QHBoxLayout *layout_ = nullptr; + Avatar *avatar_ = nullptr; QLabel *callPartyLabel_ = nullptr; + QLabel *stateLabel_ = nullptr; + QLabel *durationLabel_ = nullptr; FlatButton *muteBtn_ = nullptr; - int buttonSize_ = 32; + int buttonSize_ = 22; bool muted_ = false; + qint64 callStartTime_ = 0; + QTimer *timer_ = nullptr; + + void setMuteIcon(bool muted); }; diff --git a/src/CallManager.cpp b/src/CallManager.cpp index 92af3b2f..b5c59e08 100644 --- a/src/CallManager.cpp +++ b/src/CallManager.cpp @@ -68,9 +68,9 @@ CallManager::CallManager(QSharedPointer userSettings) turnServerTimer_.setInterval(res.ttl * 1000 * 0.9); }); - connect(&session_, &WebRTCSession::pipelineChanged, this, - [this](bool started) { - if (!started) + connect(&session_, &WebRTCSession::stateChanged, this, + [this](WebRTCSession::State state) { + if (state == WebRTCSession::State::DISCONNECTED) playRingtone("qrc:/media/media/callend.ogg", false); }); @@ -87,9 +87,9 @@ CallManager::sendInvite(const QString &roomid) if (onActiveCall()) return; - std::vector members(cache::getMembers(roomid.toStdString())); - if (members.size() != 2) { - emit ChatPage::instance()->showNotification("Voice/Video calls are limited to 1:1 rooms"); + auto roomInfo = cache::singleRoomInfo(roomid.toStdString()); + if (roomInfo.member_count != 2) { + emit ChatPage::instance()->showNotification("Voice calls are limited to 1:1 rooms."); return; } @@ -105,11 +105,13 @@ CallManager::sendInvite(const QString &roomid) // TODO Add invite timeout generateCallID(); + std::vector members(cache::getMembers(roomid.toStdString())); const RoomMember &callee = members.front().user_id == utils::localUser() ? members.back() : members.front(); - emit newCallParty(callee.user_id, callee.display_name); + emit newCallParty(callee.user_id, callee.display_name, + QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url)); playRingtone("qrc:/media/media/ringback.ogg", true); if (!session_.createOffer()) { - emit ChatPage::instance()->showNotification("Problem setting up call"); + emit ChatPage::instance()->showNotification("Problem setting up call."); endCall(); } } @@ -127,7 +129,7 @@ CallManager::hangUp() bool CallManager::onActiveCall() { - return session_.isActive(); + return session_.state() != WebRTCSession::State::DISCONNECTED; } void CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event) @@ -156,8 +158,8 @@ CallManager::handleEvent(const RoomEvent &callInviteEvent) if (callInviteEvent.content.call_id.empty()) return; - std::vector members(cache::getMembers(callInviteEvent.room_id)); - if (onActiveCall() || members.size() != 2) { + auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id); + if (onActiveCall() || roomInfo.member_count != 2) { emit newMessage(QString::fromStdString(callInviteEvent.room_id), CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut}); return; @@ -168,10 +170,18 @@ CallManager::handleEvent(const RoomEvent &callInviteEvent) callid_ = callInviteEvent.content.call_id; remoteICECandidates_.clear(); - const RoomMember &caller = members.front().user_id == utils::localUser() ? members.back() : members.front(); - emit newCallParty(caller.user_id, caller.display_name); - - auto dialog = new dialogs::AcceptCall(caller.user_id, caller.display_name, MainWindow::instance()); + std::vector members(cache::getMembers(callInviteEvent.room_id)); + const RoomMember &caller = + members.front().user_id == utils::localUser() ? members.back() : members.front(); + emit newCallParty(caller.user_id, caller.display_name, + QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url)); + + auto dialog = new dialogs::AcceptCall( + caller.user_id, + caller.display_name, + QString::fromStdString(roomInfo.name), + QString::fromStdString(roomInfo.avatar_url), + MainWindow::instance()); connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent](){ MainWindow::instance()->hideOverlay(); @@ -198,7 +208,7 @@ CallManager::answerInvite(const CallInvite &invite) session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); if (!session_.acceptOffer(invite.sdp)) { - emit ChatPage::instance()->showNotification("Problem setting up call"); + emit ChatPage::instance()->showNotification("Problem setting up call."); hangUp(); return; } @@ -232,6 +242,7 @@ CallManager::handleEvent(const RoomEvent &callAnswerEvent) if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() && callid_ == callAnswerEvent.content.call_id) { + emit ChatPage::instance()->showNotification("Call answered on another device."); stopRingtone(); MainWindow::instance()->hideOverlay(); return; @@ -240,7 +251,7 @@ CallManager::handleEvent(const RoomEvent &callAnswerEvent) if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) { stopRingtone(); if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) { - emit ChatPage::instance()->showNotification("Problem setting up call"); + emit ChatPage::instance()->showNotification("Problem setting up call."); hangUp(); } } diff --git a/src/CallManager.h b/src/CallManager.h index 8a93241f..df83a87a 100644 --- a/src/CallManager.h +++ b/src/CallManager.h @@ -36,7 +36,11 @@ signals: void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer&); void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp&); void turnServerRetrieved(const mtx::responses::TurnServer&); - void newCallParty(const QString &userid, const QString& displayName); + void newCallParty( + const QString &userid, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl); private slots: void retrieveTurnServer(); diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp index 15b7c545..5b8ea475 100644 --- a/src/ChatPage.cpp +++ b/src/ChatPage.cpp @@ -138,13 +138,13 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) connect( &callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty); connect(&WebRTCSession::instance(), - &WebRTCSession::pipelineChanged, + &WebRTCSession::stateChanged, this, - [this](bool callStarted) { - if (callStarted) - activeCallBar_->show(); - else + [this](WebRTCSession::State state) { + if (state == WebRTCSession::State::DISCONNECTED) activeCallBar_->hide(); + else + activeCallBar_->show(); }); // Splitter @@ -469,22 +469,28 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) if (callManager_.onActiveCall()) { callManager_.hangUp(); } else { - if (cache::singleRoomInfo(current_room_.toStdString()).member_count != 2) { - showNotification("Voice/Video calls are limited to 1:1 rooms"); + if (auto roomInfo = + cache::singleRoomInfo(current_room_.toStdString()); + roomInfo.member_count != 2) { + showNotification("Voice calls are limited to 1:1 rooms."); } else { std::vector members( cache::getMembers(current_room_.toStdString())); const RoomMember &callee = members.front().user_id == utils::localUser() ? members.back() : members.front(); - auto dialog = - new dialogs::PlaceCall(callee.user_id, callee.display_name, MainWindow::instance()); + auto dialog = new dialogs::PlaceCall( + callee.user_id, + callee.display_name, + QString::fromStdString(roomInfo.name), + QString::fromStdString(roomInfo.avatar_url), + MainWindow::instance()); connect(dialog, &dialogs::PlaceCall::voice, this, [this]() { callManager_.sendInvite(current_room_); }); - connect(dialog, &dialogs::PlaceCall::video, this, [this]() { - showNotification("Video calls not yet implemented"); - }); + /*connect(dialog, &dialogs::PlaceCall::video, this, [this]() { + showNotification("Video calls not yet implemented."); + });*/ utils::centerWidget(dialog, MainWindow::instance()); dialog->show(); } diff --git a/src/TextInputWidget.cpp b/src/TextInputWidget.cpp index 2be0b404..d49fc746 100644 --- a/src/TextInputWidget.cpp +++ b/src/TextInputWidget.cpp @@ -31,7 +31,6 @@ #include "Logging.h" #include "TextInputWidget.h" #include "Utils.h" -#include "WebRTCSession.h" #include "ui/FlatButton.h" #include "ui/LoadingIndicator.h" @@ -455,9 +454,9 @@ TextInputWidget::TextInputWidget(QWidget *parent) topLayout_->setContentsMargins(13, 1, 13, 0); callBtn_ = new FlatButton(this); - changeCallButtonState(false); + changeCallButtonState(WebRTCSession::State::DISCONNECTED); connect(&WebRTCSession::instance(), - &WebRTCSession::pipelineChanged, + &WebRTCSession::stateChanged, this, &TextInputWidget::changeCallButtonState); @@ -664,17 +663,16 @@ TextInputWidget::paintEvent(QPaintEvent *) } void -TextInputWidget::changeCallButtonState(bool callStarted) +TextInputWidget::changeCallButtonState(WebRTCSession::State state) { - // TODO Telephone and HangUp icons - co-opt the ones below for now QIcon icon; - if (callStarted) { - callBtn_->setToolTip(tr("Hang up")); - icon.addFile(":/icons/icons/ui/remove-symbol.png"); - } else { + if (state == WebRTCSession::State::DISCONNECTED) { callBtn_->setToolTip(tr("Place a call")); - icon.addFile(":/icons/icons/ui/speech-bubbles-comment-option.png"); + icon.addFile(":/icons/icons/ui/place-call.png"); + } else { + callBtn_->setToolTip(tr("Hang up")); + icon.addFile(":/icons/icons/ui/end-call.png"); } callBtn_->setIcon(icon); - callBtn_->setIconSize(QSize(ButtonHeight, ButtonHeight)); + callBtn_->setIconSize(QSize(ButtonHeight * 1.1, ButtonHeight * 1.1)); } diff --git a/src/TextInputWidget.h b/src/TextInputWidget.h index ae58f4e3..27dff57f 100644 --- a/src/TextInputWidget.h +++ b/src/TextInputWidget.h @@ -26,6 +26,7 @@ #include #include +#include "WebRTCSession.h" #include "dialogs/PreviewUploadOverlay.h" #include "emoji/PickButton.h" #include "popups/SuggestionsPopup.h" @@ -149,7 +150,7 @@ public slots: void openFileSelection(); void hideUploadSpinner(); void focusLineEdit() { input_->setFocus(); } - void changeCallButtonState(bool callStarted); + void changeCallButtonState(WebRTCSession::State); private slots: void addSelectedEmoji(const QString &emoji); diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index 4ef7a818..5baed72e 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -11,6 +11,8 @@ extern "C" { #include "gst/webrtc/webrtc.h" } +Q_DECLARE_METATYPE(WebRTCSession::State) + namespace { bool gisoffer; std::string glocalsdp; @@ -29,6 +31,12 @@ std::string::const_iterator findName(const std::string &sdp, const std::string int getPayloadType(const std::string &sdp, const std::string &name); } +WebRTCSession::WebRTCSession() : QObject() +{ + qRegisterMetaType(); + connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState); +} + bool WebRTCSession::init(std::string *errorMessage) { @@ -54,14 +62,14 @@ WebRTCSession::init(std::string *errorMessage) nhlog::ui()->info("Initialised " + gstVersion); // GStreamer Plugins: - // Base: audioconvert, audioresample, opus, playback, videoconvert, volume + // Base: audioconvert, audioresample, opus, playback, volume // Good: autodetect, rtpmanager, vpx // Bad: dtls, srtp, webrtc // libnice [GLib]: nice initialised_ = true; std::string strError = gstVersion + ": Missing plugins: "; const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice", - "opus", "playback", "rtpmanager", "srtp", "videoconvert", "vpx", "volume", "webrtc", nullptr}; + "opus", "playback", "rtpmanager", "srtp", "vpx", "volume", "webrtc", nullptr}; GstRegistry *registry = gst_registry_get(); for (guint i = 0; i < g_strv_length((gchar**)needed); i++) { GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); @@ -91,17 +99,19 @@ WebRTCSession::createOffer() } bool -WebRTCSession::acceptOffer(const std::string& sdp) +WebRTCSession::acceptOffer(const std::string &sdp) { nhlog::ui()->debug("Received offer:\n{}", sdp); + if (state_ != State::DISCONNECTED) + return false; + gisoffer = false; glocalsdp.clear(); gcandidates.clear(); int opusPayloadType = getPayloadType(sdp, "opus"); - if (opusPayloadType == -1) { + if (opusPayloadType == -1) return false; - } GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); if (!offer) @@ -120,9 +130,11 @@ WebRTCSession::acceptOffer(const std::string& sdp) bool WebRTCSession::startPipeline(int opusPayloadType) { - if (isActive()) + if (state_ != State::DISCONNECTED) return false; + emit stateChanged(State::INITIATING); + if (!createPipeline(opusPayloadType)) return false; @@ -132,7 +144,12 @@ WebRTCSession::startPipeline(int opusPayloadType) nhlog::ui()->info("WebRTC: Setting STUN server: {}", stunServer_); g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr); } - addTurnServers(); + + for (const auto &uri : turnServers_) { + nhlog::ui()->info("WebRTC: Setting TURN server: {}", uri); + gboolean udata; + g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata)); + } // generate the offer when the pipeline goes to PLAYING if (gisoffer) @@ -152,16 +169,14 @@ WebRTCSession::startPipeline(int opusPayloadType) GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { nhlog::ui()->error("WebRTC: unable to start pipeline"); - gst_object_unref(pipe_); - pipe_ = nullptr; - webrtc_ = nullptr; + end(); return false; } GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_)); gst_bus_add_watch(bus, newBusMessage, this); gst_object_unref(bus); - emit pipelineChanged(true); + emit stateChanged(State::INITIATED); return true; } @@ -180,10 +195,7 @@ WebRTCSession::createPipeline(int opusPayloadType) if (error) { nhlog::ui()->error("WebRTC: Failed to parse pipeline: {}", error->message); g_error_free(error); - if (pipe_) { - gst_object_unref(pipe_); - pipe_ = nullptr; - } + end(); return false; } return true; @@ -193,7 +205,7 @@ bool WebRTCSession::acceptAnswer(const std::string &sdp) { nhlog::ui()->debug("WebRTC: Received sdp:\n{}", sdp); - if (!isActive()) + if (state_ != State::OFFERSENT) return false; GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); @@ -206,18 +218,20 @@ WebRTCSession::acceptAnswer(const std::string &sdp) } void -WebRTCSession::acceptICECandidates(const std::vector& candidates) +WebRTCSession::acceptICECandidates(const std::vector &candidates) { - if (isActive()) { - for (const auto& c : candidates) + if (state_ >= State::INITIATED) { + for (const auto &c : candidates) g_signal_emit_by_name(webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); } + if (state_ < State::CONNECTED) + emit stateChanged(State::CONNECTING); } bool WebRTCSession::toggleMuteAudioSrc(bool &isMuted) { - if (!isActive()) + if (state_ < State::INITIATED) return false; GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel"); @@ -241,20 +255,7 @@ WebRTCSession::end() pipe_ = nullptr; } webrtc_ = nullptr; - emit pipelineChanged(false); -} - -void -WebRTCSession::addTurnServers() -{ - if (!webrtc_) - return; - - for (const auto &uri : turnServers_) { - nhlog::ui()->info("WebRTC: Setting TURN server: {}", uri); - gboolean udata; - g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata)); - } + emit stateChanged(State::DISCONNECTED); } namespace { @@ -373,8 +374,10 @@ gboolean onICEGatheringCompletion(gpointer timerid) { *(guint*)(timerid) = 0; - if (gisoffer) + if (gisoffer) { emit WebRTCSession::instance().offerCreated(glocalsdp, gcandidates); + emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); + } else emit WebRTCSession::instance().answerCreated(glocalsdp, gcandidates); @@ -445,6 +448,9 @@ linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe if (queuepad) { if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) nhlog::ui()->error("WebRTC: Unable to link new pad"); + else { + emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTED); + } gst_object_unref(queuepad); } } diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h index fffefb25..42db204d 100644 --- a/src/WebRTCSession.h +++ b/src/WebRTCSession.h @@ -14,6 +14,15 @@ class WebRTCSession : public QObject Q_OBJECT public: + enum class State { + DISCONNECTED, + INITIATING, + INITIATED, + OFFERSENT, + CONNECTING, + CONNECTED + }; + static WebRTCSession& instance() { static WebRTCSession instance; @@ -27,7 +36,7 @@ public: bool acceptAnswer(const std::string &sdp); void acceptICECandidates(const std::vector&); - bool isActive() { return pipe_ != nullptr; } + State state() const {return state_;} bool toggleMuteAudioSrc(bool &isMuted); void end(); @@ -37,12 +46,16 @@ public: signals: void offerCreated(const std::string &sdp, const std::vector&); void answerCreated(const std::string &sdp, const std::vector&); - void pipelineChanged(bool started); + void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt + +private slots: + void setState(State state) {state_ = state;} private: - WebRTCSession() : QObject() {} + WebRTCSession(); bool initialised_ = false; + State state_ = State::DISCONNECTED; GstElement *pipe_ = nullptr; GstElement *webrtc_ = nullptr; std::string stunServer_; @@ -50,7 +63,6 @@ private: bool startPipeline(int opusPayloadType); bool createPipeline(int opusPayloadType); - void addTurnServers(); public: WebRTCSession(WebRTCSession const&) = delete; diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp index f04a613a..6b5e2e60 100644 --- a/src/dialogs/AcceptCall.cpp +++ b/src/dialogs/AcceptCall.cpp @@ -1,43 +1,83 @@ #include #include +#include #include #include "Config.h" +#include "Utils.h" #include "dialogs/AcceptCall.h" +#include "ui/Avatar.h" namespace dialogs { -AcceptCall::AcceptCall(const QString &caller, const QString &displayName, QWidget *parent) - : QWidget(parent) +AcceptCall::AcceptCall( + const QString &caller, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent) : QWidget(parent) { setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowModality(Qt::WindowModal); setAttribute(Qt::WA_DeleteOnClose, true); + setMinimumWidth(conf::modals::MIN_WIDGET_WIDTH); + setSizePolicy(QSizePolicy::Maximum, QSizePolicy::Maximum); + auto layout = new QVBoxLayout(this); layout->setSpacing(conf::modals::WIDGET_SPACING); layout->setMargin(conf::modals::WIDGET_MARGIN); - auto buttonLayout = new QHBoxLayout(); - buttonLayout->setSpacing(15); - buttonLayout->setMargin(0); + QFont f; + f.setPointSizeF(f.pointSizeF()); + + QFont labelFont; + labelFont.setWeight(QFont::Medium); + + QLabel *displayNameLabel = nullptr; + if (!displayName.isEmpty() && displayName != caller) { + displayNameLabel = new QLabel(displayName, this); + labelFont.setPointSizeF(f.pointSizeF() * 2); + displayNameLabel ->setFont(labelFont); + displayNameLabel ->setAlignment(Qt::AlignCenter); + } + QLabel *callerLabel = new QLabel(caller, this); + labelFont.setPointSizeF(f.pointSizeF() * 1.2); + callerLabel->setFont(labelFont); + callerLabel->setAlignment(Qt::AlignCenter); + + QLabel *voiceCallLabel = new QLabel("Voice Call", this); + labelFont.setPointSizeF(f.pointSizeF() * 1.1); + voiceCallLabel->setFont(labelFont); + voiceCallLabel->setAlignment(Qt::AlignCenter); + + auto avatar = new Avatar(this, QFontMetrics(f).height() * 6); + if (!avatarUrl.isEmpty()) + avatar->setImage(avatarUrl); + else + avatar->setLetter(utils::firstChar(roomName)); + + const int iconSize = 24; + auto buttonLayout = new QHBoxLayout(); + buttonLayout->setSpacing(20); acceptBtn_ = new QPushButton(tr("Accept"), this); acceptBtn_->setDefault(true); - rejectBtn_ = new QPushButton(tr("Reject"), this); + acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); + acceptBtn_->setIconSize(QSize(iconSize, iconSize)); - buttonLayout->addStretch(1); + rejectBtn_ = new QPushButton(tr("Reject"), this); + rejectBtn_->setIcon(QIcon(":/icons/icons/ui/end-call.png")); + rejectBtn_->setIconSize(QSize(iconSize, iconSize)); buttonLayout->addWidget(acceptBtn_); buttonLayout->addWidget(rejectBtn_); - QLabel *label; - if (!displayName.isEmpty() && displayName != caller) - label = new QLabel("Accept call from " + displayName + " (" + caller + ")?", this); - else - label = new QLabel("Accept call from " + caller + "?", this); - - layout->addWidget(label); + if (displayNameLabel) + layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); + layout->addWidget(callerLabel, 0, Qt::AlignCenter); + layout->addWidget(voiceCallLabel, 0, Qt::AlignCenter); + layout->addWidget(avatar, 0, Qt::AlignCenter); layout->addLayout(buttonLayout); connect(acceptBtn_, &QPushButton::clicked, this, [this]() { diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h index a410d6b7..8e3ed3b2 100644 --- a/src/dialogs/AcceptCall.h +++ b/src/dialogs/AcceptCall.h @@ -1,9 +1,9 @@ #pragma once -#include #include class QPushButton; +class QString; namespace dialogs { @@ -12,7 +12,12 @@ class AcceptCall : public QWidget Q_OBJECT public: - AcceptCall(const QString &caller, const QString &displayName, QWidget *parent = nullptr); + AcceptCall( + const QString &caller, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent = nullptr); signals: void accept(); diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp index 8b37ff6a..c5c78f94 100644 --- a/src/dialogs/PlaceCall.cpp +++ b/src/dialogs/PlaceCall.cpp @@ -4,12 +4,18 @@ #include #include "Config.h" +#include "Utils.h" #include "dialogs/PlaceCall.h" +#include "ui/Avatar.h" namespace dialogs { -PlaceCall::PlaceCall(const QString &callee, const QString &displayName, QWidget *parent) - : QWidget(parent) +PlaceCall::PlaceCall( + const QString &callee, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent) : QWidget(parent) { setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); @@ -20,25 +26,31 @@ PlaceCall::PlaceCall(const QString &callee, const QString &displayName, QWidget layout->setSpacing(conf::modals::WIDGET_SPACING); layout->setMargin(conf::modals::WIDGET_MARGIN); - auto buttonLayout = new QHBoxLayout(); + auto buttonLayout = new QHBoxLayout(this); buttonLayout->setSpacing(15); buttonLayout->setMargin(0); + QFont f; + f.setPointSizeF(f.pointSizeF()); + auto avatar = new Avatar(this, QFontMetrics(f).height() * 3); + if (!avatarUrl.isEmpty()) + avatar->setImage(avatarUrl); + else + avatar->setLetter(utils::firstChar(roomName)); + voiceBtn_ = new QPushButton(tr("Voice Call"), this); voiceBtn_->setDefault(true); - videoBtn_ = new QPushButton(tr("Video Call"), this); + //videoBtn_ = new QPushButton(tr("Video Call"), this); cancelBtn_ = new QPushButton(tr("Cancel"), this); buttonLayout->addStretch(1); + buttonLayout->addWidget(avatar); buttonLayout->addWidget(voiceBtn_); - buttonLayout->addWidget(videoBtn_); + //buttonLayout->addWidget(videoBtn_); buttonLayout->addWidget(cancelBtn_); - QLabel *label; - if (!displayName.isEmpty() && displayName != callee) - label = new QLabel("Place a call to " + displayName + " (" + callee + ")?", this); - else - label = new QLabel("Place a call to " + callee + "?", this); + QString name = displayName.isEmpty() ? callee : displayName; + QLabel *label = new QLabel("Place a call to " + name + "?", this); layout->addWidget(label); layout->addLayout(buttonLayout); @@ -47,10 +59,10 @@ PlaceCall::PlaceCall(const QString &callee, const QString &displayName, QWidget emit voice(); emit close(); }); - connect(videoBtn_, &QPushButton::clicked, this, [this]() { + /*connect(videoBtn_, &QPushButton::clicked, this, [this]() { emit video(); emit close(); - }); + });*/ connect(cancelBtn_, &QPushButton::clicked, this, [this]() { emit cancel(); emit close(); diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h index b4de1428..1c157b7b 100644 --- a/src/dialogs/PlaceCall.h +++ b/src/dialogs/PlaceCall.h @@ -12,16 +12,21 @@ class PlaceCall : public QWidget Q_OBJECT public: - PlaceCall(const QString &callee, const QString &displayName, QWidget *parent = nullptr); + PlaceCall( + const QString &callee, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent = nullptr); signals: void voice(); - void video(); +// void video(); void cancel(); private: QPushButton *voiceBtn_; - QPushButton *videoBtn_; +// QPushButton *videoBtn_; QPushButton *cancelBtn_; }; -- cgit 1.5.1 From a4301048e3b2aa195ff73ff94a93011cf5f66c27 Mon Sep 17 00:00:00 2001 From: trilene Date: Thu, 23 Jul 2020 18:36:10 -0400 Subject: Fix QLayout warning --- src/dialogs/PlaceCall.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/dialogs') diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp index c5c78f94..9ad16384 100644 --- a/src/dialogs/PlaceCall.cpp +++ b/src/dialogs/PlaceCall.cpp @@ -26,7 +26,7 @@ PlaceCall::PlaceCall( layout->setSpacing(conf::modals::WIDGET_SPACING); layout->setMargin(conf::modals::WIDGET_MARGIN); - auto buttonLayout = new QHBoxLayout(this); + auto buttonLayout = new QHBoxLayout; buttonLayout->setSpacing(15); buttonLayout->setMargin(0); -- cgit 1.5.1 From 97681ccf648e68d85aea39de22684ff52e9470c8 Mon Sep 17 00:00:00 2001 From: trilene Date: Wed, 29 Jul 2020 18:16:52 -0400 Subject: Remove references to video calls --- src/ActiveCallBar.cpp | 5 +++-- src/CallManager.cpp | 17 +++++++++++++++-- src/ChatPage.cpp | 3 --- src/WebRTCSession.cpp | 18 +++--------------- src/dialogs/PlaceCall.cpp | 6 ------ src/dialogs/PlaceCall.h | 2 -- 6 files changed, 21 insertions(+), 30 deletions(-) (limited to 'src/dialogs') diff --git a/src/ActiveCallBar.cpp b/src/ActiveCallBar.cpp index e55b2e86..7f07982a 100644 --- a/src/ActiveCallBar.cpp +++ b/src/ActiveCallBar.cpp @@ -110,7 +110,7 @@ ActiveCallBar::setCallParty( const QString &avatarUrl) { callPartyLabel_->setText(" " + - (displayName.isEmpty() ? userid : displayName) + " -"); + (displayName.isEmpty() ? userid : displayName) + " "); if (!avatarUrl.isEmpty()) avatar_->setImage(avatarUrl); @@ -142,7 +142,8 @@ ActiveCallBar::update(WebRTCSession::State state) show(); callStartTime_ = QDateTime::currentSecsSinceEpoch(); timer_->start(1000); - stateLabel_->setText("Voice call:"); + stateLabel_->setPixmap(QIcon(":/icons/icons/ui/place-call.png"). + pixmap(QSize(buttonSize_, buttonSize_))); durationLabel_->setText("00:00"); durationLabel_->show(); break; diff --git a/src/CallManager.cpp b/src/CallManager.cpp index b57ef1bb..3ddcc227 100644 --- a/src/CallManager.cpp +++ b/src/CallManager.cpp @@ -1,3 +1,4 @@ +#include #include #include @@ -198,12 +199,24 @@ CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event) void CallManager::handleEvent(const RoomEvent &callInviteEvent) { - nhlog::ui()->debug("WebRTC: call id: {} - incoming CallInvite from {}", - callInviteEvent.content.call_id, callInviteEvent.sender); + const char video[] = "m=video"; + const std::string &sdp = callInviteEvent.content.sdp; + bool isVideo = std::search(sdp.cbegin(), sdp.cend(), std::cbegin(video), std::cend(video) - 1, + [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);}) + != sdp.cend(); + + nhlog::ui()->debug(std::string("WebRTC: call id: {} - incoming ") + (isVideo ? "video" : "voice") + + " CallInvite from {}", callInviteEvent.content.call_id, callInviteEvent.sender); if (callInviteEvent.content.call_id.empty()) return; + if (isVideo) { + emit newMessage(QString::fromStdString(callInviteEvent.room_id), + CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut}); + return; + } + auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id); if (onActiveCall() || roomInfo.member_count != 2) { emit newMessage(QString::fromStdString(callInviteEvent.room_id), diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp index b53a5761..620c977e 100644 --- a/src/ChatPage.cpp +++ b/src/ChatPage.cpp @@ -479,9 +479,6 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) connect(dialog, &dialogs::PlaceCall::voice, this, [this]() { callManager_.sendInvite(current_room_); }); - /*connect(dialog, &dialogs::PlaceCall::video, this, [this]() { - showNotification("Video calls not yet implemented."); - });*/ utils::centerWidget(dialog, MainWindow::instance()); dialog->show(); } diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index 95a9041e..9f3b2f70 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -64,13 +64,13 @@ WebRTCSession::init(std::string *errorMessage) // GStreamer Plugins: // Base: audioconvert, audioresample, opus, playback, volume - // Good: autodetect, rtpmanager, vpx + // Good: autodetect, rtpmanager // Bad: dtls, srtp, webrtc // libnice [GLib]: nice initialised_ = true; std::string strError = gstVersion + ": Missing plugins: "; const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice", - "opus", "playback", "rtpmanager", "srtp", "vpx", "volume", "webrtc", nullptr}; + "opus", "playback", "rtpmanager", "srtp", "volume", "webrtc", nullptr}; GstRegistry *registry = gst_registry_get(); for (guint i = 0; i < g_strv_length((gchar**)needed); i++) { GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); @@ -462,10 +462,9 @@ linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe gst_caps_unref(caps); GstPad *queuepad = nullptr; - GstElement *queue = gst_element_factory_make("queue", nullptr); - if (g_str_has_prefix(name, "audio")) { nhlog::ui()->debug("WebRTC: received incoming audio stream"); + GstElement *queue = gst_element_factory_make("queue", nullptr); GstElement *convert = gst_element_factory_make("audioconvert", nullptr); GstElement *resample = gst_element_factory_make("audioresample", nullptr); GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); @@ -477,17 +476,6 @@ linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe gst_element_link_many(queue, convert, resample, sink, nullptr); queuepad = gst_element_get_static_pad(queue, "sink"); } - else if (g_str_has_prefix(name, "video")) { - nhlog::ui()->debug("WebRTC: received incoming video stream"); - GstElement *convert = gst_element_factory_make("videoconvert", nullptr); - GstElement *sink = gst_element_factory_make("autovideosink", nullptr); - gst_bin_add_many(GST_BIN(pipe), queue, convert, sink, nullptr); - gst_element_sync_state_with_parent(queue); - gst_element_sync_state_with_parent(convert); - gst_element_sync_state_with_parent(sink); - gst_element_link_many(queue, convert, sink, nullptr); - queuepad = gst_element_get_static_pad(queue, "sink"); - } if (queuepad) { if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp index 9ad16384..81dd85dd 100644 --- a/src/dialogs/PlaceCall.cpp +++ b/src/dialogs/PlaceCall.cpp @@ -40,13 +40,11 @@ PlaceCall::PlaceCall( voiceBtn_ = new QPushButton(tr("Voice Call"), this); voiceBtn_->setDefault(true); - //videoBtn_ = new QPushButton(tr("Video Call"), this); cancelBtn_ = new QPushButton(tr("Cancel"), this); buttonLayout->addStretch(1); buttonLayout->addWidget(avatar); buttonLayout->addWidget(voiceBtn_); - //buttonLayout->addWidget(videoBtn_); buttonLayout->addWidget(cancelBtn_); QString name = displayName.isEmpty() ? callee : displayName; @@ -59,10 +57,6 @@ PlaceCall::PlaceCall( emit voice(); emit close(); }); - /*connect(videoBtn_, &QPushButton::clicked, this, [this]() { - emit video(); - emit close(); - });*/ connect(cancelBtn_, &QPushButton::clicked, this, [this]() { emit cancel(); emit close(); diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h index 1c157b7b..ed6fb750 100644 --- a/src/dialogs/PlaceCall.h +++ b/src/dialogs/PlaceCall.h @@ -21,12 +21,10 @@ public: signals: void voice(); -// void video(); void cancel(); private: QPushButton *voiceBtn_; -// QPushButton *videoBtn_; QPushButton *cancelBtn_; }; -- cgit 1.5.1 From e3e7595babbea739c9fac12ae3da6da368f1e08e Mon Sep 17 00:00:00 2001 From: trilene Date: Sat, 1 Aug 2020 14:31:10 -0400 Subject: clang-format --- src/ActiveCallBar.cpp | 123 +++--- src/ActiveCallBar.h | 9 +- src/CallManager.cpp | 621 +++++++++++++++--------------- src/CallManager.h | 37 +- src/ChatPage.cpp | 15 +- src/EventAccessors.cpp | 19 +- src/WebRTCSession.cpp | 833 +++++++++++++++++++++-------------------- src/WebRTCSession.h | 53 +-- src/dialogs/AcceptCall.cpp | 44 ++- src/dialogs/AcceptCall.h | 11 +- src/dialogs/PlaceCall.cpp | 24 +- src/dialogs/PlaceCall.h | 11 +- src/timeline/TimelineModel.cpp | 6 +- 13 files changed, 936 insertions(+), 870 deletions(-) (limited to 'src/dialogs') diff --git a/src/ActiveCallBar.cpp b/src/ActiveCallBar.cpp index 7f07982a..549b97b9 100644 --- a/src/ActiveCallBar.cpp +++ b/src/ActiveCallBar.cpp @@ -33,8 +33,7 @@ ActiveCallBar::ActiveCallBar(QWidget *parent) layout_ = new QHBoxLayout(this); layout_->setSpacing(widgetMargin); - layout_->setContentsMargins( - 2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin); + layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin); QFont labelFont; labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1); @@ -56,9 +55,9 @@ ActiveCallBar::ActiveCallBar(QWidget *parent) setMuteIcon(false); muteBtn_->setFixedSize(buttonSize_, buttonSize_); muteBtn_->setCornerRadius(buttonSize_ / 2); - connect(muteBtn_, &FlatButton::clicked, this, [this](){ + connect(muteBtn_, &FlatButton::clicked, this, [this]() { if (WebRTCSession::instance().toggleMuteAudioSrc(muted_)) - setMuteIcon(muted_); + setMuteIcon(muted_); }); layout_->addWidget(avatar_, 0, Qt::AlignLeft); @@ -70,21 +69,21 @@ ActiveCallBar::ActiveCallBar(QWidget *parent) layout_->addSpacing(18); timer_ = new QTimer(this); - connect(timer_, &QTimer::timeout, this, - [this](){ - auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_; - int s = seconds % 60; - int m = (seconds / 60) % 60; - int h = seconds / 3600; - char buf[12]; - if (h) - snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s); - else - snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s); - durationLabel_->setText(buf); + connect(timer_, &QTimer::timeout, this, [this]() { + auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_; + int s = seconds % 60; + int m = (seconds / 60) % 60; + int h = seconds / 3600; + char buf[12]; + if (h) + snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s); + else + snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s); + durationLabel_->setText(buf); }); - connect(&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update); + connect( + &WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update); } void @@ -103,61 +102,59 @@ ActiveCallBar::setMuteIcon(bool muted) } void -ActiveCallBar::setCallParty( - const QString &userid, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl) +ActiveCallBar::setCallParty(const QString &userid, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl) { - callPartyLabel_->setText(" " + - (displayName.isEmpty() ? userid : displayName) + " "); + callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " "); if (!avatarUrl.isEmpty()) - avatar_->setImage(avatarUrl); + avatar_->setImage(avatarUrl); else - avatar_->setLetter(utils::firstChar(roomName)); + avatar_->setLetter(utils::firstChar(roomName)); } void ActiveCallBar::update(WebRTCSession::State state) { switch (state) { - case WebRTCSession::State::INITIATING: - show(); - stateLabel_->setText("Initiating call..."); - break; - case WebRTCSession::State::INITIATED: - show(); - stateLabel_->setText("Call initiated..."); - break; - case WebRTCSession::State::OFFERSENT: - show(); - stateLabel_->setText("Calling..."); - break; - case WebRTCSession::State::CONNECTING: - show(); - stateLabel_->setText("Connecting..."); - break; - case WebRTCSession::State::CONNECTED: - show(); - callStartTime_ = QDateTime::currentSecsSinceEpoch(); - timer_->start(1000); - stateLabel_->setPixmap(QIcon(":/icons/icons/ui/place-call.png"). - pixmap(QSize(buttonSize_, buttonSize_))); - durationLabel_->setText("00:00"); - durationLabel_->show(); - break; - case WebRTCSession::State::ICEFAILED: - case WebRTCSession::State::DISCONNECTED: - hide(); - timer_->stop(); - callPartyLabel_->setText(QString()); - stateLabel_->setText(QString()); - durationLabel_->setText(QString()); - durationLabel_->hide(); - setMuteIcon(false); - break; - default: - break; + case WebRTCSession::State::INITIATING: + show(); + stateLabel_->setText("Initiating call..."); + break; + case WebRTCSession::State::INITIATED: + show(); + stateLabel_->setText("Call initiated..."); + break; + case WebRTCSession::State::OFFERSENT: + show(); + stateLabel_->setText("Calling..."); + break; + case WebRTCSession::State::CONNECTING: + show(); + stateLabel_->setText("Connecting..."); + break; + case WebRTCSession::State::CONNECTED: + show(); + callStartTime_ = QDateTime::currentSecsSinceEpoch(); + timer_->start(1000); + stateLabel_->setPixmap( + QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_))); + durationLabel_->setText("00:00"); + durationLabel_->show(); + break; + case WebRTCSession::State::ICEFAILED: + case WebRTCSession::State::DISCONNECTED: + hide(); + timer_->stop(); + callPartyLabel_->setText(QString()); + stateLabel_->setText(QString()); + durationLabel_->setText(QString()); + durationLabel_->hide(); + setMuteIcon(false); + break; + default: + break; } } diff --git a/src/ActiveCallBar.h b/src/ActiveCallBar.h index 8440d7f3..1e940227 100644 --- a/src/ActiveCallBar.h +++ b/src/ActiveCallBar.h @@ -19,11 +19,10 @@ public: public slots: void update(WebRTCSession::State); - void setCallParty( - const QString &userid, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl); + void setCallParty(const QString &userid, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl); private: QHBoxLayout *layout_ = nullptr; diff --git a/src/CallManager.cpp b/src/CallManager.cpp index cbfd5135..46781313 100644 --- a/src/CallManager.cpp +++ b/src/CallManager.cpp @@ -1,13 +1,13 @@ #include #include -#include #include +#include #include #include -#include "CallManager.h" #include "Cache.h" +#include "CallManager.h" #include "ChatPage.h" #include "Logging.h" #include "MainWindow.h" @@ -34,389 +34,420 @@ getTurnURIs(const mtx::responses::TurnServer &turnServer); } CallManager::CallManager(QSharedPointer userSettings) - : QObject(), - session_(WebRTCSession::instance()), - turnServerTimer_(this), - settings_(userSettings) + : QObject() + , session_(WebRTCSession::instance()) + , turnServerTimer_(this) + , settings_(userSettings) { - qRegisterMetaType>(); - qRegisterMetaType(); - qRegisterMetaType(); - - connect(&session_, &WebRTCSession::offerCreated, this, - [this](const std::string &sdp, - const std::vector &candidates) - { - nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_); - emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_}); - emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); - - QTimer::singleShot(timeoutms_, this, [this](){ - if (session_.state() == WebRTCSession::State::OFFERSENT) { - hangUp(CallHangUp::Reason::InviteTimeOut); - emit ChatPage::instance()->showNotification("The remote side failed to pick up."); - } - }); - }); - - connect(&session_, &WebRTCSession::answerCreated, this, - [this](const std::string &sdp, - const std::vector &candidates) - { - nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_); - emit newMessage(roomid_, CallAnswer{callid_, sdp, 0}); - emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); - }); - - connect(&session_, &WebRTCSession::newICECandidate, this, - [this](const CallCandidates::Candidate &candidate) - { - nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_); - emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0}); - }); - - connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer); - - connect(this, &CallManager::turnServerRetrieved, this, - [this](const mtx::responses::TurnServer &res) - { - nhlog::net()->info("TURN server(s) retrieved from homeserver:"); - nhlog::net()->info("username: {}", res.username); - nhlog::net()->info("ttl: {} seconds", res.ttl); - for (const auto &u : res.uris) - nhlog::net()->info("uri: {}", u); - - // Request new credentials close to expiry - // See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00 - turnURIs_ = getTurnURIs(res); - uint32_t ttl = std::max(res.ttl, UINT32_C(3600)); - if (res.ttl < 3600) - nhlog::net()->warn("Setting ttl to 1 hour"); - turnServerTimer_.setInterval(ttl * 1000 * 0.9); - }); - - connect(&session_, &WebRTCSession::stateChanged, this, - [this](WebRTCSession::State state) { - if (state == WebRTCSession::State::DISCONNECTED) { - playRingtone("qrc:/media/media/callend.ogg", false); - } - else if (state == WebRTCSession::State::ICEFAILED) { - QString error("Call connection failed."); - if (turnURIs_.empty()) - error += " Your homeserver has no configured TURN server."; - emit ChatPage::instance()->showNotification(error); - hangUp(CallHangUp::Reason::ICEFailed); - } - }); - - connect(&player_, &QMediaPlayer::mediaStatusChanged, this, - [this](QMediaPlayer::MediaStatus status) { - if (status == QMediaPlayer::LoadedMedia) - player_.play(); - }); + qRegisterMetaType>(); + qRegisterMetaType(); + qRegisterMetaType(); + + connect( + &session_, + &WebRTCSession::offerCreated, + this, + [this](const std::string &sdp, const std::vector &candidates) { + nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_); + emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_}); + emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); + QTimer::singleShot(timeoutms_, this, [this]() { + if (session_.state() == WebRTCSession::State::OFFERSENT) { + hangUp(CallHangUp::Reason::InviteTimeOut); + emit ChatPage::instance()->showNotification( + "The remote side failed to pick up."); + } + }); + }); + + connect( + &session_, + &WebRTCSession::answerCreated, + this, + [this](const std::string &sdp, const std::vector &candidates) { + nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_); + emit newMessage(roomid_, CallAnswer{callid_, sdp, 0}); + emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); + }); + + connect(&session_, + &WebRTCSession::newICECandidate, + this, + [this](const CallCandidates::Candidate &candidate) { + nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_); + emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0}); + }); + + connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer); + + connect(this, + &CallManager::turnServerRetrieved, + this, + [this](const mtx::responses::TurnServer &res) { + nhlog::net()->info("TURN server(s) retrieved from homeserver:"); + nhlog::net()->info("username: {}", res.username); + nhlog::net()->info("ttl: {} seconds", res.ttl); + for (const auto &u : res.uris) + nhlog::net()->info("uri: {}", u); + + // Request new credentials close to expiry + // See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00 + turnURIs_ = getTurnURIs(res); + uint32_t ttl = std::max(res.ttl, UINT32_C(3600)); + if (res.ttl < 3600) + nhlog::net()->warn("Setting ttl to 1 hour"); + turnServerTimer_.setInterval(ttl * 1000 * 0.9); + }); + + connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) { + switch (state) { + case WebRTCSession::State::DISCONNECTED: + playRingtone("qrc:/media/media/callend.ogg", false); + clear(); + break; + case WebRTCSession::State::ICEFAILED: { + QString error("Call connection failed."); + if (turnURIs_.empty()) + error += " Your homeserver has no configured TURN server."; + emit ChatPage::instance()->showNotification(error); + hangUp(CallHangUp::Reason::ICEFailed); + break; + } + default: + break; + } + }); + + connect(&player_, + &QMediaPlayer::mediaStatusChanged, + this, + [this](QMediaPlayer::MediaStatus status) { + if (status == QMediaPlayer::LoadedMedia) + player_.play(); + }); } void CallManager::sendInvite(const QString &roomid) { - if (onActiveCall()) - return; - - auto roomInfo = cache::singleRoomInfo(roomid.toStdString()); - if (roomInfo.member_count != 2) { - emit ChatPage::instance()->showNotification("Voice calls are limited to 1:1 rooms."); - return; - } - - std::string errorMessage; - if (!session_.init(&errorMessage)) { - emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); - return; - } - - roomid_ = roomid; - session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); - session_.setTurnServers(turnURIs_); - - generateCallID(); - nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_); - std::vector members(cache::getMembers(roomid.toStdString())); - const RoomMember &callee = members.front().user_id == utils::localUser() ? members.back() : members.front(); - emit newCallParty(callee.user_id, callee.display_name, - QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url)); - playRingtone("qrc:/media/media/ringback.ogg", true); - if (!session_.createOffer()) { - emit ChatPage::instance()->showNotification("Problem setting up call."); - endCall(); - } + if (onActiveCall()) + return; + + auto roomInfo = cache::singleRoomInfo(roomid.toStdString()); + if (roomInfo.member_count != 2) { + emit ChatPage::instance()->showNotification( + "Voice calls are limited to 1:1 rooms."); + return; + } + + std::string errorMessage; + if (!session_.init(&errorMessage)) { + emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); + return; + } + + roomid_ = roomid; + session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); + session_.setTurnServers(turnURIs_); + + generateCallID(); + nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_); + std::vector members(cache::getMembers(roomid.toStdString())); + const RoomMember &callee = + members.front().user_id == utils::localUser() ? members.back() : members.front(); + emit newCallParty(callee.user_id, + callee.display_name, + QString::fromStdString(roomInfo.name), + QString::fromStdString(roomInfo.avatar_url)); + playRingtone("qrc:/media/media/ringback.ogg", true); + if (!session_.createOffer()) { + emit ChatPage::instance()->showNotification("Problem setting up call."); + endCall(); + } } namespace { -std::string callHangUpReasonString(CallHangUp::Reason reason) +std::string +callHangUpReasonString(CallHangUp::Reason reason) { - switch (reason) { - case CallHangUp::Reason::ICEFailed: - return "ICE failed"; - case CallHangUp::Reason::InviteTimeOut: - return "Invite time out"; - default: - return "User"; - } + switch (reason) { + case CallHangUp::Reason::ICEFailed: + return "ICE failed"; + case CallHangUp::Reason::InviteTimeOut: + return "Invite time out"; + default: + return "User"; + } } } void CallManager::hangUp(CallHangUp::Reason reason) { - if (!callid_.empty()) { - nhlog::ui()->debug("WebRTC: call id: {} - hanging up ({})", callid_, - callHangUpReasonString(reason)); - emit newMessage(roomid_, CallHangUp{callid_, 0, reason}); - endCall(); - } + if (!callid_.empty()) { + nhlog::ui()->debug( + "WebRTC: call id: {} - hanging up ({})", callid_, callHangUpReasonString(reason)); + emit newMessage(roomid_, CallHangUp{callid_, 0, reason}); + endCall(); + } } bool CallManager::onActiveCall() { - return session_.state() != WebRTCSession::State::DISCONNECTED; + return session_.state() != WebRTCSession::State::DISCONNECTED; } -void CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event) +void +CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event) { - if (handleEvent_(event) || handleEvent_(event) - || handleEvent_(event) || handleEvent_(event)) - return; + if (handleEvent_(event) || handleEvent_(event) || + handleEvent_(event) || handleEvent_(event)) + return; } template bool CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event) { - if (std::holds_alternative>(event)) { - handleEvent(std::get>(event)); - return true; - } - return false; + if (std::holds_alternative>(event)) { + handleEvent(std::get>(event)); + return true; + } + return false; } void CallManager::handleEvent(const RoomEvent &callInviteEvent) { - const char video[] = "m=video"; - const std::string &sdp = callInviteEvent.content.sdp; - bool isVideo = std::search(sdp.cbegin(), sdp.cend(), std::cbegin(video), std::cend(video) - 1, - [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);}) - != sdp.cend(); - - nhlog::ui()->debug(std::string("WebRTC: call id: {} - incoming ") + (isVideo ? "video" : "voice") + - " CallInvite from {}", callInviteEvent.content.call_id, callInviteEvent.sender); - - if (callInviteEvent.content.call_id.empty()) - return; - - if (isVideo) { - emit newMessage(QString::fromStdString(callInviteEvent.room_id), - CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut}); - return; - } - - auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id); - if (onActiveCall() || roomInfo.member_count != 2) { - emit newMessage(QString::fromStdString(callInviteEvent.room_id), - CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut}); - return; - } - - playRingtone("qrc:/media/media/ring.ogg", true); - roomid_ = QString::fromStdString(callInviteEvent.room_id); - callid_ = callInviteEvent.content.call_id; - remoteICECandidates_.clear(); - - std::vector members(cache::getMembers(callInviteEvent.room_id)); - const RoomMember &caller = - members.front().user_id == utils::localUser() ? members.back() : members.front(); - emit newCallParty(caller.user_id, caller.display_name, - QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url)); - - auto dialog = new dialogs::AcceptCall( - caller.user_id, - caller.display_name, - QString::fromStdString(roomInfo.name), - QString::fromStdString(roomInfo.avatar_url), - MainWindow::instance()); - connect(dialog, &dialogs::AcceptCall::accept, this, - [this, callInviteEvent](){ - MainWindow::instance()->hideOverlay(); - answerInvite(callInviteEvent.content);}); - connect(dialog, &dialogs::AcceptCall::reject, this, - [this](){ - MainWindow::instance()->hideOverlay(); - hangUp();}); - MainWindow::instance()->showSolidOverlayModal(dialog); + const char video[] = "m=video"; + const std::string &sdp = callInviteEvent.content.sdp; + bool isVideo = std::search(sdp.cbegin(), + sdp.cend(), + std::cbegin(video), + std::cend(video) - 1, + [](unsigned char c1, unsigned char c2) { + return std::tolower(c1) == std::tolower(c2); + }) != sdp.cend(); + + nhlog::ui()->debug(std::string("WebRTC: call id: {} - incoming ") + + (isVideo ? "video" : "voice") + " CallInvite from {}", + callInviteEvent.content.call_id, + callInviteEvent.sender); + + if (callInviteEvent.content.call_id.empty()) + return; + + auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id); + if (onActiveCall() || roomInfo.member_count != 2 || isVideo) { + emit newMessage(QString::fromStdString(callInviteEvent.room_id), + CallHangUp{callInviteEvent.content.call_id, + 0, + CallHangUp::Reason::InviteTimeOut}); + return; + } + + playRingtone("qrc:/media/media/ring.ogg", true); + roomid_ = QString::fromStdString(callInviteEvent.room_id); + callid_ = callInviteEvent.content.call_id; + remoteICECandidates_.clear(); + + std::vector members(cache::getMembers(callInviteEvent.room_id)); + const RoomMember &caller = + members.front().user_id == utils::localUser() ? members.back() : members.front(); + emit newCallParty(caller.user_id, + caller.display_name, + QString::fromStdString(roomInfo.name), + QString::fromStdString(roomInfo.avatar_url)); + + auto dialog = new dialogs::AcceptCall(caller.user_id, + caller.display_name, + QString::fromStdString(roomInfo.name), + QString::fromStdString(roomInfo.avatar_url), + MainWindow::instance()); + connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() { + MainWindow::instance()->hideOverlay(); + answerInvite(callInviteEvent.content); + }); + connect(dialog, &dialogs::AcceptCall::reject, this, [this]() { + MainWindow::instance()->hideOverlay(); + hangUp(); + }); + MainWindow::instance()->showSolidOverlayModal(dialog); } void CallManager::answerInvite(const CallInvite &invite) { - stopRingtone(); - std::string errorMessage; - if (!session_.init(&errorMessage)) { - emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); - hangUp(); - return; - } - - session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); - session_.setTurnServers(turnURIs_); - - if (!session_.acceptOffer(invite.sdp)) { - emit ChatPage::instance()->showNotification("Problem setting up call."); - hangUp(); - return; - } - session_.acceptICECandidates(remoteICECandidates_); - remoteICECandidates_.clear(); + stopRingtone(); + std::string errorMessage; + if (!session_.init(&errorMessage)) { + emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); + hangUp(); + return; + } + + session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); + session_.setTurnServers(turnURIs_); + + if (!session_.acceptOffer(invite.sdp)) { + emit ChatPage::instance()->showNotification("Problem setting up call."); + hangUp(); + return; + } + session_.acceptICECandidates(remoteICECandidates_); + remoteICECandidates_.clear(); } void CallManager::handleEvent(const RoomEvent &callCandidatesEvent) { - if (callCandidatesEvent.sender == utils::localUser().toStdString()) - return; - - nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}", - callCandidatesEvent.content.call_id, callCandidatesEvent.sender); - - if (callid_ == callCandidatesEvent.content.call_id) { - if (onActiveCall()) - session_.acceptICECandidates(callCandidatesEvent.content.candidates); - else { - // CallInvite has been received and we're awaiting localUser to accept or reject the call - for (const auto &c : callCandidatesEvent.content.candidates) - remoteICECandidates_.push_back(c); - } - } + if (callCandidatesEvent.sender == utils::localUser().toStdString()) + return; + + nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}", + callCandidatesEvent.content.call_id, + callCandidatesEvent.sender); + + if (callid_ == callCandidatesEvent.content.call_id) { + if (onActiveCall()) + session_.acceptICECandidates(callCandidatesEvent.content.candidates); + else { + // CallInvite has been received and we're awaiting localUser to accept or + // reject the call + for (const auto &c : callCandidatesEvent.content.candidates) + remoteICECandidates_.push_back(c); + } + } } void CallManager::handleEvent(const RoomEvent &callAnswerEvent) { - nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}", - callAnswerEvent.content.call_id, callAnswerEvent.sender); - - if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() && - callid_ == callAnswerEvent.content.call_id) { - emit ChatPage::instance()->showNotification("Call answered on another device."); - stopRingtone(); - MainWindow::instance()->hideOverlay(); - return; - } - - if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) { - stopRingtone(); - if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) { - emit ChatPage::instance()->showNotification("Problem setting up call."); - hangUp(); - } - } + nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}", + callAnswerEvent.content.call_id, + callAnswerEvent.sender); + + if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() && + callid_ == callAnswerEvent.content.call_id) { + emit ChatPage::instance()->showNotification("Call answered on another device."); + stopRingtone(); + MainWindow::instance()->hideOverlay(); + return; + } + + if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) { + stopRingtone(); + if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) { + emit ChatPage::instance()->showNotification("Problem setting up call."); + hangUp(); + } + } } void CallManager::handleEvent(const RoomEvent &callHangUpEvent) { - nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}", - callHangUpEvent.content.call_id, callHangUpReasonString(callHangUpEvent.content.reason), - callHangUpEvent.sender); - - if (callid_ == callHangUpEvent.content.call_id) { - MainWindow::instance()->hideOverlay(); - endCall(); - } + nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}", + callHangUpEvent.content.call_id, + callHangUpReasonString(callHangUpEvent.content.reason), + callHangUpEvent.sender); + + if (callid_ == callHangUpEvent.content.call_id) { + MainWindow::instance()->hideOverlay(); + endCall(); + } } void CallManager::generateCallID() { - using namespace std::chrono; - uint64_t ms = duration_cast(system_clock::now().time_since_epoch()).count(); - callid_ = "c" + std::to_string(ms); + using namespace std::chrono; + uint64_t ms = duration_cast(system_clock::now().time_since_epoch()).count(); + callid_ = "c" + std::to_string(ms); +} + +void +CallManager::clear() +{ + roomid_.clear(); + callid_.clear(); + remoteICECandidates_.clear(); } void CallManager::endCall() { - stopRingtone(); - session_.end(); - roomid_.clear(); - callid_.clear(); - remoteICECandidates_.clear(); + stopRingtone(); + clear(); + session_.end(); } void CallManager::refreshTurnServer() { - turnURIs_.clear(); - turnServerTimer_.start(2000); + turnURIs_.clear(); + turnServerTimer_.start(2000); } void CallManager::retrieveTurnServer() { - http::client()->get_turn_server( - [this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) { - if (err) { - turnServerTimer_.setInterval(5000); - return; - } - emit turnServerRetrieved(res); - }); + http::client()->get_turn_server( + [this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) { + if (err) { + turnServerTimer_.setInterval(5000); + return; + } + emit turnServerRetrieved(res); + }); } void CallManager::playRingtone(const QString &ringtone, bool repeat) { - static QMediaPlaylist playlist; - playlist.clear(); - playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop : QMediaPlaylist::CurrentItemOnce); - playlist.addMedia(QUrl(ringtone)); - player_.setVolume(100); - player_.setPlaylist(&playlist); + static QMediaPlaylist playlist; + playlist.clear(); + playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop + : QMediaPlaylist::CurrentItemOnce); + playlist.addMedia(QUrl(ringtone)); + player_.setVolume(100); + player_.setPlaylist(&playlist); } void CallManager::stopRingtone() { - player_.setPlaylist(nullptr); + player_.setPlaylist(nullptr); } namespace { std::vector getTurnURIs(const mtx::responses::TurnServer &turnServer) { - // gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp) - // where username and password are percent-encoded - std::vector ret; - for (const auto &uri : turnServer.uris) { - if (auto c = uri.find(':'); c == std::string::npos) { - nhlog::ui()->error("Invalid TURN server uri: {}", uri); - continue; - } - else { - std::string scheme = std::string(uri, 0, c); - if (scheme != "turn" && scheme != "turns") { - nhlog::ui()->error("Invalid TURN server uri: {}", uri); - continue; - } - - QString encodedUri = QString::fromStdString(scheme) + "://" + - QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) + ":" + - QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) + "@" + - QString::fromStdString(std::string(uri, ++c)); - ret.push_back(encodedUri.toStdString()); - } - } - return ret; + // gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp) + // where username and password are percent-encoded + std::vector ret; + for (const auto &uri : turnServer.uris) { + if (auto c = uri.find(':'); c == std::string::npos) { + nhlog::ui()->error("Invalid TURN server uri: {}", uri); + continue; + } else { + std::string scheme = std::string(uri, 0, c); + if (scheme != "turn" && scheme != "turns") { + nhlog::ui()->error("Invalid TURN server uri: {}", uri); + continue; + } + + QString encodedUri = + QString::fromStdString(scheme) + "://" + + QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) + + ":" + + QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) + + "@" + QString::fromStdString(std::string(uri, ++c)); + ret.push_back(encodedUri.toStdString()); + } + } + return ret; } } - diff --git a/src/CallManager.h b/src/CallManager.h index 4ed6e4c7..3a406438 100644 --- a/src/CallManager.h +++ b/src/CallManager.h @@ -3,8 +3,8 @@ #include #include -#include #include +#include #include #include #include @@ -27,7 +27,8 @@ public: CallManager(QSharedPointer); void sendInvite(const QString &roomid); - void hangUp(mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User); + void hangUp( + mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User); bool onActiveCall(); void refreshTurnServer(); @@ -35,22 +36,21 @@ public slots: void syncEvent(const mtx::events::collections::TimelineEvents &event); signals: - void newMessage(const QString &roomid, const mtx::events::msg::CallInvite&); - void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates&); - void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer&); - void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp&); - void turnServerRetrieved(const mtx::responses::TurnServer&); - void newCallParty( - const QString &userid, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl); + void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &); + void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &); + void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &); + void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &); + void turnServerRetrieved(const mtx::responses::TurnServer &); + void newCallParty(const QString &userid, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl); private slots: void retrieveTurnServer(); private: - WebRTCSession& session_; + WebRTCSession &session_; QString roomid_; std::string callid_; const uint32_t timeoutms_ = 120000; @@ -62,12 +62,13 @@ private: template bool handleEvent_(const mtx::events::collections::TimelineEvents &event); - void handleEvent(const mtx::events::RoomEvent&); - void handleEvent(const mtx::events::RoomEvent&); - void handleEvent(const mtx::events::RoomEvent&); - void handleEvent(const mtx::events::RoomEvent&); - void answerInvite(const mtx::events::msg::CallInvite&); + void handleEvent(const mtx::events::RoomEvent &); + void handleEvent(const mtx::events::RoomEvent &); + void handleEvent(const mtx::events::RoomEvent &); + void handleEvent(const mtx::events::RoomEvent &); + void answerInvite(const mtx::events::msg::CallInvite &); void generateCallID(); + void clear(); void endCall(); void playRingtone(const QString &ringtone, bool repeat); void stopRingtone(); diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp index 5ab617fa..09153154 100644 --- a/src/ChatPage.cpp +++ b/src/ChatPage.cpp @@ -460,9 +460,8 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) if (callManager_.onActiveCall()) { callManager_.hangUp(); } else { - if (auto roomInfo = - cache::singleRoomInfo(current_room_.toStdString()); - roomInfo.member_count != 2) { + if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString()); + roomInfo.member_count != 2) { showNotification("Voice calls are limited to 1:1 rooms."); } else { std::vector members( @@ -471,11 +470,11 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) members.front().user_id == utils::localUser() ? members.back() : members.front(); auto dialog = new dialogs::PlaceCall( - callee.user_id, - callee.display_name, - QString::fromStdString(roomInfo.name), - QString::fromStdString(roomInfo.avatar_url), - MainWindow::instance()); + callee.user_id, + callee.display_name, + QString::fromStdString(roomInfo.name), + QString::fromStdString(roomInfo.avatar_url), + MainWindow::instance()); connect(dialog, &dialogs::PlaceCall::voice, this, [this]() { callManager_.sendInvite(current_room_); }); diff --git a/src/EventAccessors.cpp b/src/EventAccessors.cpp index 043e24a2..7846737b 100644 --- a/src/EventAccessors.cpp +++ b/src/EventAccessors.cpp @@ -72,12 +72,19 @@ struct CallType template std::string operator()(const T &e) { - if constexpr (std::is_same_v, T>) { - const char video[] = "m=video"; - const std::string &sdp = e.content.sdp; - return std::search(sdp.cbegin(), sdp.cend(), std::cbegin(video), std::cend(video) - 1, - [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);}) - != sdp.cend() ? "video" : "voice"; + if constexpr (std::is_same_v, + T>) { + const char video[] = "m=video"; + const std::string &sdp = e.content.sdp; + return std::search(sdp.cbegin(), + sdp.cend(), + std::cbegin(video), + std::cend(video) - 1, + [](unsigned char c1, unsigned char c2) { + return std::tolower(c1) == std::tolower(c2); + }) != sdp.cend() + ? "video" + : "voice"; } return std::string(); } diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index f3fd1bdc..32b67123 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -1,9 +1,10 @@ #include -#include "WebRTCSession.h" #include "Logging.h" +#include "WebRTCSession.h" -extern "C" { +extern "C" +{ #include "gst/gst.h" #include "gst/sdp/sdp.h" @@ -13,478 +14,498 @@ extern "C" { Q_DECLARE_METATYPE(WebRTCSession::State) -namespace { -bool isoffering_; -std::string localsdp_; -std::vector localcandidates_; - -gboolean newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data); -GstWebRTCSessionDescription* parseSDP(const std::string &sdp, GstWebRTCSDPType type); -void generateOffer(GstElement *webrtc); -void setLocalDescription(GstPromise *promise, gpointer webrtc); -void addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED); -gboolean onICEGatheringCompletion(gpointer timerid); -void iceConnectionStateChanged(GstElement *webrtcbin, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data G_GNUC_UNUSED); -void createAnswer(GstPromise *promise, gpointer webrtc); -void addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe); -void linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe); -std::string::const_iterator findName(const std::string &sdp, const std::string &name); -int getPayloadType(const std::string &sdp, const std::string &name); -} - -WebRTCSession::WebRTCSession() : QObject() +WebRTCSession::WebRTCSession() + : QObject() { - qRegisterMetaType(); - connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState); + qRegisterMetaType(); + connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState); } bool WebRTCSession::init(std::string *errorMessage) { - if (initialised_) - return true; - - GError *error = nullptr; - if (!gst_init_check(nullptr, nullptr, &error)) { - std::string strError = std::string("WebRTC: failed to initialise GStreamer: "); - if (error) { - strError += error->message; - g_error_free(error); - } - nhlog::ui()->error(strError); - if (errorMessage) - *errorMessage = strError; - return false; - } - - gchar *version = gst_version_string(); - std::string gstVersion(version); - g_free(version); - nhlog::ui()->info("WebRTC: initialised " + gstVersion); - - // GStreamer Plugins: - // Base: audioconvert, audioresample, opus, playback, volume - // Good: autodetect, rtpmanager - // Bad: dtls, srtp, webrtc - // libnice [GLib]: nice - initialised_ = true; - std::string strError = gstVersion + ": Missing plugins: "; - const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice", - "opus", "playback", "rtpmanager", "srtp", "volume", "webrtc", nullptr}; - GstRegistry *registry = gst_registry_get(); - for (guint i = 0; i < g_strv_length((gchar**)needed); i++) { - GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); - if (!plugin) { - strError += std::string(needed[i]) + " "; - initialised_ = false; - continue; - } - gst_object_unref(plugin); - } - - if (!initialised_) { - nhlog::ui()->error(strError); - if (errorMessage) - *errorMessage = strError; - } - return initialised_; + if (initialised_) + return true; + + GError *error = nullptr; + if (!gst_init_check(nullptr, nullptr, &error)) { + std::string strError = std::string("WebRTC: failed to initialise GStreamer: "); + if (error) { + strError += error->message; + g_error_free(error); + } + nhlog::ui()->error(strError); + if (errorMessage) + *errorMessage = strError; + return false; + } + + gchar *version = gst_version_string(); + std::string gstVersion(version); + g_free(version); + nhlog::ui()->info("WebRTC: initialised " + gstVersion); + + // GStreamer Plugins: + // Base: audioconvert, audioresample, opus, playback, volume + // Good: autodetect, rtpmanager + // Bad: dtls, srtp, webrtc + // libnice [GLib]: nice + initialised_ = true; + std::string strError = gstVersion + ": Missing plugins: "; + const gchar *needed[] = {"audioconvert", + "audioresample", + "autodetect", + "dtls", + "nice", + "opus", + "playback", + "rtpmanager", + "srtp", + "volume", + "webrtc", + nullptr}; + GstRegistry *registry = gst_registry_get(); + for (guint i = 0; i < g_strv_length((gchar **)needed); i++) { + GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); + if (!plugin) { + strError += std::string(needed[i]) + " "; + initialised_ = false; + continue; + } + gst_object_unref(plugin); + } + + if (!initialised_) { + nhlog::ui()->error(strError); + if (errorMessage) + *errorMessage = strError; + } + return initialised_; } -bool -WebRTCSession::createOffer() -{ - isoffering_ = true; - localsdp_.clear(); - localcandidates_.clear(); - return startPipeline(111); // a dynamic opus payload type -} +namespace { -bool -WebRTCSession::acceptOffer(const std::string &sdp) +bool isoffering_; +std::string localsdp_; +std::vector localcandidates_; + +gboolean +newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) { - nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp); - if (state_ != State::DISCONNECTED) - return false; - - isoffering_ = false; - localsdp_.clear(); - localcandidates_.clear(); - - int opusPayloadType = getPayloadType(sdp, "opus"); - if (opusPayloadType == -1) - return false; - - GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); - if (!offer) - return false; - - if (!startPipeline(opusPayloadType)) { - gst_webrtc_session_description_free(offer); - return false; - } - - // set-remote-description first, then create-answer - GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr); - g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise); - gst_webrtc_session_description_free(offer); - return true; + WebRTCSession *session = static_cast(user_data); + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_EOS: + nhlog::ui()->error("WebRTC: end of stream"); + session->end(); + break; + case GST_MESSAGE_ERROR: + GError *error; + gchar *debug; + gst_message_parse_error(msg, &error, &debug); + nhlog::ui()->error( + "WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message); + g_clear_error(&error); + g_free(debug); + session->end(); + break; + default: + break; + } + return TRUE; } -bool -WebRTCSession::startPipeline(int opusPayloadType) +GstWebRTCSessionDescription * +parseSDP(const std::string &sdp, GstWebRTCSDPType type) { - if (state_ != State::DISCONNECTED) - return false; - - emit stateChanged(State::INITIATING); - - if (!createPipeline(opusPayloadType)) - return false; - - webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); - - if (!stunServer_.empty()) { - nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_); - g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr); - } - - for (const auto &uri : turnServers_) { - nhlog::ui()->info("WebRTC: setting TURN server: {}", uri); - gboolean udata; - g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata)); - } - if (turnServers_.empty()) - nhlog::ui()->warn("WebRTC: no TURN server provided"); - - // generate the offer when the pipeline goes to PLAYING - if (isoffering_) - g_signal_connect(webrtc_, "on-negotiation-needed", G_CALLBACK(generateOffer), nullptr); - - // on-ice-candidate is emitted when a local ICE candidate has been gathered - g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr); - - // capture ICE failure - g_signal_connect(webrtc_, "notify::ice-connection-state", - G_CALLBACK(iceConnectionStateChanged), nullptr); - - // incoming streams trigger pad-added - gst_element_set_state(pipe_, GST_STATE_READY); - g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_); - - // webrtcbin lifetime is the same as that of the pipeline - gst_object_unref(webrtc_); - - // start the pipeline - GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING); - if (ret == GST_STATE_CHANGE_FAILURE) { - nhlog::ui()->error("WebRTC: unable to start pipeline"); - end(); - return false; - } - - GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_)); - gst_bus_add_watch(bus, newBusMessage, this); - gst_object_unref(bus); - emit stateChanged(State::INITIATED); - return true; + GstSDPMessage *msg; + gst_sdp_message_new(&msg); + if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) { + return gst_webrtc_session_description_new(type, msg); + } else { + nhlog::ui()->error("WebRTC: failed to parse remote session description"); + gst_object_unref(msg); + return nullptr; + } } -#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload=" - -bool -WebRTCSession::createPipeline(int opusPayloadType) +void +setLocalDescription(GstPromise *promise, gpointer webrtc) { - std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin " - "autoaudiosrc ! volume name=srclevel ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! " - "queue ! " RTP_CAPS_OPUS + std::to_string(opusPayloadType) + " ! webrtcbin."); - - webrtc_ = nullptr; - GError *error = nullptr; - pipe_ = gst_parse_launch(pipeline.c_str(), &error); - if (error) { - nhlog::ui()->error("WebRTC: failed to parse pipeline: {}", error->message); - g_error_free(error); - end(); - return false; - } - return true; + const GstStructure *reply = gst_promise_get_reply(promise); + gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); + GstWebRTCSessionDescription *gstsdp = nullptr; + gst_structure_get(reply, + isAnswer ? "answer" : "offer", + GST_TYPE_WEBRTC_SESSION_DESCRIPTION, + &gstsdp, + nullptr); + gst_promise_unref(promise); + g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr); + + gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); + localsdp_ = std::string(sdp); + g_free(sdp); + gst_webrtc_session_description_free(gstsdp); + + nhlog::ui()->debug( + "WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_); } -bool -WebRTCSession::acceptAnswer(const std::string &sdp) +void +createOffer(GstElement *webrtc) { - nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp); - if (state_ != State::OFFERSENT) - return false; - - GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); - if (!answer) { - end(); - return false; - } - - g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); - gst_webrtc_session_description_free(answer); - return true; + // create-offer first, then set-local-description + GstPromise *promise = + gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); + g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); } void -WebRTCSession::acceptICECandidates(const std::vector &candidates) +createAnswer(GstPromise *promise, gpointer webrtc) { - if (state_ >= State::INITIATED) { - for (const auto &c : candidates) { - nhlog::ui()->debug("WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate); - g_signal_emit_by_name(webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); - } - } + // create-answer first, then set-local-description + gst_promise_unref(promise); + promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); + g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); } -bool -WebRTCSession::toggleMuteAudioSrc(bool &isMuted) +gboolean +onICEGatheringCompletion(gpointer timerid) { - if (state_ < State::INITIATED) - return false; - - GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel"); - if (!srclevel) - return false; - - gboolean muted; - g_object_get(srclevel, "mute", &muted, nullptr); - g_object_set(srclevel, "mute", !muted, nullptr); - gst_object_unref(srclevel); - isMuted = !muted; - return true; + *(guint *)(timerid) = 0; + if (isoffering_) { + emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); + emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); + } else { + emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); + emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT); + } + return FALSE; } void -WebRTCSession::end() +addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, + guint mlineIndex, + gchar *candidate, + gpointer G_GNUC_UNUSED) { - nhlog::ui()->debug("WebRTC: ending session"); - if (pipe_) { - gst_element_set_state(pipe_, GST_STATE_NULL); - gst_object_unref(pipe_); - pipe_ = nullptr; - } - webrtc_ = nullptr; - if (state_ != State::DISCONNECTED) - emit stateChanged(State::DISCONNECTED); -} + nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); -namespace { + if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) { + emit WebRTCSession::instance().newICECandidate( + {"audio", (uint16_t)mlineIndex, candidate}); + return; + } -std::string::const_iterator findName(const std::string &sdp, const std::string &name) -{ - return std::search(sdp.cbegin(), sdp.cend(), name.cbegin(), name.cend(), - [](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);}); -} + localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); -int getPayloadType(const std::string &sdp, const std::string &name) -{ - // eg a=rtpmap:111 opus/48000/2 - auto e = findName(sdp, name); - if (e == sdp.cend()) { - nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing"); - return -1; - } - - if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) { - nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + " payload type"); - return -1; - } - else { - ++s; - try { - return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s)); - } - catch(...) { - nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + " payload type"); - } - } - return -1; -} + // GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers + // GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.18. Use a 100ms timeout in + // the meantime + static guint timerid = 0; + if (timerid) + g_source_remove(timerid); -gboolean -newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) -{ - WebRTCSession *session = (WebRTCSession*)user_data; - switch (GST_MESSAGE_TYPE(msg)) { - case GST_MESSAGE_EOS: - nhlog::ui()->error("WebRTC: end of stream"); - session->end(); - break; - case GST_MESSAGE_ERROR: - GError *error; - gchar *debug; - gst_message_parse_error(msg, &error, &debug); - nhlog::ui()->error("WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message); - g_clear_error(&error); - g_free(debug); - session->end(); - break; - default: - break; - } - return TRUE; + timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); } -GstWebRTCSessionDescription* -parseSDP(const std::string &sdp, GstWebRTCSDPType type) +void +iceConnectionStateChanged(GstElement *webrtc, + GParamSpec *pspec G_GNUC_UNUSED, + gpointer user_data G_GNUC_UNUSED) { - GstSDPMessage *msg; - gst_sdp_message_new(&msg); - if (gst_sdp_message_parse_buffer((guint8*)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) { - return gst_webrtc_session_description_new(type, msg); - } - else { - nhlog::ui()->error("WebRTC: failed to parse remote session description"); - gst_object_unref(msg); - return nullptr; - } + GstWebRTCICEConnectionState newState; + g_object_get(webrtc, "ice-connection-state", &newState, nullptr); + switch (newState) { + case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: + nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking"); + emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING); + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: + nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed"); + emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED); + break; + default: + break; + } } void -generateOffer(GstElement *webrtc) +linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) { - // create-offer first, then set-local-description - GstPromise *promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); - g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); + GstCaps *caps = gst_pad_get_current_caps(newpad); + if (!caps) + return; + + const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); + gst_caps_unref(caps); + + GstPad *queuepad = nullptr; + if (g_str_has_prefix(name, "audio")) { + nhlog::ui()->debug("WebRTC: received incoming audio stream"); + GstElement *queue = gst_element_factory_make("queue", nullptr); + GstElement *convert = gst_element_factory_make("audioconvert", nullptr); + GstElement *resample = gst_element_factory_make("audioresample", nullptr); + GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); + gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); + gst_element_sync_state_with_parent(queue); + gst_element_sync_state_with_parent(convert); + gst_element_sync_state_with_parent(resample); + gst_element_sync_state_with_parent(sink); + gst_element_link_many(queue, convert, resample, sink, nullptr); + queuepad = gst_element_get_static_pad(queue, "sink"); + } + + if (queuepad) { + if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) + nhlog::ui()->error("WebRTC: unable to link new pad"); + else { + emit WebRTCSession::instance().stateChanged( + WebRTCSession::State::CONNECTED); + } + gst_object_unref(queuepad); + } } void -setLocalDescription(GstPromise *promise, gpointer webrtc) +addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) { - const GstStructure *reply = gst_promise_get_reply(promise); - gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); - GstWebRTCSessionDescription *gstsdp = nullptr; - gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr); - gst_promise_unref(promise); - g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr); - - gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); - localsdp_ = std::string(sdp); - g_free(sdp); - gst_webrtc_session_description_free(gstsdp); - - nhlog::ui()->debug("WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_); + if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) + return; + + nhlog::ui()->debug("WebRTC: received incoming stream"); + GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); + g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); + gst_bin_add(GST_BIN(pipe), decodebin); + gst_element_sync_state_with_parent(decodebin); + GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); + if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) + nhlog::ui()->error("WebRTC: unable to link new pad"); + gst_object_unref(sinkpad); } -void -addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED) +std::string::const_iterator +findName(const std::string &sdp, const std::string &name) { - nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); + return std::search( + sdp.cbegin(), + sdp.cend(), + name.cbegin(), + name.cend(), + [](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); }); +} - if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) { - emit WebRTCSession::instance().newICECandidate({"audio", (uint16_t)mlineIndex, candidate}); - return; - } +int +getPayloadType(const std::string &sdp, const std::string &name) +{ + // eg a=rtpmap:111 opus/48000/2 + auto e = findName(sdp, name); + if (e == sdp.cend()) { + nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing"); + return -1; + } + + if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) { + nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + + " payload type"); + return -1; + } else { + ++s; + try { + return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s)); + } catch (...) { + nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + + " payload type"); + } + } + return -1; +} - localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); +} - // GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early - // fixed in v1.18 - // use a 100ms timeout in the meantime - static guint timerid = 0; - if (timerid) - g_source_remove(timerid); +bool +WebRTCSession::createOffer() +{ + isoffering_ = true; + localsdp_.clear(); + localcandidates_.clear(); + return startPipeline(111); // a dynamic opus payload type +} - timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); +bool +WebRTCSession::acceptOffer(const std::string &sdp) +{ + nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp); + if (state_ != State::DISCONNECTED) + return false; + + isoffering_ = false; + localsdp_.clear(); + localcandidates_.clear(); + + int opusPayloadType = getPayloadType(sdp, "opus"); + if (opusPayloadType == -1) + return false; + + GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); + if (!offer) + return false; + + if (!startPipeline(opusPayloadType)) { + gst_webrtc_session_description_free(offer); + return false; + } + + // set-remote-description first, then create-answer + GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr); + g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise); + gst_webrtc_session_description_free(offer); + return true; } -gboolean -onICEGatheringCompletion(gpointer timerid) +bool +WebRTCSession::acceptAnswer(const std::string &sdp) { - *(guint*)(timerid) = 0; - if (isoffering_) { - emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); - emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); - } - else { - emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); - emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT); - } - return FALSE; + nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp); + if (state_ != State::OFFERSENT) + return false; + + GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); + if (!answer) { + end(); + return false; + } + + g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); + gst_webrtc_session_description_free(answer); + return true; } void -iceConnectionStateChanged(GstElement *webrtc, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data G_GNUC_UNUSED) +WebRTCSession::acceptICECandidates( + const std::vector &candidates) { - GstWebRTCICEConnectionState newState; - g_object_get(webrtc, "ice-connection-state", &newState, nullptr); - switch (newState) { - case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: - nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking"); - emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING); - break; - case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: - nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed"); - emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED); - break; - default: - break; - } + if (state_ >= State::INITIATED) { + for (const auto &c : candidates) { + nhlog::ui()->debug( + "WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate); + g_signal_emit_by_name( + webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); + } + } } -void -createAnswer(GstPromise *promise, gpointer webrtc) +bool +WebRTCSession::startPipeline(int opusPayloadType) { - // create-answer first, then set-local-description - gst_promise_unref(promise); - promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); - g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); + if (state_ != State::DISCONNECTED) + return false; + + emit stateChanged(State::INITIATING); + + if (!createPipeline(opusPayloadType)) + return false; + + webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); + + if (!stunServer_.empty()) { + nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_); + g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr); + } + + for (const auto &uri : turnServers_) { + nhlog::ui()->info("WebRTC: setting TURN server: {}", uri); + gboolean udata; + g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata)); + } + if (turnServers_.empty()) + nhlog::ui()->warn("WebRTC: no TURN server provided"); + + // generate the offer when the pipeline goes to PLAYING + if (isoffering_) + g_signal_connect( + webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr); + + // on-ice-candidate is emitted when a local ICE candidate has been gathered + g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr); + + // capture ICE failure + g_signal_connect( + webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr); + + // incoming streams trigger pad-added + gst_element_set_state(pipe_, GST_STATE_READY); + g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_); + + // webrtcbin lifetime is the same as that of the pipeline + gst_object_unref(webrtc_); + + // start the pipeline + GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING); + if (ret == GST_STATE_CHANGE_FAILURE) { + nhlog::ui()->error("WebRTC: unable to start pipeline"); + end(); + return false; + } + + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_)); + gst_bus_add_watch(bus, newBusMessage, this); + gst_object_unref(bus); + emit stateChanged(State::INITIATED); + return true; } -void -addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) +#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload=" + +bool +WebRTCSession::createPipeline(int opusPayloadType) { - if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) - return; - - nhlog::ui()->debug("WebRTC: received incoming stream"); - GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); - g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); - gst_bin_add(GST_BIN(pipe), decodebin); - gst_element_sync_state_with_parent(decodebin); - GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); - if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) - nhlog::ui()->error("WebRTC: unable to link new pad"); - gst_object_unref(sinkpad); + std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin " + "autoaudiosrc ! volume name=srclevel ! audioconvert ! " + "audioresample ! queue ! opusenc ! rtpopuspay ! " + "queue ! " RTP_CAPS_OPUS + + std::to_string(opusPayloadType) + " ! webrtcbin."); + + webrtc_ = nullptr; + GError *error = nullptr; + pipe_ = gst_parse_launch(pipeline.c_str(), &error); + if (error) { + nhlog::ui()->error("WebRTC: failed to parse pipeline: {}", error->message); + g_error_free(error); + end(); + return false; + } + return true; } -void -linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) +bool +WebRTCSession::toggleMuteAudioSrc(bool &isMuted) { - GstCaps *caps = gst_pad_get_current_caps(newpad); - if (!caps) - return; - - const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); - gst_caps_unref(caps); - - GstPad *queuepad = nullptr; - if (g_str_has_prefix(name, "audio")) { - nhlog::ui()->debug("WebRTC: received incoming audio stream"); - GstElement *queue = gst_element_factory_make("queue", nullptr); - GstElement *convert = gst_element_factory_make("audioconvert", nullptr); - GstElement *resample = gst_element_factory_make("audioresample", nullptr); - GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); - gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); - gst_element_sync_state_with_parent(queue); - gst_element_sync_state_with_parent(convert); - gst_element_sync_state_with_parent(resample); - gst_element_sync_state_with_parent(sink); - gst_element_link_many(queue, convert, resample, sink, nullptr); - queuepad = gst_element_get_static_pad(queue, "sink"); - } - - if (queuepad) { - if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) - nhlog::ui()->error("WebRTC: unable to link new pad"); - else { - emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTED); - } - gst_object_unref(queuepad); - } + if (state_ < State::INITIATED) + return false; + + GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel"); + if (!srclevel) + return false; + + gboolean muted; + g_object_get(srclevel, "mute", &muted, nullptr); + g_object_set(srclevel, "mute", !muted, nullptr); + gst_object_unref(srclevel); + isMuted = !muted; + return true; } +void +WebRTCSession::end() +{ + nhlog::ui()->debug("WebRTC: ending session"); + if (pipe_) { + gst_element_set_state(pipe_, GST_STATE_NULL); + gst_object_unref(pipe_); + pipe_ = nullptr; + } + webrtc_ = nullptr; + if (state_ != State::DISCONNECTED) + emit stateChanged(State::DISCONNECTED); } diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h index d79047a8..6b54f370 100644 --- a/src/WebRTCSession.h +++ b/src/WebRTCSession.h @@ -14,52 +14,55 @@ class WebRTCSession : public QObject Q_OBJECT public: - enum class State { - ICEFAILED, - DISCONNECTED, - INITIATING, - INITIATED, - OFFERSENT, - ANSWERSENT, - CONNECTING, - CONNECTED + enum class State + { + DISCONNECTED, + ICEFAILED, + INITIATING, + INITIATED, + OFFERSENT, + ANSWERSENT, + CONNECTING, + CONNECTED }; - static WebRTCSession& instance() + static WebRTCSession &instance() { - static WebRTCSession instance; - return instance; + static WebRTCSession instance; + return instance; } bool init(std::string *errorMessage = nullptr); - State state() const {return state_;} + State state() const { return state_; } bool createOffer(); bool acceptOffer(const std::string &sdp); bool acceptAnswer(const std::string &sdp); - void acceptICECandidates(const std::vector&); + void acceptICECandidates(const std::vector &); bool toggleMuteAudioSrc(bool &isMuted); void end(); - void setStunServer(const std::string &stunServer) {stunServer_ = stunServer;} - void setTurnServers(const std::vector &uris) {turnServers_ = uris;} + void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; } + void setTurnServers(const std::vector &uris) { turnServers_ = uris; } signals: - void offerCreated(const std::string &sdp, const std::vector&); - void answerCreated(const std::string &sdp, const std::vector&); - void newICECandidate(const mtx::events::msg::CallCandidates::Candidate&); + void offerCreated(const std::string &sdp, + const std::vector &); + void answerCreated(const std::string &sdp, + const std::vector &); + void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &); void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt private slots: - void setState(State state) {state_ = state;} + void setState(State state) { state_ = state; } private: WebRTCSession(); - bool initialised_ = false; - State state_ = State::DISCONNECTED; - GstElement *pipe_ = nullptr; + bool initialised_ = false; + State state_ = State::DISCONNECTED; + GstElement *pipe_ = nullptr; GstElement *webrtc_ = nullptr; std::string stunServer_; std::vector turnServers_; @@ -68,6 +71,6 @@ private: bool createPipeline(int opusPayloadType); public: - WebRTCSession(WebRTCSession const&) = delete; - void operator=(WebRTCSession const&) = delete; + WebRTCSession(WebRTCSession const &) = delete; + void operator=(WebRTCSession const &) = delete; }; diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp index 6b5e2e60..58348b15 100644 --- a/src/dialogs/AcceptCall.cpp +++ b/src/dialogs/AcceptCall.cpp @@ -1,4 +1,5 @@ #include +#include #include #include #include @@ -10,12 +11,12 @@ namespace dialogs { -AcceptCall::AcceptCall( - const QString &caller, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl, - QWidget *parent) : QWidget(parent) +AcceptCall::AcceptCall(const QString &caller, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent) + : QWidget(parent) { setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); @@ -39,8 +40,8 @@ AcceptCall::AcceptCall( if (!displayName.isEmpty() && displayName != caller) { displayNameLabel = new QLabel(displayName, this); labelFont.setPointSizeF(f.pointSizeF() * 2); - displayNameLabel ->setFont(labelFont); - displayNameLabel ->setAlignment(Qt::AlignCenter); + displayNameLabel->setFont(labelFont); + displayNameLabel->setAlignment(Qt::AlignCenter); } QLabel *callerLabel = new QLabel(caller, this); @@ -48,19 +49,23 @@ AcceptCall::AcceptCall( callerLabel->setFont(labelFont); callerLabel->setAlignment(Qt::AlignCenter); - QLabel *voiceCallLabel = new QLabel("Voice Call", this); - labelFont.setPointSizeF(f.pointSizeF() * 1.1); - voiceCallLabel->setFont(labelFont); - voiceCallLabel->setAlignment(Qt::AlignCenter); - auto avatar = new Avatar(this, QFontMetrics(f).height() * 6); if (!avatarUrl.isEmpty()) - avatar->setImage(avatarUrl); + avatar->setImage(avatarUrl); else - avatar->setLetter(utils::firstChar(roomName)); + avatar->setLetter(utils::firstChar(roomName)); + + const int iconSize = 24; + QLabel *callTypeIndicator = new QLabel(this); + QPixmap callIndicator(":/icons/icons/ui/place-call.png"); + callTypeIndicator->setPixmap(callIndicator.scaled(iconSize * 2, iconSize * 2)); + + QLabel *callTypeLabel = new QLabel("Voice Call", this); + labelFont.setPointSizeF(f.pointSizeF() * 1.1); + callTypeLabel->setFont(labelFont); + callTypeLabel->setAlignment(Qt::AlignCenter); - const int iconSize = 24; - auto buttonLayout = new QHBoxLayout(); + auto buttonLayout = new QHBoxLayout; buttonLayout->setSpacing(20); acceptBtn_ = new QPushButton(tr("Accept"), this); acceptBtn_->setDefault(true); @@ -74,10 +79,11 @@ AcceptCall::AcceptCall( buttonLayout->addWidget(rejectBtn_); if (displayNameLabel) - layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); + layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); layout->addWidget(callerLabel, 0, Qt::AlignCenter); - layout->addWidget(voiceCallLabel, 0, Qt::AlignCenter); layout->addWidget(avatar, 0, Qt::AlignCenter); + layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter); + layout->addWidget(callTypeLabel, 0, Qt::AlignCenter); layout->addLayout(buttonLayout); connect(acceptBtn_, &QPushButton::clicked, this, [this]() { diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h index 8e3ed3b2..5d2251fd 100644 --- a/src/dialogs/AcceptCall.h +++ b/src/dialogs/AcceptCall.h @@ -12,12 +12,11 @@ class AcceptCall : public QWidget Q_OBJECT public: - AcceptCall( - const QString &caller, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl, - QWidget *parent = nullptr); + AcceptCall(const QString &caller, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent = nullptr); signals: void accept(); diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp index 81dd85dd..0fda1794 100644 --- a/src/dialogs/PlaceCall.cpp +++ b/src/dialogs/PlaceCall.cpp @@ -10,12 +10,12 @@ namespace dialogs { -PlaceCall::PlaceCall( - const QString &callee, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl, - QWidget *parent) : QWidget(parent) +PlaceCall::PlaceCall(const QString &callee, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent) + : QWidget(parent) { setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); @@ -34,11 +34,13 @@ PlaceCall::PlaceCall( f.setPointSizeF(f.pointSizeF()); auto avatar = new Avatar(this, QFontMetrics(f).height() * 3); if (!avatarUrl.isEmpty()) - avatar->setImage(avatarUrl); + avatar->setImage(avatarUrl); else - avatar->setLetter(utils::firstChar(roomName)); - - voiceBtn_ = new QPushButton(tr("Voice Call"), this); + avatar->setLetter(utils::firstChar(roomName)); + const int iconSize = 24; + voiceBtn_ = new QPushButton(tr("Voice"), this); + voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); + voiceBtn_->setIconSize(QSize(iconSize, iconSize)); voiceBtn_->setDefault(true); cancelBtn_ = new QPushButton(tr("Cancel"), this); @@ -47,7 +49,7 @@ PlaceCall::PlaceCall( buttonLayout->addWidget(voiceBtn_); buttonLayout->addWidget(cancelBtn_); - QString name = displayName.isEmpty() ? callee : displayName; + QString name = displayName.isEmpty() ? callee : displayName; QLabel *label = new QLabel("Place a call to " + name + "?", this); layout->addWidget(label); diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h index ed6fb750..f6db9ab5 100644 --- a/src/dialogs/PlaceCall.h +++ b/src/dialogs/PlaceCall.h @@ -12,12 +12,11 @@ class PlaceCall : public QWidget Q_OBJECT public: - PlaceCall( - const QString &callee, - const QString &displayName, - const QString &roomName, - const QString &avatarUrl, - QWidget *parent = nullptr); + PlaceCall(const QString &callee, + const QString &displayName, + const QString &roomName, + const QString &avatarUrl, + QWidget *parent = nullptr); signals: void voice(); diff --git a/src/timeline/TimelineModel.cpp b/src/timeline/TimelineModel.cpp index e4677f53..67e07d7b 100644 --- a/src/timeline/TimelineModel.cpp +++ b/src/timeline/TimelineModel.cpp @@ -796,9 +796,11 @@ TimelineModel::internalAddEvents( } else if (std::holds_alternative>(e_) || std::holds_alternative< - mtx::events::RoomEvent>( e_) || + mtx::events::RoomEvent>( + e_) || std::holds_alternative< - mtx::events::RoomEvent>( e_)) { + mtx::events::RoomEvent>( + e_)) { emit newCallEvent(e_); } } -- cgit 1.5.1 From 979bba6460a01525bb5fff51467cb2a9032fb2a4 Mon Sep 17 00:00:00 2001 From: trilene Date: Sat, 1 Aug 2020 15:43:23 -0400 Subject: Tweak AcceptCall dialog --- src/dialogs/AcceptCall.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/dialogs') diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp index 58348b15..fd6565e2 100644 --- a/src/dialogs/AcceptCall.cpp +++ b/src/dialogs/AcceptCall.cpp @@ -57,8 +57,8 @@ AcceptCall::AcceptCall(const QString &caller, const int iconSize = 24; QLabel *callTypeIndicator = new QLabel(this); - QPixmap callIndicator(":/icons/icons/ui/place-call.png"); - callTypeIndicator->setPixmap(callIndicator.scaled(iconSize * 2, iconSize * 2)); + callTypeIndicator->setPixmap( + QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2))); QLabel *callTypeLabel = new QLabel("Voice Call", this); labelFont.setPointSizeF(f.pointSizeF() * 1.1); -- cgit 1.5.1 From df65093374c6c77f789e10b20144b3426c4358ac Mon Sep 17 00:00:00 2001 From: trilene Date: Wed, 5 Aug 2020 17:56:44 -0400 Subject: Add audio input device selector --- src/CallManager.cpp | 1 + src/ChatPage.cpp | 1 + src/UserSettingsPage.cpp | 21 +++++++- src/UserSettingsPage.h | 7 +++ src/WebRTCSession.cpp | 116 ++++++++++++++++++++++++++++++++++++++++----- src/WebRTCSession.h | 7 +++ src/dialogs/AcceptCall.cpp | 44 +++++++++++++++-- src/dialogs/AcceptCall.h | 7 +++ src/dialogs/PlaceCall.cpp | 42 ++++++++++++++-- src/dialogs/PlaceCall.h | 7 +++ 10 files changed, 231 insertions(+), 22 deletions(-) (limited to 'src/dialogs') diff --git a/src/CallManager.cpp b/src/CallManager.cpp index 46781313..45890806 100644 --- a/src/CallManager.cpp +++ b/src/CallManager.cpp @@ -264,6 +264,7 @@ CallManager::handleEvent(const RoomEvent &callInviteEvent) caller.display_name, QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url), + settings_, MainWindow::instance()); connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() { MainWindow::instance()->hideOverlay(); diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp index 589aa3c7..84a5e4d3 100644 --- a/src/ChatPage.cpp +++ b/src/ChatPage.cpp @@ -474,6 +474,7 @@ ChatPage::ChatPage(QSharedPointer userSettings, QWidget *parent) callee.display_name, QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.avatar_url), + userSettings_, MainWindow::instance()); connect(dialog, &dialogs::PlaceCall::voice, this, [this]() { callManager_.sendInvite(current_room_); diff --git a/src/UserSettingsPage.cpp b/src/UserSettingsPage.cpp index e67da997..ab5658a4 100644 --- a/src/UserSettingsPage.cpp +++ b/src/UserSettingsPage.cpp @@ -77,7 +77,8 @@ UserSettings::load() presence_ = settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence)) .value(); - useStunServer_ = settings.value("user/use_stun_server", false).toBool(); + useStunServer_ = settings.value("user/use_stun_server", false).toBool(); + defaultAudioSource_ = settings.value("user/default_audio_source", QString()).toString(); applyTheme(); } @@ -290,6 +291,16 @@ UserSettings::setUseStunServer(bool useStunServer) save(); } +void +UserSettings::setDefaultAudioSource(const QString &defaultAudioSource) +{ + if (defaultAudioSource == defaultAudioSource_) + return; + defaultAudioSource_ = defaultAudioSource; + emit defaultAudioSourceChanged(defaultAudioSource); + save(); +} + void UserSettings::applyTheme() { @@ -376,6 +387,7 @@ UserSettings::save() settings.setValue("emoji_font_family", emojiFont_); settings.setValue("presence", QVariant::fromValue(presence_)); settings.setValue("use_stun_server", useStunServer_); + settings.setValue("default_audio_source", defaultAudioSource_); settings.endGroup(); @@ -501,6 +513,9 @@ UserSettingsPage::UserSettingsPage(QSharedPointer settings, QWidge callsLabel->setFont(font); useStunServer_ = new Toggle{this}; + defaultAudioSourceValue_ = new QLabel(this); + defaultAudioSourceValue_->setFont(font); + auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this}; encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin); encryptionLabel_->setAlignment(Qt::AlignBottom); @@ -634,9 +649,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer settings, QWidge formLayout_->addRow(callsLabel); formLayout_->addRow(new HorizontalLine{this}); - boxWrap(tr("Allow Fallback Call Assist Server"), + boxWrap(tr("Allow fallback call assist server"), useStunServer_, tr("Will use turn.matrix.org as assist when your home server does not offer one.")); + boxWrap(tr("Default audio source device"), defaultAudioSourceValue_); formLayout_->addRow(encryptionLabel_); formLayout_->addRow(new HorizontalLine{this}); @@ -797,6 +813,7 @@ UserSettingsPage::showEvent(QShowEvent *) deviceIdValue_->setText(QString::fromStdString(http::client()->device_id())); timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth()); useStunServer_->setState(!settings_->useStunServer()); + defaultAudioSourceValue_->setText(settings_->defaultAudioSource()); deviceFingerprintValue_->setText( utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519)); diff --git a/src/UserSettingsPage.h b/src/UserSettingsPage.h index 567a7520..52ff9466 100644 --- a/src/UserSettingsPage.h +++ b/src/UserSettingsPage.h @@ -73,6 +73,8 @@ class UserSettings : public QObject Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged) Q_PROPERTY( bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged) + Q_PROPERTY(QString defaultAudioSource READ defaultAudioSource WRITE setDefaultAudioSource + NOTIFY defaultAudioSourceChanged) public: UserSettings(); @@ -110,6 +112,7 @@ public: void setDecryptSidebar(bool state); void setPresence(Presence state); void setUseStunServer(bool state); + void setDefaultAudioSource(const QString &deviceName); QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; } bool messageHoverHighlight() const { return messageHoverHighlight_; } @@ -136,6 +139,7 @@ public: QString emojiFont() const { return emojiFont_; } Presence presence() const { return presence_; } bool useStunServer() const { return useStunServer_; } + QString defaultAudioSource() const { return defaultAudioSource_; } signals: void groupViewStateChanged(bool state); @@ -159,6 +163,7 @@ signals: void emojiFontChanged(QString state); void presenceChanged(Presence state); void useStunServerChanged(bool state); + void defaultAudioSourceChanged(const QString &deviceName); private: // Default to system theme if QT_QPA_PLATFORMTHEME var is set. @@ -187,6 +192,7 @@ private: QString emojiFont_; Presence presence_; bool useStunServer_; + QString defaultAudioSource_; }; class HorizontalLine : public QFrame @@ -244,6 +250,7 @@ private: Toggle *decryptSidebar_; QLabel *deviceFingerprintValue_; QLabel *deviceIdValue_; + QLabel *defaultAudioSourceValue_; QComboBox *themeCombo_; QComboBox *scaleFactorCombo_; diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index 07dfaac4..5638c607 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -487,23 +487,74 @@ WebRTCSession::startPipeline(int opusPayloadType) return true; } -#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload=" - bool WebRTCSession::createPipeline(int opusPayloadType) { - std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin " - "autoaudiosrc ! volume name=srclevel ! audioconvert ! " - "audioresample ! queue ! opusenc ! rtpopuspay ! " - "queue ! " RTP_CAPS_OPUS + - std::to_string(opusPayloadType) + " ! webrtcbin."); + int nSources = audioSources_ ? g_list_length(audioSources_) : 0; + if (nSources == 0) { + nhlog::ui()->error("WebRTC: no audio sources"); + return false; + } - webrtc_ = nullptr; - GError *error = nullptr; - pipe_ = gst_parse_launch(pipeline.c_str(), &error); - if (error) { - nhlog::ui()->error("WebRTC: failed to parse pipeline: {}", error->message); - g_error_free(error); + if (audioSourceIndex_ < 0 || audioSourceIndex_ >= nSources) { + nhlog::ui()->error("WebRTC: invalid audio source index"); + return false; + } + + GstElement *source = gst_device_create_element( + GST_DEVICE_CAST(g_list_nth_data(audioSources_, audioSourceIndex_)), nullptr); + GstElement *volume = gst_element_factory_make("volume", "srclevel"); + GstElement *convert = gst_element_factory_make("audioconvert", nullptr); + GstElement *resample = gst_element_factory_make("audioresample", nullptr); + GstElement *queue1 = gst_element_factory_make("queue", nullptr); + GstElement *opusenc = gst_element_factory_make("opusenc", nullptr); + GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr); + GstElement *queue2 = gst_element_factory_make("queue", nullptr); + GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr); + + GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp", + "media", + G_TYPE_STRING, + "audio", + "encoding-name", + G_TYPE_STRING, + "OPUS", + "payload", + G_TYPE_INT, + opusPayloadType, + nullptr); + g_object_set(capsfilter, "caps", rtpcaps, nullptr); + gst_caps_unref(rtpcaps); + + GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin"); + g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr); + + pipe_ = gst_pipeline_new(nullptr); + gst_bin_add_many(GST_BIN(pipe_), + source, + volume, + convert, + resample, + queue1, + opusenc, + rtp, + queue2, + capsfilter, + webrtcbin, + nullptr); + + if (!gst_element_link_many(source, + volume, + convert, + resample, + queue1, + opusenc, + rtp, + queue2, + capsfilter, + webrtcbin, + nullptr)) { + nhlog::ui()->error("WebRTC: failed to link pipeline elements"); end(); return false; } @@ -541,3 +592,42 @@ WebRTCSession::end() if (state_ != State::DISCONNECTED) emit stateChanged(State::DISCONNECTED); } + +void +WebRTCSession::refreshDevices() +{ + if (!initialised_) + return; + + static GstDeviceMonitor *monitor = nullptr; + if (!monitor) { + monitor = gst_device_monitor_new(); + GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); + gst_device_monitor_add_filter(monitor, "Audio/Source", caps); + gst_caps_unref(caps); + } + g_list_free_full(audioSources_, g_object_unref); + audioSources_ = gst_device_monitor_get_devices(monitor); +} + +std::vector +WebRTCSession::getAudioSourceNames(const std::string &defaultDevice) +{ + if (!initialised_) + return {}; + + refreshDevices(); + std::vector ret; + ret.reserve(g_list_length(audioSources_)); + for (GList *l = audioSources_; l != nullptr; l = l->next) { + gchar *name = gst_device_get_display_name(GST_DEVICE_CAST(l->data)); + ret.emplace_back(name); + g_free(name); + if (ret.back() == defaultDevice) { + // move default device to top of the list + std::swap(audioSources_->data, l->data); + std::swap(ret.front(), ret.back()); + } + } + return ret; +} diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h index 6b54f370..56d76fa8 100644 --- a/src/WebRTCSession.h +++ b/src/WebRTCSession.h @@ -7,6 +7,7 @@ #include "mtx/events/voip.hpp" +typedef struct _GList GList; typedef struct _GstElement GstElement; class WebRTCSession : public QObject @@ -46,6 +47,9 @@ public: void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; } void setTurnServers(const std::vector &uris) { turnServers_ = uris; } + std::vector getAudioSourceNames(const std::string &defaultDevice); + void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; } + signals: void offerCreated(const std::string &sdp, const std::vector &); @@ -66,9 +70,12 @@ private: GstElement *webrtc_ = nullptr; std::string stunServer_; std::vector turnServers_; + GList *audioSources_ = nullptr; + int audioSourceIndex_ = -1; bool startPipeline(int opusPayloadType); bool createPipeline(int opusPayloadType); + void refreshDevices(); public: WebRTCSession(WebRTCSession const &) = delete; diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp index fd6565e2..be1eb0c9 100644 --- a/src/dialogs/AcceptCall.cpp +++ b/src/dialogs/AcceptCall.cpp @@ -1,11 +1,14 @@ +#include #include -#include #include #include #include +#include "ChatPage.h" #include "Config.h" +#include "UserSettingsPage.h" #include "Utils.h" +#include "WebRTCSession.h" #include "dialogs/AcceptCall.h" #include "ui/Avatar.h" @@ -15,9 +18,25 @@ AcceptCall::AcceptCall(const QString &caller, const QString &displayName, const QString &roomName, const QString &avatarUrl, + QSharedPointer settings, QWidget *parent) : QWidget(parent) { + std::string errorMessage; + if (!WebRTCSession::instance().init(&errorMessage)) { + emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); + emit close(); + return; + } + audioDevices_ = WebRTCSession::instance().getAudioSourceNames( + settings->defaultAudioSource().toStdString()); + if (audioDevices_.empty()) { + emit ChatPage::instance()->showNotification( + "Incoming call: No audio sources found."); + emit close(); + return; + } + setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowModality(Qt::WindowModal); @@ -55,7 +74,7 @@ AcceptCall::AcceptCall(const QString &caller, else avatar->setLetter(utils::firstChar(roomName)); - const int iconSize = 24; + const int iconSize = 22; QLabel *callTypeIndicator = new QLabel(this); callTypeIndicator->setPixmap( QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2))); @@ -66,7 +85,7 @@ AcceptCall::AcceptCall(const QString &caller, callTypeLabel->setAlignment(Qt::AlignCenter); auto buttonLayout = new QHBoxLayout; - buttonLayout->setSpacing(20); + buttonLayout->setSpacing(18); acceptBtn_ = new QPushButton(tr("Accept"), this); acceptBtn_->setDefault(true); acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); @@ -78,6 +97,19 @@ AcceptCall::AcceptCall(const QString &caller, buttonLayout->addWidget(acceptBtn_); buttonLayout->addWidget(rejectBtn_); + auto deviceLayout = new QHBoxLayout; + auto audioLabel = new QLabel(this); + audioLabel->setPixmap( + QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize))); + + auto deviceList = new QComboBox(this); + for (const auto &d : audioDevices_) + deviceList->addItem(QString::fromStdString(d)); + + deviceLayout->addStretch(); + deviceLayout->addWidget(audioLabel); + deviceLayout->addWidget(deviceList); + if (displayNameLabel) layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); layout->addWidget(callerLabel, 0, Qt::AlignCenter); @@ -85,8 +117,12 @@ AcceptCall::AcceptCall(const QString &caller, layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter); layout->addWidget(callTypeLabel, 0, Qt::AlignCenter); layout->addLayout(buttonLayout); + layout->addLayout(deviceLayout); - connect(acceptBtn_, &QPushButton::clicked, this, [this]() { + connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() { + WebRTCSession::instance().setAudioSource(deviceList->currentIndex()); + settings->setDefaultAudioSource( + QString::fromStdString(audioDevices_[deviceList->currentIndex()])); emit accept(); emit close(); }); diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h index 5d2251fd..909605d0 100644 --- a/src/dialogs/AcceptCall.h +++ b/src/dialogs/AcceptCall.h @@ -1,9 +1,14 @@ #pragma once +#include +#include + +#include #include class QPushButton; class QString; +class UserSettings; namespace dialogs { @@ -16,6 +21,7 @@ public: const QString &displayName, const QString &roomName, const QString &avatarUrl, + QSharedPointer settings, QWidget *parent = nullptr); signals: @@ -25,6 +31,7 @@ signals: private: QPushButton *acceptBtn_; QPushButton *rejectBtn_; + std::vector audioDevices_; }; } diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp index 0fda1794..4e70370a 100644 --- a/src/dialogs/PlaceCall.cpp +++ b/src/dialogs/PlaceCall.cpp @@ -1,10 +1,14 @@ +#include #include #include #include #include +#include "ChatPage.h" #include "Config.h" +#include "UserSettingsPage.h" #include "Utils.h" +#include "WebRTCSession.h" #include "dialogs/PlaceCall.h" #include "ui/Avatar.h" @@ -14,9 +18,24 @@ PlaceCall::PlaceCall(const QString &callee, const QString &displayName, const QString &roomName, const QString &avatarUrl, + QSharedPointer settings, QWidget *parent) : QWidget(parent) { + std::string errorMessage; + if (!WebRTCSession::instance().init(&errorMessage)) { + emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); + emit close(); + return; + } + audioDevices_ = WebRTCSession::instance().getAudioSourceNames( + settings->defaultAudioSource().toStdString()); + if (audioDevices_.empty()) { + emit ChatPage::instance()->showNotification("No audio sources found."); + emit close(); + return; + } + setAutoFillBackground(true); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowModality(Qt::WindowModal); @@ -37,25 +56,42 @@ PlaceCall::PlaceCall(const QString &callee, avatar->setImage(avatarUrl); else avatar->setLetter(utils::firstChar(roomName)); - const int iconSize = 24; + const int iconSize = 18; voiceBtn_ = new QPushButton(tr("Voice"), this); voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); voiceBtn_->setIconSize(QSize(iconSize, iconSize)); voiceBtn_->setDefault(true); cancelBtn_ = new QPushButton(tr("Cancel"), this); - buttonLayout->addStretch(1); buttonLayout->addWidget(avatar); + buttonLayout->addStretch(); buttonLayout->addWidget(voiceBtn_); buttonLayout->addWidget(cancelBtn_); QString name = displayName.isEmpty() ? callee : displayName; QLabel *label = new QLabel("Place a call to " + name + "?", this); + auto deviceLayout = new QHBoxLayout; + auto audioLabel = new QLabel(this); + audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png") + .pixmap(QSize(iconSize * 1.2, iconSize * 1.2))); + + auto deviceList = new QComboBox(this); + for (const auto &d : audioDevices_) + deviceList->addItem(QString::fromStdString(d)); + + deviceLayout->addStretch(); + deviceLayout->addWidget(audioLabel); + deviceLayout->addWidget(deviceList); + layout->addWidget(label); layout->addLayout(buttonLayout); + layout->addLayout(deviceLayout); - connect(voiceBtn_, &QPushButton::clicked, this, [this]() { + connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() { + WebRTCSession::instance().setAudioSource(deviceList->currentIndex()); + settings->setDefaultAudioSource( + QString::fromStdString(audioDevices_[deviceList->currentIndex()])); emit voice(); emit close(); }); diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h index f6db9ab5..5a1e982c 100644 --- a/src/dialogs/PlaceCall.h +++ b/src/dialogs/PlaceCall.h @@ -1,9 +1,14 @@ #pragma once +#include +#include + +#include #include class QPushButton; class QString; +class UserSettings; namespace dialogs { @@ -16,6 +21,7 @@ public: const QString &displayName, const QString &roomName, const QString &avatarUrl, + QSharedPointer settings, QWidget *parent = nullptr); signals: @@ -25,6 +31,7 @@ signals: private: QPushButton *voiceBtn_; QPushButton *cancelBtn_; + std::vector audioDevices_; }; } -- cgit 1.5.1