summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorNicolas Werner <nicolas.werner@hotmail.de>2020-08-17 20:40:33 +0200
committerNicolas Werner <nicolas.werner@hotmail.de>2020-08-17 20:40:33 +0200
commitde7ec4d2b38888ee88d0a45b05d5a4a7cc730a4d (patch)
tree7d65f5bcbe6996c24f0f5103160e0d4d23bb0983 /src
parentAdd /clear-timeline command (diff)
parentMerge pull request #237 from trilene/voip (diff)
downloadnheko-de7ec4d2b38888ee88d0a45b05d5a4a7cc730a4d.tar.xz
Merge remote-tracking branch 'origin/master' into new-event-store
Conflicts:
	CMakeLists.txt
	io.github.NhekoReborn.Nheko.json
	src/Cache.cpp
	src/timeline/TimelineModel.cpp
	src/timeline/TimelineModel.h
	src/timeline/TimelineViewManager.cpp
Diffstat (limited to 'src')
-rw-r--r--src/ActiveCallBar.cpp160
-rw-r--r--src/ActiveCallBar.h40
-rw-r--r--src/Cache.cpp3
-rw-r--r--src/CallManager.cpp458
-rw-r--r--src/CallManager.h75
-rw-r--r--src/ChatPage.cpp114
-rw-r--r--src/ChatPage.h8
-rw-r--r--src/Config.h6
-rw-r--r--src/EventAccessors.cpp31
-rw-r--r--src/EventAccessors.h3
-rw-r--r--src/MainWindow.cpp23
-rw-r--r--src/TextInputWidget.cpp31
-rw-r--r--src/TextInputWidget.h4
-rw-r--r--src/UserSettingsPage.cpp48
-rw-r--r--src/UserSettingsPage.h14
-rw-r--r--src/Utils.cpp40
-rw-r--r--src/Utils.h45
-rw-r--r--src/WebRTCSession.cpp697
-rw-r--r--src/WebRTCSession.h83
-rw-r--r--src/dialogs/AcceptCall.cpp135
-rw-r--r--src/dialogs/AcceptCall.h37
-rw-r--r--src/dialogs/PlaceCall.cpp104
-rw-r--r--src/dialogs/PlaceCall.h37
-rw-r--r--src/dialogs/UserProfile.cpp11
-rw-r--r--src/dialogs/UserProfile.h1
-rw-r--r--src/timeline/TimelineModel.cpp124
-rw-r--r--src/timeline/TimelineModel.h19
-rw-r--r--src/timeline/TimelineViewManager.cpp71
-rw-r--r--src/timeline/TimelineViewManager.h15
29 files changed, 2364 insertions, 73 deletions
diff --git a/src/ActiveCallBar.cpp b/src/ActiveCallBar.cpp
new file mode 100644
index 00000000..c0d2c13a
--- /dev/null
+++ b/src/ActiveCallBar.cpp
@@ -0,0 +1,160 @@
+#include <cstdio>
+
+#include <QDateTime>
+#include <QHBoxLayout>
+#include <QIcon>
+#include <QLabel>
+#include <QString>
+#include <QTimer>
+
+#include "ActiveCallBar.h"
+#include "ChatPage.h"
+#include "Utils.h"
+#include "WebRTCSession.h"
+#include "ui/Avatar.h"
+#include "ui/FlatButton.h"
+
+ActiveCallBar::ActiveCallBar(QWidget *parent)
+  : QWidget(parent)
+{
+        setAutoFillBackground(true);
+        auto p = palette();
+        p.setColor(backgroundRole(), QColor(46, 204, 113));
+        setPalette(p);
+
+        QFont f;
+        f.setPointSizeF(f.pointSizeF());
+
+        const int fontHeight    = QFontMetrics(f).height();
+        const int widgetMargin  = fontHeight / 3;
+        const int contentHeight = fontHeight * 3;
+
+        setFixedHeight(contentHeight + widgetMargin);
+
+        layout_ = new QHBoxLayout(this);
+        layout_->setSpacing(widgetMargin);
+        layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
+
+        QFont labelFont;
+        labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1);
+        labelFont.setWeight(QFont::Medium);
+
+        avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5);
+
+        callPartyLabel_ = new QLabel(this);
+        callPartyLabel_->setFont(labelFont);
+
+        stateLabel_ = new QLabel(this);
+        stateLabel_->setFont(labelFont);
+
+        durationLabel_ = new QLabel(this);
+        durationLabel_->setFont(labelFont);
+        durationLabel_->hide();
+
+        muteBtn_ = new FlatButton(this);
+        setMuteIcon(false);
+        muteBtn_->setFixedSize(buttonSize_, buttonSize_);
+        muteBtn_->setCornerRadius(buttonSize_ / 2);
+        connect(muteBtn_, &FlatButton::clicked, this, [this]() {
+                if (WebRTCSession::instance().toggleMuteAudioSrc(muted_))
+                        setMuteIcon(muted_);
+        });
+
+        layout_->addWidget(avatar_, 0, Qt::AlignLeft);
+        layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft);
+        layout_->addWidget(stateLabel_, 0, Qt::AlignLeft);
+        layout_->addWidget(durationLabel_, 0, Qt::AlignLeft);
+        layout_->addStretch();
+        layout_->addWidget(muteBtn_, 0, Qt::AlignCenter);
+        layout_->addSpacing(18);
+
+        timer_ = new QTimer(this);
+        connect(timer_, &QTimer::timeout, this, [this]() {
+                auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
+                int s        = seconds % 60;
+                int m        = (seconds / 60) % 60;
+                int h        = seconds / 3600;
+                char buf[12];
+                if (h)
+                        snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
+                else
+                        snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
+                durationLabel_->setText(buf);
+        });
+
+        connect(
+          &WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
+}
+
+void
+ActiveCallBar::setMuteIcon(bool muted)
+{
+        QIcon icon;
+        if (muted) {
+                muteBtn_->setToolTip("Unmute Mic");
+                icon.addFile(":/icons/icons/ui/microphone-unmute.png");
+        } else {
+                muteBtn_->setToolTip("Mute Mic");
+                icon.addFile(":/icons/icons/ui/microphone-mute.png");
+        }
+        muteBtn_->setIcon(icon);
+        muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_));
+}
+
+void
+ActiveCallBar::setCallParty(const QString &userid,
+                            const QString &displayName,
+                            const QString &roomName,
+                            const QString &avatarUrl)
+{
+        callPartyLabel_->setText("  " + (displayName.isEmpty() ? userid : displayName) + " ");
+
+        if (!avatarUrl.isEmpty())
+                avatar_->setImage(avatarUrl);
+        else
+                avatar_->setLetter(utils::firstChar(roomName));
+}
+
+void
+ActiveCallBar::update(WebRTCSession::State state)
+{
+        switch (state) {
+        case WebRTCSession::State::INITIATING:
+                show();
+                stateLabel_->setText("Initiating call...");
+                break;
+        case WebRTCSession::State::INITIATED:
+                show();
+                stateLabel_->setText("Call initiated...");
+                break;
+        case WebRTCSession::State::OFFERSENT:
+                show();
+                stateLabel_->setText("Calling...");
+                break;
+        case WebRTCSession::State::CONNECTING:
+                show();
+                stateLabel_->setText("Connecting...");
+                break;
+        case WebRTCSession::State::CONNECTED:
+                show();
+                callStartTime_ = QDateTime::currentSecsSinceEpoch();
+                timer_->start(1000);
+                stateLabel_->setPixmap(
+                  QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_)));
+                durationLabel_->setText("00:00");
+                durationLabel_->show();
+                break;
+        case WebRTCSession::State::ICEFAILED:
+        case WebRTCSession::State::DISCONNECTED:
+                hide();
+                timer_->stop();
+                callPartyLabel_->setText(QString());
+                stateLabel_->setText(QString());
+                durationLabel_->setText(QString());
+                durationLabel_->hide();
+                setMuteIcon(false);
+                break;
+        default:
+                break;
+        }
+}
diff --git a/src/ActiveCallBar.h b/src/ActiveCallBar.h
new file mode 100644
index 00000000..1e940227
--- /dev/null
+++ b/src/ActiveCallBar.h
@@ -0,0 +1,40 @@
+#pragma once
+
+#include <QWidget>
+
+#include "WebRTCSession.h"
+
+class QHBoxLayout;
+class QLabel;
+class QTimer;
+class Avatar;
+class FlatButton;
+
+class ActiveCallBar : public QWidget
+{
+        Q_OBJECT
+
+public:
+        ActiveCallBar(QWidget *parent = nullptr);
+
+public slots:
+        void update(WebRTCSession::State);
+        void setCallParty(const QString &userid,
+                          const QString &displayName,
+                          const QString &roomName,
+                          const QString &avatarUrl);
+
+private:
+        QHBoxLayout *layout_    = nullptr;
+        Avatar *avatar_         = nullptr;
+        QLabel *callPartyLabel_ = nullptr;
+        QLabel *stateLabel_     = nullptr;
+        QLabel *durationLabel_  = nullptr;
+        FlatButton *muteBtn_    = nullptr;
+        int buttonSize_         = 22;
+        bool muted_             = false;
+        qint64 callStartTime_   = 0;
+        QTimer *timer_          = nullptr;
+
+        void setMuteIcon(bool muted);
+};
diff --git a/src/Cache.cpp b/src/Cache.cpp
index 0d879584..fd26f63e 100644
--- a/src/Cache.cpp
+++ b/src/Cache.cpp
@@ -1586,7 +1586,8 @@ Cache::getLastMessageInfo(lmdb::txn &txn, const std::string &room_id)
                 }
 
                 if (!(obj["type"] == "m.room.message" || obj["type"] == "m.sticker" ||
-                      obj["type"] == "m.room.encrypted"))
+                      obj["type"] == "m.call.invite" || obj["type"] == "m.call.answer" ||
+                      obj["type"] == "m.call.hangup" || obj["type"] == "m.room.encrypted"))
                         continue;
 
                 mtx::events::collections::TimelineEvent te;
diff --git a/src/CallManager.cpp b/src/CallManager.cpp
new file mode 100644
index 00000000..7a8d2ca7
--- /dev/null
+++ b/src/CallManager.cpp
@@ -0,0 +1,458 @@
+#include <algorithm>
+#include <cctype>
+#include <chrono>
+#include <cstdint>
+
+#include <QMediaPlaylist>
+#include <QUrl>
+
+#include "Cache.h"
+#include "CallManager.h"
+#include "ChatPage.h"
+#include "Logging.h"
+#include "MainWindow.h"
+#include "MatrixClient.h"
+#include "UserSettingsPage.h"
+#include "WebRTCSession.h"
+#include "dialogs/AcceptCall.h"
+
+#include "mtx/responses/turn_server.hpp"
+
+Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>)
+Q_DECLARE_METATYPE(mtx::events::msg::CallCandidates::Candidate)
+Q_DECLARE_METATYPE(mtx::responses::TurnServer)
+
+using namespace mtx::events;
+using namespace mtx::events::msg;
+
+// https://github.com/vector-im/riot-web/issues/10173
+#define STUN_SERVER "stun://turn.matrix.org:3478"
+
+namespace {
+std::vector<std::string>
+getTurnURIs(const mtx::responses::TurnServer &turnServer);
+}
+
+CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
+  : QObject()
+  , session_(WebRTCSession::instance())
+  , turnServerTimer_(this)
+  , settings_(userSettings)
+{
+        qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>();
+        qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>();
+        qRegisterMetaType<mtx::responses::TurnServer>();
+
+        connect(
+          &session_,
+          &WebRTCSession::offerCreated,
+          this,
+          [this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
+                  nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_);
+                  emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
+                  emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
+                  QTimer::singleShot(timeoutms_, this, [this]() {
+                          if (session_.state() == WebRTCSession::State::OFFERSENT) {
+                                  hangUp(CallHangUp::Reason::InviteTimeOut);
+                                  emit ChatPage::instance()->showNotification(
+                                    "The remote side failed to pick up.");
+                          }
+                  });
+          });
+
+        connect(
+          &session_,
+          &WebRTCSession::answerCreated,
+          this,
+          [this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
+                  nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_);
+                  emit newMessage(roomid_, CallAnswer{callid_, sdp, 0});
+                  emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
+          });
+
+        connect(&session_,
+                &WebRTCSession::newICECandidate,
+                this,
+                [this](const CallCandidates::Candidate &candidate) {
+                        nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_);
+                        emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0});
+                });
+
+        connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer);
+
+        connect(this,
+                &CallManager::turnServerRetrieved,
+                this,
+                [this](const mtx::responses::TurnServer &res) {
+                        nhlog::net()->info("TURN server(s) retrieved from homeserver:");
+                        nhlog::net()->info("username: {}", res.username);
+                        nhlog::net()->info("ttl: {} seconds", res.ttl);
+                        for (const auto &u : res.uris)
+                                nhlog::net()->info("uri: {}", u);
+
+                        // Request new credentials close to expiry
+                        // See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
+                        turnURIs_    = getTurnURIs(res);
+                        uint32_t ttl = std::max(res.ttl, UINT32_C(3600));
+                        if (res.ttl < 3600)
+                                nhlog::net()->warn("Setting ttl to 1 hour");
+                        turnServerTimer_.setInterval(ttl * 1000 * 0.9);
+                });
+
+        connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) {
+                switch (state) {
+                case WebRTCSession::State::DISCONNECTED:
+                        playRingtone("qrc:/media/media/callend.ogg", false);
+                        clear();
+                        break;
+                case WebRTCSession::State::ICEFAILED: {
+                        QString error("Call connection failed.");
+                        if (turnURIs_.empty())
+                                error += " Your homeserver has no configured TURN server.";
+                        emit ChatPage::instance()->showNotification(error);
+                        hangUp(CallHangUp::Reason::ICEFailed);
+                        break;
+                }
+                default:
+                        break;
+                }
+        });
+
+        connect(&player_,
+                &QMediaPlayer::mediaStatusChanged,
+                this,
+                [this](QMediaPlayer::MediaStatus status) {
+                        if (status == QMediaPlayer::LoadedMedia)
+                                player_.play();
+                });
+}
+
+void
+CallManager::sendInvite(const QString &roomid)
+{
+        if (onActiveCall())
+                return;
+
+        auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
+        if (roomInfo.member_count != 2) {
+                emit ChatPage::instance()->showNotification(
+                  "Voice calls are limited to 1:1 rooms.");
+                return;
+        }
+
+        std::string errorMessage;
+        if (!session_.init(&errorMessage)) {
+                emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
+                return;
+        }
+
+        roomid_ = roomid;
+        session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
+        session_.setTurnServers(turnURIs_);
+
+        generateCallID();
+        nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
+        std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
+        const RoomMember &callee =
+          members.front().user_id == utils::localUser() ? members.back() : members.front();
+        emit newCallParty(callee.user_id,
+                          callee.display_name,
+                          QString::fromStdString(roomInfo.name),
+                          QString::fromStdString(roomInfo.avatar_url));
+        playRingtone("qrc:/media/media/ringback.ogg", true);
+        if (!session_.createOffer()) {
+                emit ChatPage::instance()->showNotification("Problem setting up call.");
+                endCall();
+        }
+}
+
+namespace {
+std::string
+callHangUpReasonString(CallHangUp::Reason reason)
+{
+        switch (reason) {
+        case CallHangUp::Reason::ICEFailed:
+                return "ICE failed";
+        case CallHangUp::Reason::InviteTimeOut:
+                return "Invite time out";
+        default:
+                return "User";
+        }
+}
+}
+
+void
+CallManager::hangUp(CallHangUp::Reason reason)
+{
+        if (!callid_.empty()) {
+                nhlog::ui()->debug(
+                  "WebRTC: call id: {} - hanging up ({})", callid_, callHangUpReasonString(reason));
+                emit newMessage(roomid_, CallHangUp{callid_, 0, reason});
+                endCall();
+        }
+}
+
+bool
+CallManager::onActiveCall()
+{
+        return session_.state() != WebRTCSession::State::DISCONNECTED;
+}
+
+void
+CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
+{
+#ifdef GSTREAMER_AVAILABLE
+        if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event) ||
+            handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event))
+                return;
+#else
+        (void)event;
+#endif
+}
+
+template<typename T>
+bool
+CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event)
+{
+        if (std::holds_alternative<RoomEvent<T>>(event)) {
+                handleEvent(std::get<RoomEvent<T>>(event));
+                return true;
+        }
+        return false;
+}
+
+void
+CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
+{
+        const char video[]     = "m=video";
+        const std::string &sdp = callInviteEvent.content.sdp;
+        bool isVideo           = std::search(sdp.cbegin(),
+                                   sdp.cend(),
+                                   std::cbegin(video),
+                                   std::cend(video) - 1,
+                                   [](unsigned char c1, unsigned char c2) {
+                                           return std::tolower(c1) == std::tolower(c2);
+                                   }) != sdp.cend();
+
+        nhlog::ui()->debug("WebRTC: call id: {} - incoming {} CallInvite from {}",
+                           callInviteEvent.content.call_id,
+                           (isVideo ? "video" : "voice"),
+                           callInviteEvent.sender);
+
+        if (callInviteEvent.content.call_id.empty())
+                return;
+
+        auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
+        if (onActiveCall() || roomInfo.member_count != 2 || isVideo) {
+                emit newMessage(QString::fromStdString(callInviteEvent.room_id),
+                                CallHangUp{callInviteEvent.content.call_id,
+                                           0,
+                                           CallHangUp::Reason::InviteTimeOut});
+                return;
+        }
+
+        playRingtone("qrc:/media/media/ring.ogg", true);
+        roomid_ = QString::fromStdString(callInviteEvent.room_id);
+        callid_ = callInviteEvent.content.call_id;
+        remoteICECandidates_.clear();
+
+        std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
+        const RoomMember &caller =
+          members.front().user_id == utils::localUser() ? members.back() : members.front();
+        emit newCallParty(caller.user_id,
+                          caller.display_name,
+                          QString::fromStdString(roomInfo.name),
+                          QString::fromStdString(roomInfo.avatar_url));
+
+        auto dialog = new dialogs::AcceptCall(caller.user_id,
+                                              caller.display_name,
+                                              QString::fromStdString(roomInfo.name),
+                                              QString::fromStdString(roomInfo.avatar_url),
+                                              settings_,
+                                              MainWindow::instance());
+        connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() {
+                MainWindow::instance()->hideOverlay();
+                answerInvite(callInviteEvent.content);
+        });
+        connect(dialog, &dialogs::AcceptCall::reject, this, [this]() {
+                MainWindow::instance()->hideOverlay();
+                hangUp();
+        });
+        MainWindow::instance()->showSolidOverlayModal(dialog);
+}
+
+void
+CallManager::answerInvite(const CallInvite &invite)
+{
+        stopRingtone();
+        std::string errorMessage;
+        if (!session_.init(&errorMessage)) {
+                emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
+                hangUp();
+                return;
+        }
+
+        session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
+        session_.setTurnServers(turnURIs_);
+
+        if (!session_.acceptOffer(invite.sdp)) {
+                emit ChatPage::instance()->showNotification("Problem setting up call.");
+                hangUp();
+                return;
+        }
+        session_.acceptICECandidates(remoteICECandidates_);
+        remoteICECandidates_.clear();
+}
+
+void
+CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent)
+{
+        if (callCandidatesEvent.sender == utils::localUser().toStdString())
+                return;
+
+        nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}",
+                           callCandidatesEvent.content.call_id,
+                           callCandidatesEvent.sender);
+
+        if (callid_ == callCandidatesEvent.content.call_id) {
+                if (onActiveCall())
+                        session_.acceptICECandidates(callCandidatesEvent.content.candidates);
+                else {
+                        // CallInvite has been received and we're awaiting localUser to accept or
+                        // reject the call
+                        for (const auto &c : callCandidatesEvent.content.candidates)
+                                remoteICECandidates_.push_back(c);
+                }
+        }
+}
+
+void
+CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent)
+{
+        nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}",
+                           callAnswerEvent.content.call_id,
+                           callAnswerEvent.sender);
+
+        if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() &&
+            callid_ == callAnswerEvent.content.call_id) {
+                emit ChatPage::instance()->showNotification("Call answered on another device.");
+                stopRingtone();
+                MainWindow::instance()->hideOverlay();
+                return;
+        }
+
+        if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
+                stopRingtone();
+                if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
+                        emit ChatPage::instance()->showNotification("Problem setting up call.");
+                        hangUp();
+                }
+        }
+}
+
+void
+CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent)
+{
+        nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}",
+                           callHangUpEvent.content.call_id,
+                           callHangUpReasonString(callHangUpEvent.content.reason),
+                           callHangUpEvent.sender);
+
+        if (callid_ == callHangUpEvent.content.call_id) {
+                MainWindow::instance()->hideOverlay();
+                endCall();
+        }
+}
+
+void
+CallManager::generateCallID()
+{
+        using namespace std::chrono;
+        uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
+        callid_     = "c" + std::to_string(ms);
+}
+
+void
+CallManager::clear()
+{
+        roomid_.clear();
+        callid_.clear();
+        remoteICECandidates_.clear();
+}
+
+void
+CallManager::endCall()
+{
+        stopRingtone();
+        clear();
+        session_.end();
+}
+
+void
+CallManager::refreshTurnServer()
+{
+        turnURIs_.clear();
+        turnServerTimer_.start(2000);
+}
+
+void
+CallManager::retrieveTurnServer()
+{
+        http::client()->get_turn_server(
+          [this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) {
+                  if (err) {
+                          turnServerTimer_.setInterval(5000);
+                          return;
+                  }
+                  emit turnServerRetrieved(res);
+          });
+}
+
+void
+CallManager::playRingtone(const QString &ringtone, bool repeat)
+{
+        static QMediaPlaylist playlist;
+        playlist.clear();
+        playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop
+                                        : QMediaPlaylist::CurrentItemOnce);
+        playlist.addMedia(QUrl(ringtone));
+        player_.setVolume(100);
+        player_.setPlaylist(&playlist);
+}
+
+void
+CallManager::stopRingtone()
+{
+        player_.setPlaylist(nullptr);
+}
+
+namespace {
+std::vector<std::string>
+getTurnURIs(const mtx::responses::TurnServer &turnServer)
+{
+        // gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp)
+        // where username and password are percent-encoded
+        std::vector<std::string> ret;
+        for (const auto &uri : turnServer.uris) {
+                if (auto c = uri.find(':'); c == std::string::npos) {
+                        nhlog::ui()->error("Invalid TURN server uri: {}", uri);
+                        continue;
+                } else {
+                        std::string scheme = std::string(uri, 0, c);
+                        if (scheme != "turn" && scheme != "turns") {
+                                nhlog::ui()->error("Invalid TURN server uri: {}", uri);
+                                continue;
+                        }
+
+                        QString encodedUri =
+                          QString::fromStdString(scheme) + "://" +
+                          QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) +
+                          ":" +
+                          QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) +
+                          "@" + QString::fromStdString(std::string(uri, ++c));
+                        ret.push_back(encodedUri.toStdString());
+                }
+        }
+        return ret;
+}
+}
diff --git a/src/CallManager.h b/src/CallManager.h
new file mode 100644
index 00000000..3a406438
--- /dev/null
+++ b/src/CallManager.h
@@ -0,0 +1,75 @@
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include <QMediaPlayer>
+#include <QObject>
+#include <QSharedPointer>
+#include <QString>
+#include <QTimer>
+
+#include "mtx/events/collections.hpp"
+#include "mtx/events/voip.hpp"
+
+namespace mtx::responses {
+struct TurnServer;
+}
+
+class UserSettings;
+class WebRTCSession;
+
+class CallManager : public QObject
+{
+        Q_OBJECT
+
+public:
+        CallManager(QSharedPointer<UserSettings>);
+
+        void sendInvite(const QString &roomid);
+        void hangUp(
+          mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
+        bool onActiveCall();
+        void refreshTurnServer();
+
+public slots:
+        void syncEvent(const mtx::events::collections::TimelineEvents &event);
+
+signals:
+        void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
+        void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
+        void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
+        void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
+        void turnServerRetrieved(const mtx::responses::TurnServer &);
+        void newCallParty(const QString &userid,
+                          const QString &displayName,
+                          const QString &roomName,
+                          const QString &avatarUrl);
+
+private slots:
+        void retrieveTurnServer();
+
+private:
+        WebRTCSession &session_;
+        QString roomid_;
+        std::string callid_;
+        const uint32_t timeoutms_ = 120000;
+        std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_;
+        std::vector<std::string> turnURIs_;
+        QTimer turnServerTimer_;
+        QSharedPointer<UserSettings> settings_;
+        QMediaPlayer player_;
+
+        template<typename T>
+        bool handleEvent_(const mtx::events::collections::TimelineEvents &event);
+        void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &);
+        void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &);
+        void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &);
+        void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &);
+        void answerInvite(const mtx::events::msg::CallInvite &);
+        void generateCallID();
+        void clear();
+        void endCall();
+        void playRingtone(const QString &ringtone, bool repeat);
+        void stopRingtone();
+};
diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp
index 63d13fb9..e55b3eca 100644
--- a/src/ChatPage.cpp
+++ b/src/ChatPage.cpp
@@ -22,6 +22,7 @@
 #include <QShortcut>
 #include <QtConcurrent>
 
+#include "ActiveCallBar.h"
 #include "AvatarProvider.h"
 #include "Cache.h"
 #include "Cache_p.h"
@@ -40,11 +41,13 @@
 #include "UserInfoWidget.h"
 #include "UserSettingsPage.h"
 #include "Utils.h"
+#include "WebRTCSession.h"
 #include "ui/OverlayModal.h"
 #include "ui/Theme.h"
 
 #include "notifications/Manager.h"
 
+#include "dialogs/PlaceCall.h"
 #include "dialogs/ReadReceipts.h"
 #include "popups/UserMentions.h"
 #include "timeline/TimelineViewManager.h"
@@ -68,6 +71,7 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
   , isConnected_(true)
   , userSettings_{userSettings}
   , notificationsManager(this)
+  , callManager_(userSettings)
 {
         setObjectName("chatPage");
 
@@ -123,11 +127,17 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
         contentLayout_->setMargin(0);
 
         top_bar_      = new TopRoomBar(this);
-        view_manager_ = new TimelineViewManager(userSettings_, this);
+        view_manager_ = new TimelineViewManager(userSettings_, &callManager_, this);
 
         contentLayout_->addWidget(top_bar_);
         contentLayout_->addWidget(view_manager_->getWidget());
 
+        activeCallBar_ = new ActiveCallBar(this);
+        contentLayout_->addWidget(activeCallBar_);
+        activeCallBar_->hide();
+        connect(
+          &callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty);
+
         // Splitter
         splitter->addWidget(sideBar_);
         splitter->addWidget(content_);
@@ -448,6 +458,35 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
                                   roomid, filename, encryptedFile, url, mime, dsize);
                 });
 
+        connect(text_input_, &TextInputWidget::callButtonPress, this, [this]() {
+                if (callManager_.onActiveCall()) {
+                        callManager_.hangUp();
+                } else {
+                        if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString());
+                            roomInfo.member_count != 2) {
+                                showNotification("Voice calls are limited to 1:1 rooms.");
+                        } else {
+                                std::vector<RoomMember> members(
+                                  cache::getMembers(current_room_.toStdString()));
+                                const RoomMember &callee =
+                                  members.front().user_id == utils::localUser() ? members.back()
+                                                                                : members.front();
+                                auto dialog = new dialogs::PlaceCall(
+                                  callee.user_id,
+                                  callee.display_name,
+                                  QString::fromStdString(roomInfo.name),
+                                  QString::fromStdString(roomInfo.avatar_url),
+                                  userSettings_,
+                                  MainWindow::instance());
+                                connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
+                                        callManager_.sendInvite(current_room_);
+                                });
+                                utils::centerWidget(dialog, MainWindow::instance());
+                                dialog->show();
+                        }
+                }
+        });
+
         connect(room_list_, &RoomList::roomAvatarChanged, this, &ChatPage::updateTopBarAvatar);
 
         connect(
@@ -581,6 +620,11 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
 
         connect(this, &ChatPage::dropToLoginPageCb, this, &ChatPage::dropToLoginPage);
 
+        connectCallMessage<mtx::events::msg::CallInvite>();
+        connectCallMessage<mtx::events::msg::CallCandidates>();
+        connectCallMessage<mtx::events::msg::CallAnswer>();
+        connectCallMessage<mtx::events::msg::CallHangUp>();
+
         instance_ = this;
 }
 
@@ -683,6 +727,8 @@ ChatPage::bootstrap(QString userid, QString homeserver, QString token)
                 const bool isInitialized = cache::isInitialized();
                 const auto cacheVersion  = cache::formatVersion();
 
+                callManager_.refreshTurnServer();
+
                 if (!isInitialized) {
                         cache::setCurrentFormat();
                 } else {
@@ -1165,11 +1211,19 @@ ChatPage::leaveRoom(const QString &room_id)
 void
 ChatPage::inviteUser(QString userid, QString reason)
 {
+        auto room = current_room_;
+
+        if (QMessageBox::question(this,
+                                  tr("Confirm invite"),
+                                  tr("Do you really want to invite %1 (%2)?")
+                                    .arg(cache::displayName(current_room_, userid))
+                                    .arg(userid)) != QMessageBox::Yes)
+                return;
+
         http::client()->invite_user(
-          current_room_.toStdString(),
+          room.toStdString(),
           userid.toStdString(),
-          [this, userid, room = current_room_](const mtx::responses::Empty &,
-                                               mtx::http::RequestErr err) {
+          [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
                   if (err) {
                           emit showNotification(
                             tr("Failed to invite %1 to %2: %3")
@@ -1184,11 +1238,19 @@ ChatPage::inviteUser(QString userid, QString reason)
 void
 ChatPage::kickUser(QString userid, QString reason)
 {
+        auto room = current_room_;
+
+        if (QMessageBox::question(this,
+                                  tr("Confirm kick"),
+                                  tr("Do you really want to kick %1 (%2)?")
+                                    .arg(cache::displayName(current_room_, userid))
+                                    .arg(userid)) != QMessageBox::Yes)
+                return;
+
         http::client()->kick_user(
-          current_room_.toStdString(),
+          room.toStdString(),
           userid.toStdString(),
-          [this, userid, room = current_room_](const mtx::responses::Empty &,
-                                               mtx::http::RequestErr err) {
+          [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
                   if (err) {
                           emit showNotification(
                             tr("Failed to kick %1 to %2: %3")
@@ -1203,11 +1265,19 @@ ChatPage::kickUser(QString userid, QString reason)
 void
 ChatPage::banUser(QString userid, QString reason)
 {
+        auto room = current_room_;
+
+        if (QMessageBox::question(this,
+                                  tr("Confirm ban"),
+                                  tr("Do you really want to ban %1 (%2)?")
+                                    .arg(cache::displayName(current_room_, userid))
+                                    .arg(userid)) != QMessageBox::Yes)
+                return;
+
         http::client()->ban_user(
-          current_room_.toStdString(),
+          room.toStdString(),
           userid.toStdString(),
-          [this, userid, room = current_room_](const mtx::responses::Empty &,
-                                               mtx::http::RequestErr err) {
+          [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
                   if (err) {
                           emit showNotification(
                             tr("Failed to ban %1 in %2: %3")
@@ -1222,11 +1292,19 @@ ChatPage::banUser(QString userid, QString reason)
 void
 ChatPage::unbanUser(QString userid, QString reason)
 {
+        auto room = current_room_;
+
+        if (QMessageBox::question(this,
+                                  tr("Confirm unban"),
+                                  tr("Do you really want to unban %1 (%2)?")
+                                    .arg(cache::displayName(current_room_, userid))
+                                    .arg(userid)) != QMessageBox::Yes)
+                return;
+
         http::client()->unban_user(
-          current_room_.toStdString(),
+          room.toStdString(),
           userid.toStdString(),
-          [this, userid, room = current_room_](const mtx::responses::Empty &,
-                                               mtx::http::RequestErr err) {
+          [this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
                   if (err) {
                           emit showNotification(
                             tr("Failed to unban %1 in %2: %3")
@@ -1451,3 +1529,13 @@ ChatPage::initiateLogout()
 
         emit showOverlayProgressBar();
 }
+
+template<typename T>
+void
+ChatPage::connectCallMessage()
+{
+        connect(&callManager_,
+                qOverload<const QString &, const T &>(&CallManager::newMessage),
+                view_manager_,
+                qOverload<const QString &, const T &>(&TimelineViewManager::queueCallMessage));
+}
diff --git a/src/ChatPage.h b/src/ChatPage.h
index 18bed289..ba1c56d1 100644
--- a/src/ChatPage.h
+++ b/src/ChatPage.h
@@ -35,11 +35,13 @@
 #include <QWidget>
 
 #include "CacheStructs.h"
+#include "CallManager.h"
 #include "CommunitiesList.h"
 #include "Utils.h"
 #include "notifications/Manager.h"
 #include "popups/UserMentions.h"
 
+class ActiveCallBar;
 class OverlayModal;
 class QuickSwitcher;
 class RoomList;
@@ -50,7 +52,6 @@ class TimelineViewManager;
 class TopRoomBar;
 class UserInfoWidget;
 class UserSettings;
-class NotificationsManager;
 
 constexpr int CONSENSUS_TIMEOUT      = 1000;
 constexpr int SHOW_CONTENT_TIMEOUT   = 3000;
@@ -218,6 +219,9 @@ private:
 
         void showNotificationsDialog(const QPoint &point);
 
+        template<typename T>
+        void connectCallMessage();
+
         QHBoxLayout *topLayout_;
         Splitter *splitter;
 
@@ -237,6 +241,7 @@ private:
 
         TopRoomBar *top_bar_;
         TextInputWidget *text_input_;
+        ActiveCallBar *activeCallBar_;
 
         QTimer connectivityTimer_;
         std::atomic_bool isConnected_;
@@ -254,6 +259,7 @@ private:
         QSharedPointer<UserSettings> userSettings_;
 
         NotificationsManager notificationsManager;
+        CallManager callManager_;
 };
 
 template<class Collection>
diff --git a/src/Config.h b/src/Config.h
index f99cf36b..c0624709 100644
--- a/src/Config.h
+++ b/src/Config.h
@@ -53,9 +53,9 @@ namespace strings {
 const QString url_html = "<a href=\"\\1\">\\1</a>";
 const QRegularExpression url_regex(
   // match an URL, that is not quoted, i.e.
-  // vvvvvv match quote via negative lookahead/lookbehind                    vv
-  //       vvvv atomic match url -> fail if there is a " before or after          vvv
-  R"((?<!")(?>((www\.(?!\.)|[a-z][a-z0-9+.-]*://)[^\s<>'"]+[^!,\.\s<>'"\]\)\:]))(?!"))");
+  // vvvvvv match quote via negative lookahead/lookbehind                              vv
+  //          vvvv atomic match url -> fail if there is a " before or after        vvv
+  R"((?<!["'])(?>((www\.(?!\.)|[a-z][a-z0-9+.-]*://)[^\s<>'"]+[^!,\.\s<>'"\]\)\:]))(?!["']))");
 }
 
 // Window geometry.
diff --git a/src/EventAccessors.cpp b/src/EventAccessors.cpp
index 0618206c..88612b14 100644
--- a/src/EventAccessors.cpp
+++ b/src/EventAccessors.cpp
@@ -1,5 +1,7 @@
 #include "EventAccessors.h"
 
+#include <algorithm>
+#include <cctype>
 #include <type_traits>
 
 namespace {
@@ -65,6 +67,29 @@ struct EventRoomTopic
         }
 };
 
+struct CallType
+{
+        template<class T>
+        std::string operator()(const T &e)
+        {
+                if constexpr (std::is_same_v<mtx::events::RoomEvent<mtx::events::msg::CallInvite>,
+                                             T>) {
+                        const char video[]     = "m=video";
+                        const std::string &sdp = e.content.sdp;
+                        return std::search(sdp.cbegin(),
+                                           sdp.cend(),
+                                           std::cbegin(video),
+                                           std::cend(video) - 1,
+                                           [](unsigned char c1, unsigned char c2) {
+                                                   return std::tolower(c1) == std::tolower(c2);
+                                           }) != sdp.cend()
+                                 ? "video"
+                                 : "voice";
+                }
+                return std::string();
+        }
+};
+
 struct EventBody
 {
         template<class C>
@@ -340,6 +365,12 @@ mtx::accessors::room_topic(const mtx::events::collections::TimelineEvents &event
 }
 
 std::string
+mtx::accessors::call_type(const mtx::events::collections::TimelineEvents &event)
+{
+        return std::visit(CallType{}, event);
+}
+
+std::string
 mtx::accessors::body(const mtx::events::collections::TimelineEvents &event)
 {
         return std::visit(EventBody{}, event);
diff --git a/src/EventAccessors.h b/src/EventAccessors.h
index 8f08ef1c..0cdc5f89 100644
--- a/src/EventAccessors.h
+++ b/src/EventAccessors.h
@@ -31,6 +31,9 @@ std::string
 room_topic(const mtx::events::collections::TimelineEvents &event);
 
 std::string
+call_type(const mtx::events::collections::TimelineEvents &event);
+
+std::string
 body(const mtx::events::collections::TimelineEvents &event);
 
 std::string
diff --git a/src/MainWindow.cpp b/src/MainWindow.cpp
index cc1d868b..4dab3d26 100644
--- a/src/MainWindow.cpp
+++ b/src/MainWindow.cpp
@@ -17,6 +17,7 @@
 
 #include <QApplication>
 #include <QLayout>
+#include <QMessageBox>
 #include <QPluginLoader>
 #include <QSettings>
 #include <QShortcut>
@@ -35,6 +36,7 @@
 #include "TrayIcon.h"
 #include "UserSettingsPage.h"
 #include "Utils.h"
+#include "WebRTCSession.h"
 #include "WelcomePage.h"
 #include "ui/LoadingIndicator.h"
 #include "ui/OverlayModal.h"
@@ -285,6 +287,14 @@ MainWindow::showChatPage()
 void
 MainWindow::closeEvent(QCloseEvent *event)
 {
+        if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
+                if (QMessageBox::question(this, "nheko", "A call is in progress. Quit?") !=
+                    QMessageBox::Yes) {
+                        event->ignore();
+                        return;
+                }
+        }
+
         if (!qApp->isSavingSession() && isVisible() && pageSupportsTray() &&
             userSettings_->tray()) {
                 event->ignore();
@@ -433,8 +443,17 @@ void
 MainWindow::openLogoutDialog()
 {
         auto dialog = new dialogs::Logout(this);
-        connect(
-          dialog, &dialogs::Logout::loggingOut, this, [this]() { chat_page_->initiateLogout(); });
+        connect(dialog, &dialogs::Logout::loggingOut, this, [this]() {
+                if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
+                        if (QMessageBox::question(
+                              this, "nheko", "A call is in progress. Log out?") !=
+                            QMessageBox::Yes) {
+                                return;
+                        }
+                        WebRTCSession::instance().end();
+                }
+                chat_page_->initiateLogout();
+        });
 
         showDialog(dialog);
 }
diff --git a/src/TextInputWidget.cpp b/src/TextInputWidget.cpp
index 91846230..0a88c230 100644
--- a/src/TextInputWidget.cpp
+++ b/src/TextInputWidget.cpp
@@ -453,6 +453,15 @@ TextInputWidget::TextInputWidget(QWidget *parent)
         topLayout_->setSpacing(0);
         topLayout_->setContentsMargins(13, 1, 13, 0);
 
+#ifdef GSTREAMER_AVAILABLE
+        callBtn_ = new FlatButton(this);
+        changeCallButtonState(WebRTCSession::State::DISCONNECTED);
+        connect(&WebRTCSession::instance(),
+                &WebRTCSession::stateChanged,
+                this,
+                &TextInputWidget::changeCallButtonState);
+#endif
+
         QIcon send_file_icon;
         send_file_icon.addFile(":/icons/icons/ui/paper-clip-outline.png");
 
@@ -521,6 +530,9 @@ TextInputWidget::TextInputWidget(QWidget *parent)
         emojiBtn_->setIcon(emoji_icon);
         emojiBtn_->setIconSize(QSize(ButtonHeight, ButtonHeight));
 
+#ifdef GSTREAMER_AVAILABLE
+        topLayout_->addWidget(callBtn_);
+#endif
         topLayout_->addWidget(sendFileBtn_);
         topLayout_->addWidget(input_);
         topLayout_->addWidget(emojiBtn_);
@@ -528,6 +540,9 @@ TextInputWidget::TextInputWidget(QWidget *parent)
 
         setLayout(topLayout_);
 
+#ifdef GSTREAMER_AVAILABLE
+        connect(callBtn_, &FlatButton::clicked, this, &TextInputWidget::callButtonPress);
+#endif
         connect(sendMessageBtn_, &FlatButton::clicked, input_, &FilteredTextEdit::submit);
         connect(sendFileBtn_, SIGNAL(clicked()), this, SLOT(openFileSelection()));
         connect(input_, &FilteredTextEdit::message, this, &TextInputWidget::sendTextMessage);
@@ -654,3 +669,19 @@ TextInputWidget::paintEvent(QPaintEvent *)
 
         style()->drawPrimitive(QStyle::PE_Widget, &opt, &p, this);
 }
+
+void
+TextInputWidget::changeCallButtonState(WebRTCSession::State state)
+{
+        QIcon icon;
+        if (state == WebRTCSession::State::ICEFAILED ||
+            state == WebRTCSession::State::DISCONNECTED) {
+                callBtn_->setToolTip(tr("Place a call"));
+                icon.addFile(":/icons/icons/ui/place-call.png");
+        } else {
+                callBtn_->setToolTip(tr("Hang up"));
+                icon.addFile(":/icons/icons/ui/end-call.png");
+        }
+        callBtn_->setIcon(icon);
+        callBtn_->setIconSize(QSize(ButtonHeight * 1.1, ButtonHeight * 1.1));
+}
diff --git a/src/TextInputWidget.h b/src/TextInputWidget.h
index cbb6ea95..2473c13a 100644
--- a/src/TextInputWidget.h
+++ b/src/TextInputWidget.h
@@ -26,6 +26,7 @@
 #include <QTextEdit>
 #include <QWidget>
 
+#include "WebRTCSession.h"
 #include "dialogs/PreviewUploadOverlay.h"
 #include "emoji/PickButton.h"
 #include "popups/SuggestionsPopup.h"
@@ -149,6 +150,7 @@ public slots:
         void openFileSelection();
         void hideUploadSpinner();
         void focusLineEdit() { input_->setFocus(); }
+        void changeCallButtonState(WebRTCSession::State);
 
 private slots:
         void addSelectedEmoji(const QString &emoji);
@@ -162,6 +164,7 @@ signals:
         void uploadMedia(const QSharedPointer<QIODevice> data,
                          QString mimeClass,
                          const QString &filename);
+        void callButtonPress();
 
         void sendJoinRoomRequest(const QString &room);
         void sendInviteRoomRequest(const QString &userid, const QString &reason);
@@ -186,6 +189,7 @@ private:
 
         LoadingIndicator *spinner_;
 
+        FlatButton *callBtn_;
         FlatButton *sendFileBtn_;
         FlatButton *sendMessageBtn_;
         emoji::PickButton *emojiBtn_;
diff --git a/src/UserSettingsPage.cpp b/src/UserSettingsPage.cpp
index 05ff6d38..ab5658a4 100644
--- a/src/UserSettingsPage.cpp
+++ b/src/UserSettingsPage.cpp
@@ -77,6 +77,8 @@ UserSettings::load()
         presence_ =
           settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence))
             .value<Presence>();
+        useStunServer_      = settings.value("user/use_stun_server", false).toBool();
+        defaultAudioSource_ = settings.value("user/default_audio_source", QString()).toString();
 
         applyTheme();
 }
@@ -280,6 +282,26 @@ UserSettings::setTheme(QString theme)
 }
 
 void
+UserSettings::setUseStunServer(bool useStunServer)
+{
+        if (useStunServer == useStunServer_)
+                return;
+        useStunServer_ = useStunServer;
+        emit useStunServerChanged(useStunServer);
+        save();
+}
+
+void
+UserSettings::setDefaultAudioSource(const QString &defaultAudioSource)
+{
+        if (defaultAudioSource == defaultAudioSource_)
+                return;
+        defaultAudioSource_ = defaultAudioSource;
+        emit defaultAudioSourceChanged(defaultAudioSource);
+        save();
+}
+
+void
 UserSettings::applyTheme()
 {
         QFile stylefile;
@@ -364,6 +386,8 @@ UserSettings::save()
         settings.setValue("font_family", font_);
         settings.setValue("emoji_font_family", emojiFont_);
         settings.setValue("presence", QVariant::fromValue(presence_));
+        settings.setValue("use_stun_server", useStunServer_);
+        settings.setValue("default_audio_source", defaultAudioSource_);
 
         settings.endGroup();
 
@@ -429,6 +453,7 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
         markdown_                 = new Toggle{this};
         desktopNotifications_     = new Toggle{this};
         alertOnNotification_      = new Toggle{this};
+        useStunServer_            = new Toggle{this};
         scaleFactorCombo_         = new QComboBox{this};
         fontSizeCombo_            = new QComboBox{this};
         fontSelectionCombo_       = new QComboBox{this};
@@ -482,6 +507,15 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
         timelineMaxWidthSpin_->setMaximum(100'000'000);
         timelineMaxWidthSpin_->setSingleStep(10);
 
+        auto callsLabel = new QLabel{tr("CALLS"), this};
+        callsLabel->setFixedHeight(callsLabel->minimumHeight() + LayoutTopMargin);
+        callsLabel->setAlignment(Qt::AlignBottom);
+        callsLabel->setFont(font);
+        useStunServer_ = new Toggle{this};
+
+        defaultAudioSourceValue_ = new QLabel(this);
+        defaultAudioSourceValue_->setFont(font);
+
         auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this};
         encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin);
         encryptionLabel_->setAlignment(Qt::AlignBottom);
@@ -612,6 +646,14 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
 #endif
 
         boxWrap(tr("Theme"), themeCombo_);
+
+        formLayout_->addRow(callsLabel);
+        formLayout_->addRow(new HorizontalLine{this});
+        boxWrap(tr("Allow fallback call assist server"),
+                useStunServer_,
+                tr("Will use turn.matrix.org as assist when your home server does not offer one."));
+        boxWrap(tr("Default audio source device"), defaultAudioSourceValue_);
+
         formLayout_->addRow(encryptionLabel_);
         formLayout_->addRow(new HorizontalLine{this});
         boxWrap(tr("Device ID"), deviceIdValue_);
@@ -724,6 +766,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
                 settings_->setEnlargeEmojiOnlyMessages(!disabled);
         });
 
+        connect(useStunServer_, &Toggle::toggled, this, [this](bool disabled) {
+                settings_->setUseStunServer(!disabled);
+        });
+
         connect(timelineMaxWidthSpin_,
                 qOverload<int>(&QSpinBox::valueChanged),
                 this,
@@ -766,6 +812,8 @@ UserSettingsPage::showEvent(QShowEvent *)
         enlargeEmojiOnlyMessages_->setState(!settings_->enlargeEmojiOnlyMessages());
         deviceIdValue_->setText(QString::fromStdString(http::client()->device_id()));
         timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
+        useStunServer_->setState(!settings_->useStunServer());
+        defaultAudioSourceValue_->setText(settings_->defaultAudioSource());
 
         deviceFingerprintValue_->setText(
           utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519));
diff --git a/src/UserSettingsPage.h b/src/UserSettingsPage.h
index d2a1c641..52ff9466 100644
--- a/src/UserSettingsPage.h
+++ b/src/UserSettingsPage.h
@@ -71,6 +71,10 @@ class UserSettings : public QObject
         Q_PROPERTY(
           QString emojiFont READ emojiFont WRITE setEmojiFontFamily NOTIFY emojiFontChanged)
         Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged)
+        Q_PROPERTY(
+          bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged)
+        Q_PROPERTY(QString defaultAudioSource READ defaultAudioSource WRITE setDefaultAudioSource
+                     NOTIFY defaultAudioSourceChanged)
 
 public:
         UserSettings();
@@ -107,6 +111,8 @@ public:
         void setAvatarCircles(bool state);
         void setDecryptSidebar(bool state);
         void setPresence(Presence state);
+        void setUseStunServer(bool state);
+        void setDefaultAudioSource(const QString &deviceName);
 
         QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; }
         bool messageHoverHighlight() const { return messageHoverHighlight_; }
@@ -132,6 +138,8 @@ public:
         QString font() const { return font_; }
         QString emojiFont() const { return emojiFont_; }
         Presence presence() const { return presence_; }
+        bool useStunServer() const { return useStunServer_; }
+        QString defaultAudioSource() const { return defaultAudioSource_; }
 
 signals:
         void groupViewStateChanged(bool state);
@@ -154,6 +162,8 @@ signals:
         void fontChanged(QString state);
         void emojiFontChanged(QString state);
         void presenceChanged(Presence state);
+        void useStunServerChanged(bool state);
+        void defaultAudioSourceChanged(const QString &deviceName);
 
 private:
         // Default to system theme if QT_QPA_PLATFORMTHEME var is set.
@@ -181,6 +191,8 @@ private:
         QString font_;
         QString emojiFont_;
         Presence presence_;
+        bool useStunServer_;
+        QString defaultAudioSource_;
 };
 
 class HorizontalLine : public QFrame
@@ -234,9 +246,11 @@ private:
         Toggle *desktopNotifications_;
         Toggle *alertOnNotification_;
         Toggle *avatarCircles_;
+        Toggle *useStunServer_;
         Toggle *decryptSidebar_;
         QLabel *deviceFingerprintValue_;
         QLabel *deviceIdValue_;
+        QLabel *defaultAudioSourceValue_;
 
         QComboBox *themeCombo_;
         QComboBox *scaleFactorCombo_;
diff --git a/src/Utils.cpp b/src/Utils.cpp
index 26ea124c..0bfc82c3 100644
--- a/src/Utils.cpp
+++ b/src/Utils.cpp
@@ -35,14 +35,13 @@ createDescriptionInfo(const Event &event, const QString &localUser, const QStrin
         const auto username = cache::displayName(room_id, sender);
         const auto ts       = QDateTime::fromMSecsSinceEpoch(msg.origin_server_ts);
 
-        return DescInfo{
-          QString::fromStdString(msg.event_id),
-          sender,
-          utils::messageDescription<T>(
-            username, QString::fromStdString(msg.content.body).trimmed(), sender == localUser),
-          utils::descriptiveTime(ts),
-          msg.origin_server_ts,
-          ts};
+        return DescInfo{QString::fromStdString(msg.event_id),
+                        sender,
+                        utils::messageDescription<T>(
+                          username, utils::event_body(event).trimmed(), sender == localUser),
+                        utils::descriptiveTime(ts),
+                        msg.origin_server_ts,
+                        ts};
 }
 
 QString
@@ -156,14 +155,17 @@ utils::getMessageDescription(const TimelineEvent &event,
                              const QString &localUser,
                              const QString &room_id)
 {
-        using Audio     = mtx::events::RoomEvent<mtx::events::msg::Audio>;
-        using Emote     = mtx::events::RoomEvent<mtx::events::msg::Emote>;
-        using File      = mtx::events::RoomEvent<mtx::events::msg::File>;
-        using Image     = mtx::events::RoomEvent<mtx::events::msg::Image>;
-        using Notice    = mtx::events::RoomEvent<mtx::events::msg::Notice>;
-        using Text      = mtx::events::RoomEvent<mtx::events::msg::Text>;
-        using Video     = mtx::events::RoomEvent<mtx::events::msg::Video>;
-        using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
+        using Audio      = mtx::events::RoomEvent<mtx::events::msg::Audio>;
+        using Emote      = mtx::events::RoomEvent<mtx::events::msg::Emote>;
+        using File       = mtx::events::RoomEvent<mtx::events::msg::File>;
+        using Image      = mtx::events::RoomEvent<mtx::events::msg::Image>;
+        using Notice     = mtx::events::RoomEvent<mtx::events::msg::Notice>;
+        using Text       = mtx::events::RoomEvent<mtx::events::msg::Text>;
+        using Video      = mtx::events::RoomEvent<mtx::events::msg::Video>;
+        using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
+        using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
+        using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
+        using Encrypted  = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
 
         if (std::holds_alternative<Audio>(event)) {
                 return createDescriptionInfo<Audio>(event, localUser, room_id);
@@ -179,6 +181,12 @@ utils::getMessageDescription(const TimelineEvent &event,
                 return createDescriptionInfo<Text>(event, localUser, room_id);
         } else if (std::holds_alternative<Video>(event)) {
                 return createDescriptionInfo<Video>(event, localUser, room_id);
+        } else if (std::holds_alternative<CallInvite>(event)) {
+                return createDescriptionInfo<CallInvite>(event, localUser, room_id);
+        } else if (std::holds_alternative<CallAnswer>(event)) {
+                return createDescriptionInfo<CallAnswer>(event, localUser, room_id);
+        } else if (std::holds_alternative<CallHangUp>(event)) {
+                return createDescriptionInfo<CallHangUp>(event, localUser, room_id);
         } else if (std::holds_alternative<mtx::events::Sticker>(event)) {
                 return createDescriptionInfo<mtx::events::Sticker>(event, localUser, room_id);
         } else if (auto msg = std::get_if<Encrypted>(&event); msg != nullptr) {
diff --git a/src/Utils.h b/src/Utils.h
index 07a4a648..5e7fb601 100644
--- a/src/Utils.h
+++ b/src/Utils.h
@@ -88,15 +88,18 @@ messageDescription(const QString &username = "",
                    const QString &body     = "",
                    const bool isLocal      = false)
 {
-        using Audio     = mtx::events::RoomEvent<mtx::events::msg::Audio>;
-        using Emote     = mtx::events::RoomEvent<mtx::events::msg::Emote>;
-        using File      = mtx::events::RoomEvent<mtx::events::msg::File>;
-        using Image     = mtx::events::RoomEvent<mtx::events::msg::Image>;
-        using Notice    = mtx::events::RoomEvent<mtx::events::msg::Notice>;
-        using Sticker   = mtx::events::Sticker;
-        using Text      = mtx::events::RoomEvent<mtx::events::msg::Text>;
-        using Video     = mtx::events::RoomEvent<mtx::events::msg::Video>;
-        using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
+        using Audio      = mtx::events::RoomEvent<mtx::events::msg::Audio>;
+        using Emote      = mtx::events::RoomEvent<mtx::events::msg::Emote>;
+        using File       = mtx::events::RoomEvent<mtx::events::msg::File>;
+        using Image      = mtx::events::RoomEvent<mtx::events::msg::Image>;
+        using Notice     = mtx::events::RoomEvent<mtx::events::msg::Notice>;
+        using Sticker    = mtx::events::Sticker;
+        using Text       = mtx::events::RoomEvent<mtx::events::msg::Text>;
+        using Video      = mtx::events::RoomEvent<mtx::events::msg::Video>;
+        using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
+        using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
+        using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
+        using Encrypted  = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
 
         if (std::is_same<T, Audio>::value) {
                 if (isLocal)
@@ -164,6 +167,30 @@ messageDescription(const QString &username = "",
                         return QCoreApplication::translate("message-description sent:",
                                                            "%1 sent an encrypted message")
                           .arg(username);
+        } else if (std::is_same<T, CallInvite>::value) {
+                if (isLocal)
+                        return QCoreApplication::translate("message-description sent:",
+                                                           "You placed a call");
+                else
+                        return QCoreApplication::translate("message-description sent:",
+                                                           "%1 placed a call")
+                          .arg(username);
+        } else if (std::is_same<T, CallAnswer>::value) {
+                if (isLocal)
+                        return QCoreApplication::translate("message-description sent:",
+                                                           "You answered a call");
+                else
+                        return QCoreApplication::translate("message-description sent:",
+                                                           "%1 answered a call")
+                          .arg(username);
+        } else if (std::is_same<T, CallHangUp>::value) {
+                if (isLocal)
+                        return QCoreApplication::translate("message-description sent:",
+                                                           "You ended a call");
+                else
+                        return QCoreApplication::translate("message-description sent:",
+                                                           "%1 ended a call")
+                          .arg(username);
         } else {
                 return QCoreApplication::translate("utils", "Unknown Message Type");
         }
diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp
new file mode 100644
index 00000000..2248fb1a
--- /dev/null
+++ b/src/WebRTCSession.cpp
@@ -0,0 +1,697 @@
+#include <cctype>
+
+#include "Logging.h"
+#include "WebRTCSession.h"
+
+#ifdef GSTREAMER_AVAILABLE
+extern "C"
+{
+#include "gst/gst.h"
+#include "gst/sdp/sdp.h"
+
+#define GST_USE_UNSTABLE_API
+#include "gst/webrtc/webrtc.h"
+}
+#endif
+
+Q_DECLARE_METATYPE(WebRTCSession::State)
+
+WebRTCSession::WebRTCSession()
+  : QObject()
+{
+        qRegisterMetaType<WebRTCSession::State>();
+        connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
+}
+
+bool
+WebRTCSession::init(std::string *errorMessage)
+{
+#ifdef GSTREAMER_AVAILABLE
+        if (initialised_)
+                return true;
+
+        GError *error = nullptr;
+        if (!gst_init_check(nullptr, nullptr, &error)) {
+                std::string strError = std::string("WebRTC: failed to initialise GStreamer: ");
+                if (error) {
+                        strError += error->message;
+                        g_error_free(error);
+                }
+                nhlog::ui()->error(strError);
+                if (errorMessage)
+                        *errorMessage = strError;
+                return false;
+        }
+
+        gchar *version = gst_version_string();
+        std::string gstVersion(version);
+        g_free(version);
+        nhlog::ui()->info("WebRTC: initialised " + gstVersion);
+
+        // GStreamer Plugins:
+        // Base:            audioconvert, audioresample, opus, playback, volume
+        // Good:            autodetect, rtpmanager
+        // Bad:             dtls, srtp, webrtc
+        // libnice [GLib]:  nice
+        initialised_          = true;
+        std::string strError  = gstVersion + ": Missing plugins: ";
+        const gchar *needed[] = {"audioconvert",
+                                 "audioresample",
+                                 "autodetect",
+                                 "dtls",
+                                 "nice",
+                                 "opus",
+                                 "playback",
+                                 "rtpmanager",
+                                 "srtp",
+                                 "volume",
+                                 "webrtc",
+                                 nullptr};
+        GstRegistry *registry = gst_registry_get();
+        for (guint i = 0; i < g_strv_length((gchar **)needed); i++) {
+                GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
+                if (!plugin) {
+                        strError += std::string(needed[i]) + " ";
+                        initialised_ = false;
+                        continue;
+                }
+                gst_object_unref(plugin);
+        }
+
+        if (!initialised_) {
+                nhlog::ui()->error(strError);
+                if (errorMessage)
+                        *errorMessage = strError;
+        }
+        return initialised_;
+#else
+        (void)errorMessage;
+        return false;
+#endif
+}
+
+#ifdef GSTREAMER_AVAILABLE
+namespace {
+bool isoffering_;
+std::string localsdp_;
+std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
+
+gboolean
+newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
+{
+        WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
+        switch (GST_MESSAGE_TYPE(msg)) {
+        case GST_MESSAGE_EOS:
+                nhlog::ui()->error("WebRTC: end of stream");
+                session->end();
+                break;
+        case GST_MESSAGE_ERROR:
+                GError *error;
+                gchar *debug;
+                gst_message_parse_error(msg, &error, &debug);
+                nhlog::ui()->error(
+                  "WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
+                g_clear_error(&error);
+                g_free(debug);
+                session->end();
+                break;
+        default:
+                break;
+        }
+        return TRUE;
+}
+
+GstWebRTCSessionDescription *
+parseSDP(const std::string &sdp, GstWebRTCSDPType type)
+{
+        GstSDPMessage *msg;
+        gst_sdp_message_new(&msg);
+        if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
+                return gst_webrtc_session_description_new(type, msg);
+        } else {
+                nhlog::ui()->error("WebRTC: failed to parse remote session description");
+                gst_object_unref(msg);
+                return nullptr;
+        }
+}
+
+void
+setLocalDescription(GstPromise *promise, gpointer webrtc)
+{
+        const GstStructure *reply = gst_promise_get_reply(promise);
+        gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
+        GstWebRTCSessionDescription *gstsdp = nullptr;
+        gst_structure_get(reply,
+                          isAnswer ? "answer" : "offer",
+                          GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+                          &gstsdp,
+                          nullptr);
+        gst_promise_unref(promise);
+        g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
+
+        gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
+        localsdp_  = std::string(sdp);
+        g_free(sdp);
+        gst_webrtc_session_description_free(gstsdp);
+
+        nhlog::ui()->debug(
+          "WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_);
+}
+
+void
+createOffer(GstElement *webrtc)
+{
+        // create-offer first, then set-local-description
+        GstPromise *promise =
+          gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
+        g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
+}
+
+void
+createAnswer(GstPromise *promise, gpointer webrtc)
+{
+        // create-answer first, then set-local-description
+        gst_promise_unref(promise);
+        promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
+        g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
+}
+
+#if GST_CHECK_VERSION(1, 17, 0)
+void
+iceGatheringStateChanged(GstElement *webrtc,
+                         GParamSpec *pspec G_GNUC_UNUSED,
+                         gpointer user_data G_GNUC_UNUSED)
+{
+        GstWebRTCICEGatheringState newState;
+        g_object_get(webrtc, "ice-gathering-state", &newState, nullptr);
+        if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
+                nhlog::ui()->debug("WebRTC: GstWebRTCICEGatheringState -> Complete");
+                if (isoffering_) {
+                        emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
+                        emit WebRTCSession::instance().stateChanged(
+                          WebRTCSession::State::OFFERSENT);
+                } else {
+                        emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
+                        emit WebRTCSession::instance().stateChanged(
+                          WebRTCSession::State::ANSWERSENT);
+                }
+        }
+}
+
+#else
+
+gboolean
+onICEGatheringCompletion(gpointer timerid)
+{
+        *(guint *)(timerid) = 0;
+        if (isoffering_) {
+                emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
+                emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
+        } else {
+                emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
+                emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
+        }
+        return FALSE;
+}
+#endif
+
+void
+addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
+                     guint mlineIndex,
+                     gchar *candidate,
+                     gpointer G_GNUC_UNUSED)
+{
+        nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
+
+        if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
+                emit WebRTCSession::instance().newICECandidate(
+                  {"audio", (uint16_t)mlineIndex, candidate});
+                return;
+        }
+
+        localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
+
+        // GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
+        // GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.17.
+        // Use a 100ms timeout in the meantime
+#if !GST_CHECK_VERSION(1, 17, 0)
+        static guint timerid = 0;
+        if (timerid)
+                g_source_remove(timerid);
+
+        timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
+#endif
+}
+
+void
+iceConnectionStateChanged(GstElement *webrtc,
+                          GParamSpec *pspec G_GNUC_UNUSED,
+                          gpointer user_data G_GNUC_UNUSED)
+{
+        GstWebRTCICEConnectionState newState;
+        g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
+        switch (newState) {
+        case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
+                nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
+                emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
+                break;
+        case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
+                nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
+                emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
+                break;
+        default:
+                break;
+        }
+}
+
+void
+linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
+{
+        GstCaps *caps = gst_pad_get_current_caps(newpad);
+        if (!caps)
+                return;
+
+        const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
+        gst_caps_unref(caps);
+
+        GstPad *queuepad = nullptr;
+        if (g_str_has_prefix(name, "audio")) {
+                nhlog::ui()->debug("WebRTC: received incoming audio stream");
+                GstElement *queue    = gst_element_factory_make("queue", nullptr);
+                GstElement *convert  = gst_element_factory_make("audioconvert", nullptr);
+                GstElement *resample = gst_element_factory_make("audioresample", nullptr);
+                GstElement *sink     = gst_element_factory_make("autoaudiosink", nullptr);
+                gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
+                gst_element_sync_state_with_parent(queue);
+                gst_element_sync_state_with_parent(convert);
+                gst_element_sync_state_with_parent(resample);
+                gst_element_sync_state_with_parent(sink);
+                gst_element_link_many(queue, convert, resample, sink, nullptr);
+                queuepad = gst_element_get_static_pad(queue, "sink");
+        }
+
+        if (queuepad) {
+                if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
+                        nhlog::ui()->error("WebRTC: unable to link new pad");
+                else {
+                        emit WebRTCSession::instance().stateChanged(
+                          WebRTCSession::State::CONNECTED);
+                }
+                gst_object_unref(queuepad);
+        }
+}
+
+void
+addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
+{
+        if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
+                return;
+
+        nhlog::ui()->debug("WebRTC: received incoming stream");
+        GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
+        g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
+        gst_bin_add(GST_BIN(pipe), decodebin);
+        gst_element_sync_state_with_parent(decodebin);
+        GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
+        if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
+                nhlog::ui()->error("WebRTC: unable to link new pad");
+        gst_object_unref(sinkpad);
+}
+
+std::string::const_iterator
+findName(const std::string &sdp, const std::string &name)
+{
+        return std::search(
+          sdp.cbegin(),
+          sdp.cend(),
+          name.cbegin(),
+          name.cend(),
+          [](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); });
+}
+
+int
+getPayloadType(const std::string &sdp, const std::string &name)
+{
+        // eg a=rtpmap:111 opus/48000/2
+        auto e = findName(sdp, name);
+        if (e == sdp.cend()) {
+                nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing");
+                return -1;
+        }
+
+        if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) {
+                nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
+                                   " payload type");
+                return -1;
+        } else {
+                ++s;
+                try {
+                        return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s));
+                } catch (...) {
+                        nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
+                                           " payload type");
+                }
+        }
+        return -1;
+}
+
+}
+
+bool
+WebRTCSession::createOffer()
+{
+        isoffering_ = true;
+        localsdp_.clear();
+        localcandidates_.clear();
+        return startPipeline(111); // a dynamic opus payload type
+}
+
+bool
+WebRTCSession::acceptOffer(const std::string &sdp)
+{
+        nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp);
+        if (state_ != State::DISCONNECTED)
+                return false;
+
+        isoffering_ = false;
+        localsdp_.clear();
+        localcandidates_.clear();
+
+        int opusPayloadType = getPayloadType(sdp, "opus");
+        if (opusPayloadType == -1)
+                return false;
+
+        GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
+        if (!offer)
+                return false;
+
+        if (!startPipeline(opusPayloadType)) {
+                gst_webrtc_session_description_free(offer);
+                return false;
+        }
+
+        // set-remote-description first, then create-answer
+        GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
+        g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
+        gst_webrtc_session_description_free(offer);
+        return true;
+}
+
+bool
+WebRTCSession::acceptAnswer(const std::string &sdp)
+{
+        nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp);
+        if (state_ != State::OFFERSENT)
+                return false;
+
+        GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
+        if (!answer) {
+                end();
+                return false;
+        }
+
+        g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
+        gst_webrtc_session_description_free(answer);
+        return true;
+}
+
+void
+WebRTCSession::acceptICECandidates(
+  const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates)
+{
+        if (state_ >= State::INITIATED) {
+                for (const auto &c : candidates) {
+                        nhlog::ui()->debug(
+                          "WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate);
+                        g_signal_emit_by_name(
+                          webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str());
+                }
+        }
+}
+
+bool
+WebRTCSession::startPipeline(int opusPayloadType)
+{
+        if (state_ != State::DISCONNECTED)
+                return false;
+
+        emit stateChanged(State::INITIATING);
+
+        if (!createPipeline(opusPayloadType))
+                return false;
+
+        webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
+
+        if (!stunServer_.empty()) {
+                nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_);
+                g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
+        }
+
+        for (const auto &uri : turnServers_) {
+                nhlog::ui()->info("WebRTC: setting TURN server: {}", uri);
+                gboolean udata;
+                g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata));
+        }
+        if (turnServers_.empty())
+                nhlog::ui()->warn("WebRTC: no TURN server provided");
+
+        // generate the offer when the pipeline goes to PLAYING
+        if (isoffering_)
+                g_signal_connect(
+                  webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr);
+
+        // on-ice-candidate is emitted when a local ICE candidate has been gathered
+        g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
+
+        // capture ICE failure
+        g_signal_connect(
+          webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr);
+
+        // incoming streams trigger pad-added
+        gst_element_set_state(pipe_, GST_STATE_READY);
+        g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
+
+#if GST_CHECK_VERSION(1, 17, 0)
+        // capture ICE gathering completion
+        g_signal_connect(
+          webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr);
+#endif
+        // webrtcbin lifetime is the same as that of the pipeline
+        gst_object_unref(webrtc_);
+
+        // start the pipeline
+        GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
+        if (ret == GST_STATE_CHANGE_FAILURE) {
+                nhlog::ui()->error("WebRTC: unable to start pipeline");
+                end();
+                return false;
+        }
+
+        GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
+        gst_bus_add_watch(bus, newBusMessage, this);
+        gst_object_unref(bus);
+        emit stateChanged(State::INITIATED);
+        return true;
+}
+
+bool
+WebRTCSession::createPipeline(int opusPayloadType)
+{
+        int nSources = audioSources_ ? g_list_length(audioSources_) : 0;
+        if (nSources == 0) {
+                nhlog::ui()->error("WebRTC: no audio sources");
+                return false;
+        }
+
+        if (audioSourceIndex_ < 0 || audioSourceIndex_ >= nSources) {
+                nhlog::ui()->error("WebRTC: invalid audio source index");
+                return false;
+        }
+
+        GstElement *source = gst_device_create_element(
+          GST_DEVICE_CAST(g_list_nth_data(audioSources_, audioSourceIndex_)), nullptr);
+        GstElement *volume     = gst_element_factory_make("volume", "srclevel");
+        GstElement *convert    = gst_element_factory_make("audioconvert", nullptr);
+        GstElement *resample   = gst_element_factory_make("audioresample", nullptr);
+        GstElement *queue1     = gst_element_factory_make("queue", nullptr);
+        GstElement *opusenc    = gst_element_factory_make("opusenc", nullptr);
+        GstElement *rtp        = gst_element_factory_make("rtpopuspay", nullptr);
+        GstElement *queue2     = gst_element_factory_make("queue", nullptr);
+        GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
+
+        GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
+                                               "media",
+                                               G_TYPE_STRING,
+                                               "audio",
+                                               "encoding-name",
+                                               G_TYPE_STRING,
+                                               "OPUS",
+                                               "payload",
+                                               G_TYPE_INT,
+                                               opusPayloadType,
+                                               nullptr);
+        g_object_set(capsfilter, "caps", rtpcaps, nullptr);
+        gst_caps_unref(rtpcaps);
+
+        GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin");
+        g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr);
+
+        pipe_ = gst_pipeline_new(nullptr);
+        gst_bin_add_many(GST_BIN(pipe_),
+                         source,
+                         volume,
+                         convert,
+                         resample,
+                         queue1,
+                         opusenc,
+                         rtp,
+                         queue2,
+                         capsfilter,
+                         webrtcbin,
+                         nullptr);
+
+        if (!gst_element_link_many(source,
+                                   volume,
+                                   convert,
+                                   resample,
+                                   queue1,
+                                   opusenc,
+                                   rtp,
+                                   queue2,
+                                   capsfilter,
+                                   webrtcbin,
+                                   nullptr)) {
+                nhlog::ui()->error("WebRTC: failed to link pipeline elements");
+                end();
+                return false;
+        }
+        return true;
+}
+
+bool
+WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
+{
+        if (state_ < State::INITIATED)
+                return false;
+
+        GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
+        if (!srclevel)
+                return false;
+
+        gboolean muted;
+        g_object_get(srclevel, "mute", &muted, nullptr);
+        g_object_set(srclevel, "mute", !muted, nullptr);
+        gst_object_unref(srclevel);
+        isMuted = !muted;
+        return true;
+}
+
+void
+WebRTCSession::end()
+{
+        nhlog::ui()->debug("WebRTC: ending session");
+        if (pipe_) {
+                gst_element_set_state(pipe_, GST_STATE_NULL);
+                gst_object_unref(pipe_);
+                pipe_ = nullptr;
+        }
+        webrtc_ = nullptr;
+        if (state_ != State::DISCONNECTED)
+                emit stateChanged(State::DISCONNECTED);
+}
+
+void
+WebRTCSession::refreshDevices()
+{
+        if (!initialised_)
+                return;
+
+        static GstDeviceMonitor *monitor = nullptr;
+        if (!monitor) {
+                monitor       = gst_device_monitor_new();
+                GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
+                gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
+                gst_caps_unref(caps);
+        }
+        g_list_free_full(audioSources_, g_object_unref);
+        audioSources_ = gst_device_monitor_get_devices(monitor);
+}
+
+std::vector<std::string>
+WebRTCSession::getAudioSourceNames(const std::string &defaultDevice)
+{
+        if (!initialised_)
+                return {};
+
+        refreshDevices();
+        std::vector<std::string> ret;
+        ret.reserve(g_list_length(audioSources_));
+        for (GList *l = audioSources_; l != nullptr; l = l->next) {
+                gchar *name = gst_device_get_display_name(GST_DEVICE_CAST(l->data));
+                ret.emplace_back(name);
+                g_free(name);
+                if (ret.back() == defaultDevice) {
+                        // move default device to top of the list
+                        std::swap(audioSources_->data, l->data);
+                        std::swap(ret.front(), ret.back());
+                }
+        }
+        return ret;
+}
+#else
+
+bool
+WebRTCSession::createOffer()
+{
+        return false;
+}
+
+bool
+WebRTCSession::acceptOffer(const std::string &)
+{
+        return false;
+}
+
+bool
+WebRTCSession::acceptAnswer(const std::string &)
+{
+        return false;
+}
+
+void
+WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &)
+{}
+
+bool
+WebRTCSession::startPipeline(int)
+{
+        return false;
+}
+
+bool
+WebRTCSession::createPipeline(int)
+{
+        return false;
+}
+
+bool
+WebRTCSession::toggleMuteAudioSrc(bool &)
+{
+        return false;
+}
+
+void
+WebRTCSession::end()
+{}
+
+void
+WebRTCSession::refreshDevices()
+{}
+
+std::vector<std::string>
+WebRTCSession::getAudioSourceNames(const std::string &)
+{
+        return {};
+}
+
+#endif
diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h
new file mode 100644
index 00000000..56d76fa8
--- /dev/null
+++ b/src/WebRTCSession.h
@@ -0,0 +1,83 @@
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include <QObject>
+
+#include "mtx/events/voip.hpp"
+
+typedef struct _GList GList;
+typedef struct _GstElement GstElement;
+
+class WebRTCSession : public QObject
+{
+        Q_OBJECT
+
+public:
+        enum class State
+        {
+                DISCONNECTED,
+                ICEFAILED,
+                INITIATING,
+                INITIATED,
+                OFFERSENT,
+                ANSWERSENT,
+                CONNECTING,
+                CONNECTED
+        };
+
+        static WebRTCSession &instance()
+        {
+                static WebRTCSession instance;
+                return instance;
+        }
+
+        bool init(std::string *errorMessage = nullptr);
+        State state() const { return state_; }
+
+        bool createOffer();
+        bool acceptOffer(const std::string &sdp);
+        bool acceptAnswer(const std::string &sdp);
+        void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
+
+        bool toggleMuteAudioSrc(bool &isMuted);
+        void end();
+
+        void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
+        void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
+
+        std::vector<std::string> getAudioSourceNames(const std::string &defaultDevice);
+        void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; }
+
+signals:
+        void offerCreated(const std::string &sdp,
+                          const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
+        void answerCreated(const std::string &sdp,
+                           const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
+        void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
+        void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt
+
+private slots:
+        void setState(State state) { state_ = state; }
+
+private:
+        WebRTCSession();
+
+        bool initialised_   = false;
+        State state_        = State::DISCONNECTED;
+        GstElement *pipe_   = nullptr;
+        GstElement *webrtc_ = nullptr;
+        std::string stunServer_;
+        std::vector<std::string> turnServers_;
+        GList *audioSources_  = nullptr;
+        int audioSourceIndex_ = -1;
+
+        bool startPipeline(int opusPayloadType);
+        bool createPipeline(int opusPayloadType);
+        void refreshDevices();
+
+public:
+        WebRTCSession(WebRTCSession const &) = delete;
+        void operator=(WebRTCSession const &) = delete;
+};
diff --git a/src/dialogs/AcceptCall.cpp b/src/dialogs/AcceptCall.cpp
new file mode 100644
index 00000000..be1eb0c9
--- /dev/null
+++ b/src/dialogs/AcceptCall.cpp
@@ -0,0 +1,135 @@
+#include <QComboBox>
+#include <QLabel>
+#include <QPushButton>
+#include <QString>
+#include <QVBoxLayout>
+
+#include "ChatPage.h"
+#include "Config.h"
+#include "UserSettingsPage.h"
+#include "Utils.h"
+#include "WebRTCSession.h"
+#include "dialogs/AcceptCall.h"
+#include "ui/Avatar.h"
+
+namespace dialogs {
+
+AcceptCall::AcceptCall(const QString &caller,
+                       const QString &displayName,
+                       const QString &roomName,
+                       const QString &avatarUrl,
+                       QSharedPointer<UserSettings> settings,
+                       QWidget *parent)
+  : QWidget(parent)
+{
+        std::string errorMessage;
+        if (!WebRTCSession::instance().init(&errorMessage)) {
+                emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
+                emit close();
+                return;
+        }
+        audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
+          settings->defaultAudioSource().toStdString());
+        if (audioDevices_.empty()) {
+                emit ChatPage::instance()->showNotification(
+                  "Incoming call: No audio sources found.");
+                emit close();
+                return;
+        }
+
+        setAutoFillBackground(true);
+        setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
+        setWindowModality(Qt::WindowModal);
+        setAttribute(Qt::WA_DeleteOnClose, true);
+
+        setMinimumWidth(conf::modals::MIN_WIDGET_WIDTH);
+        setSizePolicy(QSizePolicy::Maximum, QSizePolicy::Maximum);
+
+        auto layout = new QVBoxLayout(this);
+        layout->setSpacing(conf::modals::WIDGET_SPACING);
+        layout->setMargin(conf::modals::WIDGET_MARGIN);
+
+        QFont f;
+        f.setPointSizeF(f.pointSizeF());
+
+        QFont labelFont;
+        labelFont.setWeight(QFont::Medium);
+
+        QLabel *displayNameLabel = nullptr;
+        if (!displayName.isEmpty() && displayName != caller) {
+                displayNameLabel = new QLabel(displayName, this);
+                labelFont.setPointSizeF(f.pointSizeF() * 2);
+                displayNameLabel->setFont(labelFont);
+                displayNameLabel->setAlignment(Qt::AlignCenter);
+        }
+
+        QLabel *callerLabel = new QLabel(caller, this);
+        labelFont.setPointSizeF(f.pointSizeF() * 1.2);
+        callerLabel->setFont(labelFont);
+        callerLabel->setAlignment(Qt::AlignCenter);
+
+        auto avatar = new Avatar(this, QFontMetrics(f).height() * 6);
+        if (!avatarUrl.isEmpty())
+                avatar->setImage(avatarUrl);
+        else
+                avatar->setLetter(utils::firstChar(roomName));
+
+        const int iconSize        = 22;
+        QLabel *callTypeIndicator = new QLabel(this);
+        callTypeIndicator->setPixmap(
+          QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2)));
+
+        QLabel *callTypeLabel = new QLabel("Voice Call", this);
+        labelFont.setPointSizeF(f.pointSizeF() * 1.1);
+        callTypeLabel->setFont(labelFont);
+        callTypeLabel->setAlignment(Qt::AlignCenter);
+
+        auto buttonLayout = new QHBoxLayout;
+        buttonLayout->setSpacing(18);
+        acceptBtn_ = new QPushButton(tr("Accept"), this);
+        acceptBtn_->setDefault(true);
+        acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
+        acceptBtn_->setIconSize(QSize(iconSize, iconSize));
+
+        rejectBtn_ = new QPushButton(tr("Reject"), this);
+        rejectBtn_->setIcon(QIcon(":/icons/icons/ui/end-call.png"));
+        rejectBtn_->setIconSize(QSize(iconSize, iconSize));
+        buttonLayout->addWidget(acceptBtn_);
+        buttonLayout->addWidget(rejectBtn_);
+
+        auto deviceLayout = new QHBoxLayout;
+        auto audioLabel   = new QLabel(this);
+        audioLabel->setPixmap(
+          QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize)));
+
+        auto deviceList = new QComboBox(this);
+        for (const auto &d : audioDevices_)
+                deviceList->addItem(QString::fromStdString(d));
+
+        deviceLayout->addStretch();
+        deviceLayout->addWidget(audioLabel);
+        deviceLayout->addWidget(deviceList);
+
+        if (displayNameLabel)
+                layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
+        layout->addWidget(callerLabel, 0, Qt::AlignCenter);
+        layout->addWidget(avatar, 0, Qt::AlignCenter);
+        layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
+        layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
+        layout->addLayout(buttonLayout);
+        layout->addLayout(deviceLayout);
+
+        connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
+                WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
+                settings->setDefaultAudioSource(
+                  QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
+                emit accept();
+                emit close();
+        });
+        connect(rejectBtn_, &QPushButton::clicked, this, [this]() {
+                emit reject();
+                emit close();
+        });
+}
+
+}
diff --git a/src/dialogs/AcceptCall.h b/src/dialogs/AcceptCall.h
new file mode 100644
index 00000000..909605d0
--- /dev/null
+++ b/src/dialogs/AcceptCall.h
@@ -0,0 +1,37 @@
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include <QSharedPointer>
+#include <QWidget>
+
+class QPushButton;
+class QString;
+class UserSettings;
+
+namespace dialogs {
+
+class AcceptCall : public QWidget
+{
+        Q_OBJECT
+
+public:
+        AcceptCall(const QString &caller,
+                   const QString &displayName,
+                   const QString &roomName,
+                   const QString &avatarUrl,
+                   QSharedPointer<UserSettings> settings,
+                   QWidget *parent = nullptr);
+
+signals:
+        void accept();
+        void reject();
+
+private:
+        QPushButton *acceptBtn_;
+        QPushButton *rejectBtn_;
+        std::vector<std::string> audioDevices_;
+};
+
+}
diff --git a/src/dialogs/PlaceCall.cpp b/src/dialogs/PlaceCall.cpp
new file mode 100644
index 00000000..4e70370a
--- /dev/null
+++ b/src/dialogs/PlaceCall.cpp
@@ -0,0 +1,104 @@
+#include <QComboBox>
+#include <QLabel>
+#include <QPushButton>
+#include <QString>
+#include <QVBoxLayout>
+
+#include "ChatPage.h"
+#include "Config.h"
+#include "UserSettingsPage.h"
+#include "Utils.h"
+#include "WebRTCSession.h"
+#include "dialogs/PlaceCall.h"
+#include "ui/Avatar.h"
+
+namespace dialogs {
+
+PlaceCall::PlaceCall(const QString &callee,
+                     const QString &displayName,
+                     const QString &roomName,
+                     const QString &avatarUrl,
+                     QSharedPointer<UserSettings> settings,
+                     QWidget *parent)
+  : QWidget(parent)
+{
+        std::string errorMessage;
+        if (!WebRTCSession::instance().init(&errorMessage)) {
+                emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
+                emit close();
+                return;
+        }
+        audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
+          settings->defaultAudioSource().toStdString());
+        if (audioDevices_.empty()) {
+                emit ChatPage::instance()->showNotification("No audio sources found.");
+                emit close();
+                return;
+        }
+
+        setAutoFillBackground(true);
+        setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
+        setWindowModality(Qt::WindowModal);
+        setAttribute(Qt::WA_DeleteOnClose, true);
+
+        auto layout = new QVBoxLayout(this);
+        layout->setSpacing(conf::modals::WIDGET_SPACING);
+        layout->setMargin(conf::modals::WIDGET_MARGIN);
+
+        auto buttonLayout = new QHBoxLayout;
+        buttonLayout->setSpacing(15);
+        buttonLayout->setMargin(0);
+
+        QFont f;
+        f.setPointSizeF(f.pointSizeF());
+        auto avatar = new Avatar(this, QFontMetrics(f).height() * 3);
+        if (!avatarUrl.isEmpty())
+                avatar->setImage(avatarUrl);
+        else
+                avatar->setLetter(utils::firstChar(roomName));
+        const int iconSize = 18;
+        voiceBtn_          = new QPushButton(tr("Voice"), this);
+        voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
+        voiceBtn_->setIconSize(QSize(iconSize, iconSize));
+        voiceBtn_->setDefault(true);
+        cancelBtn_ = new QPushButton(tr("Cancel"), this);
+
+        buttonLayout->addWidget(avatar);
+        buttonLayout->addStretch();
+        buttonLayout->addWidget(voiceBtn_);
+        buttonLayout->addWidget(cancelBtn_);
+
+        QString name  = displayName.isEmpty() ? callee : displayName;
+        QLabel *label = new QLabel("Place a call to " + name + "?", this);
+
+        auto deviceLayout = new QHBoxLayout;
+        auto audioLabel   = new QLabel(this);
+        audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png")
+                                .pixmap(QSize(iconSize * 1.2, iconSize * 1.2)));
+
+        auto deviceList = new QComboBox(this);
+        for (const auto &d : audioDevices_)
+                deviceList->addItem(QString::fromStdString(d));
+
+        deviceLayout->addStretch();
+        deviceLayout->addWidget(audioLabel);
+        deviceLayout->addWidget(deviceList);
+
+        layout->addWidget(label);
+        layout->addLayout(buttonLayout);
+        layout->addLayout(deviceLayout);
+
+        connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
+                WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
+                settings->setDefaultAudioSource(
+                  QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
+                emit voice();
+                emit close();
+        });
+        connect(cancelBtn_, &QPushButton::clicked, this, [this]() {
+                emit cancel();
+                emit close();
+        });
+}
+
+}
diff --git a/src/dialogs/PlaceCall.h b/src/dialogs/PlaceCall.h
new file mode 100644
index 00000000..5a1e982c
--- /dev/null
+++ b/src/dialogs/PlaceCall.h
@@ -0,0 +1,37 @@
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include <QSharedPointer>
+#include <QWidget>
+
+class QPushButton;
+class QString;
+class UserSettings;
+
+namespace dialogs {
+
+class PlaceCall : public QWidget
+{
+        Q_OBJECT
+
+public:
+        PlaceCall(const QString &callee,
+                  const QString &displayName,
+                  const QString &roomName,
+                  const QString &avatarUrl,
+                  QSharedPointer<UserSettings> settings,
+                  QWidget *parent = nullptr);
+
+signals:
+        void voice();
+        void cancel();
+
+private:
+        QPushButton *voiceBtn_;
+        QPushButton *cancelBtn_;
+        std::vector<std::string> audioDevices_;
+};
+
+}
diff --git a/src/dialogs/UserProfile.cpp b/src/dialogs/UserProfile.cpp
index 3415b127..086dbb40 100644
--- a/src/dialogs/UserProfile.cpp
+++ b/src/dialogs/UserProfile.cpp
@@ -1,6 +1,7 @@
 #include <QHBoxLayout>
 #include <QLabel>
 #include <QListWidget>
+#include <QMessageBox>
 #include <QShortcut>
 #include <QVBoxLayout>
 
@@ -97,6 +98,14 @@ UserProfile::UserProfile(QWidget *parent)
                 if (utils::localUser() != user_id)
                         req.invite = {user_id.toStdString()};
 
+                if (QMessageBox::question(
+                      this,
+                      tr("Confirm DM"),
+                      tr("Do you really want to invite %1 (%2) to a direct chat?")
+                        .arg(cache::displayName(roomId_, user_id))
+                        .arg(user_id)) != QMessageBox::Yes)
+                        return;
+
                 emit ChatPage::instance()->createRoom(req);
         });
 
@@ -199,6 +208,8 @@ UserProfile::init(const QString &userId, const QString &roomId)
 {
         resetToDefaults();
 
+        this->roomId_ = roomId;
+
         auto displayName = cache::displayName(roomId, userId);
 
         userIdLabel_->setText(userId);
diff --git a/src/dialogs/UserProfile.h b/src/dialogs/UserProfile.h
index 81276d2a..8129fdcf 100644
--- a/src/dialogs/UserProfile.h
+++ b/src/dialogs/UserProfile.h
@@ -53,6 +53,7 @@ private:
         void resetToDefaults();
 
         Avatar *avatar_;
+        QString roomId_;
 
         QLabel *userIdLabel_;
         QLabel *displayNameLabel_;
diff --git a/src/timeline/TimelineModel.cpp b/src/timeline/TimelineModel.cpp
index f41e7712..9695f850 100644
--- a/src/timeline/TimelineModel.cpp
+++ b/src/timeline/TimelineModel.cpp
@@ -121,6 +121,21 @@ struct RoomEventType
         {
                 return qml_mtx_events::EventType::Redacted;
         }
+        qml_mtx_events::EventType operator()(
+          const mtx::events::Event<mtx::events::msg::CallInvite> &)
+        {
+                return qml_mtx_events::EventType::CallInvite;
+        }
+        qml_mtx_events::EventType operator()(
+          const mtx::events::Event<mtx::events::msg::CallAnswer> &)
+        {
+                return qml_mtx_events::EventType::CallAnswer;
+        }
+        qml_mtx_events::EventType operator()(
+          const mtx::events::Event<mtx::events::msg::CallHangUp> &)
+        {
+                return qml_mtx_events::EventType::CallHangUp;
+        }
         // ::EventType::Type operator()(const Event<mtx::events::msg::Location> &e) { return
         // ::EventType::LocationMessage; }
 };
@@ -224,6 +239,7 @@ TimelineModel::roleNames() const
           {RoomId, "roomId"},
           {RoomName, "roomName"},
           {RoomTopic, "roomTopic"},
+          {CallType, "callType"},
           {Dump, "dump"},
         };
 }
@@ -375,6 +391,8 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
                 return QVariant(QString::fromStdString(room_name(event)));
         case RoomTopic:
                 return QVariant(QString::fromStdString(room_topic(event)));
+        case CallType:
+                return QVariant(QString::fromStdString(call_type(event)));
         case Dump: {
                 QVariantMap m;
                 auto names = roleNames();
@@ -405,6 +423,7 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
                 m.insert(names[ReplyTo], data(event, static_cast<int>(ReplyTo)));
                 m.insert(names[RoomName], data(event, static_cast<int>(RoomName)));
                 m.insert(names[RoomTopic], data(event, static_cast<int>(RoomTopic)));
+                m.insert(names[CallType], data(event, static_cast<int>(CallType)));
 
                 return QVariant(m);
         }
@@ -501,8 +520,32 @@ TimelineModel::addEvents(const mtx::responses::Timeline &timeline)
 
         events.handleSync(timeline);
 
-        if (!timeline.events.empty())
-                updateLastMessage();
+        using namespace mtx::events;
+        for (auto e : timeline.events) {
+                if (auto encryptedEvent = std::get_if<EncryptedEvent<msg::Encrypted>>(&e)) {
+                        MegolmSessionIndex index;
+                        index.room_id    = room_id_.toStdString();
+                        index.session_id = encryptedEvent->content.session_id;
+                        index.sender_key = encryptedEvent->content.sender_key;
+
+                        auto result = olm::decryptEvent(index, *encryptedEvent);
+                        if (result.event)
+                                e = result.event.value();
+                }
+
+                if (std::holds_alternative<RoomEvent<msg::CallCandidates>>(e) ||
+                    std::holds_alternative<RoomEvent<msg::CallInvite>>(e) ||
+                    std::holds_alternative<RoomEvent<msg::CallAnswer>>(e) ||
+                    std::holds_alternative<RoomEvent<msg::CallHangUp>>(e))
+                        std::visit(
+                          [this](auto &event) {
+                                  event.room_id = room_id_.toStdString();
+                                  if (event.sender != http::client()->user_id().to_string())
+                                          emit newCallEvent(event);
+                          },
+                          e);
+        }
+        updateLastMessage();
 }
 
 template<typename T>
@@ -527,6 +570,23 @@ isMessage(const mtx::events::EncryptedEvent<T> &)
         return true;
 }
 
+auto
+isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &)
+{
+        return true;
+}
+
+auto
+isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &)
+{
+        return true;
+}
+auto
+isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &)
+{
+        return true;
+}
+
 // Workaround. We also want to see a room at the top, if we just joined it
 auto
 isYourJoin(const mtx::events::StateEvent<mtx::events::state::Member> &e)
@@ -758,14 +818,17 @@ TimelineModel::markEventsAsRead(const std::vector<QString> &event_ids)
 }
 
 void
-TimelineModel::sendEncryptedMessage(const std::string txn_id, nlohmann::json content)
+TimelineModel::sendEncryptedMessageEvent(const std::string &txn_id,
+                                         nlohmann::json content,
+                                         mtx::events::EventType eventType)
 {
         const auto room_id = room_id_.toStdString();
 
         using namespace mtx::events;
         using namespace mtx::identifiers;
 
-        json doc = {{"type", "m.room.message"}, {"content", content}, {"room_id", room_id}};
+        json doc = {
+          {"type", mtx::events::to_string(eventType)}, {"content", content}, {"room_id", room_id}};
 
         try {
                 // Check if we have already an outbound megolm session then we can use.
@@ -1043,27 +1106,36 @@ struct SendMessageVisitor
           : model_(model)
         {}
 
-        // Do-nothing operator for all unhandled events
-        template<typename T>
-        void operator()(const mtx::events::Event<T> &)
-        {}
-        // Operator for m.room.message events that contain a msgtype in their content
-        template<typename T,
-                 std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
-        void operator()(const mtx::events::RoomEvent<T> &msg)
-
+        template<typename T, mtx::events::EventType Event>
+        void sendRoomEvent(mtx::events::RoomEvent<T> msg)
         {
                 if (cache::isRoomEncrypted(model_->room_id_.toStdString())) {
                         auto encInfo = mtx::accessors::file(msg);
                         if (encInfo)
                                 emit model_->newEncryptedImage(encInfo.value());
 
-                        model_->sendEncryptedMessage(msg.event_id, nlohmann::json(msg.content));
+                        model_->sendEncryptedMessageEvent(
+                          msg.event_id, nlohmann::json(msg.content), Event);
                 } else {
+                        msg.type = Event;
                         emit model_->addPendingMessageToStore(msg);
                 }
         }
 
+
+        // Do-nothing operator for all unhandled events
+        template<typename T>
+        void operator()(const mtx::events::Event<T> &)
+        {}
+
+        // Operator for m.room.message events that contain a msgtype in their content
+        template<typename T,
+                 std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
+        void operator()(mtx::events::RoomEvent<T> msg)
+        {
+                sendRoomEvent<T, mtx::events::EventType::RoomMessage>(msg);
+        }
+
         // Special operator for reactions, which are a type of m.room.message, but need to be
         // handled distinctly for their differences from normal room messages.  Specifically,
         // reactions need to have the relation outside of ciphertext, or synapse / the homeserver
@@ -1075,6 +1147,30 @@ struct SendMessageVisitor
                 emit model_->addPendingMessageToStore(msg);
         }
 
+        void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &event)
+        {
+                sendRoomEvent<mtx::events::msg::CallInvite, mtx::events::EventType::CallInvite>(
+                  event);
+        }
+
+        void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &event)
+        {
+                sendRoomEvent<mtx::events::msg::CallCandidates,
+                              mtx::events::EventType::CallCandidates>(event);
+        }
+
+        void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &event)
+        {
+                sendRoomEvent<mtx::events::msg::CallAnswer, mtx::events::EventType::CallAnswer>(
+                  event);
+        }
+
+        void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &event)
+        {
+                sendRoomEvent<mtx::events::msg::CallHangUp, mtx::events::EventType::CallHangUp>(
+                  event);
+        }
+
         TimelineModel *model_;
 };
 
diff --git a/src/timeline/TimelineModel.h b/src/timeline/TimelineModel.h
index 0bcf42b7..034ae31a 100644
--- a/src/timeline/TimelineModel.h
+++ b/src/timeline/TimelineModel.h
@@ -36,6 +36,12 @@ enum EventType
         Aliases,
         /// m.room.avatar
         Avatar,
+        /// m.call.invite
+        CallInvite,
+        /// m.call.answer
+        CallAnswer,
+        /// m.call.hangup
+        CallHangUp,
         /// m.room.canonical_alias
         CanonicalAlias,
         /// m.room.create
@@ -164,6 +170,7 @@ public:
                 RoomId,
                 RoomName,
                 RoomTopic,
+                CallType,
                 Dump,
         };
 
@@ -209,7 +216,7 @@ public:
         void updateLastMessage();
         void addEvents(const mtx::responses::Timeline &events);
         template<class T>
-        void sendMessage(const T &msg);
+        void sendMessageEvent(const T &content, mtx::events::EventType eventType);
         RelatedInfo relatedInfo(QString id);
 
 public slots:
@@ -256,12 +263,15 @@ signals:
         void typingUsersChanged(std::vector<QString> users);
         void replyChanged(QString reply);
         void paginationInProgressChanged(const bool);
+        void newCallEvent(const mtx::events::collections::TimelineEvents &event);
 
         void newMessageToSend(mtx::events::collections::TimelineEvents event);
         void addPendingMessageToStore(mtx::events::collections::TimelineEvents event);
 
 private:
-        void sendEncryptedMessage(const std::string txn_id, nlohmann::json content);
+        void sendEncryptedMessageEvent(const std::string &txn_id,
+                                       nlohmann::json content,
+                                       mtx::events::EventType);
         void handleClaimedKeys(std::shared_ptr<StateKeeper> keeper,
                                const std::map<std::string, std::string> &room_key,
                                const std::map<std::string, DevicePublicKeys> &pks,
@@ -292,9 +302,10 @@ private:
 
 template<class T>
 void
-TimelineModel::sendMessage(const T &msg)
+TimelineModel::sendMessageEvent(const T &content, mtx::events::EventType eventType)
 {
         mtx::events::RoomEvent<T> msgCopy = {};
-        msgCopy.content                   = msg;
+        msgCopy.content                   = content;
+        msgCopy.type                      = eventType;
         emit newMessageToSend(msgCopy);
 }
diff --git a/src/timeline/TimelineViewManager.cpp b/src/timeline/TimelineViewManager.cpp
index 975dd5fb..466c3cee 100644
--- a/src/timeline/TimelineViewManager.cpp
+++ b/src/timeline/TimelineViewManager.cpp
@@ -1,10 +1,13 @@
 #include "TimelineViewManager.h"
 
+#include <QDesktopServices>
 #include <QMetaType>
 #include <QPalette>
 #include <QQmlContext>
+#include <QString>
 
 #include "BlurhashProvider.h"
+#include "CallManager.h"
 #include "ChatPage.h"
 #include "ColorImageProvider.h"
 #include "DelegateChooser.h"
@@ -71,10 +74,13 @@ TimelineViewManager::userStatus(QString id) const
         return QString::fromStdString(cache::statusMessage(id.toStdString()));
 }
 
-TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent)
+TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings,
+                                         CallManager *callManager,
+                                         QWidget *parent)
   : imgProvider(new MxcImageProvider())
   , colorImgProvider(new ColorImageProvider())
   , blurhashProvider(new BlurhashProvider())
+  , callManager_(callManager)
   , settings(userSettings)
 {
         qmlRegisterUncreatableMetaObject(qml_mtx_events::staticMetaObject,
@@ -133,6 +139,10 @@ TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettin
                 &ChatPage::decryptSidebarChanged,
                 this,
                 &TimelineViewManager::updateEncryptedDescriptions);
+        connect(dynamic_cast<ChatPage *>(parent), &ChatPage::loggedOut, this, [this]() {
+                isInitialSync_ = true;
+                emit initialSyncChanged(true);
+        });
 }
 
 void
@@ -142,7 +152,17 @@ TimelineViewManager::sync(const mtx::responses::Rooms &rooms)
                 // addRoom will only add the room, if it doesn't exist
                 addRoom(QString::fromStdString(room_id));
                 const auto &room_model = models.value(QString::fromStdString(room_id));
+                if (!isInitialSync_)
+                        connect(room_model.data(),
+                                &TimelineModel::newCallEvent,
+                                callManager_,
+                                &CallManager::syncEvent);
                 room_model->addEvents(room.timeline);
+                if (!isInitialSync_)
+                        disconnect(room_model.data(),
+                                   &TimelineModel::newCallEvent,
+                                   callManager_,
+                                   &CallManager::syncEvent);
 
                 if (ChatPage::instance()->userSettings()->typingNotifications()) {
                         std::vector<QString> typing;
@@ -220,6 +240,12 @@ TimelineViewManager::openImageOverlay(QString mxcUrl, QString eventId) const
 }
 
 void
+TimelineViewManager::openLink(QString link) const
+{
+        QDesktopServices::openUrl(link);
+}
+
+void
 TimelineViewManager::updateReadReceipts(const QString &room_id,
                                         const std::vector<QString> &event_ids)
 {
@@ -288,7 +314,7 @@ TimelineViewManager::queueTextMessage(const QString &msg)
                 timeline_->resetReply();
         }
 
-        timeline_->sendMessage(text);
+        timeline_->sendMessageEvent(text, mtx::events::EventType::RoomMessage);
 }
 
 void
@@ -310,7 +336,7 @@ TimelineViewManager::queueEmoteMessage(const QString &msg)
         }
 
         if (timeline_)
-                timeline_->sendMessage(emote);
+                timeline_->sendMessageEvent(emote, mtx::events::EventType::RoomMessage);
 }
 
 void
@@ -339,7 +365,7 @@ TimelineViewManager::queueReactionMessage(const QString &reactedEvent, const QSt
                 reaction.relates_to.event_id = reactedEvent.toStdString();
                 reaction.relates_to.key      = reactionKey.toStdString();
 
-                timeline_->sendMessage(reaction);
+                timeline_->sendMessageEvent(reaction, mtx::events::EventType::Reaction);
                 // Otherwise, we have previously reacted and the reaction should be redacted
         } else {
                 timeline_->redactEvent(selfReactedEvent);
@@ -375,7 +401,7 @@ TimelineViewManager::queueImageMessage(const QString &roomid,
                 model->resetReply();
         }
 
-        model->sendMessage(image);
+        model->sendMessageEvent(image, mtx::events::EventType::RoomMessage);
 }
 
 void
@@ -403,7 +429,7 @@ TimelineViewManager::queueFileMessage(
                 model->resetReply();
         }
 
-        model->sendMessage(file);
+        model->sendMessageEvent(file, mtx::events::EventType::RoomMessage);
 }
 
 void
@@ -431,7 +457,7 @@ TimelineViewManager::queueAudioMessage(const QString &roomid,
                 model->resetReply();
         }
 
-        model->sendMessage(audio);
+        model->sendMessageEvent(audio, mtx::events::EventType::RoomMessage);
 }
 
 void
@@ -458,5 +484,34 @@ TimelineViewManager::queueVideoMessage(const QString &roomid,
                 model->resetReply();
         }
 
-        model->sendMessage(video);
+        model->sendMessageEvent(video, mtx::events::EventType::RoomMessage);
+}
+
+void
+TimelineViewManager::queueCallMessage(const QString &roomid,
+                                      const mtx::events::msg::CallInvite &callInvite)
+{
+        models.value(roomid)->sendMessageEvent(callInvite, mtx::events::EventType::CallInvite);
+}
+
+void
+TimelineViewManager::queueCallMessage(const QString &roomid,
+                                      const mtx::events::msg::CallCandidates &callCandidates)
+{
+        models.value(roomid)->sendMessageEvent(callCandidates,
+                                               mtx::events::EventType::CallCandidates);
+}
+
+void
+TimelineViewManager::queueCallMessage(const QString &roomid,
+                                      const mtx::events::msg::CallAnswer &callAnswer)
+{
+        models.value(roomid)->sendMessageEvent(callAnswer, mtx::events::EventType::CallAnswer);
+}
+
+void
+TimelineViewManager::queueCallMessage(const QString &roomid,
+                                      const mtx::events::msg::CallHangUp &callHangUp)
+{
+        models.value(roomid)->sendMessageEvent(callHangUp, mtx::events::EventType::CallHangUp);
 }
diff --git a/src/timeline/TimelineViewManager.h b/src/timeline/TimelineViewManager.h
index 20dbc3bb..ea6d1743 100644
--- a/src/timeline/TimelineViewManager.h
+++ b/src/timeline/TimelineViewManager.h
@@ -18,6 +18,7 @@
 
 class MxcImageProvider;
 class BlurhashProvider;
+class CallManager;
 class ColorImageProvider;
 class UserSettings;
 
@@ -31,7 +32,9 @@ class TimelineViewManager : public QObject
           bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
 
 public:
-        TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent = nullptr);
+        TimelineViewManager(QSharedPointer<UserSettings> userSettings,
+                            CallManager *callManager,
+                            QWidget *parent = nullptr);
         QWidget *getWidget() const { return container; }
 
         void sync(const mtx::responses::Rooms &rooms);
@@ -47,6 +50,8 @@ public:
         Q_INVOKABLE QString userPresence(QString id) const;
         Q_INVOKABLE QString userStatus(QString id) const;
 
+        Q_INVOKABLE void openLink(QString link) const;
+
 signals:
         void clearRoomMessageCount(QString roomid);
         void updateRoomsLastMessage(QString roomid, const DescInfo &info);
@@ -90,6 +95,11 @@ public slots:
                                const QString &url,
                                const QString &mime,
                                uint64_t dsize);
+        void queueCallMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
+        void queueCallMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
+        void queueCallMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
+        void queueCallMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
+
         void updateEncryptedDescriptions();
 
         void clearCurrentRoomTimeline()
@@ -111,7 +121,8 @@ private:
         BlurhashProvider *blurhashProvider;
 
         QHash<QString, QSharedPointer<TimelineModel>> models;
-        TimelineModel *timeline_ = nullptr;
+        TimelineModel *timeline_  = nullptr;
+        CallManager *callManager_ = nullptr;
 
         bool isInitialSync_ = true;