diff --git a/src/Cache.cpp b/src/Cache.cpp
index 3f2bf73a..8cf66d21 100644
--- a/src/Cache.cpp
+++ b/src/Cache.cpp
@@ -108,6 +108,11 @@ Cache::isHiddenEvent(lmdb::txn &txn,
const std::string &room_id)
{
using namespace mtx::events;
+
+ // Always hide edits
+ if (mtx::accessors::relations(e).replaces())
+ return true;
+
if (auto encryptedEvent = std::get_if<EncryptedEvent<msg::Encrypted>>(&e)) {
MegolmSessionIndex index;
index.room_id = room_id;
@@ -1197,25 +1202,24 @@ Cache::calculateRoomReadStatus(const std::string &room_id)
const auto last_event_id = getLastEventId(txn, room_id);
const auto localUser = utils::localUser().toStdString();
+ std::string fullyReadEventId;
+ if (auto ev = getAccountData(txn, mtx::events::EventType::FullyRead, room_id)) {
+ if (auto fr = std::get_if<
+ mtx::events::AccountDataEvent<mtx::events::account_data::FullyRead>>(
+ &ev.value())) {
+ fullyReadEventId = fr->content.event_id;
+ }
+ }
txn.commit();
- if (last_event_id.empty())
- return false;
-
- // Retrieve all read receipts for that event.
- const auto receipts =
- readReceipts(QString::fromStdString(last_event_id), QString::fromStdString(room_id));
-
- if (receipts.size() == 0)
+ if (last_event_id.empty() || fullyReadEventId.empty())
return true;
- // Check if the local user has a read receipt for it.
- for (auto it = receipts.cbegin(); it != receipts.cend(); it++) {
- if (it->second == localUser)
- return false;
- }
+ if (last_event_id == fullyReadEventId)
+ return false;
- return true;
+ // Retrieve all read receipts for that event.
+ return getEventIndex(room_id, last_event_id) > getEventIndex(room_id, fullyReadEventId);
}
void
@@ -1891,6 +1895,108 @@ Cache::getTimelineIndex(const std::string &room_id, std::string_view event_id)
return *val.data<uint64_t>();
}
+std::optional<uint64_t>
+Cache::getEventIndex(const std::string &room_id, std::string_view event_id)
+{
+ if (event_id.empty())
+ return {};
+
+ auto txn = lmdb::txn::begin(env_, nullptr, MDB_RDONLY);
+
+ lmdb::dbi orderDb{0};
+ try {
+ orderDb = getEventToOrderDb(txn, room_id);
+ } catch (lmdb::runtime_error &e) {
+ nhlog::db()->error("Can't open db for room '{}', probably doesn't exist yet. ({})",
+ room_id,
+ e.what());
+ return {};
+ }
+
+ lmdb::val indexVal{event_id.data(), event_id.size()}, val;
+
+ bool success = lmdb::dbi_get(txn, orderDb, indexVal, val);
+ if (!success) {
+ return {};
+ }
+
+ return *val.data<uint64_t>();
+}
+
+std::optional<std::pair<uint64_t, std::string>>
+Cache::lastInvisibleEventAfter(const std::string &room_id, std::string_view event_id)
+{
+ if (event_id.empty())
+ return {};
+
+ auto txn = lmdb::txn::begin(env_, nullptr, MDB_RDONLY);
+
+ lmdb::dbi orderDb{0};
+ lmdb::dbi eventOrderDb{0};
+ lmdb::dbi timelineDb{0};
+ try {
+ orderDb = getEventToOrderDb(txn, room_id);
+ eventOrderDb = getEventOrderDb(txn, room_id);
+ timelineDb = getMessageToOrderDb(txn, room_id);
+ } catch (lmdb::runtime_error &e) {
+ nhlog::db()->error("Can't open db for room '{}', probably doesn't exist yet. ({})",
+ room_id,
+ e.what());
+ return {};
+ }
+
+ lmdb::val eventIdVal{event_id.data(), event_id.size()}, indexVal;
+
+ bool success = lmdb::dbi_get(txn, orderDb, eventIdVal, indexVal);
+ if (!success) {
+ return {};
+ }
+ uint64_t prevIdx = *indexVal.data<uint64_t>();
+ std::string prevId{eventIdVal.data(), eventIdVal.size()};
+
+ auto cursor = lmdb::cursor::open(txn, eventOrderDb);
+ cursor.get(indexVal, MDB_SET);
+ while (cursor.get(indexVal, eventIdVal, MDB_NEXT)) {
+ std::string evId =
+ json::parse(std::string_view(eventIdVal.data(), eventIdVal.size()))["event_id"]
+ .get<std::string>();
+ lmdb::val temp;
+ if (lmdb::dbi_get(txn, timelineDb, lmdb::val(evId.data(), evId.size()), temp)) {
+ return std::pair{prevIdx, std::string(prevId)};
+ } else {
+ prevIdx = *indexVal.data<uint64_t>();
+ prevId = std::move(evId);
+ }
+ }
+
+ return std::pair{prevIdx, std::string(prevId)};
+}
+
+std::optional<uint64_t>
+Cache::getArrivalIndex(const std::string &room_id, std::string_view event_id)
+{
+ auto txn = lmdb::txn::begin(env_, nullptr, MDB_RDONLY);
+
+ lmdb::dbi orderDb{0};
+ try {
+ orderDb = getEventToOrderDb(txn, room_id);
+ } catch (lmdb::runtime_error &e) {
+ nhlog::db()->error("Can't open db for room '{}', probably doesn't exist yet. ({})",
+ room_id,
+ e.what());
+ return {};
+ }
+
+ lmdb::val indexVal{event_id.data(), event_id.size()}, val;
+
+ bool success = lmdb::dbi_get(txn, orderDb, indexVal, val);
+ if (!success) {
+ return {};
+ }
+
+ return *val.data<uint64_t>();
+}
+
std::optional<std::string>
Cache::getTimelineEventId(const std::string &room_id, uint64_t index)
{
@@ -2713,23 +2819,19 @@ Cache::saveTimelineMessages(lmdb::txn &txn,
lmdb::dbi_put(txn, evToOrderDb, event_id, txn_order);
lmdb::dbi_del(txn, evToOrderDb, lmdb::val(txn_id));
- if (event.contains("content") &&
- event["content"].contains("m.relates_to")) {
- auto temp = event["content"]["m.relates_to"];
- json relates_to_j = temp.contains("m.in_reply_to") &&
- temp["m.in_reply_to"].is_object()
- ? temp["m.in_reply_to"]["event_id"]
- : temp["event_id"];
- std::string relates_to =
- relates_to_j.is_string() ? relates_to_j.get<std::string>() : "";
-
- if (!relates_to.empty()) {
- lmdb::dbi_del(txn,
- relationsDb,
- lmdb::val(relates_to),
- lmdb::val(txn_id));
- lmdb::dbi_put(
- txn, relationsDb, lmdb::val(relates_to), event_id);
+ auto relations = mtx::accessors::relations(e);
+ if (!relations.relations.empty()) {
+ for (const auto &r : relations.relations) {
+ if (!r.event_id.empty()) {
+ lmdb::dbi_del(txn,
+ relationsDb,
+ lmdb::val(r.event_id),
+ lmdb::val(txn_id));
+ lmdb::dbi_put(txn,
+ relationsDb,
+ lmdb::val(r.event_id),
+ event_id);
+ }
}
}
@@ -2808,19 +2910,16 @@ Cache::saveTimelineMessages(lmdb::txn &txn,
lmdb::val(&msgIndex, sizeof(msgIndex)));
}
- if (event.contains("content") &&
- event["content"].contains("m.relates_to")) {
- auto temp = event["content"]["m.relates_to"];
- json relates_to_j = temp.contains("m.in_reply_to") &&
- temp["m.in_reply_to"].is_object()
- ? temp["m.in_reply_to"]["event_id"]
- : temp["event_id"];
- std::string relates_to =
- relates_to_j.is_string() ? relates_to_j.get<std::string>() : "";
-
- if (!relates_to.empty())
- lmdb::dbi_put(
- txn, relationsDb, lmdb::val(relates_to), event_id);
+ auto relations = mtx::accessors::relations(e);
+ if (!relations.relations.empty()) {
+ for (const auto &r : relations.relations) {
+ if (!r.event_id.empty()) {
+ lmdb::dbi_put(txn,
+ relationsDb,
+ lmdb::val(r.event_id),
+ event_id);
+ }
+ }
}
}
}
@@ -2901,17 +3000,14 @@ Cache::saveOldMessages(const std::string &room_id, const mtx::responses::Message
txn, msg2orderDb, event_id, lmdb::val(&msgIndex, sizeof(msgIndex)));
}
- if (event.contains("content") && event["content"].contains("m.relates_to")) {
- auto temp = event["content"]["m.relates_to"];
- json relates_to_j =
- temp.contains("m.in_reply_to") && temp["m.in_reply_to"].is_object()
- ? temp["m.in_reply_to"]["event_id"]
- : temp["event_id"];
- std::string relates_to =
- relates_to_j.is_string() ? relates_to_j.get<std::string>() : "";
-
- if (!relates_to.empty())
- lmdb::dbi_put(txn, relationsDb, lmdb::val(relates_to), event_id);
+ auto relations = mtx::accessors::relations(e);
+ if (!relations.relations.empty()) {
+ for (const auto &r : relations.relations) {
+ if (!r.event_id.empty()) {
+ lmdb::dbi_put(
+ txn, relationsDb, lmdb::val(r.event_id), event_id);
+ }
+ }
}
}
@@ -3222,9 +3318,12 @@ Cache::getAccountData(lmdb::txn &txn, mtx::events::EventType type, const std::st
lmdb::val data;
if (lmdb::dbi_get(txn, db, lmdb::val(to_string(type)), data)) {
mtx::responses::utils::RoomAccountDataEvents events;
- mtx::responses::utils::parse_room_account_data_events(
- std::string_view(data.data(), data.size()), events);
- return events.front();
+ json j = json::array({
+ json::parse(std::string_view(data.data(), data.size())),
+ });
+ mtx::responses::utils::parse_room_account_data_events(j, events);
+ if (events.size() == 1)
+ return events.front();
}
} catch (...) {
}
@@ -4233,6 +4332,18 @@ readReceipts(const QString &event_id, const QString &room_id)
return instance_->readReceipts(event_id, room_id);
}
+std::optional<uint64_t>
+getEventIndex(const std::string &room_id, std::string_view event_id)
+{
+ return instance_->getEventIndex(room_id, event_id);
+}
+
+std::optional<std::pair<uint64_t, std::string>>
+lastInvisibleEventAfter(const std::string &room_id, std::string_view event_id)
+{
+ return instance_->lastInvisibleEventAfter(room_id, event_id);
+}
+
QByteArray
image(const QString &url)
{
diff --git a/src/Cache.h b/src/Cache.h
index 91956725..e60fc970 100644
--- a/src/Cache.h
+++ b/src/Cache.h
@@ -168,6 +168,12 @@ using UserReceipts = std::multimap<uint64_t, std::string, std::greater<uint64_t>
UserReceipts
readReceipts(const QString &event_id, const QString &room_id);
+//! get index of the event in the event db, not representing the visual index
+std::optional<uint64_t>
+getEventIndex(const std::string &room_id, std::string_view event_id);
+std::optional<std::pair<uint64_t, std::string>>
+lastInvisibleEventAfter(const std::string &room_id, std::string_view event_id);
+
QByteArray
image(const QString &url);
QByteArray
diff --git a/src/Cache_p.h b/src/Cache_p.h
index e2ce1668..431e7bc3 100644
--- a/src/Cache_p.h
+++ b/src/Cache_p.h
@@ -204,7 +204,14 @@ public:
std::optional<TimelineRange> getTimelineRange(const std::string &room_id);
std::optional<uint64_t> getTimelineIndex(const std::string &room_id,
std::string_view event_id);
+ std::optional<uint64_t> getEventIndex(const std::string &room_id,
+ std::string_view event_id);
+ std::optional<std::pair<uint64_t, std::string>> lastInvisibleEventAfter(
+ const std::string &room_id,
+ std::string_view event_id);
std::optional<std::string> getTimelineEventId(const std::string &room_id, uint64_t index);
+ std::optional<uint64_t> getArrivalIndex(const std::string &room_id,
+ std::string_view event_id);
std::string previousBatchToken(const std::string &room_id);
uint64_t saveOldMessages(const std::string &room_id, const mtx::responses::Messages &res);
diff --git a/src/CallDevices.cpp b/src/CallDevices.cpp
new file mode 100644
index 00000000..0b9809e5
--- /dev/null
+++ b/src/CallDevices.cpp
@@ -0,0 +1,437 @@
+#include <cstring>
+#include <optional>
+#include <string_view>
+
+#include "CallDevices.h"
+#include "ChatPage.h"
+#include "Logging.h"
+#include "UserSettingsPage.h"
+
+#ifdef GSTREAMER_AVAILABLE
+extern "C"
+{
+#include "gst/gst.h"
+}
+#endif
+
+CallDevices::CallDevices()
+ : QObject()
+{}
+
+#ifdef GSTREAMER_AVAILABLE
+namespace {
+
+struct AudioSource
+{
+ std::string name;
+ GstDevice *device;
+};
+
+struct VideoSource
+{
+ struct Caps
+ {
+ std::string resolution;
+ std::vector<std::string> frameRates;
+ };
+ std::string name;
+ GstDevice *device;
+ std::vector<Caps> caps;
+};
+
+std::vector<AudioSource> audioSources_;
+std::vector<VideoSource> videoSources_;
+
+using FrameRate = std::pair<int, int>;
+std::optional<FrameRate>
+getFrameRate(const GValue *value)
+{
+ if (GST_VALUE_HOLDS_FRACTION(value)) {
+ gint num = gst_value_get_fraction_numerator(value);
+ gint den = gst_value_get_fraction_denominator(value);
+ return FrameRate{num, den};
+ }
+ return std::nullopt;
+}
+
+void
+addFrameRate(std::vector<std::string> &rates, const FrameRate &rate)
+{
+ constexpr double minimumFrameRate = 15.0;
+ if (static_cast<double>(rate.first) / rate.second >= minimumFrameRate)
+ rates.push_back(std::to_string(rate.first) + "/" + std::to_string(rate.second));
+}
+
+void
+setDefaultDevice(bool isVideo)
+{
+ auto settings = ChatPage::instance()->userSettings();
+ if (isVideo && settings->camera().isEmpty()) {
+ const VideoSource &camera = videoSources_.front();
+ settings->setCamera(QString::fromStdString(camera.name));
+ settings->setCameraResolution(
+ QString::fromStdString(camera.caps.front().resolution));
+ settings->setCameraFrameRate(
+ QString::fromStdString(camera.caps.front().frameRates.front()));
+ } else if (!isVideo && settings->microphone().isEmpty()) {
+ settings->setMicrophone(QString::fromStdString(audioSources_.front().name));
+ }
+}
+
+void
+addDevice(GstDevice *device)
+{
+ if (!device)
+ return;
+
+ gchar *name = gst_device_get_display_name(device);
+ gchar *type = gst_device_get_device_class(device);
+ bool isVideo = !std::strncmp(type, "Video", 5);
+ g_free(type);
+ nhlog::ui()->debug("WebRTC: {} device added: {}", isVideo ? "video" : "audio", name);
+ if (!isVideo) {
+ audioSources_.push_back({name, device});
+ g_free(name);
+ setDefaultDevice(false);
+ return;
+ }
+
+ GstCaps *gstcaps = gst_device_get_caps(device);
+ if (!gstcaps) {
+ nhlog::ui()->debug("WebRTC: unable to get caps for {}", name);
+ g_free(name);
+ return;
+ }
+
+ VideoSource source{name, device, {}};
+ g_free(name);
+ guint nCaps = gst_caps_get_size(gstcaps);
+ for (guint i = 0; i < nCaps; ++i) {
+ GstStructure *structure = gst_caps_get_structure(gstcaps, i);
+ const gchar *name = gst_structure_get_name(structure);
+ if (!std::strcmp(name, "video/x-raw")) {
+ gint widthpx, heightpx;
+ if (gst_structure_get(structure,
+ "width",
+ G_TYPE_INT,
+ &widthpx,
+ "height",
+ G_TYPE_INT,
+ &heightpx,
+ nullptr)) {
+ VideoSource::Caps caps;
+ caps.resolution =
+ std::to_string(widthpx) + "x" + std::to_string(heightpx);
+ const GValue *value =
+ gst_structure_get_value(structure, "framerate");
+ if (auto fr = getFrameRate(value); fr)
+ addFrameRate(caps.frameRates, *fr);
+ else if (GST_VALUE_HOLDS_FRACTION_RANGE(value)) {
+ addFrameRate(
+ caps.frameRates,
+ *getFrameRate(gst_value_get_fraction_range_min(value)));
+ addFrameRate(
+ caps.frameRates,
+ *getFrameRate(gst_value_get_fraction_range_max(value)));
+ } else if (GST_VALUE_HOLDS_LIST(value)) {
+ guint nRates = gst_value_list_get_size(value);
+ for (guint j = 0; j < nRates; ++j) {
+ const GValue *rate =
+ gst_value_list_get_value(value, j);
+ if (auto fr = getFrameRate(rate); fr)
+ addFrameRate(caps.frameRates, *fr);
+ }
+ }
+ if (!caps.frameRates.empty())
+ source.caps.push_back(std::move(caps));
+ }
+ }
+ }
+ gst_caps_unref(gstcaps);
+ videoSources_.push_back(std::move(source));
+ setDefaultDevice(true);
+}
+
+#if GST_CHECK_VERSION(1, 18, 0)
+template<typename T>
+bool
+removeDevice(T &sources, GstDevice *device, bool changed)
+{
+ if (auto it = std::find_if(sources.begin(),
+ sources.end(),
+ [device](const auto &s) { return s.device == device; });
+ it != sources.end()) {
+ nhlog::ui()->debug(std::string("WebRTC: device ") +
+ (changed ? "changed: " : "removed: ") + "{}",
+ it->name);
+ gst_object_unref(device);
+ sources.erase(it);
+ return true;
+ }
+ return false;
+}
+
+void
+removeDevice(GstDevice *device, bool changed)
+{
+ if (device) {
+ if (removeDevice(audioSources_, device, changed) ||
+ removeDevice(videoSources_, device, changed))
+ return;
+ }
+}
+
+gboolean
+newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data G_GNUC_UNUSED)
+{
+ switch (GST_MESSAGE_TYPE(msg)) {
+ case GST_MESSAGE_DEVICE_ADDED: {
+ GstDevice *device;
+ gst_message_parse_device_added(msg, &device);
+ addDevice(device);
+ emit CallDevices::instance().devicesChanged();
+ break;
+ }
+ case GST_MESSAGE_DEVICE_REMOVED: {
+ GstDevice *device;
+ gst_message_parse_device_removed(msg, &device);
+ removeDevice(device, false);
+ emit CallDevices::instance().devicesChanged();
+ break;
+ }
+ case GST_MESSAGE_DEVICE_CHANGED: {
+ GstDevice *device;
+ GstDevice *oldDevice;
+ gst_message_parse_device_changed(msg, &device, &oldDevice);
+ removeDevice(oldDevice, true);
+ addDevice(device);
+ break;
+ }
+ default:
+ break;
+ }
+ return TRUE;
+}
+#endif
+
+template<typename T>
+std::vector<std::string>
+deviceNames(T &sources, const std::string &defaultDevice)
+{
+ std::vector<std::string> ret;
+ ret.reserve(sources.size());
+ for (const auto &s : sources)
+ ret.push_back(s.name);
+
+ // move default device to top of the list
+ if (auto it = std::find(ret.begin(), ret.end(), defaultDevice); it != ret.end())
+ std::swap(ret.front(), *it);
+
+ return ret;
+}
+
+std::optional<VideoSource>
+getVideoSource(const std::string &cameraName)
+{
+ if (auto it = std::find_if(videoSources_.cbegin(),
+ videoSources_.cend(),
+ [&cameraName](const auto &s) { return s.name == cameraName; });
+ it != videoSources_.cend()) {
+ return *it;
+ }
+ return std::nullopt;
+}
+
+std::pair<int, int>
+tokenise(std::string_view str, char delim)
+{
+ std::pair<int, int> ret;
+ ret.first = std::atoi(str.data());
+ auto pos = str.find_first_of(delim);
+ ret.second = std::atoi(str.data() + pos + 1);
+ return ret;
+}
+
+}
+
+void
+CallDevices::init()
+{
+#if GST_CHECK_VERSION(1, 18, 0)
+ static GstDeviceMonitor *monitor = nullptr;
+ if (!monitor) {
+ monitor = gst_device_monitor_new();
+ GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
+ gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
+ gst_device_monitor_add_filter(monitor, "Audio/Duplex", caps);
+ gst_caps_unref(caps);
+ caps = gst_caps_new_empty_simple("video/x-raw");
+ gst_device_monitor_add_filter(monitor, "Video/Source", caps);
+ gst_device_monitor_add_filter(monitor, "Video/Duplex", caps);
+ gst_caps_unref(caps);
+
+ GstBus *bus = gst_device_monitor_get_bus(monitor);
+ gst_bus_add_watch(bus, newBusMessage, nullptr);
+ gst_object_unref(bus);
+ if (!gst_device_monitor_start(monitor)) {
+ nhlog::ui()->error("WebRTC: failed to start device monitor");
+ return;
+ }
+ }
+#endif
+}
+
+void
+CallDevices::refresh()
+{
+#if !GST_CHECK_VERSION(1, 18, 0)
+
+ static GstDeviceMonitor *monitor = nullptr;
+ if (!monitor) {
+ monitor = gst_device_monitor_new();
+ GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
+ gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
+ gst_device_monitor_add_filter(monitor, "Audio/Duplex", caps);
+ gst_caps_unref(caps);
+ caps = gst_caps_new_empty_simple("video/x-raw");
+ gst_device_monitor_add_filter(monitor, "Video/Source", caps);
+ gst_device_monitor_add_filter(monitor, "Video/Duplex", caps);
+ gst_caps_unref(caps);
+ }
+
+ auto clearDevices = [](auto &sources) {
+ std::for_each(
+ sources.begin(), sources.end(), [](auto &s) { gst_object_unref(s.device); });
+ sources.clear();
+ };
+ clearDevices(audioSources_);
+ clearDevices(videoSources_);
+
+ GList *devices = gst_device_monitor_get_devices(monitor);
+ if (devices) {
+ for (GList *l = devices; l != nullptr; l = l->next)
+ addDevice(GST_DEVICE_CAST(l->data));
+ g_list_free(devices);
+ }
+ emit devicesChanged();
+#endif
+}
+
+bool
+CallDevices::haveMic() const
+{
+ return !audioSources_.empty();
+}
+
+bool
+CallDevices::haveCamera() const
+{
+ return !videoSources_.empty();
+}
+
+std::vector<std::string>
+CallDevices::names(bool isVideo, const std::string &defaultDevice) const
+{
+ return isVideo ? deviceNames(videoSources_, defaultDevice)
+ : deviceNames(audioSources_, defaultDevice);
+}
+
+std::vector<std::string>
+CallDevices::resolutions(const std::string &cameraName) const
+{
+ std::vector<std::string> ret;
+ if (auto s = getVideoSource(cameraName); s) {
+ ret.reserve(s->caps.size());
+ for (const auto &c : s->caps)
+ ret.push_back(c.resolution);
+ }
+ return ret;
+}
+
+std::vector<std::string>
+CallDevices::frameRates(const std::string &cameraName, const std::string &resolution) const
+{
+ if (auto s = getVideoSource(cameraName); s) {
+ if (auto it =
+ std::find_if(s->caps.cbegin(),
+ s->caps.cend(),
+ [&](const auto &c) { return c.resolution == resolution; });
+ it != s->caps.cend())
+ return it->frameRates;
+ }
+ return {};
+}
+
+GstDevice *
+CallDevices::audioDevice() const
+{
+ std::string name = ChatPage::instance()->userSettings()->microphone().toStdString();
+ if (auto it = std::find_if(audioSources_.cbegin(),
+ audioSources_.cend(),
+ [&name](const auto &s) { return s.name == name; });
+ it != audioSources_.cend()) {
+ nhlog::ui()->debug("WebRTC: microphone: {}", name);
+ return it->device;
+ } else {
+ nhlog::ui()->error("WebRTC: unknown microphone: {}", name);
+ return nullptr;
+ }
+}
+
+GstDevice *
+CallDevices::videoDevice(std::pair<int, int> &resolution, std::pair<int, int> &frameRate) const
+{
+ auto settings = ChatPage::instance()->userSettings();
+ std::string name = settings->camera().toStdString();
+ if (auto s = getVideoSource(name); s) {
+ nhlog::ui()->debug("WebRTC: camera: {}", name);
+ resolution = tokenise(settings->cameraResolution().toStdString(), 'x');
+ frameRate = tokenise(settings->cameraFrameRate().toStdString(), '/');
+ nhlog::ui()->debug(
+ "WebRTC: camera resolution: {}x{}", resolution.first, resolution.second);
+ nhlog::ui()->debug(
+ "WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second);
+ return s->device;
+ } else {
+ nhlog::ui()->error("WebRTC: unknown camera: {}", name);
+ return nullptr;
+ }
+}
+
+#else
+
+void
+CallDevices::refresh()
+{}
+
+bool
+CallDevices::haveMic() const
+{
+ return false;
+}
+
+bool
+CallDevices::haveCamera() const
+{
+ return false;
+}
+
+std::vector<std::string>
+CallDevices::names(bool, const std::string &) const
+{
+ return {};
+}
+
+std::vector<std::string>
+CallDevices::resolutions(const std::string &) const
+{
+ return {};
+}
+
+std::vector<std::string>
+CallDevices::frameRates(const std::string &, const std::string &) const
+{
+ return {};
+}
+
+#endif
diff --git a/src/CallDevices.h b/src/CallDevices.h
new file mode 100644
index 00000000..2b4129f1
--- /dev/null
+++ b/src/CallDevices.h
@@ -0,0 +1,45 @@
+#pragma once
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <QObject>
+
+typedef struct _GstDevice GstDevice;
+
+class CallDevices : public QObject
+{
+ Q_OBJECT
+
+public:
+ static CallDevices &instance()
+ {
+ static CallDevices instance;
+ return instance;
+ }
+
+ void refresh();
+ bool haveMic() const;
+ bool haveCamera() const;
+ std::vector<std::string> names(bool isVideo, const std::string &defaultDevice) const;
+ std::vector<std::string> resolutions(const std::string &cameraName) const;
+ std::vector<std::string> frameRates(const std::string &cameraName,
+ const std::string &resolution) const;
+
+signals:
+ void devicesChanged();
+
+private:
+ CallDevices();
+
+ friend class WebRTCSession;
+ void init();
+ GstDevice *audioDevice() const;
+ GstDevice *videoDevice(std::pair<int, int> &resolution,
+ std::pair<int, int> &frameRate) const;
+
+public:
+ CallDevices(CallDevices const &) = delete;
+ void operator=(CallDevices const &) = delete;
+};
diff --git a/src/CallManager.cpp b/src/CallManager.cpp
index 0841a079..7acd9592 100644
--- a/src/CallManager.cpp
+++ b/src/CallManager.cpp
@@ -7,6 +7,7 @@
#include <QUrl>
#include "Cache.h"
+#include "CallDevices.h"
#include "CallManager.h"
#include "ChatPage.h"
#include "Logging.h"
@@ -114,21 +115,10 @@ CallManager::CallManager(QObject *parent)
emit newCallState();
});
- connect(&session_, &WebRTCSession::devicesChanged, this, [this]() {
- if (ChatPage::instance()->userSettings()->microphone().isEmpty()) {
- auto mics = session_.getDeviceNames(false, std::string());
- if (!mics.empty())
- ChatPage::instance()->userSettings()->setMicrophone(
- QString::fromStdString(mics.front()));
- }
- if (ChatPage::instance()->userSettings()->camera().isEmpty()) {
- auto cameras = session_.getDeviceNames(true, std::string());
- if (!cameras.empty())
- ChatPage::instance()->userSettings()->setCamera(
- QString::fromStdString(cameras.front()));
- }
- emit devicesChanged();
- });
+ connect(&CallDevices::instance(),
+ &CallDevices::devicesChanged,
+ this,
+ &CallManager::devicesChanged);
connect(&player_,
&QMediaPlayer::mediaStatusChanged,
@@ -292,7 +282,7 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
haveCallInvite_ = true;
isVideo_ = isVideo;
inviteSDP_ = callInviteEvent.content.sdp;
- session_.refreshDevices();
+ CallDevices::instance().refresh();
emit newInviteState();
}
@@ -409,7 +399,7 @@ CallManager::devices(bool isVideo) const
const QString &defaultDevice = isVideo ? ChatPage::instance()->userSettings()->camera()
: ChatPage::instance()->userSettings()->microphone();
std::vector<std::string> devices =
- session_.getDeviceNames(isVideo, defaultDevice.toStdString());
+ CallDevices::instance().names(isVideo, defaultDevice.toStdString());
ret.reserve(devices.size());
std::transform(devices.cbegin(),
devices.cend(),
diff --git a/src/CallManager.h b/src/CallManager.h
index 7d388efd..97cffbc8 100644
--- a/src/CallManager.h
+++ b/src/CallManager.h
@@ -8,6 +8,7 @@
#include <QString>
#include <QTimer>
+#include "CallDevices.h"
#include "WebRTCSession.h"
#include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp"
@@ -53,7 +54,7 @@ public:
public slots:
void sendInvite(const QString &roomid, bool isVideo);
void syncEvent(const mtx::events::collections::TimelineEvents &event);
- void refreshDevices() { session_.refreshDevices(); }
+ void refreshDevices() { CallDevices::instance().refresh(); }
void toggleMicMute();
void toggleCameraView() { session_.toggleCameraView(); }
void acceptInvite();
diff --git a/src/ChatPage.cpp b/src/ChatPage.cpp
index db80ecd5..45802789 100644
--- a/src/ChatPage.cpp
+++ b/src/ChatPage.cpp
@@ -448,15 +448,14 @@ void
ChatPage::deleteConfigs()
{
QSettings settings;
+
+ if (UserSettings::instance()->profile() != "") {
+ settings.beginGroup("profile");
+ settings.beginGroup(UserSettings::instance()->profile());
+ }
settings.beginGroup("auth");
settings.remove("");
- settings.endGroup();
- settings.beginGroup("client");
- settings.remove("");
- settings.endGroup();
- settings.beginGroup("notifications");
- settings.remove("");
- settings.endGroup();
+ settings.endGroup(); // auth
http::client()->shutdown();
cache::deleteData();
@@ -669,8 +668,6 @@ ChatPage::sendNotifications(const mtx::responses::Notifications &res)
if (!cache::isNotificationSent(event_id)) {
const auto room_id = QString::fromStdString(item.room_id);
- const auto user_id =
- QString::fromStdString(mtx::accessors::sender(item.event));
// We should only sent one notification per event.
cache::markSentNotification(event_id);
@@ -690,15 +687,9 @@ ChatPage::sendNotifications(const mtx::responses::Notifications &res)
QString::fromStdString(info.avatar_url),
96,
this,
- [this, room_id, event_id, item, user_id, info](
- QPixmap image) {
+ [this, item](QPixmap image) {
notificationsManager.postNotification(
- room_id,
- QString::fromStdString(event_id),
- QString::fromStdString(info.name),
- cache::displayName(room_id, user_id),
- utils::event_body(item.event),
- image.toImage());
+ item, image.toImage());
});
}
}
diff --git a/src/DeviceVerificationFlow.cpp b/src/DeviceVerificationFlow.cpp
index 51ef79fd..c6277a9d 100644
--- a/src/DeviceVerificationFlow.cpp
+++ b/src/DeviceVerificationFlow.cpp
@@ -105,8 +105,8 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if (msg.relates_to.has_value()) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
}
if ((msg.key_agreement_protocol == "curve25519-hkdf-sha256") &&
@@ -136,8 +136,8 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if (msg.relates_to.has_value()) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
}
error_ = User;
@@ -152,8 +152,8 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if (msg.relates_to.has_value()) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
}
@@ -217,8 +217,8 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if (msg.relates_to.has_value()) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
}
@@ -385,8 +385,8 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if ((msg.relates_to.has_value() && sender)) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
else {
this->deviceId = QString::fromStdString(msg.from_device);
@@ -402,8 +402,8 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if (msg.relates_to.has_value()) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
}
nhlog::ui()->info("Flow done on other side");
@@ -526,8 +526,8 @@ DeviceVerificationFlow::handleStartMessage(const mtx::events::msg::KeyVerificati
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
- } else if (msg.relates_to.has_value()) {
- if (msg.relates_to.value().event_id != this->relation.event_id)
+ } else if (msg.relations.references()) {
+ if (msg.relations.references() != this->relation.event_id)
return;
}
if ((std::find(msg.key_agreement_protocols.begin(),
@@ -625,8 +625,10 @@ DeviceVerificationFlow::startVerificationRequest()
req.transaction_id = this->transaction_id;
this->canonical_json = nlohmann::json(req);
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
- req.relates_to = this->relation;
- this->canonical_json = nlohmann::json(req);
+ req.relations.relations.push_back(this->relation);
+ // Set synthesized to surpress the nheko relation extensions
+ req.relations.synthesized = true;
+ this->canonical_json = nlohmann::json(req);
}
send(req);
setState(WaitingForOtherToAccept);
diff --git a/src/DeviceVerificationFlow.h b/src/DeviceVerificationFlow.h
index 34b78962..6c613545 100644
--- a/src/DeviceVerificationFlow.h
+++ b/src/DeviceVerificationFlow.h
@@ -206,7 +206,7 @@ private:
std::vector<int> sasList;
UserKeyCache their_keys;
TimelineModel *model_;
- mtx::common::RelatesTo relation;
+ mtx::common::Relation relation;
State state_ = PromptStartVerification;
Error error_ = UnknownMethod;
@@ -230,8 +230,12 @@ private:
static_cast<int>(err->status_code));
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
- if constexpr (!std::is_same_v<T, mtx::events::msg::KeyVerificationRequest>)
- msg.relates_to = this->relation;
+ if constexpr (!std::is_same_v<T,
+ mtx::events::msg::KeyVerificationRequest>) {
+ msg.relations.relations.push_back(this->relation);
+ // Set synthesized to surpress the nheko relation extensions
+ msg.relations.synthesized = true;
+ }
(model_)->sendMessageEvent(msg, mtx::events::to_device_content_to_type<T>);
}
diff --git a/src/EventAccessors.cpp b/src/EventAccessors.cpp
index 3ae781f0..e6bc61b0 100644
--- a/src/EventAccessors.cpp
+++ b/src/EventAccessors.cpp
@@ -34,6 +34,20 @@ struct detector<Default, std::void_t<Op<Args...>>, Op, Args...>
template<template<class...> class Op, class... Args>
using is_detected = typename detail::detector<nonesuch, void, Op, Args...>::value_t;
+struct IsStateEvent
+{
+ template<class T>
+ bool operator()(const mtx::events::StateEvent<T> &)
+ {
+ return true;
+ }
+ template<class T>
+ bool operator()(const mtx::events::Event<T> &)
+ {
+ return false;
+ }
+};
+
struct EventMsgType
{
template<class E>
@@ -250,31 +264,31 @@ struct EventFilesize
}
};
-struct EventInReplyTo
+struct EventRelations
{
template<class Content>
- using related_ev_id_t = decltype(Content::relates_to.in_reply_to.event_id);
+ using related_ev_id_t = decltype(Content::relations);
template<class T>
- std::string operator()(const mtx::events::Event<T> &e)
+ mtx::common::Relations operator()(const mtx::events::Event<T> &e)
{
if constexpr (is_detected<related_ev_id_t, T>::value) {
- return e.content.relates_to.in_reply_to.event_id;
+ return e.content.relations;
}
- return "";
+ return {};
}
};
-struct EventRelatesTo
+struct SetEventRelations
{
+ mtx::common::Relations new_relations;
template<class Content>
- using related_ev_id_t = decltype(Content::relates_to.event_id);
+ using related_ev_id_t = decltype(Content::relations);
template<class T>
- std::string operator()(const mtx::events::Event<T> &e)
+ void operator()(mtx::events::Event<T> &e)
{
if constexpr (is_detected<related_ev_id_t, T>::value) {
- return e.content.relates_to.event_id;
+ e.content.relations = std::move(new_relations);
}
- return "";
}
};
@@ -434,15 +448,17 @@ mtx::accessors::mimetype(const mtx::events::collections::TimelineEvents &event)
{
return std::visit(EventMimeType{}, event);
}
-std::string
-mtx::accessors::in_reply_to_event(const mtx::events::collections::TimelineEvents &event)
+mtx::common::Relations
+mtx::accessors::relations(const mtx::events::collections::TimelineEvents &event)
{
- return std::visit(EventInReplyTo{}, event);
+ return std::visit(EventRelations{}, event);
}
-std::string
-mtx::accessors::relates_to_event_id(const mtx::events::collections::TimelineEvents &event)
+
+void
+mtx::accessors::set_relations(mtx::events::collections::TimelineEvents &event,
+ mtx::common::Relations relations)
{
- return std::visit(EventRelatesTo{}, event);
+ std::visit(SetEventRelations{std::move(relations)}, event);
}
std::string
@@ -474,3 +490,9 @@ mtx::accessors::serialize_event(const mtx::events::collections::TimelineEvents &
{
return std::visit([](const auto &e) { return nlohmann::json(e); }, event);
}
+
+bool
+mtx::accessors::is_state_event(const mtx::events::collections::TimelineEvents &event)
+{
+ return std::visit(IsStateEvent{}, event);
+}
diff --git a/src/EventAccessors.h b/src/EventAccessors.h
index 0cdc5f89..7bf695fc 100644
--- a/src/EventAccessors.h
+++ b/src/EventAccessors.h
@@ -17,6 +17,9 @@ room_id(const mtx::events::collections::TimelineEvents &event);
std::string
sender(const mtx::events::collections::TimelineEvents &event);
+bool
+is_state_event(const mtx::events::collections::TimelineEvents &event);
+
QDateTime
origin_server_ts(const mtx::events::collections::TimelineEvents &event);
@@ -53,10 +56,10 @@ std::string
blurhash(const mtx::events::collections::TimelineEvents &event);
std::string
mimetype(const mtx::events::collections::TimelineEvents &event);
-std::string
-in_reply_to_event(const mtx::events::collections::TimelineEvents &event);
-std::string
-relates_to_event_id(const mtx::events::collections::TimelineEvents &event);
+mtx::common::Relations
+relations(const mtx::events::collections::TimelineEvents &event);
+void
+set_relations(mtx::events::collections::TimelineEvents &event, mtx::common::Relations relations);
std::string
transaction_id(const mtx::events::collections::TimelineEvents &event);
diff --git a/src/Olm.cpp b/src/Olm.cpp
index 4ccf8ab9..54be4751 100644
--- a/src/Olm.cpp
+++ b/src/Olm.cpp
@@ -575,29 +575,19 @@ encrypt_group_message(const std::string &room_id, const std::string &device_id,
if (!sendSessionTo.empty())
olm::send_encrypted_to_device_messages(sendSessionTo, megolm_payload);
- mtx::common::ReplyRelatesTo relation;
- mtx::common::RelatesTo r_relation;
-
// relations shouldn't be encrypted...
- if (body["content"].contains("m.relates_to")) {
- if (body["content"]["m.relates_to"].contains("m.in_reply_to")) {
- relation = body["content"]["m.relates_to"];
- } else if (body["content"]["m.relates_to"].contains("event_id")) {
- r_relation = body["content"]["m.relates_to"];
- }
- }
+ mtx::common::Relations relations = mtx::common::parse_relations(body["content"]);
auto payload = olm::client()->encrypt_group_message(session.get(), body.dump());
// Prepare the m.room.encrypted event.
msg::Encrypted data;
- data.ciphertext = std::string((char *)payload.data(), payload.size());
- data.sender_key = olm::client()->identity_keys().curve25519;
- data.session_id = mtx::crypto::session_id(session.get());
- data.device_id = device_id;
- data.algorithm = MEGOLM_ALGO;
- data.relates_to = relation;
- data.r_relates_to = r_relation;
+ data.ciphertext = std::string((char *)payload.data(), payload.size());
+ data.sender_key = olm::client()->identity_keys().curve25519;
+ data.session_id = mtx::crypto::session_id(session.get());
+ data.device_id = device_id;
+ data.algorithm = MEGOLM_ALGO;
+ data.relations = relations;
group_session_data.message_index = olm_outbound_group_session_message_index(session.get());
nhlog::crypto()->debug("next message_index {}", group_session_data.message_index);
@@ -910,8 +900,7 @@ decryptEvent(const MegolmSessionIndex &index,
body["unsigned"] = event.unsigned_data;
// relations are unencrypted in content...
- if (json old_ev = event; old_ev["content"].count("m.relates_to") != 0)
- body["content"]["m.relates_to"] = old_ev["content"]["m.relates_to"];
+ mtx::common::add_relations(body["content"], event.content.relations);
mtx::events::collections::TimelineEvent te;
try {
diff --git a/src/UserSettingsPage.cpp b/src/UserSettingsPage.cpp
index 96c07d7c..b6fdf504 100644
--- a/src/UserSettingsPage.cpp
+++ b/src/UserSettingsPage.cpp
@@ -39,12 +39,12 @@
#include <QtQml>
#include "Cache.h"
+#include "CallDevices.h"
#include "Config.h"
#include "MatrixClient.h"
#include "Olm.h"
#include "UserSettingsPage.h"
#include "Utils.h"
-#include "WebRTCSession.h"
#include "ui/FlatButton.h"
#include "ui/ToggleButton.h"
@@ -115,8 +115,8 @@ UserSettings::load(std::optional<QString> profile)
cameraFrameRate_ = settings.value("user/camera_frame_rate", QString()).toString();
useStunServer_ = settings.value("user/use_stun_server", false).toBool();
- if (profile)
- profile_ = *profile;
+ if (profile) // set to "" if it's the default to maintain compatibility
+ profile_ = (*profile == "default") ? "" : *profile;
else
profile_ = settings.value("user/currentProfile", "").toString();
@@ -341,7 +341,13 @@ UserSettings::setEmojiFontFamily(QString family)
{
if (family == emojiFont_)
return;
- emojiFont_ = family;
+
+ if (family == tr("Default")) {
+ emojiFont_ = "default";
+ } else {
+ emojiFont_ = family;
+ }
+
emit emojiFontChanged(family);
save();
}
@@ -725,11 +731,15 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
// TODO: Is there a way to limit to just emojis, rather than
// all emoji fonts?
auto emojiFamilies = fontDb.families(QFontDatabase::Symbol);
+ emojiFontSelectionCombo_->addItem(tr("Default"));
for (const auto &family : emojiFamilies) {
emojiFontSelectionCombo_->addItem(family);
}
- fontSelectionCombo_->setCurrentIndex(fontSelectionCombo_->findText(settings_->font()));
+ QString currentFont = settings_->font();
+ if (currentFont != "default" || currentFont != "") {
+ fontSelectionCombo_->setCurrentIndex(fontSelectionCombo_->findText(currentFont));
+ }
emojiFontSelectionCombo_->setCurrentIndex(
emojiFontSelectionCombo_->findText(settings_->emojiFont()));
@@ -1060,7 +1070,7 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
[this](const QString &camera) {
settings_->setCamera(camera);
std::vector<std::string> resolutions =
- WebRTCSession::instance().getResolutions(camera.toStdString());
+ CallDevices::instance().resolutions(camera.toStdString());
cameraResolutionCombo_->clear();
for (const auto &resolution : resolutions)
cameraResolutionCombo_->addItem(QString::fromStdString(resolution));
@@ -1070,9 +1080,8 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &resolution) {
settings_->setCameraResolution(resolution);
- std::vector<std::string> frameRates =
- WebRTCSession::instance().getFrameRates(settings_->camera().toStdString(),
- resolution.toStdString());
+ std::vector<std::string> frameRates = CallDevices::instance().frameRates(
+ settings_->camera().toStdString(), resolution.toStdString());
cameraFrameRateCombo_->clear();
for (const auto &frameRate : frameRates)
cameraFrameRateCombo_->addItem(QString::fromStdString(frameRate));
@@ -1231,9 +1240,8 @@ UserSettingsPage::showEvent(QShowEvent *)
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
privacyScreenTimeout_->setValue(settings_->privacyScreenTimeout());
- WebRTCSession::instance().refreshDevices();
- auto mics =
- WebRTCSession::instance().getDeviceNames(false, settings_->microphone().toStdString());
+ CallDevices::instance().refresh();
+ auto mics = CallDevices::instance().names(false, settings_->microphone().toStdString());
microphoneCombo_->clear();
for (const auto &m : mics)
microphoneCombo_->addItem(QString::fromStdString(m));
@@ -1241,8 +1249,7 @@ UserSettingsPage::showEvent(QShowEvent *)
auto cameraResolution = settings_->cameraResolution();
auto cameraFrameRate = settings_->cameraFrameRate();
- auto cameras =
- WebRTCSession::instance().getDeviceNames(true, settings_->camera().toStdString());
+ auto cameras = CallDevices::instance().names(true, settings_->camera().toStdString());
cameraCombo_->clear();
for (const auto &c : cameras)
cameraCombo_->addItem(QString::fromStdString(c));
diff --git a/src/UserSettingsPage.h b/src/UserSettingsPage.h
index b65e1efc..49de94b3 100644
--- a/src/UserSettingsPage.h
+++ b/src/UserSettingsPage.h
@@ -177,7 +177,14 @@ public:
int timelineMaxWidth() const { return timelineMaxWidth_; }
double fontSize() const { return baseFontSize_; }
QString font() const { return font_; }
- QString emojiFont() const { return emojiFont_; }
+ QString emojiFont() const
+ {
+ if (emojiFont_ == "Default") {
+ return tr("Default");
+ }
+
+ return emojiFont_;
+ }
Presence presence() const { return presence_; }
QString ringtone() const { return ringtone_; }
QString microphone() const { return microphone_; }
diff --git a/src/Utils.cpp b/src/Utils.cpp
index 5af5748e..991fa550 100644
--- a/src/Utils.cpp
+++ b/src/Utils.cpp
@@ -20,6 +20,7 @@
#include "Cache.h"
#include "Config.h"
#include "MatrixClient.h"
+#include "UserSettingsPage.h"
using TimelineEvent = mtx::events::collections::TimelineEvents;
@@ -65,14 +66,12 @@ utils::replaceEmoji(const QString &body)
QVector<uint> utf32_string = body.toUcs4();
- QSettings settings;
- QString userFontFamily = settings.value("user/emoji_font_family", "emoji").toString();
-
bool insideFontBlock = false;
for (auto &code : utf32_string) {
if (utils::codepointIsEmoji(code)) {
if (!insideFontBlock) {
- fmtBody += QString("<font face=\"" + userFontFamily + "\">");
+ fmtBody += QString("<font face=\"" +
+ UserSettings::instance()->emojiFont() + "\">");
insideFontBlock = true;
}
@@ -505,13 +504,7 @@ utils::getQuoteBody(const RelatedInfo &related)
QString
utils::linkColor()
{
- QSettings settings;
- // Default to system theme if QT_QPA_PLATFORMTHEME var is set.
- QString defaultTheme =
- QProcessEnvironment::systemEnvironment().value("QT_QPA_PLATFORMTHEME", "").isEmpty()
- ? "light"
- : "system";
- const auto theme = settings.value("user/theme", defaultTheme).toString();
+ const auto theme = UserSettings::instance()->theme();
if (theme == "light") {
return "#0077b5";
diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp
index d306007d..b6d98058 100644
--- a/src/WebRTCSession.cpp
+++ b/src/WebRTCSession.cpp
@@ -35,6 +35,7 @@ using webrtc::State;
WebRTCSession::WebRTCSession()
: QObject()
+ , devices_(CallDevices::instance())
{
qRegisterMetaType<webrtc::State>();
qmlRegisterUncreatableMetaObject(
@@ -68,9 +69,7 @@ WebRTCSession::init(std::string *errorMessage)
gchar *version = gst_version_string();
nhlog::ui()->info("WebRTC: initialised {}", version);
g_free(version);
-#if GST_CHECK_VERSION(1, 18, 0)
- startDeviceMonitor();
-#endif
+ devices_.init();
return true;
#else
(void)errorMessage;
@@ -81,195 +80,17 @@ WebRTCSession::init(std::string *errorMessage)
#ifdef GSTREAMER_AVAILABLE
namespace {
-struct AudioSource
-{
- std::string name;
- GstDevice *device;
-};
-
-struct VideoSource
-{
- struct Caps
- {
- std::string resolution;
- std::vector<std::string> frameRates;
- };
- std::string name;
- GstDevice *device;
- std::vector<Caps> caps;
-};
-
std::string localsdp_;
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
bool haveAudioStream_;
bool haveVideoStream_;
-std::vector<AudioSource> audioSources_;
-std::vector<VideoSource> videoSources_;
GstPad *insetSinkPad_ = nullptr;
-using FrameRate = std::pair<int, int>;
-std::optional<FrameRate>
-getFrameRate(const GValue *value)
-{
- if (GST_VALUE_HOLDS_FRACTION(value)) {
- gint num = gst_value_get_fraction_numerator(value);
- gint den = gst_value_get_fraction_denominator(value);
- return FrameRate{num, den};
- }
- return std::nullopt;
-}
-
-void
-addFrameRate(std::vector<std::string> &rates, const FrameRate &rate)
-{
- constexpr double minimumFrameRate = 15.0;
- if (static_cast<double>(rate.first) / rate.second >= minimumFrameRate)
- rates.push_back(std::to_string(rate.first) + "/" + std::to_string(rate.second));
-}
-
-std::pair<int, int>
-tokenise(std::string_view str, char delim)
-{
- std::pair<int, int> ret;
- ret.first = std::atoi(str.data());
- auto pos = str.find_first_of(delim);
- ret.second = std::atoi(str.data() + pos + 1);
- return ret;
-}
-
-void
-addDevice(GstDevice *device)
-{
- if (!device)
- return;
-
- gchar *name = gst_device_get_display_name(device);
- gchar *type = gst_device_get_device_class(device);
- bool isVideo = !std::strncmp(type, "Video", 5);
- g_free(type);
- nhlog::ui()->debug("WebRTC: {} device added: {}", isVideo ? "video" : "audio", name);
- if (!isVideo) {
- audioSources_.push_back({name, device});
- g_free(name);
- return;
- }
-
- GstCaps *gstcaps = gst_device_get_caps(device);
- if (!gstcaps) {
- nhlog::ui()->debug("WebRTC: unable to get caps for {}", name);
- g_free(name);
- return;
- }
-
- VideoSource source{name, device, {}};
- g_free(name);
- guint nCaps = gst_caps_get_size(gstcaps);
- for (guint i = 0; i < nCaps; ++i) {
- GstStructure *structure = gst_caps_get_structure(gstcaps, i);
- const gchar *name = gst_structure_get_name(structure);
- if (!std::strcmp(name, "video/x-raw")) {
- gint widthpx, heightpx;
- if (gst_structure_get(structure,
- "width",
- G_TYPE_INT,
- &widthpx,
- "height",
- G_TYPE_INT,
- &heightpx,
- nullptr)) {
- VideoSource::Caps caps;
- caps.resolution =
- std::to_string(widthpx) + "x" + std::to_string(heightpx);
- const GValue *value =
- gst_structure_get_value(structure, "framerate");
- if (auto fr = getFrameRate(value); fr)
- addFrameRate(caps.frameRates, *fr);
- else if (GST_VALUE_HOLDS_FRACTION_RANGE(value)) {
- const GValue *minRate =
- gst_value_get_fraction_range_min(value);
- if (auto fr = getFrameRate(minRate); fr)
- addFrameRate(caps.frameRates, *fr);
- const GValue *maxRate =
- gst_value_get_fraction_range_max(value);
- if (auto fr = getFrameRate(maxRate); fr)
- addFrameRate(caps.frameRates, *fr);
- } else if (GST_VALUE_HOLDS_LIST(value)) {
- guint nRates = gst_value_list_get_size(value);
- for (guint j = 0; j < nRates; ++j) {
- const GValue *rate =
- gst_value_list_get_value(value, j);
- if (auto fr = getFrameRate(rate); fr)
- addFrameRate(caps.frameRates, *fr);
- }
- }
- if (!caps.frameRates.empty())
- source.caps.push_back(std::move(caps));
- }
- }
- }
- gst_caps_unref(gstcaps);
- videoSources_.push_back(std::move(source));
-}
-
-#if GST_CHECK_VERSION(1, 18, 0)
-template<typename T>
-bool
-removeDevice(T &sources, GstDevice *device, bool changed)
-{
- if (auto it = std::find_if(sources.begin(),
- sources.end(),
- [device](const auto &s) { return s.device == device; });
- it != sources.end()) {
- nhlog::ui()->debug(std::string("WebRTC: device ") +
- (changed ? "changed: " : "removed: ") + "{}",
- it->name);
- gst_object_unref(device);
- sources.erase(it);
- return true;
- }
- return false;
-}
-
-void
-removeDevice(GstDevice *device, bool changed)
-{
- if (device) {
- if (removeDevice(audioSources_, device, changed) ||
- removeDevice(videoSources_, device, changed))
- return;
- }
-}
-#endif
-
gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
{
WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
switch (GST_MESSAGE_TYPE(msg)) {
-#if GST_CHECK_VERSION(1, 18, 0)
- case GST_MESSAGE_DEVICE_ADDED: {
- GstDevice *device;
- gst_message_parse_device_added(msg, &device);
- addDevice(device);
- emit WebRTCSession::instance().devicesChanged();
- break;
- }
- case GST_MESSAGE_DEVICE_REMOVED: {
- GstDevice *device;
- gst_message_parse_device_removed(msg, &device);
- removeDevice(device, false);
- emit WebRTCSession::instance().devicesChanged();
- break;
- }
- case GST_MESSAGE_DEVICE_CHANGED: {
- GstDevice *device;
- GstDevice *oldDevice;
- gst_message_parse_device_changed(msg, &device, &oldDevice);
- removeDevice(oldDevice, true);
- addDevice(device);
- break;
- }
-#endif
case GST_MESSAGE_EOS:
nhlog::ui()->error("WebRTC: end of stream");
session->end();
@@ -724,27 +545,6 @@ getMediaAttributes(const GstSDPMessage *sdp,
return false;
}
-template<typename T>
-std::vector<std::string>
-deviceNames(T &sources, const std::string &defaultDevice)
-{
- std::vector<std::string> ret;
- ret.reserve(sources.size());
- std::transform(sources.cbegin(),
- sources.cend(),
- std::back_inserter(ret),
- [](const auto &s) { return s.name; });
-
- // move default device to top of the list
- if (auto it = std::find_if(ret.begin(),
- ret.end(),
- [&defaultDevice](const auto &s) { return s == defaultDevice; });
- it != ret.end())
- std::swap(ret.front(), *it);
-
- return ret;
-}
-
}
bool
@@ -995,19 +795,11 @@ WebRTCSession::startPipeline(int opusPayloadType, int vp8PayloadType)
bool
WebRTCSession::createPipeline(int opusPayloadType, int vp8PayloadType)
{
- std::string microphoneSetting =
- ChatPage::instance()->userSettings()->microphone().toStdString();
- auto it =
- std::find_if(audioSources_.cbegin(),
- audioSources_.cend(),
- [µphoneSetting](const auto &s) { return s.name == microphoneSetting; });
- if (it == audioSources_.cend()) {
- nhlog::ui()->error("WebRTC: unknown microphone: {}", microphoneSetting);
+ GstDevice *device = devices_.audioDevice();
+ if (!device)
return false;
- }
- nhlog::ui()->debug("WebRTC: microphone: {}", microphoneSetting);
- GstElement *source = gst_device_create_element(it->device, nullptr);
+ GstElement *source = gst_device_create_element(device, nullptr);
GstElement *volume = gst_element_factory_make("volume", "srclevel");
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
@@ -1070,30 +862,16 @@ bool
WebRTCSession::addVideoPipeline(int vp8PayloadType)
{
// allow incoming video calls despite localUser having no webcam
- if (videoSources_.empty())
+ if (!devices_.haveCamera())
return !isOffering_;
- QSharedPointer<UserSettings> settings = ChatPage::instance()->userSettings();
- std::string cameraSetting = settings->camera().toStdString();
- auto it = std::find_if(videoSources_.cbegin(),
- videoSources_.cend(),
- [&cameraSetting](const auto &s) { return s.name == cameraSetting; });
- if (it == videoSources_.cend()) {
- nhlog::ui()->error("WebRTC: unknown camera: {}", cameraSetting);
+ std::pair<int, int> resolution;
+ std::pair<int, int> frameRate;
+ GstDevice *device = devices_.videoDevice(resolution, frameRate);
+ if (!device)
return false;
- }
- std::string resSetting = settings->cameraResolution().toStdString();
- const std::string &res = resSetting.empty() ? it->caps.front().resolution : resSetting;
- std::string frSetting = settings->cameraFrameRate().toStdString();
- const std::string &fr = frSetting.empty() ? it->caps.front().frameRates.front() : frSetting;
- auto resolution = tokenise(res, 'x');
- auto frameRate = tokenise(fr, '/');
- nhlog::ui()->debug("WebRTC: camera: {}", cameraSetting);
- nhlog::ui()->debug("WebRTC: camera resolution: {}x{}", resolution.first, resolution.second);
- nhlog::ui()->debug("WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second);
-
- GstElement *source = gst_device_create_element(it->device, nullptr);
+ GstElement *source = gst_device_create_element(device, nullptr);
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", "camerafilter");
GstCaps *caps = gst_caps_new_simple("video/x-raw",
@@ -1239,111 +1017,6 @@ WebRTCSession::end()
emit stateChanged(State::DISCONNECTED);
}
-#if GST_CHECK_VERSION(1, 18, 0)
-void
-WebRTCSession::startDeviceMonitor()
-{
- if (!initialised_)
- return;
-
- static GstDeviceMonitor *monitor = nullptr;
- if (!monitor) {
- monitor = gst_device_monitor_new();
- GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
- gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
- gst_caps_unref(caps);
- caps = gst_caps_new_empty_simple("video/x-raw");
- gst_device_monitor_add_filter(monitor, "Video/Source", caps);
- gst_caps_unref(caps);
-
- GstBus *bus = gst_device_monitor_get_bus(monitor);
- gst_bus_add_watch(bus, newBusMessage, nullptr);
- gst_object_unref(bus);
- if (!gst_device_monitor_start(monitor)) {
- nhlog::ui()->error("WebRTC: failed to start device monitor");
- return;
- }
- }
-}
-#endif
-
-void
-WebRTCSession::refreshDevices()
-{
-#if GST_CHECK_VERSION(1, 18, 0)
- return;
-#else
- if (!initialised_)
- return;
-
- static GstDeviceMonitor *monitor = nullptr;
- if (!monitor) {
- monitor = gst_device_monitor_new();
- GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
- gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
- gst_caps_unref(caps);
- caps = gst_caps_new_empty_simple("video/x-raw");
- gst_device_monitor_add_filter(monitor, "Video/Source", caps);
- gst_caps_unref(caps);
- }
-
- auto clearDevices = [](auto &sources) {
- std::for_each(
- sources.begin(), sources.end(), [](auto &s) { gst_object_unref(s.device); });
- sources.clear();
- };
- clearDevices(audioSources_);
- clearDevices(videoSources_);
-
- GList *devices = gst_device_monitor_get_devices(monitor);
- if (devices) {
- for (GList *l = devices; l != nullptr; l = l->next)
- addDevice(GST_DEVICE_CAST(l->data));
- g_list_free(devices);
- }
- emit devicesChanged();
-#endif
-}
-
-std::vector<std::string>
-WebRTCSession::getDeviceNames(bool isVideo, const std::string &defaultDevice) const
-{
- return isVideo ? deviceNames(videoSources_, defaultDevice)
- : deviceNames(audioSources_, defaultDevice);
-}
-
-std::vector<std::string>
-WebRTCSession::getResolutions(const std::string &cameraName) const
-{
- std::vector<std::string> ret;
- if (auto it = std::find_if(videoSources_.cbegin(),
- videoSources_.cend(),
- [&cameraName](const auto &s) { return s.name == cameraName; });
- it != videoSources_.cend()) {
- ret.reserve(it->caps.size());
- for (const auto &c : it->caps)
- ret.push_back(c.resolution);
- }
- return ret;
-}
-
-std::vector<std::string>
-WebRTCSession::getFrameRates(const std::string &cameraName, const std::string &resolution) const
-{
- if (auto i = std::find_if(videoSources_.cbegin(),
- videoSources_.cend(),
- [&](const auto &s) { return s.name == cameraName; });
- i != videoSources_.cend()) {
- if (auto j =
- std::find_if(i->caps.cbegin(),
- i->caps.cend(),
- [&](const auto &s) { return s.resolution == resolution; });
- j != i->caps.cend())
- return j->frameRates;
- }
- return {};
-}
-
#else
bool
@@ -1400,25 +1073,4 @@ void
WebRTCSession::end()
{}
-void
-WebRTCSession::refreshDevices()
-{}
-
-std::vector<std::string>
-WebRTCSession::getDeviceNames(bool, const std::string &) const
-{
- return {};
-}
-
-std::vector<std::string>
-WebRTCSession::getResolutions(const std::string &) const
-{
- return {};
-}
-
-std::vector<std::string>
-WebRTCSession::getFrameRates(const std::string &, const std::string &) const
-{
- return {};
-}
#endif
diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h
index 2f0fb70e..0fe8a864 100644
--- a/src/WebRTCSession.h
+++ b/src/WebRTCSession.h
@@ -5,6 +5,7 @@
#include <QObject>
+#include "CallDevices.h"
#include "mtx/events/voip.hpp"
typedef struct _GstElement GstElement;
@@ -59,13 +60,6 @@ public:
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
- void refreshDevices();
- std::vector<std::string> getDeviceNames(bool isVideo,
- const std::string &defaultDevice) const;
- std::vector<std::string> getResolutions(const std::string &cameraName) const;
- std::vector<std::string> getFrameRates(const std::string &cameraName,
- const std::string &resolution) const;
-
void setVideoItem(QQuickItem *item) { videoItem_ = item; }
QQuickItem *getVideoItem() const { return videoItem_; }
@@ -76,7 +70,6 @@ signals:
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(webrtc::State);
- void devicesChanged();
private slots:
void setState(webrtc::State state) { state_ = state; }
@@ -84,6 +77,7 @@ private slots:
private:
WebRTCSession();
+ CallDevices &devices_;
bool initialised_ = false;
bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false;
@@ -101,7 +95,6 @@ private:
bool startPipeline(int opusPayloadType, int vp8PayloadType);
bool createPipeline(int opusPayloadType, int vp8PayloadType);
bool addVideoPipeline(int vp8PayloadType);
- void startDeviceMonitor();
public:
WebRTCSession(WebRTCSession const &) = delete;
diff --git a/src/main.cpp b/src/main.cpp
index a890a6fd..0c7c9f60 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -128,7 +128,7 @@ main(int argc, char *argv[])
// parsed before the SingleApplication userdata is set.
QString userdata{""};
QString matrixUri;
- for (int i = 0; i < argc; ++i) {
+ for (int i = 1; i < argc; ++i) {
QString arg{argv[i]};
if (arg.startsWith("--profile=")) {
arg.remove("--profile=");
@@ -214,7 +214,7 @@ main(int argc, char *argv[])
QFont font;
QString userFontFamily = settings.lock()->font();
- if (!userFontFamily.isEmpty()) {
+ if (!userFontFamily.isEmpty() && userFontFamily != "default") {
font.setFamily(userFontFamily);
}
font.setPointSizeF(settings.lock()->fontSize());
diff --git a/src/notifications/Manager.h b/src/notifications/Manager.h
index 4c9852cc..950740ba 100644
--- a/src/notifications/Manager.h
+++ b/src/notifications/Manager.h
@@ -4,7 +4,9 @@
#include <QObject>
#include <QString>
-#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD)
+#include <mtx/responses/notifications.hpp>
+
+#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD) || defined(Q_OS_HAIKU)
#include <QtDBus/QDBusArgument>
#include <QtDBus/QDBusInterface>
#endif
@@ -27,12 +29,7 @@ class NotificationsManager : public QObject
public:
NotificationsManager(QObject *parent = nullptr);
- void postNotification(const QString &roomId,
- const QString &eventId,
- const QString &roomName,
- const QString &senderName,
- const QString &text,
- const QImage &icon);
+ void postNotification(const mtx::responses::Notification ¬ification, const QImage &icon);
signals:
void notificationClicked(const QString roomId, const QString eventId);
@@ -41,7 +38,7 @@ signals:
public slots:
void removeNotification(const QString &roomId, const QString &eventId);
-#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD)
+#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD) || defined(Q_OS_HAIKU)
public:
void closeNotifications(QString roomId);
@@ -61,7 +58,7 @@ private slots:
void notificationReplied(uint id, QString reply);
};
-#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD)
+#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD) || defined(Q_OS_HAIKU)
QDBusArgument &
operator<<(QDBusArgument &arg, const QImage &image);
const QDBusArgument &
diff --git a/src/notifications/ManagerLinux.cpp b/src/notifications/ManagerLinux.cpp
index 8f7261e6..fb424b2a 100644
--- a/src/notifications/ManagerLinux.cpp
+++ b/src/notifications/ManagerLinux.cpp
@@ -8,6 +8,12 @@
#include <QDebug>
#include <QImage>
+#include "Cache.h"
+#include "EventAccessors.h"
+#include "MatrixClient.h"
+#include "Utils.h"
+#include <mtx/responses/notifications.hpp>
+
NotificationsManager::NotificationsManager(QObject *parent)
: QObject(parent)
, dbus("org.freedesktop.Notifications",
@@ -45,22 +51,31 @@ NotificationsManager::NotificationsManager(QObject *parent)
* Licensed under the GNU General Public License, version 3
*/
void
-NotificationsManager::postNotification(const QString &roomid,
- const QString &eventid,
- const QString &roomname,
- const QString &sender,
- const QString &text,
+NotificationsManager::postNotification(const mtx::responses::Notification ¬ification,
const QImage &icon)
{
+ const auto room_id = QString::fromStdString(notification.room_id);
+ const auto event_id = QString::fromStdString(mtx::accessors::event_id(notification.event));
+ const auto sender = cache::displayName(
+ room_id, QString::fromStdString(mtx::accessors::sender(notification.event)));
+ const auto text = utils::event_body(notification.event);
+
QVariantMap hints;
hints["image-data"] = icon;
hints["sound-name"] = "message-new-instant";
QList<QVariant> argumentList;
- argumentList << "nheko"; // app_name
- argumentList << (uint)0; // replace_id
- argumentList << ""; // app_icon
- argumentList << roomname; // summary
- argumentList << sender + ": " + text; // body
+ argumentList << "nheko"; // app_name
+ argumentList << (uint)0; // replace_id
+ argumentList << ""; // app_icon
+ argumentList << QString::fromStdString(
+ cache::singleRoomInfo(notification.room_id).name); // summary
+
+ // body
+ if (mtx::accessors::msg_type(notification.event) == mtx::events::MessageType::Emote)
+ argumentList << "* " + sender + " " + text;
+ else
+ argumentList << sender + ": " + text;
+
// The list of actions has always the action name and then a localized version of that
// action. Currently we just use an empty string for that.
// TODO(Nico): Look into what to actually put there.
@@ -76,12 +91,12 @@ NotificationsManager::postNotification(const QString &roomid,
QDBusPendingCall call = notifyApp.asyncCallWithArgumentList("Notify", argumentList);
auto watcher = new QDBusPendingCallWatcher{call, this};
connect(
- watcher, &QDBusPendingCallWatcher::finished, this, [watcher, this, roomid, eventid]() {
+ watcher, &QDBusPendingCallWatcher::finished, this, [watcher, this, room_id, event_id]() {
if (watcher->reply().type() == QDBusMessage::ErrorMessage)
qDebug() << "D-Bus Error:" << watcher->reply().errorMessage();
else
notificationIds[watcher->reply().arguments().first().toUInt()] =
- roomEventId{roomid, eventid};
+ roomEventId{room_id, event_id};
watcher->deleteLater();
});
}
diff --git a/src/notifications/ManagerMac.mm b/src/notifications/ManagerMac.mm
index c09e894c..5609d3de 100644
--- a/src/notifications/ManagerMac.mm
+++ b/src/notifications/ManagerMac.mm
@@ -3,6 +3,12 @@
#include <Foundation/Foundation.h>
#include <QtMac>
+#include "Cache.h"
+#include "EventAccessors.h"
+#include "MatrixClient.h"
+#include "Utils.h"
+#include <mtx/responses/notifications.hpp>
+
@interface NSUserNotification (CFIPrivate)
- (void)set_identityImage:(NSImage *)image;
@end
@@ -13,23 +19,22 @@ NotificationsManager::NotificationsManager(QObject *parent): QObject(parent)
}
void
-NotificationsManager::postNotification(
- const QString &roomId,
- const QString &eventId,
- const QString &roomName,
- const QString &senderName,
- const QString &text,
- const QImage &icon)
+NotificationsManager::postNotification(const mtx::responses::Notification ¬ification,
+ const QImage &icon)
{
- Q_UNUSED(roomId);
- Q_UNUSED(eventId);
Q_UNUSED(icon);
+ const auto sender = cache::displayName(QString::fromStdString(notification.room_id), QString::fromStdString(mtx::accessors::sender(notification.event)));
+ const auto text = utils::event_body(notification.event);
+
NSUserNotification * notif = [[NSUserNotification alloc] init];
- notif.title = roomName.toNSString();
- notif.subtitle = QString("%1 sent a message").arg(senderName).toNSString();
- notif.informativeText = text.toNSString();
+ notif.title = QString::fromStdString(cache::singleRoomInfo(notification.room_id).name).toNSString();
+ notif.subtitle = QString("%1 sent a message").arg(sender).toNSString();
+ if (mtx::accessors::msg_type(notification.event) == mtx::events::MessageType::Emote)
+ notif.informativeText = QString("* ").append(sender).append(" ").append(text).toNSString();
+ else
+ notif.informativeText = text.toNSString();
notif.soundName = NSUserNotificationDefaultSoundName;
[[NSUserNotificationCenter defaultUserNotificationCenter] deliverNotification: notif];
diff --git a/src/notifications/ManagerWin.cpp b/src/notifications/ManagerWin.cpp
index cc61c645..85abe642 100644
--- a/src/notifications/ManagerWin.cpp
+++ b/src/notifications/ManagerWin.cpp
@@ -1,6 +1,12 @@
#include "notifications/Manager.h"
#include "wintoastlib.h"
+#include "Cache.h"
+#include "EventAccessors.h"
+#include "MatrixClient.h"
+#include "Utils.h"
+#include <mtx/responses/notifications.hpp>
+
using namespace WinToastLib;
class CustomHandler : public IWinToastHandler
@@ -23,7 +29,7 @@ init()
WinToast::instance()->setAppName(L"Nheko");
WinToast::instance()->setAppUserModelId(WinToast::configureAUMI(L"nheko", L"nheko"));
if (!WinToast::instance()->initialize())
- std::wcout << "Your system in not compatible with toast notifications\n";
+ std::wcout << "Your system is not compatible with toast notifications\n";
}
}
@@ -32,17 +38,18 @@ NotificationsManager::NotificationsManager(QObject *parent)
{}
void
-NotificationsManager::postNotification(const QString &room_id,
- const QString &event_id,
- const QString &room_name,
- const QString &sender,
- const QString &text,
+NotificationsManager::postNotification(const mtx::responses::Notification ¬ification,
const QImage &icon)
{
- Q_UNUSED(room_id)
- Q_UNUSED(event_id)
Q_UNUSED(icon)
+ const auto room_name =
+ QString::fromStdString(cache::singleRoomInfo(notification.room_id).name);
+ const auto sender =
+ cache::displayName(QString::fromStdString(notification.room_id),
+ QString::fromStdString(mtx::accessors::sender(notification.event)));
+ const auto text = utils::event_body(notification.event);
+
if (!isInitialized)
init();
@@ -53,7 +60,13 @@ NotificationsManager::postNotification(const QString &room_id,
else
templ.setTextField(QString("%1").arg(sender).toStdWString(),
WinToastTemplate::FirstLine);
- templ.setTextField(QString("%1").arg(text).toStdWString(), WinToastTemplate::SecondLine);
+ if (mtx::accessors::msg_type(notification.event) == mtx::events::MessageType::Emote)
+ templ.setTextField(
+ QString("* ").append(sender).append(" ").append(text).toStdWString(),
+ WinToastTemplate::SecondLine);
+ else
+ templ.setTextField(QString("%1").arg(text).toStdWString(),
+ WinToastTemplate::SecondLine);
// TODO: implement room or user avatar
// templ.setImagePath(L"C:/example.png");
diff --git a/src/timeline/DelegateChooser.cpp b/src/timeline/DelegateChooser.cpp
index 1f5fae7e..8598fa77 100644
--- a/src/timeline/DelegateChooser.cpp
+++ b/src/timeline/DelegateChooser.cpp
@@ -123,10 +123,6 @@ DelegateChooser::DelegateIncubator::statusChanged(QQmlIncubator::Status status)
}
chooser.child_->setParentItem(&chooser);
- connect(chooser.child_, &QQuickItem::heightChanged, &chooser, [this]() {
- chooser.setHeight(chooser.child_->height());
- });
- chooser.setHeight(chooser.child_->height());
QQmlEngine::setObjectOwnership(chooser.child_,
QQmlEngine::ObjectOwnership::JavaScriptOwnership);
emit chooser.childChanged();
diff --git a/src/timeline/EventStore.cpp b/src/timeline/EventStore.cpp
index be4bc09e..94d43a83 100644
--- a/src/timeline/EventStore.cpp
+++ b/src/timeline/EventStore.cpp
@@ -293,16 +293,16 @@ EventStore::handleSync(const mtx::responses::Timeline &events)
}
for (const auto &event : events.events) {
- std::string relates_to;
+ std::set<std::string> relates_to;
if (auto redaction =
std::get_if<mtx::events::RedactionEvent<mtx::events::msg::Redaction>>(
&event)) {
// fixup reactions
auto redacted = events_by_id_.object({room_id_, redaction->redacts});
if (redacted) {
- auto id = mtx::accessors::relates_to_event_id(*redacted);
- if (!id.empty()) {
- auto idx = idToIndex(id);
+ auto id = mtx::accessors::relations(*redacted);
+ if (id.annotates()) {
+ auto idx = idToIndex(id.annotates()->event_id);
if (idx) {
events_by_id_.remove(
{room_id_, redaction->redacts});
@@ -312,20 +312,17 @@ EventStore::handleSync(const mtx::responses::Timeline &events)
}
}
- relates_to = redaction->redacts;
- } else if (auto reaction =
- std::get_if<mtx::events::RoomEvent<mtx::events::msg::Reaction>>(
- &event)) {
- relates_to = reaction->content.relates_to.event_id;
+ relates_to.insert(redaction->redacts);
} else {
- relates_to = mtx::accessors::in_reply_to_event(event);
+ for (const auto &r : mtx::accessors::relations(event).relations)
+ relates_to.insert(r.event_id);
}
- if (!relates_to.empty()) {
- auto idx = cache::client()->getTimelineIndex(room_id_, relates_to);
+ for (const auto &relates_to_id : relates_to) {
+ auto idx = cache::client()->getTimelineIndex(room_id_, relates_to_id);
if (idx) {
- events_by_id_.remove({room_id_, relates_to});
- decryptedEvents_.remove({room_id_, relates_to});
+ events_by_id_.remove({room_id_, relates_to_id});
+ decryptedEvents_.remove({room_id_, relates_to_id});
events_.remove({room_id_, *idx});
emit dataChanged(toExternalIdx(*idx), toExternalIdx(*idx));
}
@@ -408,6 +405,52 @@ EventStore::handle_room_verification(mtx::events::collections::TimelineEvents ev
event);
}
+std::vector<mtx::events::collections::TimelineEvents>
+EventStore::edits(const std::string &event_id)
+{
+ auto event_ids = cache::client()->relatedEvents(room_id_, event_id);
+
+ auto original_event = get(event_id, "", false, false);
+ if (!original_event)
+ return {};
+
+ auto original_sender = mtx::accessors::sender(*original_event);
+ auto original_relations = mtx::accessors::relations(*original_event);
+
+ std::vector<mtx::events::collections::TimelineEvents> edits;
+ for (const auto &id : event_ids) {
+ auto related_event = get(id, event_id, false, false);
+ if (!related_event)
+ continue;
+
+ auto related_ev = *related_event;
+
+ auto edit_rel = mtx::accessors::relations(related_ev);
+ if (edit_rel.replaces() == event_id &&
+ original_sender == mtx::accessors::sender(related_ev)) {
+ if (edit_rel.synthesized && original_relations.reply_to() &&
+ !edit_rel.reply_to()) {
+ edit_rel.relations.push_back(
+ {mtx::common::RelationType::InReplyTo,
+ original_relations.reply_to().value()});
+ mtx::accessors::set_relations(related_ev, std::move(edit_rel));
+ }
+ edits.push_back(std::move(related_ev));
+ }
+ }
+
+ auto c = cache::client();
+ std::sort(edits.begin(),
+ edits.end(),
+ [this, c](const mtx::events::collections::TimelineEvents &a,
+ const mtx::events::collections::TimelineEvents &b) {
+ return c->getArrivalIndex(this->room_id_, mtx::accessors::event_id(a)) <
+ c->getArrivalIndex(this->room_id_, mtx::accessors::event_id(b));
+ });
+
+ return edits;
+}
+
QVariantList
EventStore::reactions(const std::string &event_id)
{
@@ -430,13 +473,14 @@ EventStore::reactions(const std::string &event_id)
if (auto reaction = std::get_if<mtx::events::RoomEvent<mtx::events::msg::Reaction>>(
related_event);
- reaction && reaction->content.relates_to.key) {
- auto &agg = aggregation[reaction->content.relates_to.key.value()];
+ reaction && reaction->content.relations.annotates() &&
+ reaction->content.relations.annotates()->key) {
+ auto key = reaction->content.relations.annotates()->key.value();
+ auto &agg = aggregation[key];
if (agg.count == 0) {
Reaction temp{};
- temp.key_ =
- QString::fromStdString(reaction->content.relates_to.key.value());
+ temp.key_ = QString::fromStdString(key);
reactions.push_back(temp);
}
@@ -489,7 +533,13 @@ EventStore::get(int idx, bool decrypt)
if (!event_id)
return nullptr;
- auto event = cache::client()->getEvent(room_id_, *event_id);
+ std::optional<mtx::events::collections::TimelineEvent> event;
+ auto edits_ = edits(*event_id);
+ if (edits_.empty())
+ event = cache::client()->getEvent(room_id_, *event_id);
+ else
+ event = {edits_.back()};
+
if (!event)
return nullptr;
else
@@ -691,8 +741,7 @@ EventStore::decryptEvent(const IdIndex &idx,
body["unsigned"] = e.unsigned_data;
// relations are unencrypted in content...
- if (json old_ev = e; old_ev["content"].count("m.relates_to") != 0)
- body["content"]["m.relates_to"] = old_ev["content"]["m.relates_to"];
+ mtx::common::add_relations(body["content"], e.content.relations);
json event_array = json::array();
event_array.push_back(body);
@@ -717,7 +766,7 @@ EventStore::decryptEvent(const IdIndex &idx,
}
mtx::events::collections::TimelineEvents *
-EventStore::get(std::string_view id, std::string_view related_to, bool decrypt)
+EventStore::get(std::string_view id, std::string_view related_to, bool decrypt, bool resolve_edits)
{
if (this->thread() != QThread::currentThread())
nhlog::db()->warn("{} called from a different thread!", __func__);
@@ -725,7 +774,16 @@ EventStore::get(std::string_view id, std::string_view related_to, bool decrypt)
if (id.empty())
return nullptr;
- IdIndex index{room_id_, std::string(id.data(), id.size())};
+ IdIndex index{room_id_, std::string(id)};
+ if (resolve_edits) {
+ auto edits_ = edits(index.id);
+ if (!edits_.empty()) {
+ index.id = mtx::accessors::event_id(edits_.back());
+ auto event_ptr =
+ new mtx::events::collections::TimelineEvents(std::move(edits_.back()));
+ events_by_id_.insert(index, event_ptr);
+ }
+ }
auto event_ptr = events_by_id_.object(index);
if (!event_ptr) {
diff --git a/src/timeline/EventStore.h b/src/timeline/EventStore.h
index f8eff9a9..ced7bdc0 100644
--- a/src/timeline/EventStore.h
+++ b/src/timeline/EventStore.h
@@ -66,7 +66,8 @@ public:
// relatedFetched event
mtx::events::collections::TimelineEvents *get(std::string_view id,
std::string_view related_to,
- bool decrypt = true);
+ bool decrypt = true,
+ bool resolve_edits = true);
// always returns a proper event as long as the idx is valid
mtx::events::collections::TimelineEvents *get(int idx, bool decrypt = true);
@@ -110,6 +111,7 @@ public slots:
void clearTimeline();
private:
+ std::vector<mtx::events::collections::TimelineEvents> edits(const std::string &event_id);
mtx::events::collections::TimelineEvents *decryptEvent(
const IdIndex &idx,
const mtx::events::EncryptedEvent<mtx::events::msg::Encrypted> &e);
diff --git a/src/timeline/InputBar.cpp b/src/timeline/InputBar.cpp
index b31c1f76..08cbd15b 100644
--- a/src/timeline/InputBar.cpp
+++ b/src/timeline/InputBar.cpp
@@ -268,7 +268,18 @@ InputBar::message(QString msg, MarkdownOverride useMarkdown)
text.format = "org.matrix.custom.html";
}
- if (!room->reply().isEmpty()) {
+ if (!room->edit().isEmpty()) {
+ if (!room->reply().isEmpty()) {
+ text.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, room->reply().toStdString()});
+ room->resetReply();
+ }
+
+ text.relations.relations.push_back(
+ {mtx::common::RelationType::Replace, room->edit().toStdString()});
+ room->resetEdit();
+
+ } else if (!room->reply().isEmpty()) {
auto related = room->relatedInfo(room->reply());
QString body;
@@ -294,7 +305,8 @@ InputBar::message(QString msg, MarkdownOverride useMarkdown)
text.formatted_body =
utils::getFormattedQuoteBody(related, msg.toHtmlEscaped()).toStdString();
- text.relates_to.in_reply_to.event_id = related.related_event;
+ text.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, related.related_event});
room->resetReply();
}
@@ -316,9 +328,15 @@ InputBar::emote(QString msg)
}
if (!room->reply().isEmpty()) {
- emote.relates_to.in_reply_to.event_id = room->reply().toStdString();
+ emote.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, room->reply().toStdString()});
room->resetReply();
}
+ if (!room->edit().isEmpty()) {
+ emote.relations.relations.push_back(
+ {mtx::common::RelationType::Replace, room->edit().toStdString()});
+ room->resetEdit();
+ }
room->sendMessageEvent(emote, mtx::events::EventType::RoomMessage);
}
@@ -346,9 +364,15 @@ InputBar::image(const QString &filename,
image.url = url.toStdString();
if (!room->reply().isEmpty()) {
- image.relates_to.in_reply_to.event_id = room->reply().toStdString();
+ image.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, room->reply().toStdString()});
room->resetReply();
}
+ if (!room->edit().isEmpty()) {
+ image.relations.relations.push_back(
+ {mtx::common::RelationType::Replace, room->edit().toStdString()});
+ room->resetEdit();
+ }
room->sendMessageEvent(image, mtx::events::EventType::RoomMessage);
}
@@ -371,9 +395,15 @@ InputBar::file(const QString &filename,
file.url = url.toStdString();
if (!room->reply().isEmpty()) {
- file.relates_to.in_reply_to.event_id = room->reply().toStdString();
+ file.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, room->reply().toStdString()});
room->resetReply();
}
+ if (!room->edit().isEmpty()) {
+ file.relations.relations.push_back(
+ {mtx::common::RelationType::Replace, room->edit().toStdString()});
+ room->resetEdit();
+ }
room->sendMessageEvent(file, mtx::events::EventType::RoomMessage);
}
@@ -397,9 +427,15 @@ InputBar::audio(const QString &filename,
audio.url = url.toStdString();
if (!room->reply().isEmpty()) {
- audio.relates_to.in_reply_to.event_id = room->reply().toStdString();
+ audio.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, room->reply().toStdString()});
room->resetReply();
}
+ if (!room->edit().isEmpty()) {
+ audio.relations.relations.push_back(
+ {mtx::common::RelationType::Replace, room->edit().toStdString()});
+ room->resetEdit();
+ }
room->sendMessageEvent(audio, mtx::events::EventType::RoomMessage);
}
@@ -422,9 +458,15 @@ InputBar::video(const QString &filename,
video.url = url.toStdString();
if (!room->reply().isEmpty()) {
- video.relates_to.in_reply_to.event_id = room->reply().toStdString();
+ video.relations.relations.push_back(
+ {mtx::common::RelationType::InReplyTo, room->reply().toStdString()});
room->resetReply();
}
+ if (!room->edit().isEmpty()) {
+ video.relations.relations.push_back(
+ {mtx::common::RelationType::Replace, room->edit().toStdString()});
+ room->resetEdit();
+ }
room->sendMessageEvent(video, mtx::events::EventType::RoomMessage);
}
@@ -518,6 +560,8 @@ InputBar::showPreview(const QMimeData &source, QString path, const QStringList &
[this](const QByteArray data, const QString &mime, const QString &fn) {
setUploading(true);
+ setText("");
+
auto payload = std::string(data.data(), data.size());
std::optional<mtx::crypto::EncryptedFile> encryptedFile;
if (cache::isRoomEncrypted(room->roomId().toStdString())) {
diff --git a/src/timeline/InputBar.h b/src/timeline/InputBar.h
index f173bbc0..696a0dd9 100644
--- a/src/timeline/InputBar.h
+++ b/src/timeline/InputBar.h
@@ -41,6 +41,7 @@ public slots:
QString text() const;
QString previousText();
QString nextText();
+ void setText(QString newText) { emit textChanged(newText); }
void send();
void paste(bool fromMouse);
@@ -58,6 +59,7 @@ private slots:
signals:
void insertText(QString text);
+ void textChanged(QString newText);
void uploadingChanged(bool value);
private:
diff --git a/src/timeline/TimelineModel.cpp b/src/timeline/TimelineModel.cpp
index efeba146..6caac132 100644
--- a/src/timeline/TimelineModel.cpp
+++ b/src/timeline/TimelineModel.cpp
@@ -288,6 +288,8 @@ TimelineModel::roleNames() const
{ProportionalHeight, "proportionalHeight"},
{Id, "id"},
{State, "state"},
+ {IsEdited, "isEdited"},
+ {IsEditable, "isEditable"},
{IsEncrypted, "isEncrypted"},
{IsRoomEncrypted, "isRoomEncrypted"},
{ReplyTo, "replyTo"},
@@ -360,7 +362,7 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
const static QRegularExpression replyFallback(
"<mx-reply>.*</mx-reply>", QRegularExpression::DotMatchesEverythingOption);
- bool isReply = !in_reply_to_event(event).empty();
+ bool isReply = relations(event).reply_to().has_value();
auto formattedBody_ = QString::fromStdString(formatted_body(event));
if (formattedBody_.isEmpty()) {
@@ -409,8 +411,12 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
return QVariant(prop > 0 ? prop : 1.);
}
- case Id:
- return QVariant(QString::fromStdString(event_id(event)));
+ case Id: {
+ if (auto replaces = relations(event).replaces())
+ return QVariant(QString::fromStdString(replaces.value()));
+ else
+ return QVariant(QString::fromStdString(event_id(event)));
+ }
case State: {
auto id = QString::fromStdString(event_id(event));
auto containsOthers = [](const auto &vec) {
@@ -430,6 +436,11 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
else
return qml_mtx_events::Received;
}
+ case IsEdited:
+ return QVariant(relations(event).replaces().has_value());
+ case IsEditable:
+ return QVariant(!is_state_event(event) && mtx::accessors::sender(event) ==
+ http::client()->user_id().to_string());
case IsEncrypted: {
auto id = event_id(event);
auto encrypted_event = events.get(id, id, false);
@@ -442,9 +453,9 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
return cache::isRoomEncrypted(room_id_.toStdString());
}
case ReplyTo:
- return QVariant(QString::fromStdString(in_reply_to_event(event)));
+ return QVariant(QString::fromStdString(relations(event).reply_to().value_or("")));
case Reactions: {
- auto id = event_id(event);
+ auto id = relations(event).replaces().value_or(event_id(event));
return QVariant::fromValue(events.reactions(id));
}
case RoomId:
@@ -724,15 +735,30 @@ TimelineModel::updateLastMessage()
void
TimelineModel::setCurrentIndex(int index)
{
- if (!ChatPage::instance()->isActiveWindow())
- return;
-
auto oldIndex = idToIndex(currentId);
currentId = indexToId(index);
- emit currentIndexChanged(index);
+ if (index != oldIndex)
+ emit currentIndexChanged(index);
+
+ if (!ChatPage::instance()->isActiveWindow())
+ return;
- if ((oldIndex > index || oldIndex == -1) && !currentId.startsWith("m")) {
- readEvent(currentId.toStdString());
+ if (!currentId.startsWith("m")) {
+ auto oldReadIndex =
+ cache::getEventIndex(roomId().toStdString(), currentReadId.toStdString());
+ auto nextEventIndexAndId =
+ cache::lastInvisibleEventAfter(roomId().toStdString(), currentId.toStdString());
+
+ if (nextEventIndexAndId &&
+ (!oldReadIndex || *oldReadIndex < nextEventIndexAndId->first)) {
+ readEvent(nextEventIndexAndId->second);
+ currentReadId = QString::fromStdString(nextEventIndexAndId->second);
+
+ nhlog::net()->info("Marked as read {}, index {}, oldReadIndex {}",
+ nextEventIndexAndId->second,
+ nextEventIndexAndId->first,
+ *oldReadIndex);
+ }
}
}
@@ -821,6 +847,12 @@ TimelineModel::replyAction(QString id)
setReply(id);
}
+void
+TimelineModel::editAction(QString id)
+{
+ setEdit(id);
+}
+
RelatedInfo
TimelineModel::relatedInfo(QString id)
{
@@ -1509,6 +1541,51 @@ TimelineModel::formatMemberEvent(QString id)
return rendered;
}
+void
+TimelineModel::setEdit(QString newEdit)
+{
+ if (edit_.startsWith('m'))
+ return;
+
+ if (edit_ != newEdit) {
+ auto ev = events.get(newEdit.toStdString(), "");
+ if (ev && mtx::accessors::sender(*ev) == http::client()->user_id().to_string()) {
+ auto e = *ev;
+ setReply(QString::fromStdString(
+ mtx::accessors::relations(e).reply_to().value_or("")));
+
+ auto msgType = mtx::accessors::msg_type(e);
+ if (msgType == mtx::events::MessageType::Text ||
+ msgType == mtx::events::MessageType::Notice) {
+ input()->setText(relatedInfo(newEdit).quoted_body);
+ } else if (msgType == mtx::events::MessageType::Emote) {
+ input()->setText("/me " + relatedInfo(newEdit).quoted_body);
+ } else {
+ input()->setText("");
+ }
+
+ edit_ = newEdit;
+ } else {
+ resetReply();
+
+ input()->setText("");
+ edit_ = "";
+ }
+ emit editChanged(edit_);
+ }
+}
+
+void
+TimelineModel::resetEdit()
+{
+ if (!edit_.isEmpty()) {
+ edit_ = "";
+ emit editChanged(edit_);
+ input()->setText("");
+ resetReply();
+ }
+}
+
QString
TimelineModel::roomName() const
{
diff --git a/src/timeline/TimelineModel.h b/src/timeline/TimelineModel.h
index df067fd4..5f599741 100644
--- a/src/timeline/TimelineModel.h
+++ b/src/timeline/TimelineModel.h
@@ -146,6 +146,7 @@ class TimelineModel : public QAbstractListModel
Q_PROPERTY(std::vector<QString> typingUsers READ typingUsers WRITE updateTypingUsers NOTIFY
typingUsersChanged)
Q_PROPERTY(QString reply READ reply WRITE setReply NOTIFY replyChanged RESET resetReply)
+ Q_PROPERTY(QString edit READ edit WRITE setEdit NOTIFY editChanged RESET resetEdit)
Q_PROPERTY(
bool paginationInProgress READ paginationInProgress NOTIFY paginationInProgressChanged)
Q_PROPERTY(QString roomName READ roomName NOTIFY roomNameChanged)
@@ -182,6 +183,8 @@ public:
ProportionalHeight,
Id,
State,
+ IsEdited,
+ IsEditable,
IsEncrypted,
IsRoomEncrypted,
ReplyTo,
@@ -215,6 +218,7 @@ public:
Q_INVOKABLE void viewDecryptedRawMessage(QString id) const;
Q_INVOKABLE void openUserProfile(QString userid, bool global = false);
Q_INVOKABLE void openRoomSettings();
+ Q_INVOKABLE void editAction(QString id);
Q_INVOKABLE void replyAction(QString id);
Q_INVOKABLE void readReceiptsAction(QString id) const;
Q_INVOKABLE void redactEvent(QString id);
@@ -258,6 +262,9 @@ public slots:
QString reply() const { return reply_; }
void setReply(QString newReply)
{
+ if (edit_.startsWith('m'))
+ return;
+
if (reply_ != newReply) {
reply_ = newReply;
emit replyChanged(reply_);
@@ -270,6 +277,9 @@ public slots:
emit replyChanged(reply_);
}
}
+ QString edit() const { return edit_; }
+ void setEdit(QString newEdit);
+ void resetEdit();
void setDecryptDescription(bool decrypt) { decryptDescription = decrypt; }
void clearTimeline() { events.clearTimeline(); }
void receivedSessionKey(const std::string &session_key)
@@ -294,6 +304,7 @@ signals:
void newEncryptedImage(mtx::crypto::EncryptedFile encryptionInfo);
void typingUsersChanged(std::vector<QString> users);
void replyChanged(QString reply);
+ void editChanged(QString reply);
void paginationInProgressChanged(const bool);
void newCallEvent(const mtx::events::collections::TimelineEvents &event);
@@ -324,8 +335,8 @@ private:
bool decryptDescription = true;
bool m_paginationInProgress = false;
- QString currentId;
- QString reply_;
+ QString currentId, currentReadId;
+ QString reply_, edit_;
std::vector<QString> typingUsers_;
TimelineViewManager *manager_;
diff --git a/src/timeline/TimelineViewManager.cpp b/src/timeline/TimelineViewManager.cpp
index 0ed680f8..f2e6d571 100644
--- a/src/timeline/TimelineViewManager.cpp
+++ b/src/timeline/TimelineViewManager.cpp
@@ -21,6 +21,7 @@
#include "dialogs/ImageOverlay.h"
#include "emoji/EmojiModel.h"
#include "emoji/Provider.h"
+#include "ui/NhekoCursorShape.h"
#include "ui/NhekoDropArea.h"
#include <iostream> //only for debugging
@@ -118,6 +119,7 @@ TimelineViewManager::TimelineViewManager(CallManager *callManager, ChatPage *par
qmlRegisterType<DelegateChoice>("im.nheko", 1, 0, "DelegateChoice");
qmlRegisterType<DelegateChooser>("im.nheko", 1, 0, "DelegateChooser");
qmlRegisterType<NhekoDropArea>("im.nheko", 1, 0, "NhekoDropArea");
+ qmlRegisterType<NhekoCursorShape>("im.nheko", 1, 0, "CursorShape");
qmlRegisterUncreatableType<DeviceVerificationFlow>(
"im.nheko", 1, 0, "DeviceVerificationFlow", "Can't create verification flow from QML!");
qmlRegisterUncreatableType<UserProfile>(
@@ -176,10 +178,6 @@ TimelineViewManager::TimelineViewManager(CallManager *callManager, ChatPage *par
view->setResizeMode(QQuickWidget::SizeRootObjectToView);
container->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
-#if (QT_VERSION >= QT_VERSION_CHECK(5, 10, 0))
- view->quickWindow()->setTextRenderType(QQuickWindow::NativeTextRendering);
-#endif
-
connect(view, &QQuickWidget::statusChanged, this, [](QQuickWidget::Status status) {
nhlog::ui()->debug("Status changed to {}", status);
});
@@ -508,9 +506,11 @@ TimelineViewManager::queueReactionMessage(const QString &reactedEvent, const QSt
// If selfReactedEvent is empty, that means we haven't previously reacted
if (selfReactedEvent.isEmpty()) {
mtx::events::msg::Reaction reaction;
- reaction.relates_to.rel_type = mtx::common::RelationType::Annotation;
- reaction.relates_to.event_id = reactedEvent.toStdString();
- reaction.relates_to.key = reactionKey.toStdString();
+ mtx::common::Relation rel;
+ rel.rel_type = mtx::common::RelationType::Annotation;
+ rel.event_id = reactedEvent.toStdString();
+ rel.key = reactionKey.toStdString();
+ reaction.relations.relations.push_back(rel);
timeline_->sendMessageEvent(reaction, mtx::events::EventType::Reaction);
// Otherwise, we have previously reacted and the reaction should be redacted
@@ -546,3 +546,9 @@ TimelineViewManager::queueCallMessage(const QString &roomid,
{
models.value(roomid)->sendMessageEvent(callHangUp, mtx::events::EventType::CallHangUp);
}
+
+void
+TimelineViewManager::focusMessageInput()
+{
+ emit focusInput();
+}
diff --git a/src/timeline/TimelineViewManager.h b/src/timeline/TimelineViewManager.h
index 3e58bb43..61fce574 100644
--- a/src/timeline/TimelineViewManager.h
+++ b/src/timeline/TimelineViewManager.h
@@ -66,6 +66,7 @@ public:
Q_INVOKABLE void openLink(QString link) const;
+ Q_INVOKABLE void focusMessageInput();
Q_INVOKABLE void openInviteUsersDialog();
Q_INVOKABLE void openMemberListDialog() const;
Q_INVOKABLE void openLeaveRoomDialog() const;
@@ -86,6 +87,7 @@ signals:
void showRoomList();
void narrowViewChanged();
void focusChanged();
+ void focusInput();
public slots:
void updateReadReceipts(const QString &room_id, const std::vector<QString> &event_ids);
diff --git a/src/ui/NhekoCursorShape.cpp b/src/ui/NhekoCursorShape.cpp
new file mode 100644
index 00000000..06b0a321
--- /dev/null
+++ b/src/ui/NhekoCursorShape.cpp
@@ -0,0 +1,25 @@
+#include "NhekoCursorShape.h"
+
+#include <QCursor>
+
+NhekoCursorShape::NhekoCursorShape(QQuickItem *parent)
+ : QQuickItem(parent)
+ , currentShape_(Qt::CursorShape::ArrowCursor)
+{}
+
+Qt::CursorShape
+NhekoCursorShape::cursorShape() const
+{
+ return cursor().shape();
+}
+
+void
+NhekoCursorShape::setCursorShape(Qt::CursorShape cursorShape)
+{
+ if (currentShape_ == cursorShape)
+ return;
+
+ currentShape_ = cursorShape;
+ setCursor(cursorShape);
+ emit cursorShapeChanged();
+}
diff --git a/src/ui/NhekoCursorShape.h b/src/ui/NhekoCursorShape.h
new file mode 100644
index 00000000..2eab5e42
--- /dev/null
+++ b/src/ui/NhekoCursorShape.h
@@ -0,0 +1,26 @@
+#pragma once
+
+// see
+// https://stackoverflow.com/questions/27821054/how-to-change-cursor-shape-in-qml-when-mousearea-is-covered-with-another-mousear/29382092#29382092
+
+#include <QQuickItem>
+
+class NhekoCursorShape : public QQuickItem
+{
+ Q_OBJECT
+
+ Q_PROPERTY(Qt::CursorShape cursorShape READ cursorShape WRITE setCursorShape NOTIFY
+ cursorShapeChanged)
+
+public:
+ explicit NhekoCursorShape(QQuickItem *parent = 0);
+
+private:
+ Qt::CursorShape cursorShape() const;
+ void setCursorShape(Qt::CursorShape cursorShape);
+
+ Qt::CursorShape currentShape_;
+
+signals:
+ void cursorShapeChanged();
+};
diff --git a/src/ui/UserProfile.cpp b/src/ui/UserProfile.cpp
index 274ed927..77f6ced5 100644
--- a/src/ui/UserProfile.cpp
+++ b/src/ui/UserProfile.cpp
@@ -314,9 +314,8 @@ UserProfile::changeAvatar()
return;
}
- const auto bin = file.peek(file.size());
- const auto payload = std::string(bin.data(), bin.size());
- const auto dimensions = QImageReader(&file).size();
+ const auto bin = file.peek(file.size());
+ const auto payload = std::string(bin.data(), bin.size());
isLoading_ = true;
emit loadingChanged();
@@ -328,7 +327,6 @@ UserProfile::changeAvatar()
mime.name().toStdString(),
QFileInfo(fileName).fileName().toStdString(),
[this,
- dimensions,
payload,
mimetype = mime.name().toStdString(),
size = payload.size(),
@@ -367,15 +365,15 @@ UserProfile::changeAvatar()
void
UserProfile::updateRoomMemberState(mtx::events::state::Member member)
{
- http::client()->send_state_event(
- roomid_.toStdString(),
- http::client()->user_id().to_string(),
- member,
- [this](mtx::responses::EventId, mtx::http::RequestErr err) {
- if (err)
- nhlog::net()->error("Failed to update room member state : ",
- err->matrix_error.error);
- });
+ http::client()->send_state_event(roomid_.toStdString(),
+ http::client()->user_id().to_string(),
+ member,
+ [](mtx::responses::EventId, mtx::http::RequestErr err) {
+ if (err)
+ nhlog::net()->error(
+ "Failed to update room member state : ",
+ err->matrix_error.error);
+ });
}
void
|