diff options
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/app/pusher.py | 15 | ||||
-rw-r--r-- | synapse/config/emailconfig.py | 98 | ||||
-rw-r--r-- | synapse/config/homeserver.py | 3 | ||||
-rw-r--r-- | synapse/config/server.py | 8 | ||||
-rw-r--r-- | synapse/handlers/_base.py | 2 | ||||
-rw-r--r-- | synapse/handlers/message.py | 8 | ||||
-rw-r--r-- | synapse/handlers/room.py | 2 | ||||
-rw-r--r-- | synapse/handlers/search.py | 8 | ||||
-rw-r--r-- | synapse/handlers/sync.py | 4 | ||||
-rw-r--r-- | synapse/notifier.py | 2 | ||||
-rw-r--r-- | synapse/push/emailpusher.py | 251 | ||||
-rw-r--r-- | synapse/push/mailer.py | 455 | ||||
-rw-r--r-- | synapse/push/pusher.py | 33 | ||||
-rw-r--r-- | synapse/push/pusherpool.py | 6 | ||||
-rw-r--r-- | synapse/python_dependencies.py | 4 | ||||
-rw-r--r-- | synapse/rest/client/v2_alpha/register.py | 29 | ||||
-rw-r--r-- | synapse/storage/event_push_actions.py | 49 | ||||
-rw-r--r-- | synapse/storage/events.py | 2 | ||||
-rw-r--r-- | synapse/storage/pusher.py | 27 | ||||
-rw-r--r-- | synapse/storage/schema/delta/31/events.sql | 16 | ||||
-rw-r--r-- | synapse/storage/schema/delta/31/pusher_throttle.sql | 23 | ||||
-rw-r--r-- | synapse/util/presentable_names.py | 159 |
22 files changed, 1170 insertions, 34 deletions
diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index b5339f030d..89c8d5c7ce 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -20,6 +20,7 @@ from synapse.server import HomeServer from synapse.config._base import ConfigError from synapse.config.database import DatabaseConfig from synapse.config.logger import LoggingConfig +from synapse.config.emailconfig import EmailConfig from synapse.http.site import SynapseSite from synapse.metrics.resource import MetricsResource, METRICS_PREFIX from synapse.replication.slave.storage.events import SlavedEventStore @@ -91,7 +92,7 @@ class SlaveConfig(DatabaseConfig): """ % locals() -class PusherSlaveConfig(SlaveConfig, LoggingConfig): +class PusherSlaveConfig(SlaveConfig, LoggingConfig, EmailConfig): pass @@ -110,6 +111,18 @@ class PusherSlaveStore( DataStore.update_pusher_last_stream_ordering.__func__ ) + get_throttle_params_by_room = ( + DataStore.get_throttle_params_by_room.__func__ + ) + + set_throttle_params = ( + DataStore.set_throttle_params.__func__ + ) + + get_time_of_last_push_action_before = ( + DataStore.get_time_of_last_push_action_before.__func__ + ) + class PusherServer(HomeServer): diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py new file mode 100644 index 0000000000..90bdd08f00 --- /dev/null +++ b/synapse/config/emailconfig.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2015, 2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file can't be called email.py because if it is, we cannot: +import email.utils + +from ._base import Config + + +class EmailConfig(Config): + def read_config(self, config): + self.email_enable_notifs = False + + email_config = config.get("email", {}) + self.email_enable_notifs = email_config.get("enable_notifs", False) + + if self.email_enable_notifs: + # make sure we can import the required deps + import jinja2 + import bleach + # prevent unused warnings + jinja2 + bleach + + required = [ + "smtp_host", + "smtp_port", + "notif_from", + "template_dir", + "notif_template_html", + "notif_template_text", + ] + + missing = [] + for k in required: + if k not in email_config: + missing.append(k) + + if (len(missing) > 0): + raise RuntimeError( + "email.enable_notifs is True but required keys are missing: %s" % + (", ".join(["email." + k for k in missing]),) + ) + + if config.get("public_baseurl") is None: + raise RuntimeError( + "email.enable_notifs is True but no public_baseurl is set" + ) + + self.email_smtp_host = email_config["smtp_host"] + self.email_smtp_port = email_config["smtp_port"] + self.email_notif_from = email_config["notif_from"] + self.email_template_dir = email_config["template_dir"] + self.email_notif_template_html = email_config["notif_template_html"] + self.email_notif_template_text = email_config["notif_template_text"] + self.email_notif_for_new_users = email_config.get( + "notif_for_new_users", True + ) + if "app_name" in email_config: + self.email_app_name = email_config["app_name"] + else: + self.email_app_name = "Matrix" + + # make sure it's valid + parsed = email.utils.parseaddr(self.email_notif_from) + if parsed[1] == '': + raise RuntimeError("Invalid notif_from address") + else: + self.email_enable_notifs = False + # Not much point setting defaults for the rest: it would be an + # error for them to be used. + + def default_config(self, config_dir_path, server_name, **kwargs): + return """ + # Enable sending emails for notification events + #email: + # enable_notifs: false + # smtp_host: "localhost" + # smtp_port: 25 + # notif_from: Your Friendly Matrix Home Server <noreply@example.com> + # app_name: Matrix + # template_dir: res/templates + # notif_template_html: notif_mail.html + # notif_template_text: notif_mail.txt + # notif_for_new_users: True + """ diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 9a80ac39ec..fc2445484c 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -31,13 +31,14 @@ from .cas import CasConfig from .password import PasswordConfig from .jwt import JWTConfig from .ldap import LDAPConfig +from .emailconfig import EmailConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig, AppServiceConfig, KeyConfig, SAML2Config, CasConfig, - JWTConfig, LDAPConfig, PasswordConfig,): + JWTConfig, LDAPConfig, PasswordConfig, EmailConfig,): pass diff --git a/synapse/config/server.py b/synapse/config/server.py index 46c633548a..0b5f462e44 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -28,6 +28,11 @@ class ServerConfig(Config): self.print_pidfile = config.get("print_pidfile") self.user_agent_suffix = config.get("user_agent_suffix") self.use_frozen_dicts = config.get("use_frozen_dicts", True) + self.public_baseurl = config.get("public_baseurl") + + if self.public_baseurl is not None: + if self.public_baseurl[-1] != '/': + self.public_baseurl += '/' self.start_pushers = config.get("start_pushers", True) self.listeners = config.get("listeners", []) @@ -143,6 +148,9 @@ class ServerConfig(Config): # Whether to serve a web client from the HTTP/HTTPS root resource. web_client: True + # The public-facing base URL for the client API (not including _matrix/...) + # public_baseurl: https://example.com:8448/ + # Set the soft limit on the number of file descriptors synapse can use # Zero is used to indicate synapse should set the soft limit to the # hard limit. diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 745c8901ee..2c811906d9 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -208,7 +208,7 @@ class BaseHandler(object): }) @defer.inlineCallbacks - def _filter_events_for_client(self, user_id, events, is_peeking=False): + def filter_events_for_client(self, user_id, events, is_peeking=False): """ Check which events a user is allowed to see diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index f51feda2f4..7d9e3cf364 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -123,7 +123,7 @@ class MessageHandler(BaseHandler): "end": next_token.to_string(), }) - events = yield self._filter_events_for_client( + events = yield self.filter_events_for_client( user_id, events, is_peeking=(member_event_id is None), @@ -483,7 +483,7 @@ class MessageHandler(BaseHandler): ] ).addErrback(unwrapFirstError) - messages = yield self._filter_events_for_client( + messages = yield self.filter_events_for_client( user_id, messages ) @@ -619,7 +619,7 @@ class MessageHandler(BaseHandler): end_token=stream_token ) - messages = yield self._filter_events_for_client( + messages = yield self.filter_events_for_client( user_id, messages, is_peeking=is_peeking ) @@ -700,7 +700,7 @@ class MessageHandler(BaseHandler): consumeErrors=True, ).addErrback(unwrapFirstError) - messages = yield self._filter_events_for_client( + messages = yield self.filter_events_for_client( user_id, messages, is_peeking=is_peeking, ) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index dd9c18df84..fdebc9c438 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -449,7 +449,7 @@ class RoomContextHandler(BaseHandler): now_token = yield self.hs.get_event_sources().get_current_token() def filter_evts(events): - return self._filter_events_for_client( + return self.filter_events_for_client( user.to_string(), events, is_peeking=is_guest) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 9937d8dd7f..a937e87408 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -172,7 +172,7 @@ class SearchHandler(BaseHandler): filtered_events = search_filter.filter([r["event"] for r in results]) - events = yield self._filter_events_for_client( + events = yield self.filter_events_for_client( user.to_string(), filtered_events ) @@ -223,7 +223,7 @@ class SearchHandler(BaseHandler): r["event"] for r in results ]) - events = yield self._filter_events_for_client( + events = yield self.filter_events_for_client( user.to_string(), filtered_events ) @@ -281,11 +281,11 @@ class SearchHandler(BaseHandler): event.room_id, event.event_id, before_limit, after_limit ) - res["events_before"] = yield self._filter_events_for_client( + res["events_before"] = yield self.filter_events_for_client( user.to_string(), res["events_before"] ) - res["events_after"] = yield self._filter_events_for_client( + res["events_after"] = yield self.filter_events_for_client( user.to_string(), res["events_after"] ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 0bb1913285..b7dcbc6b1b 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -697,7 +697,7 @@ class SyncHandler(BaseHandler): if recents is not None: recents = sync_config.filter_collection.filter_room_timeline(recents) - recents = yield self._filter_events_for_client( + recents = yield self.filter_events_for_client( sync_config.user.to_string(), recents, ) @@ -718,7 +718,7 @@ class SyncHandler(BaseHandler): loaded_recents = sync_config.filter_collection.filter_room_timeline( events ) - loaded_recents = yield self._filter_events_for_client( + loaded_recents = yield self.filter_events_for_client( sync_config.user.to_string(), loaded_recents, ) diff --git a/synapse/notifier.py b/synapse/notifier.py index 6af7a8f424..cb58dfffd4 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -399,7 +399,7 @@ class Notifier(object): if name == "room": room_member_handler = self.hs.get_handlers().room_member_handler - new_events = yield room_member_handler._filter_events_for_client( + new_events = yield room_member_handler.filter_events_for_client( user.to_string(), new_events, is_peeking=is_peeking, diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py new file mode 100644 index 0000000000..3a13c7485a --- /dev/null +++ b/synapse/push/emailpusher.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer, reactor + +import logging + +from synapse.util.metrics import Measure +from synapse.util.logcontext import LoggingContext + +from mailer import Mailer + +logger = logging.getLogger(__name__) + +# The amount of time we always wait before ever emailing about a notification +# (to give the user a chance to respond to other push or notice the window) +DELAY_BEFORE_MAIL_MS = 2 * 60 * 1000 + +THROTTLE_START_MS = 2 * 60 * 1000 +THROTTLE_MAX_MS = (2 * 60 * 1000) * (2 ** 11) # ~3 days +THROTTLE_MULTIPLIER = 2 + +# If no event triggers a notification for this long after the previous, +# the throttle is released. +THROTTLE_RESET_AFTER_MS = (2 * 60 * 1000) * (2 ** 11) # ~3 days + + +class EmailPusher(object): + """ + A pusher that sends email notifications about events (approximately) + when they happen. + This shares quite a bit of code with httpusher: it would be good to + factor out the common parts + """ + def __init__(self, hs, pusherdict): + self.hs = hs + self.store = self.hs.get_datastore() + self.clock = self.hs.get_clock() + self.pusher_id = pusherdict['id'] + self.user_id = pusherdict['user_name'] + self.app_id = pusherdict['app_id'] + self.email = pusherdict['pushkey'] + self.last_stream_ordering = pusherdict['last_stream_ordering'] + self.timed_call = None + self.throttle_params = None + + # See httppusher + self.max_stream_ordering = None + + self.processing = False + + if self.hs.config.email_enable_notifs: + self.mailer = Mailer(self.hs) + else: + self.mailer = None + + @defer.inlineCallbacks + def on_started(self): + if self.mailer is not None: + self.throttle_params = yield self.store.get_throttle_params_by_room( + self.pusher_id + ) + yield self._process() + + def on_stop(self): + if self.timed_call: + self.timed_call.cancel() + + @defer.inlineCallbacks + def on_new_notifications(self, min_stream_ordering, max_stream_ordering): + self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering) + yield self._process() + + def on_new_receipts(self, min_stream_id, max_stream_id): + # We could wake up and cancel the timer but there tend to be quite a + # lot of read receipts so it's probably less work to just let the + # timer fire + return defer.succeed(None) + + @defer.inlineCallbacks + def on_timer(self): + self.timed_call = None + yield self._process() + + @defer.inlineCallbacks + def _process(self): + if self.processing: + return + + with LoggingContext("emailpush._process"): + with Measure(self.clock, "emailpush._process"): + try: + self.processing = True + # if the max ordering changes while we're running _unsafe_process, + # call it again, and so on until we've caught up. + while True: + starting_max_ordering = self.max_stream_ordering + try: + yield self._unsafe_process() + except: + logger.exception("Exception processing notifs") + if self.max_stream_ordering == starting_max_ordering: + break + finally: + self.processing = False + + @defer.inlineCallbacks + def _unsafe_process(self): + """ + Main logic of the push loop without the wrapper function that sets + up logging, measures and guards against multiple instances of it + being run. + """ + unprocessed = yield self.store.get_unread_push_actions_for_user_in_range( + self.user_id, self.last_stream_ordering, self.max_stream_ordering + ) + + soonest_due_at = None + + for push_action in unprocessed: + received_at = push_action['received_ts'] + if received_at is None: + received_at = 0 + notif_ready_at = received_at + DELAY_BEFORE_MAIL_MS + + room_ready_at = self.room_ready_to_notify_at( + push_action['room_id'] + ) + + should_notify_at = max(notif_ready_at, room_ready_at) + + if should_notify_at < self.clock.time_msec(): + # one of our notifications is ready for sending, so we send + # *one* email updating the user on their notifications, + # we then consider all previously outstanding notifications + # to be delivered. + yield self.send_notification(unprocessed) + + yield self.save_last_stream_ordering_and_success(max([ + ea['stream_ordering'] for ea in unprocessed + ])) + yield self.sent_notif_update_throttle( + push_action['room_id'], push_action + ) + break + else: + if soonest_due_at is None or should_notify_at < soonest_due_at: + soonest_due_at = should_notify_at + + if self.timed_call is not None: + self.timed_call.cancel() + self.timed_call = None + + if soonest_due_at is not None: + self.timed_call = reactor.callLater( + self.seconds_until(soonest_due_at), self.on_timer + ) + + @defer.inlineCallbacks + def save_last_stream_ordering_and_success(self, last_stream_ordering): + self.last_stream_ordering = last_stream_ordering + yield self.store.update_pusher_last_stream_ordering_and_success( + self.app_id, self.email, self.user_id, + last_stream_ordering, self.clock.time_msec() + ) + + def seconds_until(self, ts_msec): + return (ts_msec - self.clock.time_msec()) / 1000 + + def get_room_throttle_ms(self, room_id): + if room_id in self.throttle_params: + return self.throttle_params[room_id]["throttle_ms"] + else: + return 0 + + def get_room_last_sent_ts(self, room_id): + if room_id in self.throttle_params: + return self.throttle_params[room_id]["last_sent_ts"] + else: + return 0 + + def room_ready_to_notify_at(self, room_id): + """ + Determines whether throttling should prevent us from sending an email + for the given room + Returns: True if we should send, False if we should not + """ + last_sent_ts = self.get_room_last_sent_ts(room_id) + throttle_ms = self.get_room_throttle_ms(room_id) + + may_send_at = last_sent_ts + throttle_ms + return may_send_at + + @defer.inlineCallbacks + def sent_notif_update_throttle(self, room_id, notified_push_action): + # We have sent a notification, so update the throttle accordingly. + # If the event that triggered the notif happened more than + # THROTTLE_RESET_AFTER_MS after the previous one that triggered a + # notif, we release the throttle. Otherwise, the throttle is increased. + time_of_previous_notifs = yield self.store.get_time_of_last_push_action_before( + notified_push_action['stream_ordering'] + ) + + time_of_this_notifs = notified_push_action['received_ts'] + + if time_of_previous_notifs is not None and time_of_this_notifs is not None: + gap = time_of_this_notifs - time_of_previous_notifs + else: + # if we don't know the arrival time of one of the notifs (it was not + # stored prior to email notification code) then assume a gap of + # zero which will just not reset the throttle + gap = 0 + + current_throttle_ms = self.get_room_throttle_ms(room_id) + + if gap > THROTTLE_RESET_AFTER_MS: + new_throttle_ms = THROTTLE_START_MS + else: + if current_throttle_ms == 0: + new_throttle_ms = THROTTLE_START_MS + else: + new_throttle_ms = min( + current_throttle_ms * THROTTLE_MULTIPLIER, + THROTTLE_MAX_MS + ) + self.throttle_params[room_id] = { + "last_sent_ts": self.clock.time_msec(), + "throttle_ms": new_throttle_ms + } + yield self.store.set_throttle_params( + self.pusher_id, room_id, self.throttle_params[room_id] + ) + + @defer.inlineCallbacks + def send_notification(self, push_actions): + logger.info("Sending notif email for user %r", self.user_id) + yield self.mailer.send_notification_mail( + self.user_id, self.email, push_actions + ) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py new file mode 100644 index 0000000000..7031fa6d55 --- /dev/null +++ b/synapse/push/mailer.py @@ -0,0 +1,455 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer +from twisted.mail.smtp import sendmail + +import email.utils +import email.mime.multipart +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart + +from synapse.util.async import concurrently_execute +from synapse.util.presentable_names import ( + calculate_room_name, name_from_member_event, descriptor_from_member_events +) +from synapse.types import UserID +from synapse.api.errors import StoreError +from synapse.api.constants import EventTypes + +import jinja2 +import bleach + +import time +import urllib + +import logging +logger = logging.getLogger(__name__) + + +MESSAGE_FROM_PERSON_IN_ROOM = "You have a message on %(app)s from %(person)s " \ + "in the %s room..." +MESSAGE_FROM_PERSON = "You have a message on %(app)s from %(person)s..." +MESSAGES_FROM_PERSON = "You have messages on %(app)s from %(person)s..." +MESSAGES_IN_ROOM = "There are some messages on %(app)s for you in the %(room)s room..." +MESSAGES_IN_ROOMS = "Here are some messages on %(app)s you may have missed..." +INVITE_FROM_PERSON_TO_ROOM = "%(person)s has invited you to join the " \ + "%(room)s room on %(app)s..." +INVITE_FROM_PERSON = "%(person)s has invited you to chat on %(app)s..." + +CONTEXT_BEFORE = 1 +CONTEXT_AFTER = 1 + +# From https://github.com/matrix-org/matrix-react-sdk/blob/master/src/HtmlUtils.js +ALLOWED_TAGS = [ + 'font', # custom to matrix for IRC-style font coloring + 'del', # for markdown + # deliberately no h1/h2 to stop people shouting. + 'h3', 'h4', 'h5', 'h6', 'blockquote', 'p', 'a', 'ul', 'ol', + 'nl', 'li', 'b', 'i', 'u', 'strong', 'em', 'strike', 'code', 'hr', 'br', 'div', + 'table', 'thead', 'caption', 'tbody', 'tr', 'th', 'td', 'pre' +] +ALLOWED_ATTRS = { + # custom ones first: + "font": ["color"], # custom to matrix + "a": ["href", "name", "target"], # remote target: custom to matrix + # We don't currently allow img itself by default, but this + # would make sense if we did + "img": ["src"], +} +# When bleach release a version with this option, we can specify schemes +# ALLOWED_SCHEMES = ["http", "https", "ftp", "mailto"] + + +class Mailer(object): + def __init__(self, hs): + self.hs = hs + self.store = self.hs.get_datastore() + self.state_handler = self.hs.get_state_handler() + loader = jinja2.FileSystemLoader(self.hs.config.email_template_dir) + self.app_name = self.hs.config.email_app_name + env = jinja2.Environment(loader=loader) + env.filters["format_ts"] = format_ts_filter + env.filters["mxc_to_http"] = self.mxc_to_http_filter + self.notif_template_html = env.get_template( + self.hs.config.email_notif_template_html + ) + self.notif_template_text = env.get_template( + self.hs.config.email_notif_template_text + ) + + @defer.inlineCallbacks + def send_notification_mail(self, user_id, email_address, push_actions): + raw_from = email.utils.parseaddr(self.hs.config.email_notif_from)[1] + raw_to = email.utils.parseaddr(email_address)[1] + + if raw_to == '': + raise RuntimeError("Invalid 'to' address") + + rooms_in_order = deduped_ordered_list( + [pa['room_id'] for pa in push_actions] + ) + + notif_events = yield self.store.get_events( + [pa['event_id'] for pa in push_actions] + ) + + notifs_by_room = {} + for pa in push_actions: + notifs_by_room.setdefault(pa["room_id"], []).append(pa) + + # collect the current state for all the rooms in which we have + # notifications + state_by_room = {} + + try: + user_display_name = yield self.store.get_profile_displayname( + UserID.from_string(user_id).localpart + ) + except StoreError: + user_display_name = user_id + + @defer.inlineCallbacks + def _fetch_room_state(room_id): + room_state = yield self.state_handler.get_current_state(room_id) + state_by_room[room_id] = room_state + + # Run at most 3 of these at once: sync does 10 at a time but email + # notifs are much realtime than sync so we can afford to wait a bit. + yield concurrently_execute(_fetch_room_state, rooms_in_order, 3) + + rooms = [] + + for r in rooms_in_order: + roomvars = yield self.get_room_vars( + r, user_id, notifs_by_room[r], notif_events, state_by_room[r] + ) + rooms.append(roomvars) + + summary_text = self.make_summary_text( + notifs_by_room, state_by_room, notif_events, user_id + ) + + template_vars = { + "user_display_name": user_display_name, + "unsubscribe_link": self.make_unsubscribe_link(), + "summary_text": summary_text, + "app_name": self.app_name, + "rooms": rooms, + } + + html_text = self.notif_template_html.render(**template_vars) + html_part = MIMEText(html_text, "html", "utf8") + + plain_text = self.notif_template_text.render(**template_vars) + text_part = MIMEText(plain_text, "plain", "utf8") + + multipart_msg = MIMEMultipart('alternative') + multipart_msg['Subject'] = "[%s] %s" % (self.app_name, summary_text) + multipart_msg['From'] = self.hs.config.email_notif_from + multipart_msg['To'] = email_address + multipart_msg['Date'] = email.utils.formatdate() + multipart_msg['Message-ID'] = email.utils.make_msgid() + multipart_msg.attach(text_part) + multipart_msg.attach(html_part) + + logger.info("Sending email push notification to %s" % email_address) + # logger.debug(html_text) + + yield sendmail( + self.hs.config.email_smtp_host, + raw_from, raw_to, multipart_msg.as_string(), + port=self.hs.config.email_smtp_port + ) + + @defer.inlineCallbacks + def get_room_vars(self, room_id, user_id, notifs, notif_events, room_state): + my_member_event = room_state[("m.room.member", user_id)] + is_invite = my_member_event.content["membership"] == "invite" + + room_vars = { + "title": calculate_room_name(room_state, user_id), + "hash": string_ordinal_total(room_id), # See sender avatar hash + "notifs": [], + "invite": is_invite, + "link": self.make_room_link(room_id), + } + + if not is_invite: + for n in notifs: + notifvars = yield self.get_notif_vars( + n, user_id, notif_events[n['event_id']], room_state + ) + + # merge overlapping notifs together. + # relies on the notifs being in chronological order. + merge = False + if room_vars['notifs'] and 'messages' in room_vars['notifs'][-1]: + prev_messages = room_vars['notifs'][-1]['messages'] + for message in notifvars['messages']: + pm = filter(lambda pm: pm['id'] == message['id'], prev_messages) + if pm: + if not message["is_historical"]: + pm[0]["is_historical"] = False + merge = True + elif merge: + # we're merging, so append any remaining messages + # in this notif to the previous one + prev_messages.append(message) + + if not merge: + room_vars['notifs'].append(notifvars) + + defer.returnValue(room_vars) + + @defer.inlineCallbacks + def get_notif_vars(self, notif, user_id, notif_event, room_state): + results = yield self.store.get_events_around( + notif['room_id'], notif['event_id'], + before_limit=CONTEXT_BEFORE, after_limit=CONTEXT_AFTER + ) + + ret = { + "link": self.make_notif_link(notif), + "ts": notif['received_ts'], + "messages": [], + } + + handler = self.hs.get_handlers().message_handler + the_events = yield handler.filter_events_for_client( + user_id, results["events_before"] + ) + the_events.append(notif_event) + + for event in the_events: + messagevars = self.get_message_vars(notif, event, room_state) + if messagevars is not None: + ret['messages'].append(messagevars) + + defer.returnValue(ret) + + def get_message_vars(self, notif, event, room_state): + if event.type != EventTypes.Message: + return None + + sender_state_event = room_state[("m.room.member", event.sender)] + sender_name = name_from_member_event(sender_state_event) + sender_avatar_url = sender_state_event.content["avatar_url"] + + # 'hash' for deterministically picking default images: use + # sender_hash % the number of default images to choose from + sender_hash = string_ordinal_total(event.sender) + + ret = { + "msgtype": event.content["msgtype"], + "is_historical": event.event_id != notif['event_id'], + "id": event.event_id, + "ts": event.origin_server_ts, + "sender_name": sender_name, + "sender_avatar_url": sender_avatar_url, + "sender_hash": sender_hash, + } + + if event.content["msgtype"] == "m.text": + self.add_text_message_vars(ret, event) + elif event.content["msgtype"] == "m.image": + self.add_image_message_vars(ret, event) + + if "body" in event.content: + ret["body_text_plain"] = event.content["body"] + + return ret + + def add_text_message_vars(self, messagevars, event): + if "format" in event.content: + msgformat = event.content["format"] + else: + msgformat = None + messagevars["format"] = msgformat + + if msgformat == "org.matrix.custom.html": + messagevars["body_text_html"] = safe_markup(event.content["formatted_body"]) + else: + messagevars["body_text_html"] = safe_text(event.content["body"]) + + return messagevars + + def add_image_message_vars(self, messagevars, event): + messagevars["image_url"] = event.content["url"] + + return messagevars + + def make_summary_text(self, notifs_by_room, state_by_room, notif_events, user_id): + if len(notifs_by_room) == 1: + # Only one room has new stuff + room_id = notifs_by_room.keys()[0] + + # If the room has some kind of name, use it, but we don't + # want the generated-from-names one here otherwise we'll + # end up with, "new message from Bob in the Bob room" + room_name = calculate_room_name( + state_by_room[room_id], user_id, fallback_to_members=False + ) + + my_member_event = state_by_room[room_id][("m.room.member", user_id)] + if my_member_event.content["membership"] == "invite": + inviter_member_event = state_by_room[room_id][ + ("m.room.member", my_member_event.sender) + ] + inviter_name = name_from_member_event(inviter_member_event) + + if room_name is None: + return INVITE_FROM_PERSON % { + "person": inviter_name, + "app": self.app_name + } + else: + return INVITE_FROM_PERSON_TO_ROOM % { + "person": inviter_name, + "room": room_name, + "app": self.app_name, + } + + sender_name = None + if len(notifs_by_room[room_id]) == 1: + # There is just the one notification, so give some detail + event = notif_events[notifs_by_room[room_id][0]["event_id"]] + if ("m.room.member", event.sender) in state_by_room[room_id]: + state_event = state_by_room[room_id][("m.room.member", event.sender)] + sender_name = name_from_member_event(state_event) + + if sender_name is not None and room_name is not None: + return MESSAGE_FROM_PERSON_IN_ROOM % { + "person": sender_name, + "room": room_name, + "app": self.app_name, + } + elif sender_name is not None: + return MESSAGE_FROM_PERSON % { + "person": sender_name, + "app": self.app_name, + } + else: + # There's more than one notification for this room, so just + # say there are several + if room_name is not None: + return MESSAGES_IN_ROOM % { + "room": room_name, + "app": self.app_name, + } + else: + # If the room doesn't have a name, say who the messages + # are from explicitly to avoid, "messages in the Bob room" + sender_ids = list(set([ + notif_events[n['event_id']].sender + for n in notifs_by_room[room_id] + ])) + + return MESSAGES_FROM_PERSON % { + "person": descriptor_from_member_events([ + state_by_room[room_id][("m.room.member", s)] + for s in sender_ids + ]), + "app": self.app_name, + } + else: + # Stuff's happened in multiple different rooms + return MESSAGES_IN_ROOMS % { + "app": self.app_name, + } + + def make_room_link(self, room_id): + # need /beta for Universal Links to work on iOS + if self.app_name == "Vector": + return "https://vector.im/beta/#/room/%s" % (room_id,) + else: + return "https://matrix.to/#/room/%s" % (room_id,) + + def make_notif_link(self, notif): + # need /beta for Universal Links to work on iOS + if self.app_name == "Vector": + return "https://vector.im/beta/#/room/%s/%s" % ( + notif['room_id'], notif['event_id'] + ) + else: + return "https://matrix.to/#/room/%s/%s" % ( + notif['room_id'], notif['event_id'] + ) + + def make_unsubscribe_link(self): + # XXX: matrix.to + return "https://vector.im/#/settings" + + def mxc_to_http_filter(self, value, width, height, resize_method="crop"): + if value[0:6] != "mxc://": + return "" + + serverAndMediaId = value[6:] + if '#' in serverAndMediaId: + (serverAndMediaId, fragment) = serverAndMediaId.split('#', 1) + fragment = "#" + fragment + + params = { + "width": width, + "height": height, + "method": resize_method, + } + return "%s_matrix/media/v1/thumbnail/%s?%s%s" % ( + self.hs.config.public_baseurl, + serverAndMediaId, + urllib.urlencode(params), + fragment or "", + ) + + +def safe_markup(raw_html): + return jinja2.Markup(bleach.linkify(bleach.clean( + raw_html, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRS, + # bleach master has this, but it isn't released yet + # protocols=ALLOWED_SCHEMES, + strip=True + ))) + + +def safe_text(raw_text): + """ + Process text: treat it as HTML but escape any tags (ie. just escape the + HTML) then linkify it. + """ + return jinja2.Markup(bleach.linkify(bleach.clean( + raw_text, tags=[], attributes={}, + strip=False + ))) + + +def deduped_ordered_list(l): + seen = set() + ret = [] + for item in l: + if item not in seen: + seen.add(item) + ret.append(item) + return ret + + +def string_ordinal_total(s): + tot = 0 + for c in s: + tot += ord(c) + return tot + + +def format_ts_filter(value, format): + return time.strftime(format, time.localtime(value / 1000)) diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index 4960837504..e6c0806415 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -1,10 +1,37 @@ +# -*- coding: utf-8 -*- +# Copyright 2014-2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from httppusher import HttpPusher -PUSHER_TYPES = { - 'http': HttpPusher -} +import logging +logger = logging.getLogger(__name__) def create_pusher(hs, pusherdict): + logger.info("trying to create_pusher for %r", pusherdict) + + PUSHER_TYPES = { + "http": HttpPusher, + } + + logger.info("email enable notifs: %r", hs.config.email_enable_notifs) + if hs.config.email_enable_notifs: + from synapse.push.emailpusher import EmailPusher + PUSHER_TYPES["email"] = EmailPusher + logger.info("defined email pusher type") + if pusherdict['kind'] in PUSHER_TYPES: + logger.info("found pusher") return PUSHER_TYPES[pusherdict['kind']](hs, pusherdict) diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 6ef48d63f7..5853ec36a9 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -17,7 +17,6 @@ from twisted.internet import defer import pusher -from synapse.push import PusherConfigException from synapse.util.logcontext import preserve_fn from synapse.util.async import run_on_reactor @@ -50,6 +49,7 @@ class PusherPool: # recreated, added and started: this means we have only one # code path adding pushers. pusher.create_pusher(self.hs, { + "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, @@ -185,8 +185,8 @@ class PusherPool: for pusherdict in pushers: try: p = pusher.create_pusher(self.hs, pusherdict) - except PusherConfigException: - logger.exception("Couldn't start a pusher: caught PusherConfigException") + except: + logger.exception("Couldn't start a pusher: caught Exception") continue if p: appid_pushkey = "%s:%s" % ( diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 0eb3d6c1de..e0a7a19777 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -44,6 +44,10 @@ CONDITIONAL_REQUIREMENTS = { "preview_url": { "netaddr>=0.7.18": ["netaddr"], }, + "email.enable_notifs": { + "Jinja2>=2.8": ["Jinja2>=2.8"], + "bleach>=1.4.2": ["bleach>=1.4.2"], + }, } diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index ff8f69ddbf..1ecc02d94d 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -48,6 +48,7 @@ class RegisterRestServlet(RestServlet): super(RegisterRestServlet, self).__init__() self.hs = hs self.auth = hs.get_auth() + self.store = hs.get_datastore() self.auth_handler = hs.get_handlers().auth_handler self.registration_handler = hs.get_handlers().registration_handler self.identity_handler = hs.get_handlers().identity_handler @@ -214,6 +215,34 @@ class RegisterRestServlet(RestServlet): threepid['validated_at'], ) + # And we add an email pusher for them by default, but only + # if email notifications are enabled (so people don't start + # getting mail spam where they weren't before if email + # notifs are set up on a home server) + if ( + self.hs.config.email_enable_notifs and + self.hs.config.email_notif_for_new_users + ): + # Pull the ID of the access token back out of the db + # It would really make more sense for this to be passed + # up when the access token is saved, but that's quite an + # invasive change I'd rather do separately. + user_tuple = yield self.store.get_user_by_access_token( + token + ) + + yield self.hs.get_pusherpool().add_pusher( + user_id=user_id, + access_token=user_tuple["token_id"], + kind="email", + app_id="m.email", + app_display_name="Email Notifications", + device_display_name=threepid["address"], + pushkey=threepid["address"], + lang=None, # We don't know a user's language here + data={}, + ) + if 'bind_email' in params and params['bind_email']: logger.info("bind_email specified: binding") diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 86a98b6f11..6f316f7d24 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -118,16 +118,19 @@ class EventPushActionsStore(SQLBaseStore): max_stream_ordering=None): def get_after_receipt(txn): sql = ( - "SELECT ep.event_id, ep.stream_ordering, ep.actions " - "FROM event_push_actions AS ep, (" - " SELECT room_id, user_id," - " max(topological_ordering) as topological_ordering," - " max(stream_ordering) as stream_ordering" + "SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, " + "e.received_ts " + "FROM (" + " SELECT room_id, user_id, " + " max(topological_ordering) as topological_ordering, " + " max(stream_ordering) as stream_ordering " " FROM events" " NATURAL JOIN receipts_linearized WHERE receipt_type = 'm.read'" " GROUP BY room_id, user_id" - ") AS rl " - "WHERE" + ") AS rl," + " event_push_actions AS ep" + " INNER JOIN events AS e USING (room_id, event_id)" + " WHERE" " ep.room_id = rl.room_id" " AND (" " ep.topological_ordering > rl.topological_ordering" @@ -153,11 +156,13 @@ class EventPushActionsStore(SQLBaseStore): def get_no_receipt(txn): sql = ( - "SELECT ep.event_id, ep.stream_ordering, ep.actions " - "FROM event_push_actions AS ep " - "WHERE ep.room_id not in (" + "SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions," + " e.received_ts" + " FROM event_push_actions AS ep" + " JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id" + " WHERE ep.room_id not in (" " SELECT room_id FROM events NATURAL JOIN receipts_linearized" - " WHERE receipt_type = 'm.read' AND user_id = ? " + " WHERE receipt_type = 'm.read' AND user_id = ?" " GROUP BY room_id" ") AND ep.user_id = ? AND ep.stream_ordering > ?" ) @@ -175,12 +180,30 @@ class EventPushActionsStore(SQLBaseStore): defer.returnValue([ { "event_id": row[0], - "stream_ordering": row[1], - "actions": json.loads(row[2]), + "room_id": row[1], + "stream_ordering": row[2], + "actions": json.loads(row[3]), + "received_ts": row[4], } for row in after_read_receipt + no_read_receipt ]) @defer.inlineCallbacks + def get_time_of_last_push_action_before(self, stream_ordering): + def f(txn): + sql = ( + "SELECT e.received_ts" + " FROM event_push_actions AS ep" + " JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id" + " WHERE ep.stream_ordering > ?" + " ORDER BY ep.stream_ordering ASC" + " LIMIT 1" + ) + txn.execute(sql, (stream_ordering,)) + return txn.fetchone() + result = yield self.runInteraction("get_time_of_last_push_action_before", f) + defer.returnValue(result[0] if result else None) + + @defer.inlineCallbacks def get_latest_push_action_stream_ordering(self): def f(txn): txn.execute("SELECT MAX(stream_ordering) FROM event_push_actions") diff --git a/synapse/storage/events.py b/synapse/storage/events.py index a27b7919cd..4655669ba0 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -144,6 +144,7 @@ class EventsStore(SQLBaseStore): def __init__(self, hs): super(EventsStore, self).__init__(hs) + self._clock = hs.get_clock() self.register_background_update_handler( self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts ) @@ -565,6 +566,7 @@ class EventsStore(SQLBaseStore): "outlier": event.internal_metadata.is_outlier(), "content": encode_json(event.content).decode("UTF-8"), "origin_server_ts": int(event.origin_server_ts), + "received_ts": self._clock.time_msec(), } for event, _ in events_and_contexts ], diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 11feb3eb11..d9afd7ec87 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -233,3 +233,30 @@ class PusherStore(SQLBaseStore): {'failing_since': failing_since}, desc="update_pusher_failing_since", ) + + @defer.inlineCallbacks + def get_throttle_params_by_room(self, pusher_id): + res = yield self._simple_select_list( + "pusher_throttle", + {"pusher": pusher_id}, + ["room_id", "last_sent_ts", "throttle_ms"], + desc="get_throttle_params_by_room" + ) + + params_by_room = {} + for row in res: + params_by_room[row["room_id"]] = { + "last_sent_ts": row["last_sent_ts"], + "throttle_ms": row["throttle_ms"] + } + + defer.returnValue(params_by_room) + + @defer.inlineCallbacks + def set_throttle_params(self, pusher_id, room_id, params): + yield self._simple_upsert( + "pusher_throttle", + {"pusher": pusher_id, "room_id": room_id}, + params, + desc="set_throttle_params" + ) diff --git a/synapse/storage/schema/delta/31/events.sql b/synapse/storage/schema/delta/31/events.sql new file mode 100644 index 0000000000..1dd0f9e170 --- /dev/null +++ b/synapse/storage/schema/delta/31/events.sql @@ -0,0 +1,16 @@ +/* Copyright 2016 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +ALTER TABLE events ADD COLUMN received_ts BIGINT; diff --git a/synapse/storage/schema/delta/31/pusher_throttle.sql b/synapse/storage/schema/delta/31/pusher_throttle.sql new file mode 100644 index 0000000000..d86d30c13c --- /dev/null +++ b/synapse/storage/schema/delta/31/pusher_throttle.sql @@ -0,0 +1,23 @@ +/* Copyright 2016 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +CREATE TABLE pusher_throttle( + pusher BIGINT NOT NULL, + room_id TEXT NOT NULL, + last_sent_ts BIGINT, + throttle_ms BIGINT, + PRIMARY KEY (pusher, room_id) +); diff --git a/synapse/util/presentable_names.py b/synapse/util/presentable_names.py new file mode 100644 index 0000000000..3efa8a8206 --- /dev/null +++ b/synapse/util/presentable_names.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +# intentionally looser than what aliases we allow to be registered since +# other HSes may allow aliases that we would not +ALIAS_RE = re.compile(r"^#.*:.+$") + +ALL_ALONE = "Empty Room" + + +def calculate_room_name(room_state, user_id, fallback_to_members=True): + """ + Works out a user-facing name for the given room as per Matrix + spec recommendations. + Does not yet support internationalisation. + Args: + room_state: Dictionary of the room's state + user_id: The ID of the user to whom the room name is being presented + fallback_to_members: If False, return None instead of generating a name + based on the room's members if the room has no + title or aliases. + + Returns: + (string or None) A human readable name for the room. + """ + # does it have a name? + if ("m.room.name", "") in room_state: + m_room_name = room_state[("m.room.name", "")] + if m_room_name.content and m_room_name.content["name"]: + return m_room_name.content["name"] + + # does it have a canonical alias? + if ("m.room.canonical_alias", "") in room_state: + canon_alias = room_state[("m.room.canonical_alias", "")] + if ( + canon_alias.content and canon_alias.content["alias"] and + _looks_like_an_alias(canon_alias.content["alias"]) + ): + return canon_alias.content["alias"] + + # at this point we're going to need to search the state by all state keys + # for an event type, so rearrange the data structure + room_state_bytype = _state_as_two_level_dict(room_state) + + # right then, any aliases at all? + if "m.room.aliases" in room_state_bytype: + m_room_aliases = room_state_bytype["m.room.aliases"] + if len(m_room_aliases.values()) > 0: + first_alias_event = m_room_aliases.values()[0] + if first_alias_event.content and first_alias_event.content["aliases"]: + the_aliases = first_alias_event.content["aliases"] + if len(the_aliases) > 0 and _looks_like_an_alias(the_aliases[0]): + return the_aliases[0] + + if not fallback_to_members: + return None + + my_member_event = None + if ("m.room.member", user_id) in room_state: + my_member_event = room_state[("m.room.member", user_id)] + + if ( + my_member_event is not None and + my_member_event.content['membership'] == "invite" + ): + if ("m.room.member", my_member_event.sender) in room_state: + inviter_member_event = room_state[("m.room.member", my_member_event.sender)] + return "Invite from %s" % (name_from_member_event(inviter_member_event),) + else: + return "Room Invite" + + # we're going to have to generate a name based on who's in the room, + # so find out who is in the room that isn't the user. + if "m.room.member" in room_state_bytype: + all_members = [ + ev for ev in room_state_bytype["m.room.member"].values() + if ev.content['membership'] == "join" or ev.content['membership'] == "invite" + ] + # Sort the member events oldest-first so the we name people in the + # order the joined (it should at least be deterministic rather than + # dictionary iteration order) + all_members.sort(key=lambda e: e.origin_server_ts) + other_members = [m for m in all_members if m.state_key != user_id] + else: + other_members = [] + all_members = [] + + if len(other_members) == 0: + if len(all_members) == 1: + # self-chat, peeked room with 1 participant, + # or inbound invite, or outbound 3PID invite. + if all_members[0].sender == user_id: + if "m.room.third_party_invite" in room_state_bytype: + third_party_invites = room_state_bytype["m.room.third_party_invite"] + if len(third_party_invites) > 0: + # technically third party invite events are not member + # events, but they are close enough + return "Inviting %s" ( + descriptor_from_member_events(third_party_invites) + ) + else: + return ALL_ALONE + else: + return name_from_member_event(all_members[0]) + else: + return ALL_ALONE + else: + return descriptor_from_member_events(other_members) + + +def descriptor_from_member_events(member_events): + if len(member_events) == 0: + return "nobody" + elif len(member_events) == 1: + return name_from_member_event(member_events[0]) + elif len(member_events) == 2: + return "%s and %s" % ( + name_from_member_event(member_events[0]), + name_from_member_event(member_events[1]), + ) + else: + return "%s and %d others" % ( + name_from_member_event(member_events[0]), + len(member_events) - 1, + ) + + +def name_from_member_event(member_event): + if ( + member_event.content and "displayname" in member_event.content and + member_event.content["displayname"] + ): + return member_event.content["displayname"] + return member_event.state_key + + +def _state_as_two_level_dict(state): + ret = {} + for k, v in state.items(): + ret.setdefault(k[0], {})[k[1]] = v + return ret + + +def _looks_like_an_alias(string): + return ALIAS_RE.match(string) is not None |