diff options
Diffstat (limited to 'synapse/handlers')
-rw-r--r-- | synapse/handlers/_base.py | 142 | ||||
-rw-r--r-- | synapse/handlers/directory.py | 2 | ||||
-rw-r--r-- | synapse/handlers/events.py | 43 | ||||
-rw-r--r-- | synapse/handlers/federation.py | 97 | ||||
-rw-r--r-- | synapse/handlers/message.py | 80 | ||||
-rw-r--r-- | synapse/handlers/presence.py | 1811 | ||||
-rw-r--r-- | synapse/handlers/profile.py | 31 | ||||
-rw-r--r-- | synapse/handlers/receipts.py | 2 | ||||
-rw-r--r-- | synapse/handlers/register.py | 32 | ||||
-rw-r--r-- | synapse/handlers/room.py | 624 | ||||
-rw-r--r-- | synapse/handlers/sync.py | 51 | ||||
-rw-r--r-- | synapse/handlers/typing.py | 14 |
12 files changed, 1492 insertions, 1437 deletions
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 064e8723c8..bdade98bf7 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import LimitExceededError, SynapseError, AuthError from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.api.constants import Membership, EventTypes -from synapse.types import UserID, RoomAlias +from synapse.types import UserID, RoomAlias, Requester from synapse.push.action_generator import ActionGenerator from synapse.util.logcontext import PreserveLoggingContext @@ -53,9 +53,15 @@ class BaseHandler(object): self.event_builder_factory = hs.get_event_builder_factory() @defer.inlineCallbacks - def _filter_events_for_clients(self, user_tuples, events, event_id_to_state): + def filter_events_for_clients(self, user_tuples, events, event_id_to_state): """ Returns dict of user_id -> list of events that user is allowed to see. + + :param (str, bool) user_tuples: (user id, is_peeking) for each + user to be checked. is_peeking should be true if: + * the user is not currently a member of the room, and: + * the user has not been a member of the room since the given + events """ forgotten = yield defer.gatherResults([ self.store.who_forgot_in_room( @@ -72,18 +78,20 @@ class BaseHandler(object): def allowed(event, user_id, is_peeking): state = event_id_to_state[event.event_id] + # get the room_visibility at the time of the event. visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None) if visibility_event: visibility = visibility_event.content.get("history_visibility", "shared") else: visibility = "shared" + # if it was world_readable, it's easy: everyone can read it if visibility == "world_readable": return True - if is_peeking: - return False - + # get the user's membership at the time of the event. (or rather, + # just *after* the event. Which means that people can see their + # own join events, but not (currently) their own leave events.) membership_event = state.get((EventTypes.Member, user_id), None) if membership_event: if membership_event.event_id in event_id_forgotten: @@ -93,20 +101,29 @@ class BaseHandler(object): else: membership = None + # if the user was a member of the room at the time of the event, + # they can see it. if membership == Membership.JOIN: return True - if event.type == EventTypes.RoomHistoryVisibility: - return not is_peeking + if visibility == "joined": + # we weren't a member at the time of the event, so we can't + # see this event. + return False - if visibility == "shared": - return True - elif visibility == "joined": - return membership == Membership.JOIN elif visibility == "invited": + # user can also see the event if they were *invited* at the time + # of the event. return membership == Membership.INVITE - return True + else: + # visibility is shared: user can also see the event if they have + # become a member since the event + # + # XXX: if the user has subsequently joined and then left again, + # ideally we would share history up to the point they left. But + # we don't know when they left. + return not is_peeking defer.returnValue({ user_id: [ @@ -119,7 +136,17 @@ class BaseHandler(object): @defer.inlineCallbacks def _filter_events_for_client(self, user_id, events, is_peeking=False): - # Assumes that user has at some point joined the room if not is_guest. + """ + Check which events a user is allowed to see + + :param str user_id: user id to be checked + :param [synapse.events.EventBase] events: list of events to be checked + :param bool is_peeking should be True if: + * the user is not currently a member of the room, and: + * the user has not been a member of the room since the given + events + :rtype [synapse.events.EventBase] + """ types = ( (EventTypes.RoomHistoryVisibility, ""), (EventTypes.Member, user_id), @@ -128,7 +155,7 @@ class BaseHandler(object): frozenset(e.event_id for e in events), types=types ) - res = yield self._filter_events_for_clients( + res = yield self.filter_events_for_clients( [(user_id, is_peeking)], events, event_id_to_state ) defer.returnValue(res.get(user_id, [])) @@ -147,7 +174,7 @@ class BaseHandler(object): @defer.inlineCallbacks def _create_new_client_event(self, builder): - latest_ret = yield self.store.get_latest_events_in_room( + latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room( builder.room_id, ) @@ -156,7 +183,10 @@ class BaseHandler(object): else: depth = 1 - prev_events = [(e, h) for e, h, _ in latest_ret] + prev_events = [ + (event_id, prev_hashes) + for event_id, prev_hashes, _ in latest_ret + ] builder.prev_events = prev_events builder.depth = depth @@ -165,6 +195,31 @@ class BaseHandler(object): context = yield state_handler.compute_event_context(builder) + # If we've received an invite over federation, there are no latest + # events in the room, because we don't know enough about the graph + # fragment we received to treat it like a graph, so the above returned + # no relevant events. It may have returned some events (if we have + # joined and left the room), but not useful ones, like the invite. So we + # forcibly set our context to the invite we received over federation. + if ( + not self.is_host_in_room(context.current_state) and + builder.type == EventTypes.Member + ): + prev_member_event = yield self.store.get_room_member( + builder.sender, builder.room_id + ) + if prev_member_event: + builder.prev_events = ( + prev_member_event.event_id, + prev_member_event.prev_events + ) + + context = yield state_handler.compute_event_context( + builder, + old_state=(prev_member_event,), + outlier=True + ) + if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events @@ -187,10 +242,33 @@ class BaseHandler(object): (event, context,) ) + def is_host_in_room(self, current_state): + room_members = [ + (state_key, event.membership) + for ((event_type, state_key), event) in current_state.items() + if event_type == EventTypes.Member + ] + if len(room_members) == 0: + # Have we just created the room, and is this about to be the very + # first member event? + create_event = current_state.get(("m.room.create", "")) + if create_event: + return True + for (state_key, membership) in room_members: + if ( + UserID.from_string(state_key).domain == self.hs.hostname + and membership == Membership.JOIN + ): + return True + return False + @defer.inlineCallbacks - def handle_new_client_event(self, event, context, extra_users=[]): + def handle_new_client_event(self, event, context, ratelimit=True, extra_users=[]): # We now need to go and hit out to wherever we need to hit out to. + if ratelimit: + self.ratelimit(event.sender) + self.auth.check(event, auth_events=context.current_state) yield self.maybe_kick_guest_users(event, context.current_state.values()) @@ -215,6 +293,12 @@ class BaseHandler(object): if event.type == EventTypes.Member: if event.content["membership"] == Membership.INVITE: + def is_inviter_member_event(e): + return ( + e.type == EventTypes.Member and + e.sender == event.sender + ) + event.unsigned["invite_room_state"] = [ { "type": e.type, @@ -228,7 +312,7 @@ class BaseHandler(object): EventTypes.CanonicalAlias, EventTypes.RoomAvatar, EventTypes.Name, - ) + ) or is_inviter_member_event(e) ] invitee = UserID.from_string(event.state_key) @@ -316,7 +400,8 @@ class BaseHandler(object): if member_event.type != EventTypes.Member: continue - if not self.hs.is_mine(UserID.from_string(member_event.state_key)): + target_user = UserID.from_string(member_event.state_key) + if not self.hs.is_mine(target_user): continue if member_event.content["membership"] not in { @@ -338,18 +423,13 @@ class BaseHandler(object): # and having homeservers have their own users leave keeps more # of that decision-making and control local to the guest-having # homeserver. - message_handler = self.hs.get_handlers().message_handler - yield message_handler.create_and_send_event( - { - "type": EventTypes.Member, - "state_key": member_event.state_key, - "content": { - "membership": Membership.LEAVE, - "kind": "guest" - }, - "room_id": member_event.room_id, - "sender": member_event.state_key - }, + requester = Requester(target_user, "", True) + handler = self.hs.get_handlers().room_member_handler + yield handler.update_membership( + requester, + target_user, + member_event.room_id, + "leave", ratelimit=False, ) except Exception as e: diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 4efecb1ffd..e0a778e7ff 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -216,7 +216,7 @@ class DirectoryHandler(BaseHandler): aliases = yield self.store.get_aliases_for_room(room_id) msg_handler = self.hs.get_handlers().message_handler - yield msg_handler.create_and_send_event({ + yield msg_handler.create_and_send_nonmember_event({ "type": EventTypes.Aliases, "state_key": self.hs.hostname, "room_id": room_id, diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 4933c31c19..72a31a9755 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -19,6 +19,8 @@ from synapse.util.logutils import log_function from synapse.types import UserID from synapse.events.utils import serialize_event from synapse.util.logcontext import preserve_context_over_fn +from synapse.api.constants import Membership, EventTypes +from synapse.events import EventBase from ._base import BaseHandler @@ -126,11 +128,12 @@ class EventStreamHandler(BaseHandler): If `only_keys` is not None, events from keys will be sent down. """ auth_user = UserID.from_string(auth_user_id) + presence_handler = self.hs.get_handlers().presence_handler - try: - if affect_presence: - yield self.started_stream(auth_user) - + context = yield presence_handler.user_syncing( + auth_user_id, affect_presence=affect_presence, + ) + with context: if timeout: # If they've set a timeout set a minimum limit. timeout = max(timeout, 500) @@ -145,6 +148,34 @@ class EventStreamHandler(BaseHandler): is_guest=is_guest, explicit_room_id=room_id ) + # When the user joins a new room, or another user joins a currently + # joined room, we need to send down presence for those users. + to_add = [] + for event in events: + if not isinstance(event, EventBase): + continue + if event.type == EventTypes.Member: + if event.membership != Membership.JOIN: + continue + # Send down presence. + if event.state_key == auth_user_id: + # Send down presence for everyone in the room. + users = yield self.store.get_users_in_room(event.room_id) + states = yield presence_handler.get_states( + users, + as_event=True, + ) + to_add.extend(states) + else: + + ev = yield presence_handler.get_state( + UserID.from_string(event.state_key), + as_event=True, + ) + to_add.append(ev) + + events.extend(to_add) + time_now = self.clock.time_msec() chunks = [ @@ -159,10 +190,6 @@ class EventStreamHandler(BaseHandler): defer.returnValue(chunk) - finally: - if affect_presence: - self.stopped_stream(auth_user) - class EventHandler(BaseHandler): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index da55d43541..3655b9e5e2 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -14,6 +14,9 @@ # limitations under the License. """Contains handlers for federation events.""" +from signedjson.key import decode_verify_key_bytes +from signedjson.sign import verify_signed_json +from unpaddedbase64 import decode_base64 from ._base import BaseHandler @@ -1620,19 +1623,15 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks @log_function - def exchange_third_party_invite(self, invite): - sender = invite["sender"] - room_id = invite["room_id"] - - if "signed" not in invite or "token" not in invite["signed"]: - logger.info( - "Discarding received notification of third party invite " - "without signed: %s" % (invite,) - ) - return - + def exchange_third_party_invite( + self, + sender_user_id, + target_user_id, + room_id, + signed, + ): third_party_invite = { - "signed": invite["signed"], + "signed": signed, } event_dict = { @@ -1642,8 +1641,8 @@ class FederationHandler(BaseHandler): "third_party_invite": third_party_invite, }, "room_id": room_id, - "sender": sender, - "state_key": invite["mxid"], + "sender": sender_user_id, + "state_key": target_user_id, } if (yield self.auth.check_host_in_room(room_id, self.hs.hostname)): @@ -1656,11 +1655,11 @@ class FederationHandler(BaseHandler): ) self.auth.check(event, context.current_state) - yield self._validate_keyserver(event, auth_events=context.current_state) + yield self._check_signature(event, auth_events=context.current_state) member_handler = self.hs.get_handlers().room_member_handler - yield member_handler.send_membership_event(event, context) + yield member_handler.send_membership_event(event, context, from_client=False) else: - destinations = set([x.split(":", 1)[-1] for x in (sender, room_id)]) + destinations = set(x.split(":", 1)[-1] for x in (sender_user_id, room_id)) yield self.replication_layer.forward_third_party_invite( destinations, room_id, @@ -1681,13 +1680,13 @@ class FederationHandler(BaseHandler): ) self.auth.check(event, auth_events=context.current_state) - yield self._validate_keyserver(event, auth_events=context.current_state) + yield self._check_signature(event, auth_events=context.current_state) returned_invite = yield self.send_invite(origin, event) # TODO: Make sure the signatures actually are correct. event.signatures.update(returned_invite.signatures) member_handler = self.hs.get_handlers().room_member_handler - yield member_handler.send_membership_event(event, context) + yield member_handler.send_membership_event(event, context, from_client=False) @defer.inlineCallbacks def add_display_name_to_third_party_invite(self, event_dict, event, context): @@ -1711,17 +1710,69 @@ class FederationHandler(BaseHandler): defer.returnValue((event, context)) @defer.inlineCallbacks - def _validate_keyserver(self, event, auth_events): - token = event.content["third_party_invite"]["signed"]["token"] + def _check_signature(self, event, auth_events): + """ + Checks that the signature in the event is consistent with its invite. + :param event (Event): The m.room.member event to check + :param auth_events (dict<(event type, state_key), event>) + + :raises + AuthError if signature didn't match any keys, or key has been + revoked, + SynapseError if a transient error meant a key couldn't be checked + for revocation. + """ + signed = event.content["third_party_invite"]["signed"] + token = signed["token"] invite_event = auth_events.get( (EventTypes.ThirdPartyInvite, token,) ) + if not invite_event: + raise AuthError(403, "Could not find invite") + + last_exception = None + for public_key_object in self.hs.get_auth().get_public_keys(invite_event): + try: + for server, signature_block in signed["signatures"].items(): + for key_name, encoded_signature in signature_block.items(): + if not key_name.startswith("ed25519:"): + continue + + public_key = public_key_object["public_key"] + verify_key = decode_verify_key_bytes( + key_name, + decode_base64(public_key) + ) + verify_signed_json(signed, server, verify_key) + if "key_validity_url" in public_key_object: + yield self._check_key_revocation( + public_key, + public_key_object["key_validity_url"] + ) + return + except Exception as e: + last_exception = e + raise last_exception + + @defer.inlineCallbacks + def _check_key_revocation(self, public_key, url): + """ + Checks whether public_key has been revoked. + + :param public_key (str): base-64 encoded public key. + :param url (str): Key revocation URL. + + :raises + AuthError if they key has been revoked. + SynapseError if a transient error meant a key couldn't be checked + for revocation. + """ try: response = yield self.hs.get_simple_http_client().get_json( - invite_event.content["key_validity_url"], - {"public_key": invite_event.content["public_key"]} + url, + {"public_key": public_key} ) except Exception: raise SynapseError( diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 82c8cb5f0c..afa7c9c36c 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -16,12 +16,11 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError, Codes, SynapseError from synapse.streams.config import PaginationConfig from synapse.events.utils import serialize_event from synapse.events.validator import EventValidator from synapse.util import unwrapFirstError -from synapse.util.logcontext import PreserveLoggingContext from synapse.util.caches.snapshot_cache import SnapshotCache from synapse.types import UserID, RoomStreamToken, StreamToken @@ -216,7 +215,7 @@ class MessageHandler(BaseHandler): defer.returnValue((event, context)) @defer.inlineCallbacks - def send_event(self, event, context, ratelimit=True, is_guest=False): + def send_nonmember_event(self, event, context, ratelimit=True): """ Persists and notifies local clients and federation of an event. @@ -226,55 +225,68 @@ class MessageHandler(BaseHandler): ratelimit (bool): Whether to rate limit this send. is_guest (bool): Whether the sender is a guest. """ + if event.type == EventTypes.Member: + raise SynapseError( + 500, + "Tried to send member event through non-member codepath" + ) + user = UserID.from_string(event.sender) assert self.hs.is_mine(user), "User must be our own: %s" % (user,) - if ratelimit: - self.ratelimit(event.sender) - if event.is_state(): - prev_state = context.current_state.get((event.type, event.state_key)) - if prev_state and event.user_id == prev_state.user_id: - prev_content = encode_canonical_json(prev_state.content) - next_content = encode_canonical_json(event.content) - if prev_content == next_content: - # Duplicate suppression for state updates with same sender - # and content. - defer.returnValue(prev_state) + prev_state = self.deduplicate_state_event(event, context) + if prev_state is not None: + defer.returnValue(prev_state) - if event.type == EventTypes.Member: - member_handler = self.hs.get_handlers().room_member_handler - yield member_handler.send_membership_event(event, context, is_guest=is_guest) - else: - yield self.handle_new_client_event( - event=event, - context=context, - ) + yield self.handle_new_client_event( + event=event, + context=context, + ratelimit=ratelimit, + ) if event.type == EventTypes.Message: presence = self.hs.get_handlers().presence_handler - with PreserveLoggingContext(): - presence.bump_presence_active_time(user) + yield presence.bump_presence_active_time(user) + + def deduplicate_state_event(self, event, context): + """ + Checks whether event is in the latest resolved state in context. + + If so, returns the version of the event in context. + Otherwise, returns None. + """ + prev_event = context.current_state.get((event.type, event.state_key)) + if prev_event and event.user_id == prev_event.user_id: + prev_content = encode_canonical_json(prev_event.content) + next_content = encode_canonical_json(event.content) + if prev_content == next_content: + return prev_event + return None @defer.inlineCallbacks - def create_and_send_event(self, event_dict, ratelimit=True, - token_id=None, txn_id=None, is_guest=False): + def create_and_send_nonmember_event( + self, + event_dict, + ratelimit=True, + token_id=None, + txn_id=None + ): """ Creates an event, then sends it. - See self.create_event and self.send_event. + See self.create_event and self.send_nonmember_event. """ event, context = yield self.create_event( event_dict, token_id=token_id, txn_id=txn_id ) - yield self.send_event( + yield self.send_nonmember_event( event, context, ratelimit=ratelimit, - is_guest=is_guest ) defer.returnValue(event) @@ -660,10 +672,6 @@ class MessageHandler(BaseHandler): room_id=room_id, ) - # TODO(paul): I wish I was called with user objects not user_id - # strings... - auth_user = UserID.from_string(user_id) - # TODO: These concurrently time_now = self.clock.time_msec() state = [ @@ -688,13 +696,11 @@ class MessageHandler(BaseHandler): @defer.inlineCallbacks def get_presence(): states = yield presence_handler.get_states( - target_users=[UserID.from_string(m.user_id) for m in room_members], - auth_user=auth_user, + [m.user_id for m in room_members], as_event=True, - check_auth=False, ) - defer.returnValue(states.values()) + defer.returnValue(states) @defer.inlineCallbacks def get_receipts(): diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index b61394f2b5..f6cf343174 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -13,13 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer +"""This module is responsible for keeping track of presence status of local +and remote users. -from synapse.api.errors import SynapseError, AuthError +The methods that define policy are: + - PresenceHandler._update_states + - PresenceHandler._handle_timeouts + - should_notify +""" + +from twisted.internet import defer, reactor +from contextlib import contextmanager + +from synapse.api.errors import SynapseError from synapse.api.constants import PresenceState +from synapse.storage.presence import UserPresenceState -from synapse.util.logcontext import PreserveLoggingContext +from synapse.util.logcontext import preserve_fn from synapse.util.logutils import log_function +from synapse.util.metrics import Measure +from synapse.util.wheel_timer import WheelTimer from synapse.types import UserID import synapse.metrics @@ -32,34 +45,32 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) +notified_presence_counter = metrics.register_counter("notified_presence") +federation_presence_out_counter = metrics.register_counter("federation_presence_out") +presence_updates_counter = metrics.register_counter("presence_updates") +timers_fired_counter = metrics.register_counter("timers_fired") +federation_presence_counter = metrics.register_counter("federation_presence") +bump_active_time_counter = metrics.register_counter("bump_active_time") -# Don't bother bumping "last active" time if it differs by less than 60 seconds -LAST_ACTIVE_GRANULARITY = 60 * 1000 - -# Keep no more than this number of offline serial revisions -MAX_OFFLINE_SERIALS = 1000 +# If a user was last active in the last LAST_ACTIVE_GRANULARITY, consider them +# "currently_active" +LAST_ACTIVE_GRANULARITY = 60 * 1000 -# TODO(paul): Maybe there's one of these I can steal from somewhere -def partition(l, func): - """Partition the list by the result of func applied to each element.""" - ret = {} - - for x in l: - key = func(x) - if key not in ret: - ret[key] = [] - ret[key].append(x) +# How long to wait until a new /events or /sync request before assuming +# the client has gone. +SYNC_ONLINE_TIMEOUT = 30 * 1000 - return ret +# How long to wait before marking the user as idle. Compared against last active +IDLE_TIMER = 5 * 60 * 1000 +# How often we expect remote servers to resend us presence. +FEDERATION_TIMEOUT = 30 * 60 * 1000 -def partitionbool(l, func): - def boolfunc(x): - return bool(func(x)) +# How often to resend presence to remote servers +FEDERATION_PING_INTERVAL = 25 * 60 * 1000 - ret = partition(l, boolfunc) - return ret.get(True, []), ret.get(False, []) +assert LAST_ACTIVE_GRANULARITY < IDLE_TIMER def user_presence_changed(distributor, user, statuscache): @@ -72,45 +83,13 @@ def collect_presencelike_data(distributor, user, content): class PresenceHandler(BaseHandler): - STATE_LEVELS = { - PresenceState.OFFLINE: 0, - PresenceState.UNAVAILABLE: 1, - PresenceState.ONLINE: 2, - PresenceState.FREE_FOR_CHAT: 3, - } - def __init__(self, hs): super(PresenceHandler, self).__init__(hs) - - self.homeserver = hs - + self.hs = hs self.clock = hs.get_clock() - - distributor = hs.get_distributor() - distributor.observe("registered_user", self.registered_user) - - distributor.observe( - "started_user_eventstream", self.started_user_eventstream - ) - distributor.observe( - "stopped_user_eventstream", self.stopped_user_eventstream - ) - - distributor.observe("user_joined_room", self.user_joined_room) - - distributor.declare("collect_presencelike_data") - - distributor.declare("changed_presencelike_data") - distributor.observe( - "changed_presencelike_data", self.changed_presencelike_data - ) - - # outbound signal from the presence module to advertise when a user's - # presence has changed - distributor.declare("user_presence_changed") - - self.distributor = distributor - + self.store = hs.get_datastore() + self.wheel_timer = WheelTimer() + self.notifier = hs.get_notifier() self.federation = hs.get_replication_layer() self.federation.register_edu_handler( @@ -138,348 +117,552 @@ class PresenceHandler(BaseHandler): ) ) - # IN-MEMORY store, mapping local userparts to sets of local users to - # be informed of state changes. - self._local_pushmap = {} - # map local users to sets of remote /domain names/ who are interested - # in them - self._remote_sendmap = {} - # map remote users to sets of local users who're interested in them - self._remote_recvmap = {} - # list of (serial, set of(userids)) tuples, ordered by serial, latest - # first - self._remote_offline_serials = [] - - # map any user to a UserPresenceCache - self._user_cachemap = {} - self._user_cachemap_latest_serial = 0 - - # map room_ids to the latest presence serial for a member of that - # room - self._room_serials = {} + distributor = hs.get_distributor() + distributor.observe("user_joined_room", self.user_joined_room) + + active_presence = self.store.take_presence_startup_info() + + # A dictionary of the current state of users. This is prefilled with + # non-offline presence from the DB. We should fetch from the DB if + # we can't find a users presence in here. + self.user_to_current_state = { + state.user_id: state + for state in active_presence + } metrics.register_callback( - "userCachemap:size", - lambda: len(self._user_cachemap), + "user_to_current_state_size", lambda: len(self.user_to_current_state) ) - def _get_or_make_usercache(self, user): - """If the cache entry doesn't exist, initialise a new one.""" - if user not in self._user_cachemap: - self._user_cachemap[user] = UserPresenceCache() - return self._user_cachemap[user] - - def _get_or_offline_usercache(self, user): - """If the cache entry doesn't exist, return an OFFLINE one but do not - store it into the cache.""" - if user in self._user_cachemap: - return self._user_cachemap[user] - else: - return UserPresenceCache() + now = self.clock.time_msec() + for state in active_presence: + self.wheel_timer.insert( + now=now, + obj=state.user_id, + then=state.last_active_ts + IDLE_TIMER, + ) + self.wheel_timer.insert( + now=now, + obj=state.user_id, + then=state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT, + ) + if self.hs.is_mine_id(state.user_id): + self.wheel_timer.insert( + now=now, + obj=state.user_id, + then=state.last_federation_update_ts + FEDERATION_PING_INTERVAL, + ) + else: + self.wheel_timer.insert( + now=now, + obj=state.user_id, + then=state.last_federation_update_ts + FEDERATION_TIMEOUT, + ) - def registered_user(self, user): - return self.store.create_presence(user.localpart) + # Set of users who have presence in the `user_to_current_state` that + # have not yet been persisted + self.unpersisted_users_changes = set() - @defer.inlineCallbacks - def is_presence_visible(self, observer_user, observed_user): - assert(self.hs.is_mine(observed_user)) + reactor.addSystemEventTrigger("before", "shutdown", self._on_shutdown) - if observer_user == observed_user: - defer.returnValue(True) + self.serial_to_user = {} + self._next_serial = 1 - if (yield self.store.user_rooms_intersect( - [u.to_string() for u in observer_user, observed_user])): - defer.returnValue(True) + # Keeps track of the number of *ongoing* syncs. While this is non zero + # a user will never go offline. + self.user_to_num_current_syncs = {} - if (yield self.store.is_presence_visible( - observed_localpart=observed_user.localpart, - observer_userid=observer_user.to_string())): - defer.returnValue(True) + # Start a LoopingCall in 30s that fires every 5s. + # The initial delay is to allow disconnected clients a chance to + # reconnect before we treat them as offline. + self.clock.call_later( + 0 * 1000, + self.clock.looping_call, + self._handle_timeouts, + 5000, + ) - defer.returnValue(False) + metrics.register_callback("wheel_timer_size", lambda: len(self.wheel_timer)) @defer.inlineCallbacks - def get_state(self, target_user, auth_user, as_event=False, check_auth=True): - """Get the current presence state of the given user. + def _on_shutdown(self): + """Gets called when shutting down. This lets us persist any updates that + we haven't yet persisted, e.g. updates that only changes some internal + timers. This allows changes to persist across startup without having to + persist every single change. + + If this does not run it simply means that some of the timers will fire + earlier than they should when synapse is restarted. This affect of this + is some spurious presence changes that will self-correct. + """ + logger.info( + "Performing _on_shutdown. Persiting %d unpersisted changes", + len(self.user_to_current_state) + ) - Args: - target_user (UserID): The user whose presence we want - auth_user (UserID): The user requesting the presence, used for - checking if said user is allowed to see the persence of the - `target_user` - as_event (bool): Format the return as an event or not? - check_auth (bool): Perform the auth checks or not? + if self.unpersisted_users_changes: + yield self.store.update_presence([ + self.user_to_current_state[user_id] + for user_id in self.unpersisted_users_changes + ]) + logger.info("Finished _on_shutdown") - Returns: - dict: The presence state of the `target_user`, whose format depends - on the `as_event` argument. + @defer.inlineCallbacks + def _update_states(self, new_states): + """Updates presence of users. Sets the appropriate timeouts. Pokes + the notifier and federation if and only if the changed presence state + should be sent to clients/servers. """ - if self.hs.is_mine(target_user): - if check_auth: - visible = yield self.is_presence_visible( - observer_user=auth_user, - observed_user=target_user - ) + now = self.clock.time_msec() - if not visible: - raise SynapseError(404, "Presence information not visible") + with Measure(self.clock, "presence_update_states"): - if target_user in self._user_cachemap: - state = self._user_cachemap[target_user].get_state() - else: - state = yield self.store.get_presence_state(target_user.localpart) - if "mtime" in state: - del state["mtime"] - state["presence"] = state.pop("state") - else: - # TODO(paul): Have remote server send us permissions set - state = self._get_or_offline_usercache(target_user).get_state() + # NOTE: We purposefully don't yield between now and when we've + # calculated what we want to do with the new states, to avoid races. - if "last_active" in state: - state["last_active_ago"] = int( - self.clock.time_msec() - state.pop("last_active") - ) + to_notify = {} # Changes we want to notify everyone about + to_federation_ping = {} # These need sending keep-alives - if as_event: - content = state + for new_state in new_states: + user_id = new_state.user_id - content["user_id"] = target_user.to_string() + # Its fine to not hit the database here, as the only thing not in + # the current state cache are OFFLINE states, where the only field + # of interest is last_active which is safe enough to assume is 0 + # here. + prev_state = self.user_to_current_state.get( + user_id, UserPresenceState.default(user_id) + ) - if "last_active" in content: - content["last_active_ago"] = int( - self._clock.time_msec() - content.pop("last_active") + new_state, should_notify, should_ping = handle_update( + prev_state, new_state, + is_mine=self.hs.is_mine_id(user_id), + wheel_timer=self.wheel_timer, + now=now ) - defer.returnValue({"type": "m.presence", "content": content}) - else: - defer.returnValue(state) + self.user_to_current_state[user_id] = new_state - @defer.inlineCallbacks - def get_states(self, target_users, auth_user, as_event=False, check_auth=True): - """A batched version of the `get_state` method that accepts a list of - `target_users` + if should_notify: + to_notify[user_id] = new_state + elif should_ping: + to_federation_ping[user_id] = new_state - Args: - target_users (list): The list of UserID's whose presence we want - auth_user (UserID): The user requesting the presence, used for - checking if said user is allowed to see the persence of the - `target_users` - as_event (bool): Format the return as an event or not? - check_auth (bool): Perform the auth checks or not? + # TODO: We should probably ensure there are no races hereafter - Returns: - dict: A mapping from user -> presence_state - """ - local_users, remote_users = partitionbool( - target_users, - lambda u: self.hs.is_mine(u) - ) + presence_updates_counter.inc_by(len(new_states)) + + if to_notify: + notified_presence_counter.inc_by(len(to_notify)) + yield self._persist_and_notify(to_notify.values()) + + self.unpersisted_users_changes |= set(s.user_id for s in new_states) + self.unpersisted_users_changes -= set(to_notify.keys()) - if check_auth: - for user in local_users: - visible = yield self.is_presence_visible( - observer_user=auth_user, - observed_user=user + to_federation_ping = { + user_id: state for user_id, state in to_federation_ping.items() + if user_id not in to_notify + } + if to_federation_ping: + federation_presence_out_counter.inc_by(len(to_federation_ping)) + + _, _, hosts_to_states = yield self._get_interested_parties( + to_federation_ping.values() ) - if not visible: - raise SynapseError(404, "Presence information not visible") + self._push_to_remotes(hosts_to_states) + + def _handle_timeouts(self): + """Checks the presence of users that have timed out and updates as + appropriate. + """ + now = self.clock.time_msec() + + with Measure(self.clock, "presence_handle_timeouts"): + # Fetch the list of users that *may* have timed out. Things may have + # changed since the timeout was set, so we won't necessarily have to + # take any action. + users_to_check = self.wheel_timer.fetch(now) - results = {} - if local_users: - for user in local_users: - if user in self._user_cachemap: - results[user] = self._user_cachemap[user].get_state() + states = [ + self.user_to_current_state.get( + user_id, UserPresenceState.default(user_id) + ) + for user_id in set(users_to_check) + ] - local_to_user = {u.localpart: u for u in local_users} + timers_fired_counter.inc_by(len(states)) - states = yield self.store.get_presence_states( - [u.localpart for u in local_users if u not in results] + changes = handle_timeouts( + states, + is_mine_fn=self.hs.is_mine_id, + user_to_num_current_syncs=self.user_to_num_current_syncs, + now=now, ) - for local_part, state in states.items(): - if state is None: - continue - res = {"presence": state["state"]} - if "status_msg" in state and state["status_msg"]: - res["status_msg"] = state["status_msg"] - results[local_to_user[local_part]] = res - - for user in remote_users: - # TODO(paul): Have remote server send us permissions set - results[user] = self._get_or_offline_usercache(user).get_state() - - for state in results.values(): - if "last_active" in state: - state["last_active_ago"] = int( - self.clock.time_msec() - state.pop("last_active") - ) + preserve_fn(self._update_states)(changes) - if as_event: - for user, state in results.items(): - content = state - content["user_id"] = user.to_string() + @defer.inlineCallbacks + def bump_presence_active_time(self, user): + """We've seen the user do something that indicates they're interacting + with the app. + """ + user_id = user.to_string() - if "last_active" in content: - content["last_active_ago"] = int( - self._clock.time_msec() - content.pop("last_active") - ) + bump_active_time_counter.inc() - results[user] = {"type": "m.presence", "content": content} + prev_state = yield self.current_state_for_user(user_id) - defer.returnValue(results) + new_fields = { + "last_active_ts": self.clock.time_msec(), + } + if prev_state.state == PresenceState.UNAVAILABLE: + new_fields["state"] = PresenceState.ONLINE + + yield self._update_states([prev_state.copy_and_replace(**new_fields)]) @defer.inlineCallbacks - @log_function - def set_state(self, target_user, auth_user, state): - # return - # TODO (erikj): Turn this back on. Why did we end up sending EDUs - # everywhere? + def user_syncing(self, user_id, affect_presence=True): + """Returns a context manager that should surround any stream requests + from the user. - if not self.hs.is_mine(target_user): - raise SynapseError(400, "User is not hosted on this Home Server") + This allows us to keep track of who is currently streaming and who isn't + without having to have timers outside of this module to avoid flickering + when users disconnect/reconnect. - if target_user != auth_user: - raise AuthError(400, "Cannot set another user's presence") + Args: + user_id (str) + affect_presence (bool): If false this function will be a no-op. + Useful for streams that are not associated with an actual + client that is being used by a user. + """ + if affect_presence: + curr_sync = self.user_to_num_current_syncs.get(user_id, 0) + self.user_to_num_current_syncs[user_id] = curr_sync + 1 + + prev_state = yield self.current_state_for_user(user_id) + if prev_state.state == PresenceState.OFFLINE: + # If they're currently offline then bring them online, otherwise + # just update the last sync times. + yield self._update_states([prev_state.copy_and_replace( + state=PresenceState.ONLINE, + last_active_ts=self.clock.time_msec(), + last_user_sync_ts=self.clock.time_msec(), + )]) + else: + yield self._update_states([prev_state.copy_and_replace( + last_user_sync_ts=self.clock.time_msec(), + )]) - if "status_msg" not in state: - state["status_msg"] = None + @defer.inlineCallbacks + def _end(): + if affect_presence: + self.user_to_num_current_syncs[user_id] -= 1 - for k in state.keys(): - if k not in ("presence", "status_msg"): - raise SynapseError( - 400, "Unexpected presence state key '%s'" % (k,) - ) + prev_state = yield self.current_state_for_user(user_id) + yield self._update_states([prev_state.copy_and_replace( + last_user_sync_ts=self.clock.time_msec(), + )]) - if state["presence"] not in self.STATE_LEVELS: - raise SynapseError(400, "'%s' is not a valid presence state" % ( - state["presence"], - )) + @contextmanager + def _user_syncing(): + try: + yield + finally: + preserve_fn(_end)() - logger.debug("Updating presence state of %s to %s", - target_user.localpart, state["presence"]) + defer.returnValue(_user_syncing()) - state_to_store = dict(state) - state_to_store["state"] = state_to_store.pop("presence") + @defer.inlineCallbacks + def current_state_for_user(self, user_id): + """Get the current presence state for a user. + """ + res = yield self.current_state_for_users([user_id]) + defer.returnValue(res[user_id]) - statuscache = self._get_or_offline_usercache(target_user) - was_level = self.STATE_LEVELS[statuscache.get_state()["presence"]] - now_level = self.STATE_LEVELS[state["presence"]] + @defer.inlineCallbacks + def current_state_for_users(self, user_ids): + """Get the current presence state for multiple users. - yield self.store.set_presence_state( - target_user.localpart, state_to_store - ) - yield collect_presencelike_data(self.distributor, target_user, state) + Returns: + dict: `user_id` -> `UserPresenceState` + """ + states = { + user_id: self.user_to_current_state.get(user_id, None) + for user_id in user_ids + } + + missing = [user_id for user_id, state in states.items() if not state] + if missing: + # There are things not in our in memory cache. Lets pull them out of + # the database. + res = yield self.store.get_presence_for_users(missing) + states.update({state.user_id: state for state in res}) + + missing = [user_id for user_id, state in states.items() if not state] + if missing: + new = { + user_id: UserPresenceState.default(user_id) + for user_id in missing + } + states.update(new) + self.user_to_current_state.update(new) + + defer.returnValue(states) + + @defer.inlineCallbacks + def _get_interested_parties(self, states): + """Given a list of states return which entities (rooms, users, servers) + are interested in the given states. + + Returns: + 3-tuple: `(room_ids_to_states, users_to_states, hosts_to_states)`, + with each item being a dict of `entity_name` -> `[UserPresenceState]` + """ + room_ids_to_states = {} + users_to_states = {} + for state in states: + events = yield self.store.get_rooms_for_user(state.user_id) + for e in events: + room_ids_to_states.setdefault(e.room_id, []).append(state) + + plist = yield self.store.get_presence_list_observers_accepted(state.user_id) + for u in plist: + users_to_states.setdefault(u, []).append(state) + + # Always notify self + users_to_states.setdefault(state.user_id, []).append(state) + + hosts_to_states = {} + for room_id, states in room_ids_to_states.items(): + local_states = filter(lambda s: self.hs.is_mine_id(s.user_id), states) + if not local_states: + continue - if now_level > was_level: - state["last_active"] = self.clock.time_msec() + hosts = yield self.store.get_joined_hosts_for_room(room_id) + for host in hosts: + hosts_to_states.setdefault(host, []).extend(local_states) - now_online = state["presence"] != PresenceState.OFFLINE - was_polling = target_user in self._user_cachemap + for user_id, states in users_to_states.items(): + local_states = filter(lambda s: self.hs.is_mine_id(s.user_id), states) + if not local_states: + continue - if now_online and not was_polling: - yield self.start_polling_presence(target_user, state=state) - elif not now_online and was_polling: - yield self.stop_polling_presence(target_user) + host = UserID.from_string(user_id).domain + hosts_to_states.setdefault(host, []).extend(local_states) - # TODO(paul): perform a presence push as part of start/stop poll so - # we don't have to do this all the time - yield self.changed_presencelike_data(target_user, state) + # TODO: de-dup hosts_to_states, as a single host might have multiple + # of same presence - def bump_presence_active_time(self, user, now=None): - if now is None: - now = self.clock.time_msec() + defer.returnValue((room_ids_to_states, users_to_states, hosts_to_states)) - prev_state = self._get_or_make_usercache(user) - if now - prev_state.state.get("last_active", 0) < LAST_ACTIVE_GRANULARITY: - return + @defer.inlineCallbacks + def _persist_and_notify(self, states): + """Persist states in the database, poke the notifier and send to + interested remote servers + """ + stream_id, max_token = yield self.store.update_presence(states) + + parties = yield self._get_interested_parties(states) + room_ids_to_states, users_to_states, hosts_to_states = parties + + self.notifier.on_new_event( + "presence_key", stream_id, rooms=room_ids_to_states.keys(), + users=[UserID.from_string(u) for u in users_to_states.keys()] + ) - with PreserveLoggingContext(): - self.changed_presencelike_data(user, {"last_active": now}) + self._push_to_remotes(hosts_to_states) - def get_joined_rooms_for_user(self, user): - """Get the list of rooms a user is joined to. + def _push_to_remotes(self, hosts_to_states): + """Sends state updates to remote servers. Args: - user(UserID): The user. - Returns: - A Deferred of a list of room id strings. + hosts_to_states (dict): Mapping `server_name` -> `[UserPresenceState]` """ - rm_handler = self.homeserver.get_handlers().room_member_handler - return rm_handler.get_joined_rooms_for_user(user) + now = self.clock.time_msec() + for host, states in hosts_to_states.items(): + self.federation.send_edu( + destination=host, + edu_type="m.presence", + content={ + "push": [ + _format_user_presence_state(state, now) + for state in states + ] + } + ) - def get_joined_users_for_room_id(self, room_id): - rm_handler = self.homeserver.get_handlers().room_member_handler - return rm_handler.get_room_members(room_id) + @defer.inlineCallbacks + def incoming_presence(self, origin, content): + """Called when we receive a `m.presence` EDU from a remote server. + """ + now = self.clock.time_msec() + updates = [] + for push in content.get("push", []): + # A "push" contains a list of presence that we are probably interested + # in. + # TODO: Actually check if we're interested, rather than blindly + # accepting presence updates. + user_id = push.get("user_id", None) + if not user_id: + logger.info( + "Got presence update from %r with no 'user_id': %r", + origin, push, + ) + continue + + presence_state = push.get("presence", None) + if not presence_state: + logger.info( + "Got presence update from %r with no 'presence_state': %r", + origin, push, + ) + continue + + new_fields = { + "state": presence_state, + "last_federation_update_ts": now, + } + + last_active_ago = push.get("last_active_ago", None) + if last_active_ago is not None: + new_fields["last_active_ts"] = now - last_active_ago + + new_fields["status_msg"] = push.get("status_msg", None) + new_fields["currently_active"] = push.get("currently_active", False) + + prev_state = yield self.current_state_for_user(user_id) + updates.append(prev_state.copy_and_replace(**new_fields)) + + if updates: + federation_presence_counter.inc_by(len(updates)) + yield self._update_states(updates) @defer.inlineCallbacks - def changed_presencelike_data(self, user, state): - """Updates the presence state of a local user. + def get_state(self, target_user, as_event=False): + results = yield self.get_states( + [target_user.to_string()], + as_event=as_event, + ) + + defer.returnValue(results[0]) + + @defer.inlineCallbacks + def get_states(self, target_user_ids, as_event=False): + """Get the presence state for users. Args: - user(UserID): The user being updated. - state(dict): The new presence state for the user. + target_user_ids (list) + as_event (bool): Whether to format it as a client event or not. + Returns: - A Deferred + list """ - self._user_cachemap_latest_serial += 1 - statuscache = yield self.update_presence_cache(user, state) - yield self.push_presence(user, statuscache=statuscache) - @log_function - def started_user_eventstream(self, user): - # TODO(paul): Use "last online" state - return self.set_state(user, user, {"presence": PresenceState.ONLINE}) + updates = yield self.current_state_for_users(target_user_ids) + updates = updates.values() - @log_function - def stopped_user_eventstream(self, user): - # TODO(paul): Save current state as "last online" state - return self.set_state(user, user, {"presence": PresenceState.OFFLINE}) + for user_id in set(target_user_ids) - set(u.user_id for u in updates): + updates.append(UserPresenceState.default(user_id)) + + now = self.clock.time_msec() + if as_event: + defer.returnValue([ + { + "type": "m.presence", + "content": _format_user_presence_state(state, now), + } + for state in updates + ]) + else: + defer.returnValue([ + _format_user_presence_state(state, now) for state in updates + ]) @defer.inlineCallbacks - def user_joined_room(self, user, room_id): - """Called via the distributor whenever a user joins a room. - Notifies the new member of the presence of the current members. - Notifies the current members of the room of the new member's presence. + def set_state(self, target_user, state): + """Set the presence state of the user. + """ + status_msg = state.get("status_msg", None) + presence = state["presence"] - Args: - user(UserID): The user who joined the room. - room_id(str): The room id the user joined. + valid_presence = ( + PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE + ) + if presence not in valid_presence: + raise SynapseError(400, "Invalid presence state") + + user_id = target_user.to_string() + + prev_state = yield self.current_state_for_user(user_id) + + new_fields = { + "state": presence, + "status_msg": status_msg if presence != PresenceState.OFFLINE else None + } + + if presence == PresenceState.ONLINE: + new_fields["last_active_ts"] = self.clock.time_msec() + + yield self._update_states([prev_state.copy_and_replace(**new_fields)]) + + @defer.inlineCallbacks + def user_joined_room(self, user, room_id): + """Called (via the distributor) when a user joins a room. This funciton + sends presence updates to servers, either: + 1. the joining user is a local user and we send their presence to + all servers in the room. + 2. the joining user is a remote user and so we send presence for all + local users in the room. """ + # We only need to send presence to servers that don't have it yet. We + # don't need to send to local clients here, as that is done as part + # of the event stream/sync. + # TODO: Only send to servers not already in the room. if self.hs.is_mine(user): - # No actual update but we need to bump the serial anyway for the - # event source - self._user_cachemap_latest_serial += 1 - statuscache = yield self.update_presence_cache( - user, room_ids=[room_id] - ) - self.push_update_to_local_and_remote( - observed_user=user, - room_ids=[room_id], - statuscache=statuscache, - ) + state = yield self.current_state_for_user(user.to_string()) - # We also want to tell them about current presence of people. - curr_users = yield self.get_joined_users_for_room_id(room_id) + hosts = yield self.store.get_joined_hosts_for_room(room_id) + self._push_to_remotes({host: (state,) for host in hosts}) + else: + user_ids = yield self.store.get_users_in_room(room_id) + user_ids = filter(self.hs.is_mine_id, user_ids) - for local_user in [c for c in curr_users if self.hs.is_mine(c)]: - statuscache = yield self.update_presence_cache( - local_user, room_ids=[room_id], add_to_cache=False - ) + states = yield self.current_state_for_users(user_ids) - with PreserveLoggingContext(): - self.push_update_to_local_and_remote( - observed_user=local_user, - users_to_push=[user], - statuscache=statuscache, - ) + self._push_to_remotes({user.domain: states.values()}) @defer.inlineCallbacks - def send_presence_invite(self, observer_user, observed_user): - """Request the presence of a local or remote user for a local user""" + def get_presence_list(self, observer_user, accepted=None): + """Returns the presence for all users in their presence list. + """ if not self.hs.is_mine(observer_user): raise SynapseError(400, "User is not hosted on this Home Server") + presence_list = yield self.store.get_presence_list( + observer_user.localpart, accepted=accepted + ) + + results = yield self.get_states( + target_user_ids=[row["observed_user_id"] for row in presence_list], + as_event=False, + ) + + is_accepted = { + row["observed_user_id"]: row["accepted"] for row in presence_list + } + + for result in results: + result.update({ + "accepted": is_accepted, + }) + + defer.returnValue(results) + + @defer.inlineCallbacks + def send_presence_invite(self, observer_user, observed_user): + """Sends a presence invite. + """ yield self.store.add_presence_list_pending( observer_user.localpart, observed_user.to_string() ) @@ -497,59 +680,40 @@ class PresenceHandler(BaseHandler): ) @defer.inlineCallbacks - def _should_accept_invite(self, observed_user, observer_user): - if not self.hs.is_mine(observed_user): - defer.returnValue(False) - - row = yield self.store.has_presence_state(observed_user.localpart) - if not row: - defer.returnValue(False) - - # TODO(paul): Eventually we'll ask the user's permission for this - # before accepting. For now just accept any invite request - defer.returnValue(True) - - @defer.inlineCallbacks def invite_presence(self, observed_user, observer_user): - """Handles a m.presence_invite EDU. A remote or local user has - requested presence updates for a local user. If the invite is accepted - then allow the local or remote user to see the presence of the local - user. - - Args: - observed_user(UserID): The local user whose presence is requested. - observer_user(UserID): The remote or local user requesting presence. + """Handles new presence invites. """ - accept = yield self._should_accept_invite(observed_user, observer_user) - - if accept: - yield self.store.allow_presence_visible( - observed_user.localpart, observer_user.to_string() - ) + if not self.hs.is_mine(observed_user): + raise SynapseError(400, "User is not hosted on this Home Server") + # TODO: Don't auto accept if self.hs.is_mine(observer_user): - if accept: - yield self.accept_presence(observed_user, observer_user) - else: - yield self.deny_presence(observed_user, observer_user) + yield self.accept_presence(observed_user, observer_user) else: - edu_type = "m.presence_accept" if accept else "m.presence_deny" - - yield self.federation.send_edu( + self.federation.send_edu( destination=observer_user.domain, - edu_type=edu_type, + edu_type="m.presence_accept", content={ "observed_user": observed_user.to_string(), "observer_user": observer_user.to_string(), } ) + state_dict = yield self.get_state(observed_user, as_event=False) + + self.federation.send_edu( + destination=observer_user.domain, + edu_type="m.presence", + content={ + "push": [state_dict] + } + ) + @defer.inlineCallbacks def accept_presence(self, observed_user, observer_user): """Handles a m.presence_accept EDU. Mark a presence invite from a local or remote user as accepted in a local user's presence list. Starts polling for presence updates from the local or remote user. - Args: observed_user(UserID): The user to update in the presence list. observer_user(UserID): The owner of the presence list to update. @@ -558,15 +722,10 @@ class PresenceHandler(BaseHandler): observer_user.localpart, observed_user.to_string() ) - yield self.start_polling_presence( - observer_user, target_user=observed_user - ) - @defer.inlineCallbacks def deny_presence(self, observed_user, observer_user): """Handle a m.presence_deny EDU. Removes a local or remote user from a local user's presence list. - Args: observed_user(UserID): The local or remote user to remove from the list. @@ -584,7 +743,6 @@ class PresenceHandler(BaseHandler): def drop(self, observed_user, observer_user): """Remove a local or remote user from a local user's presence list and unsubscribe the local user from updates that user. - Args: observed_user(UserId): The local or remote user to remove from the list. @@ -599,710 +757,353 @@ class PresenceHandler(BaseHandler): observer_user.localpart, observed_user.to_string() ) - self.stop_polling_presence( - observer_user, target_user=observed_user - ) - - @defer.inlineCallbacks - def get_presence_list(self, observer_user, accepted=None): - """Get the presence list for a local user. The retured list includes - the current presence state for each user listed. - - Args: - observer_user(UserID): The local user whose presence list to fetch. - accepted(bool or None): If not none then only include users who - have or have not accepted the presence invite request. - Returns: - A Deferred list of presence state events. - """ - if not self.hs.is_mine(observer_user): - raise SynapseError(400, "User is not hosted on this Home Server") - - presence_list = yield self.store.get_presence_list( - observer_user.localpart, accepted=accepted - ) - - results = [] - for row in presence_list: - observed_user = UserID.from_string(row["observed_user_id"]) - result = { - "observed_user": observed_user, "accepted": row["accepted"] - } - result.update( - self._get_or_offline_usercache(observed_user).get_state() - ) - if "last_active" in result: - result["last_active_ago"] = int( - self.clock.time_msec() - result.pop("last_active") - ) - results.append(result) - - defer.returnValue(results) + # TODO: Inform the remote that we've dropped the presence list. @defer.inlineCallbacks - @log_function - def start_polling_presence(self, user, target_user=None, state=None): - """Subscribe a local user to presence updates from a local or remote - user. If no target_user is supplied then subscribe to all users stored - in the presence list for the local user. - - Additonally this pushes the current presence state of this user to all - target_users. That state can be provided directly or will be read from - the stored state for the local user. - - Also this attempts to notify the local user of the current state of - any local target users. - - Args: - user(UserID): The local user that whishes for presence updates. - target_user(UserID): The local or remote user whose updates are - wanted. - state(dict): Optional presence state for the local user. + def is_visible(self, observed_user, observer_user): + """Returns whether a user can see another user's presence. """ - logger.debug("Start polling for presence from %s", user) - - if target_user: - target_users = set([target_user]) - room_ids = [] - else: - presence = yield self.store.get_presence_list( - user.localpart, accepted=True - ) - target_users = set([ - UserID.from_string(x["observed_user_id"]) for x in presence - ]) + observer_rooms = yield self.store.get_rooms_for_user(observer_user.to_string()) + observed_rooms = yield self.store.get_rooms_for_user(observed_user.to_string()) - # Also include people in all my rooms + observer_room_ids = set(r.room_id for r in observer_rooms) + observed_room_ids = set(r.room_id for r in observed_rooms) - room_ids = yield self.get_joined_rooms_for_user(user) + if observer_room_ids & observed_room_ids: + defer.returnValue(True) - if state is None: - state = yield self.store.get_presence_state(user.localpart) - else: - # statuscache = self._get_or_make_usercache(user) - # self._user_cachemap_latest_serial += 1 - # statuscache.update(state, self._user_cachemap_latest_serial) - pass - - yield self.push_update_to_local_and_remote( - observed_user=user, - users_to_push=target_users, - room_ids=room_ids, - statuscache=self._get_or_make_usercache(user), + accepted_observers = yield self.store.get_presence_list_observers_accepted( + observed_user.to_string() ) - for target_user in target_users: - if self.hs.is_mine(target_user): - self._start_polling_local(user, target_user) - - # We want to tell the person that just came online - # presence state of people they are interested in? - self.push_update_to_clients( - users_to_push=[user], - ) - - deferreds = [] - remote_users = [u for u in target_users if not self.hs.is_mine(u)] - remoteusers_by_domain = partition(remote_users, lambda u: u.domain) - # Only poll for people in our get_presence_list - for domain in remoteusers_by_domain: - remoteusers = remoteusers_by_domain[domain] - - deferreds.append(self._start_polling_remote( - user, domain, remoteusers - )) - - yield defer.DeferredList(deferreds, consumeErrors=True) + defer.returnValue(observer_user.to_string() in accepted_observers) - def _start_polling_local(self, user, target_user): - """Subscribe a local user to presence updates for a local user - - Args: - user(UserId): The local user that wishes for updates. - target_user(UserId): The local users whose updates are wanted. + @defer.inlineCallbacks + def get_all_presence_updates(self, last_id, current_id): """ - target_localpart = target_user.localpart - - if target_localpart not in self._local_pushmap: - self._local_pushmap[target_localpart] = set() - - self._local_pushmap[target_localpart].add(user) - - def _start_polling_remote(self, user, domain, remoteusers): - """Subscribe a local user to presence updates for remote users on a - given remote domain. - - Args: - user(UserID): The local user that wishes for updates. - domain(str): The remote server the local user wants updates from. - remoteusers(UserID): The remote users that local user wants to be - told about. - Returns: - A Deferred. + Gets a list of presence update rows from between the given stream ids. + Each row has: + - stream_id(str) + - user_id(str) + - state(str) + - last_active_ts(int) + - last_federation_update_ts(int) + - last_user_sync_ts(int) + - status_msg(int) + - currently_active(int) """ - to_poll = set() - - for u in remoteusers: - if u not in self._remote_recvmap: - self._remote_recvmap[u] = set() - to_poll.add(u) - - self._remote_recvmap[u].add(user) - - if not to_poll: - return defer.succeed(None) + # TODO(markjh): replicate the unpersisted changes. + # This could use the in-memory stores for recent changes. + rows = yield self.store.get_all_presence_updates(last_id, current_id) + defer.returnValue(rows) - return self.federation.send_edu( - destination=domain, - edu_type="m.presence", - content={"poll": [u.to_string() for u in to_poll]} - ) - - @log_function - def stop_polling_presence(self, user, target_user=None): - """Unsubscribe a local user from presence updates from a local or - remote user. If no target user is supplied then unsubscribe the user - from all presence updates that the user had subscribed to. - - Args: - user(UserID): The local user that no longer wishes for updates. - target_user(UserID or None): The user whose updates are no longer - wanted. - Returns: - A Deferred. - """ - logger.debug("Stop polling for presence from %s", user) - if not target_user or self.hs.is_mine(target_user): - self._stop_polling_local(user, target_user=target_user) +def should_notify(old_state, new_state): + """Decides if a presence state change should be sent to interested parties. + """ + if old_state.status_msg != new_state.status_msg: + return True - deferreds = [] + if old_state.state == PresenceState.ONLINE: + if new_state.state != PresenceState.ONLINE: + # Always notify for online -> anything + return True - if target_user: - if target_user not in self._remote_recvmap: - return - target_users = set([target_user]) - else: - target_users = self._remote_recvmap.keys() + if new_state.currently_active != old_state.currently_active: + return True - remoteusers = [u for u in target_users - if user in self._remote_recvmap[u]] - remoteusers_by_domain = partition(remoteusers, lambda u: u.domain) + if new_state.last_active_ts - old_state.last_active_ts > LAST_ACTIVE_GRANULARITY: + # Always notify for a transition where last active gets bumped. + return True - for domain in remoteusers_by_domain: - remoteusers = remoteusers_by_domain[domain] + if old_state.state != new_state.state: + return True - deferreds.append( - self._stop_polling_remote(user, domain, remoteusers) - ) + return False - return defer.DeferredList(deferreds, consumeErrors=True) - def _stop_polling_local(self, user, target_user): - """Unsubscribe a local user from presence updates from a local user on - this server. +def _format_user_presence_state(state, now): + """Convert UserPresenceState to a format that can be sent down to clients + and to other servers. + """ + content = { + "presence": state.state, + "user_id": state.user_id, + } + if state.last_active_ts: + content["last_active_ago"] = now - state.last_active_ts + if state.status_msg and state.state != PresenceState.OFFLINE: + content["status_msg"] = state.status_msg + if state.state == PresenceState.ONLINE: + content["currently_active"] = state.currently_active - Args: - user(UserID): The local user that no longer wishes for updates. - target_user(UserID): The user whose updates are no longer wanted. - """ - for localpart in self._local_pushmap.keys(): - if target_user and localpart != target_user.localpart: - continue + return content - if user in self._local_pushmap[localpart]: - self._local_pushmap[localpart].remove(user) - if not self._local_pushmap[localpart]: - del self._local_pushmap[localpart] +class PresenceEventSource(object): + def __init__(self, hs): + self.hs = hs + self.clock = hs.get_clock() + self.store = hs.get_datastore() + @defer.inlineCallbacks @log_function - def _stop_polling_remote(self, user, domain, remoteusers): - """Unsubscribe a local user from presence updates from remote users on - a given domain. - - Args: - user(UserID): The local user that no longer wishes for updates. - domain(str): The remote server to unsubscribe from. - remoteusers([UserID]): The users on that remote server that the - local user no longer wishes to be updated about. - Returns: - A Deferred. - """ - to_unpoll = set() - - for u in remoteusers: - self._remote_recvmap[u].remove(user) - - if not self._remote_recvmap[u]: - del self._remote_recvmap[u] - to_unpoll.add(u) + def get_new_events(self, user, from_key, room_ids=None, include_offline=True, + **kwargs): + # The process for getting presence events are: + # 1. Get the rooms the user is in. + # 2. Get the list of user in the rooms. + # 3. Get the list of users that are in the user's presence list. + # 4. If there is a from_key set, cross reference the list of users + # with the `presence_stream_cache` to see which ones we actually + # need to check. + # 5. Load current state for the users. + # + # We don't try and limit the presence updates by the current token, as + # sending down the rare duplicate is not a concern. + + with Measure(self.clock, "presence.get_new_events"): + user_id = user.to_string() + if from_key is not None: + from_key = int(from_key) + room_ids = room_ids or [] - if not to_unpoll: - return defer.succeed(None) + presence = self.hs.get_handlers().presence_handler + stream_change_cache = self.store.presence_stream_cache - return self.federation.send_edu( - destination=domain, - edu_type="m.presence", - content={"unpoll": [u.to_string() for u in to_unpoll]} - ) + if not room_ids: + rooms = yield self.store.get_rooms_for_user(user_id) + room_ids = set(e.room_id for e in rooms) + else: + room_ids = set(room_ids) + + max_token = self.store.get_current_presence_token() + + plist = yield self.store.get_presence_list_accepted(user.localpart) + friends = set(row["observed_user_id"] for row in plist) + friends.add(user_id) # So that we receive our own presence + + user_ids_changed = set() + changed = None + if from_key and max_token - from_key < 100: + # For small deltas, its quicker to get all changes and then + # work out if we share a room or they're in our presence list + changed = stream_change_cache.get_all_entities_changed(from_key) + + # get_all_entities_changed can return None + if changed is not None: + for other_user_id in changed: + if other_user_id in friends: + user_ids_changed.add(other_user_id) + continue + other_rooms = yield self.store.get_rooms_for_user(other_user_id) + if room_ids.intersection(e.room_id for e in other_rooms): + user_ids_changed.add(other_user_id) + continue + else: + # Too many possible updates. Find all users we can see and check + # if any of them have changed. + user_ids_to_check = set() + for room_id in room_ids: + users = yield self.store.get_users_in_room(room_id) + user_ids_to_check.update(users) + + user_ids_to_check.update(friends) + + # Always include yourself. Only really matters for when the user is + # not in any rooms, but still. + user_ids_to_check.add(user_id) + + if from_key: + user_ids_changed = stream_change_cache.get_entities_changed( + user_ids_to_check, from_key, + ) + else: + user_ids_changed = user_ids_to_check - @defer.inlineCallbacks - @log_function - def push_presence(self, user, statuscache): - """ - Notify local and remote users of a change in presence of a local user. - Pushes the update to local clients and remote domains that are directly - subscribed to the presence of the local user. - Also pushes that update to any local user or remote domain that shares - a room with the local user. + updates = yield presence.current_state_for_users(user_ids_changed) - Args: - user(UserID): The local user whose presence was updated. - statuscache(UserPresenceCache): Cache of the user's presence state - Returns: - A Deferred. - """ - assert(self.hs.is_mine(user)) + now = self.clock.time_msec() - logger.debug("Pushing presence update from %s", user) + defer.returnValue(([ + { + "type": "m.presence", + "content": _format_user_presence_state(s, now), + } + for s in updates.values() + if include_offline or s.state != PresenceState.OFFLINE + ], max_token)) - localusers = set(self._local_pushmap.get(user.localpart, set())) - remotedomains = set(self._remote_sendmap.get(user.localpart, set())) + def get_current_key(self): + return self.store.get_current_presence_token() - # Reflect users' status changes back to themselves, so UIs look nice - # and also user is informed of server-forced pushes - localusers.add(user) + def get_pagination_rows(self, user, pagination_config, key): + return self.get_new_events(user, from_key=None, include_offline=False) - room_ids = yield self.get_joined_rooms_for_user(user) - if not localusers and not room_ids: - defer.returnValue(None) +def handle_timeouts(user_states, is_mine_fn, user_to_num_current_syncs, now): + """Checks the presence of users that have timed out and updates as + appropriate. - yield self.push_update_to_local_and_remote( - observed_user=user, - users_to_push=localusers, - remote_domains=remotedomains, - room_ids=room_ids, - statuscache=statuscache, - ) - yield user_presence_changed(self.distributor, user, statuscache) + Args: + user_states(list): List of UserPresenceState's to check. + is_mine_fn (fn): Function that returns if a user_id is ours + user_to_num_current_syncs (dict): Mapping of user_id to number of currently + active syncs. + now (int): Current time in ms. - @defer.inlineCallbacks - def incoming_presence(self, origin, content): - """Handle an incoming m.presence EDU. - For each presence update in the "push" list update our local cache and - notify the appropriate local clients. Only clients that share a room - or are directly subscribed to the presence for a user should be - notified of the update. - For each subscription request in the "poll" list start pushing presence - updates to the remote server. - For unsubscribe request in the "unpoll" list stop pushing presence - updates to the remote server. + Returns: + List of UserPresenceState updates + """ + changes = {} # Actual changes we need to notify people about - Args: - orgin(str): The source of this m.presence EDU. - content(dict): The content of this m.presence EDU. - Returns: - A Deferred. - """ - deferreds = [] + for state in user_states: + is_mine = is_mine_fn(state.user_id) - for push in content.get("push", []): - user = UserID.from_string(push["user_id"]) + new_state = handle_timeout(state, is_mine, user_to_num_current_syncs, now) + if new_state: + changes[state.user_id] = new_state - logger.debug("Incoming presence update from %s", user) + return changes.values() - observers = set(self._remote_recvmap.get(user, set())) - if observers: - logger.debug( - " | %d interested local observers %r", len(observers), observers - ) - room_ids = yield self.get_joined_rooms_for_user(user) - if room_ids: - logger.debug(" | %d interested room IDs %r", len(room_ids), room_ids) +def handle_timeout(state, is_mine, user_to_num_current_syncs, now): + """Checks the presence of the user to see if any of the timers have elapsed - state = dict(push) - del state["user_id"] + Args: + state (UserPresenceState) + is_mine (bool): Whether the user is ours + user_to_num_current_syncs (dict): Mapping of user_id to number of currently + active syncs. + now (int): Current time in ms. - if "presence" not in state: - logger.warning( - "Received a presence 'push' EDU from %s without a" - " 'presence' key", origin + Returns: + A UserPresenceState update or None if no update. + """ + if state.state == PresenceState.OFFLINE: + # No timeouts are associated with offline states. + return None + + changed = False + user_id = state.user_id + + if is_mine: + if state.state == PresenceState.ONLINE: + if now - state.last_active_ts > IDLE_TIMER: + # Currently online, but last activity ages ago so auto + # idle + state = state.copy_and_replace( + state=PresenceState.UNAVAILABLE, ) - continue - - if "last_active_ago" in state: - state["last_active"] = int( - self.clock.time_msec() - state.pop("last_active_ago") + changed = True + elif now - state.last_active_ts > LAST_ACTIVE_GRANULARITY: + # So that we send down a notification that we've + # stopped updating. + changed = True + + if now - state.last_federation_update_ts > FEDERATION_PING_INTERVAL: + # Need to send ping to other servers to ensure they don't + # timeout and set us to offline + changed = True + + # If there are have been no sync for a while (and none ongoing), + # set presence to offline + if not user_to_num_current_syncs.get(user_id, 0): + if now - state.last_user_sync_ts > SYNC_ONLINE_TIMEOUT: + state = state.copy_and_replace( + state=PresenceState.OFFLINE, + status_msg=None, ) - - self._user_cachemap_latest_serial += 1 - yield self.update_presence_cache(user, state, room_ids=room_ids) - - if not observers and not room_ids: - logger.debug(" | no interested observers or room IDs") - continue - - self.push_update_to_clients( - users_to_push=observers, room_ids=room_ids + changed = True + else: + # We expect to be poked occaisonally by the other side. + # This is to protect against forgetful/buggy servers, so that + # no one gets stuck online forever. + if now - state.last_federation_update_ts > FEDERATION_TIMEOUT: + # The other side seems to have disappeared. + state = state.copy_and_replace( + state=PresenceState.OFFLINE, + status_msg=None, ) + changed = True - user_id = user.to_string() - - if state["presence"] == PresenceState.OFFLINE: - self._remote_offline_serials.insert( - 0, - (self._user_cachemap_latest_serial, set([user_id])) - ) - while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS: - self._remote_offline_serials.pop() # remove the oldest - if user in self._user_cachemap: - del self._user_cachemap[user] - else: - # Remove the user from remote_offline_serials now that they're - # no longer offline - for idx, elem in enumerate(self._remote_offline_serials): - (_, user_ids) = elem - user_ids.discard(user_id) - if not user_ids: - self._remote_offline_serials.pop(idx) - - for poll in content.get("poll", []): - user = UserID.from_string(poll) - - if not self.hs.is_mine(user): - continue + return state if changed else None - # TODO(paul) permissions checks - - if user not in self._remote_sendmap: - self._remote_sendmap[user] = set() - - self._remote_sendmap[user].add(origin) - - deferreds.append(self._push_presence_remote(user, origin)) - - for unpoll in content.get("unpoll", []): - user = UserID.from_string(unpoll) - - if not self.hs.is_mine(user): - continue - if user in self._remote_sendmap: - self._remote_sendmap[user].remove(origin) +def handle_update(prev_state, new_state, is_mine, wheel_timer, now): + """Given a presence update: + 1. Add any appropriate timers. + 2. Check if we should notify anyone. - if not self._remote_sendmap[user]: - del self._remote_sendmap[user] + Args: + prev_state (UserPresenceState) + new_state (UserPresenceState) + is_mine (bool): Whether the user is ours + wheel_timer (WheelTimer) + now (int): Time now in ms - yield defer.DeferredList(deferreds, consumeErrors=True) - - @defer.inlineCallbacks - def update_presence_cache(self, user, state={}, room_ids=None, - add_to_cache=True): - """Update the presence cache for a user with a new state and bump the - serial to the latest value. - - Args: - user(UserID): The user being updated - state(dict): The presence state being updated - room_ids(None or list of str): A list of room_ids to update. If - room_ids is None then fetch the list of room_ids the user is - joined to. - add_to_cache: Whether to add an entry to the presence cache if the - user isn't already in the cache. - Returns: - A Deferred UserPresenceCache for the user being updated. - """ - if room_ids is None: - room_ids = yield self.get_joined_rooms_for_user(user) - - for room_id in room_ids: - self._room_serials[room_id] = self._user_cachemap_latest_serial - if add_to_cache: - statuscache = self._get_or_make_usercache(user) - else: - statuscache = self._get_or_offline_usercache(user) - statuscache.update(state, serial=self._user_cachemap_latest_serial) - defer.returnValue(statuscache) - - @defer.inlineCallbacks - def push_update_to_local_and_remote(self, observed_user, statuscache, - users_to_push=[], room_ids=[], - remote_domains=[]): - """Notify local clients and remote servers of a change in the presence - of a user. - - Args: - observed_user(UserID): The user to push the presence state for. - statuscache(UserPresenceCache): The cache for the presence state to - push. - users_to_push([UserID]): A list of local and remote users to - notify. - room_ids([str]): Notify the local and remote occupants of these - rooms. - remote_domains([str]): A list of remote servers to notify in - addition to those implied by the users_to_push and the - room_ids. - Returns: - A Deferred. - """ - - localusers, remoteusers = partitionbool( - users_to_push, - lambda u: self.hs.is_mine(u) - ) - - localusers = set(localusers) - - self.push_update_to_clients( - users_to_push=localusers, room_ids=room_ids - ) - - remote_domains = set(remote_domains) - remote_domains |= set([r.domain for r in remoteusers]) - for room_id in room_ids: - remote_domains.update( - (yield self.store.get_joined_hosts_for_room(room_id)) - ) - - remote_domains.discard(self.hs.hostname) - - deferreds = [] - for domain in remote_domains: - logger.debug(" | push to remote domain %s", domain) - deferreds.append( - self._push_presence_remote( - observed_user, domain, state=statuscache.get_state() - ) + Returns: + 3-tuple: `(new_state, persist_and_notify, federation_ping)` where: + - new_state: is the state to actually persist + - persist_and_notify (bool): whether to persist and notify people + - federation_ping (bool): whether we should send a ping over federation + """ + user_id = new_state.user_id + + persist_and_notify = False + federation_ping = False + + # If the users are ours then we want to set up a bunch of timers + # to time things out. + if is_mine: + if new_state.state == PresenceState.ONLINE: + # Idle timer + wheel_timer.insert( + now=now, + obj=user_id, + then=new_state.last_active_ts + IDLE_TIMER ) - yield defer.DeferredList(deferreds, consumeErrors=True) - - defer.returnValue((localusers, remote_domains)) - - def push_update_to_clients(self, users_to_push=[], room_ids=[]): - """Notify clients of a new presence event. - - Args: - users_to_push([UserID]): List of users to notify. - room_ids([str]): List of room_ids to notify. - """ - with PreserveLoggingContext(): - self.notifier.on_new_event( - "presence_key", - self._user_cachemap_latest_serial, - users_to_push, - room_ids, + active = now - new_state.last_active_ts < LAST_ACTIVE_GRANULARITY + new_state = new_state.copy_and_replace( + currently_active=active, ) - @defer.inlineCallbacks - def _push_presence_remote(self, user, destination, state=None): - """Push a user's presence to a remote server. If a presence state event - that event is sent. Otherwise a new state event is constructed from the - stored presence state. - The last_active is replaced with last_active_ago in case the wallclock - time on the remote server is different to the time on this server. - Sends an EDU to the remote server with the current presence state. - - Args: - user(UserID): The user to push the presence state for. - destination(str): The remote server to send state to. - state(dict): The state to push, or None to use the current stored - state. - Returns: - A Deferred. - """ - if state is None: - state = yield self.store.get_presence_state(user.localpart) - del state["mtime"] - state["presence"] = state.pop("state") - - if user in self._user_cachemap: - state["last_active"] = ( - self._user_cachemap[user].get_state()["last_active"] + if active: + wheel_timer.insert( + now=now, + obj=user_id, + then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY ) - yield collect_presencelike_data(self.distributor, user, state) - - if "last_active" in state: - state = dict(state) - state["last_active_ago"] = int( - self.clock.time_msec() - state.pop("last_active") + if new_state.state != PresenceState.OFFLINE: + # User has stopped syncing + wheel_timer.insert( + now=now, + obj=user_id, + then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT ) - user_state = {"user_id": user.to_string(), } - user_state.update(state) - - yield self.federation.send_edu( - destination=destination, - edu_type="m.presence", - content={"push": [user_state, ], } - ) - - -class PresenceEventSource(object): - def __init__(self, hs): - self.hs = hs - self.clock = hs.get_clock() - - @defer.inlineCallbacks - @log_function - def get_new_events(self, user, from_key, room_ids=None, **kwargs): - from_key = int(from_key) - room_ids = room_ids or [] - - presence = self.hs.get_handlers().presence_handler - cachemap = presence._user_cachemap - - max_serial = presence._user_cachemap_latest_serial - - clock = self.clock - latest_serial = 0 - - user_ids_to_check = {user} - presence_list = yield presence.store.get_presence_list( - user.localpart, accepted=True - ) - if presence_list is not None: - user_ids_to_check |= set( - UserID.from_string(p["observed_user_id"]) for p in presence_list - ) - for room_id in set(room_ids) & set(presence._room_serials): - if presence._room_serials[room_id] > from_key: - joined = yield presence.get_joined_users_for_room_id(room_id) - user_ids_to_check |= set(joined) - - updates = [] - for observed_user in user_ids_to_check & set(cachemap): - cached = cachemap[observed_user] - - if cached.serial <= from_key or cached.serial > max_serial: - continue - - latest_serial = max(cached.serial, latest_serial) - updates.append(cached.make_event(user=observed_user, clock=clock)) - - # TODO(paul): limit - - for serial, user_ids in presence._remote_offline_serials: - if serial <= from_key: - break - - if serial > max_serial: - continue - - latest_serial = max(latest_serial, serial) - for u in user_ids: - updates.append({ - "type": "m.presence", - "content": {"user_id": u, "presence": PresenceState.OFFLINE}, - }) - # TODO(paul): For the v2 API we want to tell the client their from_key - # is too old if we fell off the end of the _remote_offline_serials - # list, and get them to invalidate+resync. In v1 we have no such - # concept so this is a best-effort result. - - if updates: - defer.returnValue((updates, latest_serial)) - else: - defer.returnValue(([], presence._user_cachemap_latest_serial)) - - def get_current_key(self): - presence = self.hs.get_handlers().presence_handler - return presence._user_cachemap_latest_serial - - @defer.inlineCallbacks - def get_pagination_rows(self, user, pagination_config, key): - # TODO (erikj): Does this make sense? Ordering? - - from_key = int(pagination_config.from_key) - - if pagination_config.to_key: - to_key = int(pagination_config.to_key) - else: - to_key = -1 - - presence = self.hs.get_handlers().presence_handler - cachemap = presence._user_cachemap + last_federate = new_state.last_federation_update_ts + if now - last_federate > FEDERATION_PING_INTERVAL: + # Been a while since we've poked remote servers + new_state = new_state.copy_and_replace( + last_federation_update_ts=now, + ) + federation_ping = True - user_ids_to_check = {user} - presence_list = yield presence.store.get_presence_list( - user.localpart, accepted=True + else: + wheel_timer.insert( + now=now, + obj=user_id, + then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT ) - if presence_list is not None: - user_ids_to_check |= set( - UserID.from_string(p["observed_user_id"]) for p in presence_list - ) - room_ids = yield presence.get_joined_rooms_for_user(user) - for room_id in set(room_ids) & set(presence._room_serials): - if presence._room_serials[room_id] >= from_key: - joined = yield presence.get_joined_users_for_room_id(room_id) - user_ids_to_check |= set(joined) - - updates = [] - for observed_user in user_ids_to_check & set(cachemap): - if not (to_key < cachemap[observed_user].serial <= from_key): - continue - - updates.append((observed_user, cachemap[observed_user])) - - # TODO(paul): limit - - if updates: - clock = self.clock - - earliest_serial = max([x[1].serial for x in updates]) - data = [x[1].make_event(user=x[0], clock=clock) for x in updates] - - defer.returnValue((data, earliest_serial)) - else: - defer.returnValue(([], 0)) - -class UserPresenceCache(object): - """Store an observed user's state and status message. - - Includes the update timestamp. - """ - def __init__(self): - self.state = {"presence": PresenceState.OFFLINE} - self.serial = None - - def __repr__(self): - return "UserPresenceCache(state=%r, serial=%r)" % ( - self.state, self.serial + # Check whether the change was something worth notifying about + if should_notify(prev_state, new_state): + new_state = new_state.copy_and_replace( + last_federation_update_ts=now, ) + persist_and_notify = True - def update(self, state, serial): - assert("mtime_age" not in state) - - self.state.update(state) - # Delete keys that are now 'None' - for k in self.state.keys(): - if self.state[k] is None: - del self.state[k] - - self.serial = serial - - if "status_msg" in state: - self.status_msg = state["status_msg"] - else: - self.status_msg = None - - def get_state(self): - # clone it so caller can't break our cache - state = dict(self.state) - return state - - def make_event(self, user, clock): - content = self.get_state() - content["user_id"] = user.to_string() - - if "last_active" in content: - content["last_active_ago"] = int( - clock.time_msec() - content.pop("last_active") - ) - - return {"type": "m.presence", "content": content} + return new_state, persist_and_notify, federation_ping diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 629e6e3594..c9ad5944e6 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -16,8 +16,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError, AuthError, CodeMessageException -from synapse.api.constants import EventTypes, Membership -from synapse.types import UserID +from synapse.types import UserID, Requester from synapse.util import unwrapFirstError from ._base import BaseHandler @@ -49,6 +48,9 @@ class ProfileHandler(BaseHandler): distributor = hs.get_distributor() self.distributor = distributor + distributor.declare("collect_presencelike_data") + distributor.declare("changed_presencelike_data") + distributor.observe("registered_user", self.registered_user) distributor.observe( @@ -208,21 +210,18 @@ class ProfileHandler(BaseHandler): ) for j in joins: - content = { - "membership": Membership.JOIN, - } - - yield collect_presencelike_data(self.distributor, user, content) - - msg_handler = self.hs.get_handlers().message_handler + handler = self.hs.get_handlers().room_member_handler try: - yield msg_handler.create_and_send_event({ - "type": EventTypes.Member, - "room_id": j.room_id, - "state_key": user.to_string(), - "content": content, - "sender": user.to_string() - }, ratelimit=False) + # Assume the user isn't a guest because we don't let guests set + # profile or avatar data. + requester = Requester(user, "", False) + yield handler.update_membership( + requester, + user, + j.room_id, + "join", # We treat a profile update like a join. + ratelimit=False, # Try to hide that these events aren't atomic. + ) except Exception as e: logger.warn( "Failed to update join event for room %s - %s", diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index de4c694714..935c339707 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -36,8 +36,6 @@ class ReceiptsHandler(BaseHandler): ) self.clock = self.hs.get_clock() - self._receipt_cache = None - @defer.inlineCallbacks def received_client_receipt(self, room_id, receipt_type, user_id, event_id): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 24c850ae9b..6d155d57e7 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -60,7 +60,7 @@ class RegistrationHandler(BaseHandler): user = UserID(localpart, self.hs.hostname) user_id = user.to_string() - yield self.check_user_id_is_valid(user_id) + yield self.check_user_id_not_appservice_exclusive(user_id) users = yield self.store.get_users_by_id_case_insensitive(user_id) if users: @@ -145,7 +145,7 @@ class RegistrationHandler(BaseHandler): localpart = yield self._generate_user_id(attempts > 0) user = UserID(localpart, self.hs.hostname) user_id = user.to_string() - yield self.check_user_id_is_valid(user_id) + yield self.check_user_id_not_appservice_exclusive(user_id) if generate_token: token = self.auth_handler().generate_access_token(user_id) try: @@ -180,6 +180,11 @@ class RegistrationHandler(BaseHandler): 400, "Invalid user localpart for this application service.", errcode=Codes.EXCLUSIVE ) + + yield self.check_user_id_not_appservice_exclusive( + user_id, allowed_appservice=service + ) + token = self.auth_handler().generate_access_token(user_id) yield self.store.register( user_id=user_id, @@ -226,7 +231,7 @@ class RegistrationHandler(BaseHandler): user = UserID(localpart, self.hs.hostname) user_id = user.to_string() - yield self.check_user_id_is_valid(user_id) + yield self.check_user_id_not_appservice_exclusive(user_id) token = self.auth_handler().generate_access_token(user_id) try: yield self.store.register( @@ -278,12 +283,14 @@ class RegistrationHandler(BaseHandler): yield identity_handler.bind_threepid(c, user_id) @defer.inlineCallbacks - def check_user_id_is_valid(self, user_id): + def check_user_id_not_appservice_exclusive(self, user_id, allowed_appservice=None): # valid user IDs must not clash with any user ID namespaces claimed by # application services. services = yield self.store.get_app_services() interested_services = [ - s for s in services if s.is_interested_in_user(user_id) + s for s in services + if s.is_interested_in_user(user_id) + and s != allowed_appservice ] for service in interested_services: if service.is_exclusive_user(user_id): @@ -342,3 +349,18 @@ class RegistrationHandler(BaseHandler): def auth_handler(self): return self.hs.get_handlers().auth_handler + + @defer.inlineCallbacks + def guest_access_token_for(self, medium, address, inviter_user_id): + access_token = yield self.store.get_3pid_guest_access_token(medium, address) + if access_token: + defer.returnValue(access_token) + + _, access_token = yield self.register( + generate_token=True, + make_guest=True + ) + access_token = yield self.store.save_or_get_3pid_guest_access_token( + medium, address, access_token, inviter_user_id + ) + defer.returnValue(access_token) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a8e3a9029c..d2de23a6cc 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -24,7 +24,6 @@ from synapse.api.constants import ( ) from synapse.api.errors import AuthError, StoreError, SynapseError, Codes from synapse.util import stringutils, unwrapFirstError -from synapse.util.async import run_on_reactor from synapse.util.logcontext import preserve_context_over_fn from signedjson.sign import verify_signed_json @@ -42,10 +41,6 @@ logger = logging.getLogger(__name__) id_server_scheme = "https://" -def collect_presencelike_data(distributor, user, content): - return distributor.fire("collect_presencelike_data", user, content) - - def user_left_room(distributor, user, room_id): return preserve_context_over_fn( distributor.fire, @@ -81,20 +76,20 @@ class RoomCreationHandler(BaseHandler): } @defer.inlineCallbacks - def create_room(self, user_id, room_id, config): + def create_room(self, requester, config): """ Creates a new room. Args: - user_id (str): The ID of the user creating the new room. - room_id (str): The proposed ID for the new room. Can be None, in - which case one will be created for you. + requester (Requester): The user who requested the room creation. config (dict) : A dict of configuration options. Returns: The new room ID. Raises: - SynapseError if the room ID was taken, couldn't be stored, or - something went horribly wrong. + SynapseError if the room ID couldn't be stored, or something went + horribly wrong. """ + user_id = requester.user.to_string() + self.ratelimit(user_id) if "room_alias_name" in config: @@ -126,40 +121,28 @@ class RoomCreationHandler(BaseHandler): is_public = config.get("visibility", None) == "public" - if room_id: - # Ensure room_id is the correct type - room_id_obj = RoomID.from_string(room_id) - if not self.hs.is_mine(room_id_obj): - raise SynapseError(400, "Room id must be local") - - yield self.store.store_room( - room_id=room_id, - room_creator_user_id=user_id, - is_public=is_public - ) - else: - # autogen room IDs and try to create it. We may clash, so just - # try a few times till one goes through, giving up eventually. - attempts = 0 - room_id = None - while attempts < 5: - try: - random_string = stringutils.random_string(18) - gen_room_id = RoomID.create( - random_string, - self.hs.hostname, - ) - yield self.store.store_room( - room_id=gen_room_id.to_string(), - room_creator_user_id=user_id, - is_public=is_public - ) - room_id = gen_room_id.to_string() - break - except StoreError: - attempts += 1 - if not room_id: - raise StoreError(500, "Couldn't generate a room ID.") + # autogen room IDs and try to create it. We may clash, so just + # try a few times till one goes through, giving up eventually. + attempts = 0 + room_id = None + while attempts < 5: + try: + random_string = stringutils.random_string(18) + gen_room_id = RoomID.create( + random_string, + self.hs.hostname, + ) + yield self.store.store_room( + room_id=gen_room_id.to_string(), + room_creator_user_id=user_id, + is_public=is_public + ) + room_id = gen_room_id.to_string() + break + except StoreError: + attempts += 1 + if not room_id: + raise StoreError(500, "Couldn't generate a room ID.") if room_alias: directory_handler = self.hs.get_handlers().directory_handler @@ -185,9 +168,14 @@ class RoomCreationHandler(BaseHandler): creation_content = config.get("creation_content", {}) - user = UserID.from_string(user_id) - creation_events = self._create_events_for_new_room( - user, room_id, + msg_handler = self.hs.get_handlers().message_handler + room_member_handler = self.hs.get_handlers().room_member_handler + + yield self._send_events_for_new_room( + requester, + room_id, + msg_handler, + room_member_handler, preset_config=preset_config, invite_list=invite_list, initial_state=initial_state, @@ -195,14 +183,9 @@ class RoomCreationHandler(BaseHandler): room_alias=room_alias, ) - msg_handler = self.hs.get_handlers().message_handler - - for event in creation_events: - yield msg_handler.create_and_send_event(event, ratelimit=False) - if "name" in config: name = config["name"] - yield msg_handler.create_and_send_event({ + yield msg_handler.create_and_send_nonmember_event({ "type": EventTypes.Name, "room_id": room_id, "sender": user_id, @@ -212,7 +195,7 @@ class RoomCreationHandler(BaseHandler): if "topic" in config: topic = config["topic"] - yield msg_handler.create_and_send_event({ + yield msg_handler.create_and_send_nonmember_event({ "type": EventTypes.Topic, "room_id": room_id, "sender": user_id, @@ -221,13 +204,13 @@ class RoomCreationHandler(BaseHandler): }, ratelimit=False) for invitee in invite_list: - yield msg_handler.create_and_send_event({ - "type": EventTypes.Member, - "state_key": invitee, - "room_id": room_id, - "sender": user_id, - "content": {"membership": Membership.INVITE}, - }, ratelimit=False) + room_member_handler.update_membership( + requester, + UserID.from_string(invitee), + room_id, + "invite", + ratelimit=False, + ) for invite_3pid in invite_3pid_list: id_server = invite_3pid["id_server"] @@ -235,11 +218,11 @@ class RoomCreationHandler(BaseHandler): medium = invite_3pid["medium"] yield self.hs.get_handlers().room_member_handler.do_3pid_invite( room_id, - user, + requester.user, medium, address, id_server, - token_id=None, + requester, txn_id=None, ) @@ -253,19 +236,19 @@ class RoomCreationHandler(BaseHandler): defer.returnValue(result) - def _create_events_for_new_room(self, creator, room_id, preset_config, - invite_list, initial_state, creation_content, - room_alias): - config = RoomCreationHandler.PRESETS_DICT[preset_config] - - creator_id = creator.to_string() - - event_keys = { - "room_id": room_id, - "sender": creator_id, - "state_key": "", - } - + @defer.inlineCallbacks + def _send_events_for_new_room( + self, + creator, # A Requester object. + room_id, + msg_handler, + room_member_handler, + preset_config, + invite_list, + initial_state, + creation_content, + room_alias + ): def create(etype, content, **kwargs): e = { "type": etype, @@ -277,26 +260,39 @@ class RoomCreationHandler(BaseHandler): return e - creation_content.update({"creator": creator.to_string()}) - creation_event = create( + @defer.inlineCallbacks + def send(etype, content, **kwargs): + event = create(etype, content, **kwargs) + yield msg_handler.create_and_send_nonmember_event(event, ratelimit=False) + + config = RoomCreationHandler.PRESETS_DICT[preset_config] + + creator_id = creator.user.to_string() + + event_keys = { + "room_id": room_id, + "sender": creator_id, + "state_key": "", + } + + creation_content.update({"creator": creator_id}) + yield send( etype=EventTypes.Create, content=creation_content, ) - join_event = create( - etype=EventTypes.Member, - state_key=creator_id, - content={ - "membership": Membership.JOIN, - }, + yield room_member_handler.update_membership( + creator, + creator.user, + room_id, + "join", + ratelimit=False, ) - returned_events = [creation_event, join_event] - if (EventTypes.PowerLevels, '') not in initial_state: power_level_content = { "users": { - creator.to_string(): 100, + creator_id: 100, }, "users_default": 0, "events": { @@ -318,45 +314,35 @@ class RoomCreationHandler(BaseHandler): for invitee in invite_list: power_level_content["users"][invitee] = 100 - power_levels_event = create( + yield send( etype=EventTypes.PowerLevels, content=power_level_content, ) - returned_events.append(power_levels_event) - if room_alias and (EventTypes.CanonicalAlias, '') not in initial_state: - room_alias_event = create( + yield send( etype=EventTypes.CanonicalAlias, content={"alias": room_alias.to_string()}, ) - returned_events.append(room_alias_event) - if (EventTypes.JoinRules, '') not in initial_state: - join_rules_event = create( + yield send( etype=EventTypes.JoinRules, content={"join_rule": config["join_rules"]}, ) - returned_events.append(join_rules_event) - if (EventTypes.RoomHistoryVisibility, '') not in initial_state: - history_event = create( + yield send( etype=EventTypes.RoomHistoryVisibility, content={"history_visibility": config["history_visibility"]} ) - returned_events.append(history_event) - for (etype, state_key), content in initial_state.items(): - returned_events.append(create( + yield send( etype=etype, state_key=state_key, content=content, - )) - - return returned_events + ) class RoomMemberHandler(BaseHandler): @@ -404,16 +390,35 @@ class RoomMemberHandler(BaseHandler): remotedomains.add(member.domain) @defer.inlineCallbacks - def update_membership(self, requester, target, room_id, action, txn_id=None): + def update_membership( + self, + requester, + target, + room_id, + action, + txn_id=None, + remote_room_hosts=None, + third_party_signed=None, + ratelimit=True, + ): effective_membership_state = action if action in ["kick", "unban"]: effective_membership_state = "leave" elif action == "forget": effective_membership_state = "leave" + if third_party_signed is not None: + replication = self.hs.get_replication_layer() + yield replication.exchange_third_party_invite( + third_party_signed["sender"], + target.to_string(), + room_id, + third_party_signed, + ) + msg_handler = self.hs.get_handlers().message_handler - content = {"membership": unicode(effective_membership_state)} + content = {"membership": effective_membership_state} if requester.is_guest: content["kind"] = "guest" @@ -424,6 +429,9 @@ class RoomMemberHandler(BaseHandler): "room_id": room_id, "sender": requester.user.to_string(), "state_key": target.to_string(), + + # For backwards compatibility: + "membership": effective_membership_state, }, token_id=requester.access_token_id, txn_id=txn_id, @@ -444,202 +452,181 @@ class RoomMemberHandler(BaseHandler): errcode=Codes.BAD_STATE ) - yield msg_handler.send_event( + member_handler = self.hs.get_handlers().room_member_handler + yield member_handler.send_membership_event( event, context, - ratelimit=True, - is_guest=requester.is_guest + is_guest=requester.is_guest, + ratelimit=ratelimit, + remote_room_hosts=remote_room_hosts, + from_client=True, ) if action == "forget": yield self.forget(requester.user, room_id) @defer.inlineCallbacks - def send_membership_event(self, event, context, is_guest=False): - """ Change the membership status of a user in a room. + def send_membership_event( + self, + event, + context, + is_guest=False, + remote_room_hosts=None, + ratelimit=True, + from_client=True, + ): + """ + Change the membership status of a user in a room. Args: - event (SynapseEvent): The membership event + event (SynapseEvent): The membership event. + context: The context of the event. + is_guest (bool): Whether the sender is a guest. + room_hosts ([str]): Homeservers which are likely to already be in + the room, and could be danced with in order to join this + homeserver for the first time. + ratelimit (bool): Whether to rate limit this request. + from_client (bool): Whether this request is the result of a local + client request (rather than over federation). If so, we will + perform extra checks, like that this homeserver can act as this + client. Raises: SynapseError if there was a problem changing the membership. """ - target_user_id = event.state_key + target_user = UserID.from_string(event.state_key) + room_id = event.room_id - prev_state = context.current_state.get( - (EventTypes.Member, target_user_id), - None - ) + if from_client: + sender = UserID.from_string(event.sender) + assert self.hs.is_mine(sender), "Sender must be our own: %s" % (sender,) - room_id = event.room_id + message_handler = self.hs.get_handlers().message_handler + prev_event = message_handler.deduplicate_state_event(event, context) + if prev_event is not None: + return - # If we're trying to join a room then we have to do this differently - # if this HS is not currently in the room, i.e. we have to do the - # invite/join dance. - if event.membership == Membership.JOIN: - if is_guest: - guest_access = context.current_state.get( - (EventTypes.GuestAccess, ""), - None - ) - is_guest_access_allowed = ( - guest_access - and guest_access.content - and "guest_access" in guest_access.content - and guest_access.content["guest_access"] == "can_join" - ) - if not is_guest_access_allowed: - raise AuthError(403, "Guest access not allowed") + action = "send" - yield self._do_join(event, context) - else: - if event.membership == Membership.LEAVE: - is_host_in_room = yield self.is_host_in_room(room_id, context) - if not is_host_in_room: - # Rejecting an invite, rather than leaving a joined room - handler = self.hs.get_handlers().federation_handler - inviter = yield self.get_inviter(event) - if not inviter: - # return the same error as join_room_alias does - raise SynapseError(404, "No known servers") - yield handler.do_remotely_reject_invite( - [inviter.domain], - room_id, - event.user_id - ) - defer.returnValue({"room_id": room_id}) - return - - # FIXME: This isn't idempotency. - if prev_state and prev_state.membership == event.membership: - # double same action, treat this event as a NOOP. - defer.returnValue({}) - return - - yield self._do_local_membership_update( - event, - context=context, + if event.membership == Membership.JOIN: + if is_guest and not self._can_guest_join(context.current_state): + # This should be an auth check, but guests are a local concept, + # so don't really fit into the general auth process. + raise AuthError(403, "Guest access not allowed") + do_remote_join_dance, remote_room_hosts = self._should_do_dance( + context, + (self.get_inviter(event.state_key, context.current_state)), + remote_room_hosts, ) + if do_remote_join_dance: + action = "remote_join" + elif event.membership == Membership.LEAVE: + is_host_in_room = self.is_host_in_room(context.current_state) + if not is_host_in_room: + action = "remote_reject" - if prev_state and prev_state.membership == Membership.JOIN: - user = UserID.from_string(event.user_id) - user_left_room(self.distributor, user, event.room_id) + federation_handler = self.hs.get_handlers().federation_handler - defer.returnValue({"room_id": room_id}) + if action == "remote_join": + if len(remote_room_hosts) == 0: + raise SynapseError(404, "No known servers") - @defer.inlineCallbacks - def join_room_alias(self, joinee, room_alias, content={}): - directory_handler = self.hs.get_handlers().directory_handler - mapping = yield directory_handler.get_association(room_alias) + # We don't do an auth check if we are doing an invite + # join dance for now, since we're kinda implicitly checking + # that we are allowed to join when we decide whether or not we + # need to do the invite/join dance. + yield federation_handler.do_invite_join( + remote_room_hosts, + event.room_id, + event.user_id, + event.content, + ) + elif action == "remote_reject": + inviter = self.get_inviter(target_user.to_string(), context.current_state) + if not inviter: + raise SynapseError(404, "No known servers") + yield federation_handler.do_remotely_reject_invite( + [inviter.domain], + room_id, + event.user_id + ) + else: + yield self.handle_new_client_event( + event, + context, + extra_users=[target_user], + ratelimit=ratelimit, + ) - if not mapping: - raise SynapseError(404, "No such room alias") + prev_member_event = context.current_state.get( + (EventTypes.Member, target_user.to_string()), + None + ) - room_id = mapping["room_id"] - hosts = mapping["servers"] - if not hosts: - raise SynapseError(404, "No known servers") + if event.membership == Membership.JOIN: + if not prev_member_event or prev_member_event.membership != Membership.JOIN: + # Only fire user_joined_room if the user has acutally joined the + # room. Don't bother if the user is just changing their profile + # info. + yield user_joined_room(self.distributor, target_user, room_id) + elif event.membership == Membership.LEAVE: + if prev_member_event and prev_member_event.membership == Membership.JOIN: + user_left_room(self.distributor, target_user, room_id) + + def _can_guest_join(self, current_state): + """ + Returns whether a guest can join a room based on its current state. + """ + guest_access = current_state.get((EventTypes.GuestAccess, ""), None) + return ( + guest_access + and guest_access.content + and "guest_access" in guest_access.content + and guest_access.content["guest_access"] == "can_join" + ) - # If event doesn't include a display name, add one. - yield collect_presencelike_data(self.distributor, joinee, content) + def _should_do_dance(self, context, inviter, room_hosts=None): + # TODO: Shouldn't this be remote_room_host? + room_hosts = room_hosts or [] - content.update({"membership": Membership.JOIN}) - builder = self.event_builder_factory.new({ - "type": EventTypes.Member, - "state_key": joinee.to_string(), - "room_id": room_id, - "sender": joinee.to_string(), - "membership": Membership.JOIN, - "content": content, - }) - event, context = yield self._create_new_client_event(builder) + is_host_in_room = self.is_host_in_room(context.current_state) + if is_host_in_room: + return False, room_hosts - yield self._do_join(event, context, room_hosts=hosts) + if inviter and not self.hs.is_mine(inviter): + room_hosts.append(inviter.domain) - defer.returnValue({"room_id": room_id}) + return True, room_hosts @defer.inlineCallbacks - def _do_join(self, event, context, room_hosts=None): - room_id = event.room_id - - # XXX: We don't do an auth check if we are doing an invite - # join dance for now, since we're kinda implicitly checking - # that we are allowed to join when we decide whether or not we - # need to do the invite/join dance. - - is_host_in_room = yield self.is_host_in_room(room_id, context) - if is_host_in_room: - should_do_dance = False - elif room_hosts: # TODO: Shouldn't this be remote_room_host? - should_do_dance = True - else: - inviter = yield self.get_inviter(event) - if not inviter: - # return the same error as join_room_alias does - raise SynapseError(404, "No known servers") - should_do_dance = not self.hs.is_mine(inviter) - room_hosts = [inviter.domain] + def lookup_room_alias(self, room_alias): + """ + Get the room ID associated with a room alias. - if should_do_dance: - handler = self.hs.get_handlers().federation_handler - yield handler.do_invite_join( - room_hosts, - room_id, - event.user_id, - event.content, - ) - else: - logger.debug("Doing normal join") + Args: + room_alias (RoomAlias): The alias to look up. + Returns: + A tuple of: + The room ID as a RoomID object. + Hosts likely to be participating in the room ([str]). + Raises: + SynapseError if room alias could not be found. + """ + directory_handler = self.hs.get_handlers().directory_handler + mapping = yield directory_handler.get_association(room_alias) - yield self._do_local_membership_update( - event, - context=context, - ) + if not mapping: + raise SynapseError(404, "No such room alias") - prev_state = context.current_state.get((event.type, event.state_key)) - if not prev_state or prev_state.membership != Membership.JOIN: - # Only fire user_joined_room if the user has acutally joined the - # room. Don't bother if the user is just changing their profile - # info. - user = UserID.from_string(event.user_id) - yield user_joined_room(self.distributor, user, room_id) + room_id = mapping["room_id"] + servers = mapping["servers"] - @defer.inlineCallbacks - def get_inviter(self, event): - # TODO(markjh): get prev_state from snapshot - prev_state = yield self.store.get_room_member( - event.user_id, event.room_id - ) + defer.returnValue((RoomID.from_string(room_id), servers)) + def get_inviter(self, user_id, current_state): + prev_state = current_state.get((EventTypes.Member, user_id)) if prev_state and prev_state.membership == Membership.INVITE: - defer.returnValue(UserID.from_string(prev_state.user_id)) - return - elif "third_party_invite" in event.content: - if "sender" in event.content["third_party_invite"]: - inviter = UserID.from_string( - event.content["third_party_invite"]["sender"] - ) - defer.returnValue(inviter) - defer.returnValue(None) - - @defer.inlineCallbacks - def is_host_in_room(self, room_id, context): - is_host_in_room = yield self.auth.check_host_in_room( - room_id, - self.hs.hostname - ) - if not is_host_in_room: - # is *anyone* in the room? - room_member_keys = [ - v for (k, v) in context.current_state.keys() if ( - k == "m.room.member" - ) - ] - if len(room_member_keys) == 0: - # has the room been created so we can join it? - create_event = context.current_state.get(("m.room.create", "")) - if create_event: - is_host_in_room = True - defer.returnValue(is_host_in_room) + return UserID.from_string(prev_state.user_id) + return None @defer.inlineCallbacks def get_joined_rooms_for_user(self, user): @@ -657,18 +644,6 @@ class RoomMemberHandler(BaseHandler): defer.returnValue(room_ids) @defer.inlineCallbacks - def _do_local_membership_update(self, event, context): - yield run_on_reactor() - - target_user = UserID.from_string(event.state_key) - - yield self.handle_new_client_event( - event, - context, - extra_users=[target_user], - ) - - @defer.inlineCallbacks def do_3pid_invite( self, room_id, @@ -676,7 +651,7 @@ class RoomMemberHandler(BaseHandler): medium, address, id_server, - token_id, + requester, txn_id ): invitee = yield self._lookup_3pid( @@ -684,19 +659,12 @@ class RoomMemberHandler(BaseHandler): ) if invitee: - # make sure it looks like a user ID; it'll throw if it's invalid. - UserID.from_string(invitee) - yield self.hs.get_handlers().message_handler.create_and_send_event( - { - "type": EventTypes.Member, - "content": { - "membership": unicode("invite") - }, - "room_id": room_id, - "sender": inviter.to_string(), - "state_key": invitee, - }, - token_id=token_id, + handler = self.hs.get_handlers().room_member_handler + yield handler.update_membership( + requester, + UserID.from_string(invitee), + room_id, + "invite", txn_id=txn_id, ) else: @@ -706,7 +674,7 @@ class RoomMemberHandler(BaseHandler): address, room_id, inviter, - token_id, + requester.access_token_id, txn_id=txn_id ) @@ -801,7 +769,7 @@ class RoomMemberHandler(BaseHandler): if room_avatar_event: room_avatar_url = room_avatar_event.content.get("url", "") - token, public_key, key_validity_url, display_name = ( + token, public_keys, fallback_public_key, display_name = ( yield self._ask_id_server_for_third_party_invite( id_server=id_server, medium=medium, @@ -816,14 +784,18 @@ class RoomMemberHandler(BaseHandler): inviter_avatar_url=inviter_avatar_url ) ) + msg_handler = self.hs.get_handlers().message_handler - yield msg_handler.create_and_send_event( + yield msg_handler.create_and_send_nonmember_event( { "type": EventTypes.ThirdPartyInvite, "content": { "display_name": display_name, - "key_validity_url": key_validity_url, - "public_key": public_key, + "public_keys": public_keys, + + # For backwards compatibility: + "key_validity_url": fallback_public_key["key_validity_url"], + "public_key": fallback_public_key["public_key"], }, "room_id": room_id, "sender": user.to_string(), @@ -848,6 +820,41 @@ class RoomMemberHandler(BaseHandler): inviter_display_name, inviter_avatar_url ): + """ + Asks an identity server for a third party invite. + + :param id_server (str): hostname + optional port for the identity server. + :param medium (str): The literal string "email". + :param address (str): The third party address being invited. + :param room_id (str): The ID of the room to which the user is invited. + :param inviter_user_id (str): The user ID of the inviter. + :param room_alias (str): An alias for the room, for cosmetic + notifications. + :param room_avatar_url (str): The URL of the room's avatar, for cosmetic + notifications. + :param room_join_rules (str): The join rules of the email + (e.g. "public"). + :param room_name (str): The m.room.name of the room. + :param inviter_display_name (str): The current display name of the + inviter. + :param inviter_avatar_url (str): The URL of the inviter's avatar. + + :return: A deferred tuple containing: + token (str): The token which must be signed to prove authenticity. + public_keys ([{"public_key": str, "key_validity_url": str}]): + public_key is a base64-encoded ed25519 public key. + fallback_public_key: One element from public_keys. + display_name (str): A user-friendly name to represent the invited + user. + """ + + registration_handler = self.hs.get_handlers().registration_handler + guest_access_token = yield registration_handler.guest_access_token_for( + medium=medium, + address=address, + inviter_user_id=inviter_user_id, + ) + is_url = "%s%s/_matrix/identity/api/v1/store-invite" % ( id_server_scheme, id_server, ) @@ -864,16 +871,26 @@ class RoomMemberHandler(BaseHandler): "sender": inviter_user_id, "sender_display_name": inviter_display_name, "sender_avatar_url": inviter_avatar_url, + "guest_access_token": guest_access_token, } ) # TODO: Check for success token = data["token"] - public_key = data["public_key"] + public_keys = data.get("public_keys", []) + if "public_key" in data: + fallback_public_key = { + "public_key": data["public_key"], + "key_validity_url": "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % ( + id_server_scheme, id_server, + ), + } + else: + fallback_public_key = public_keys[0] + + if not public_keys: + public_keys.append(fallback_public_key) display_name = data["display_name"] - key_validity_url = "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % ( - id_server_scheme, id_server, - ) - defer.returnValue((token, public_key, key_validity_url, display_name)) + defer.returnValue((token, public_keys, fallback_public_key, display_name)) def forget(self, user, room_id): return self.store.forget(user.to_string(), room_id) @@ -1061,6 +1078,7 @@ class RoomEventSource(object): from_key=from_key, to_key=to_key, limit=limit or 10, + order='ASC', ) events = list(room_events) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 1d0f0058a2..fded6e4009 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -121,7 +121,11 @@ class SyncResult(collections.namedtuple("SyncResult", [ events. """ return bool( - self.presence or self.joined or self.invited or self.archived + self.presence or + self.joined or + self.invited or + self.archived or + self.account_data ) @@ -582,6 +586,28 @@ class SyncHandler(BaseHandler): if room_sync: joined.append(room_sync) + # For each newly joined room, we want to send down presence of + # existing users. + presence_handler = self.hs.get_handlers().presence_handler + extra_presence_users = set() + for room_id in newly_joined_rooms: + users = yield self.store.get_users_in_room(event.room_id) + extra_presence_users.update(users) + + # For each new member, send down presence. + for joined_sync in joined: + it = itertools.chain(joined_sync.timeline.events, joined_sync.state.values()) + for event in it: + if event.type == EventTypes.Member: + if event.membership == Membership.JOIN: + extra_presence_users.add(event.state_key) + + states = yield presence_handler.get_states( + [u for u in extra_presence_users if u != user_id], + as_event=True, + ) + presence.extend(states) + account_data_for_user = sync_config.filter_collection.filter_account_data( self.account_data_for_user(account_data) ) @@ -623,7 +649,6 @@ class SyncHandler(BaseHandler): recents = yield self._filter_events_for_client( sync_config.user.to_string(), recents, - is_peeking=sync_config.is_guest, ) else: recents = [] @@ -645,7 +670,6 @@ class SyncHandler(BaseHandler): loaded_recents = yield self._filter_events_for_client( sync_config.user.to_string(), loaded_recents, - is_peeking=sync_config.is_guest, ) loaded_recents.extend(recents) recents = loaded_recents @@ -825,14 +849,20 @@ class SyncHandler(BaseHandler): with Measure(self.clock, "compute_state_delta"): if full_state: if batch: + current_state = yield self.store.get_state_for_event( + batch.events[-1].event_id + ) + state = yield self.store.get_state_for_event( batch.events[0].event_id ) else: - state = yield self.get_state_at( + current_state = yield self.get_state_at( room_id, stream_position=now_token ) + state = current_state + timeline_state = { (event.type, event.state_key): event for event in batch.events if event.is_state() @@ -842,12 +872,17 @@ class SyncHandler(BaseHandler): timeline_contains=timeline_state, timeline_start=state, previous={}, + current=current_state, ) elif batch.limited: state_at_previous_sync = yield self.get_state_at( room_id, stream_position=since_token ) + current_state = yield self.store.get_state_for_event( + batch.events[-1].event_id + ) + state_at_timeline_start = yield self.store.get_state_for_event( batch.events[0].event_id ) @@ -861,6 +896,7 @@ class SyncHandler(BaseHandler): timeline_contains=timeline_state, timeline_start=state_at_timeline_start, previous=state_at_previous_sync, + current=current_state, ) else: state = {} @@ -920,7 +956,7 @@ def _action_has_highlight(actions): return False -def _calculate_state(timeline_contains, timeline_start, previous): +def _calculate_state(timeline_contains, timeline_start, previous, current): """Works out what state to include in a sync response. Args: @@ -928,6 +964,7 @@ def _calculate_state(timeline_contains, timeline_start, previous): timeline_start (dict): state at the start of the timeline previous (dict): state at the end of the previous sync (or empty dict if this is an initial sync) + current (dict): state at the end of the timeline Returns: dict @@ -938,14 +975,16 @@ def _calculate_state(timeline_contains, timeline_start, previous): timeline_contains.values(), previous.values(), timeline_start.values(), + current.values(), ) } + c_ids = set(e.event_id for e in current.values()) tc_ids = set(e.event_id for e in timeline_contains.values()) p_ids = set(e.event_id for e in previous.values()) ts_ids = set(e.event_id for e in timeline_start.values()) - state_ids = (ts_ids - p_ids) - tc_ids + state_ids = ((c_ids | ts_ids) - p_ids) - tc_ids evs = (event_id_to_state[e] for e in state_ids) return { diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index b16d0017df..8ce27f49ec 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -25,6 +25,7 @@ from synapse.types import UserID import logging from collections import namedtuple +import ujson as json logger = logging.getLogger(__name__) @@ -219,6 +220,19 @@ class TypingNotificationHandler(BaseHandler): "typing_key", self._latest_room_serial, rooms=[room_id] ) + def get_all_typing_updates(self, last_id, current_id): + # TODO: Work out a way to do this without scanning the entire state. + rows = [] + for room_id, serial in self._room_serials.items(): + if last_id < serial and serial <= current_id: + typing = self._room_typing[room_id] + typing_bytes = json.dumps([ + u.to_string() for u in typing + ], ensure_ascii=False) + rows.append((serial, room_id, typing_bytes)) + rows.sort() + return rows + class TypingNotificationEventSource(object): def __init__(self, hs): |