summary refs log tree commit diff
path: root/synapse/handlers
diff options
context:
space:
mode:
authorMark Haines <mark.haines@matrix.org>2015-10-21 15:48:34 +0100
committerMark Haines <mark.haines@matrix.org>2015-10-21 15:48:34 +0100
commit5201c661082cb66e544c968ab3d5d97278509774 (patch)
tree1a23e19f9be1c6139fbff4052b416261d6aadcd6 /synapse/handlers
parentDoc string for the SyncHandler.typing_by_room method (diff)
parentMerge pull request #316 from matrix-org/markjh/v2_sync_archived (diff)
downloadsynapse-5201c661082cb66e544c968ab3d5d97278509774.tar.xz
Merge branch 'develop' into markjh/v2_sync_typing
Conflicts:
	synapse/handlers/sync.py
Diffstat (limited to 'synapse/handlers')
-rw-r--r--synapse/handlers/__init__.py2
-rw-r--r--synapse/handlers/federation.py209
-rw-r--r--synapse/handlers/room.py102
-rw-r--r--synapse/handlers/search.py93
-rw-r--r--synapse/handlers/sync.py130
5 files changed, 451 insertions, 85 deletions
diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py
index 8725c3c420..87b4d381c7 100644
--- a/synapse/handlers/__init__.py
+++ b/synapse/handlers/__init__.py
@@ -32,6 +32,7 @@ from .sync import SyncHandler
 from .auth import AuthHandler
 from .identity import IdentityHandler
 from .receipts import ReceiptsHandler
+from .search import SearchHandler
 
 
 class Handlers(object):
@@ -68,3 +69,4 @@ class Handlers(object):
         self.sync_handler = SyncHandler(hs)
         self.auth_handler = AuthHandler(hs)
         self.identity_handler = IdentityHandler(hs)
+        self.search_handler = SearchHandler(hs)
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 946ff97c7d..ae9d227586 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -565,7 +565,7 @@ class FederationHandler(BaseHandler):
 
     @log_function
     @defer.inlineCallbacks
-    def do_invite_join(self, target_hosts, room_id, joinee, content, snapshot):
+    def do_invite_join(self, target_hosts, room_id, joinee, content):
         """ Attempts to join the `joinee` to the room `room_id` via the
         server `target_host`.
 
@@ -581,50 +581,19 @@ class FederationHandler(BaseHandler):
 
         yield self.store.clean_room_for_join(room_id)
 
-        origin, pdu = yield self.replication_layer.make_join(
+        origin, event = yield self._make_and_verify_event(
             target_hosts,
             room_id,
             joinee,
+            "join",
             content
         )
 
-        logger.debug("Got response to make_join: %s", pdu)
-
-        event = pdu
-
-        # We should assert some things.
-        # FIXME: Do this in a nicer way
-        assert(event.type == EventTypes.Member)
-        assert(event.user_id == joinee)
-        assert(event.state_key == joinee)
-        assert(event.room_id == room_id)
-
-        event.internal_metadata.outlier = False
-
         self.room_queues[room_id] = []
-
-        builder = self.event_builder_factory.new(
-            unfreeze(event.get_pdu_json())
-        )
-
         handled_events = set()
 
         try:
-            builder.event_id = self.event_builder_factory.create_event_id()
-            builder.origin = self.hs.hostname
-            builder.content = content
-
-            if not hasattr(event, "signatures"):
-                builder.signatures = {}
-
-            add_hashes_and_signatures(
-                builder,
-                self.hs.hostname,
-                self.hs.config.signing_key[0],
-            )
-
-            new_event = builder.build()
-
+            new_event = self._sign_event(event)
             # Try the host we successfully got a response to /make_join/
             # request first.
             try:
@@ -632,11 +601,7 @@ class FederationHandler(BaseHandler):
                 target_hosts.insert(0, origin)
             except ValueError:
                 pass
-
-            ret = yield self.replication_layer.send_join(
-                target_hosts,
-                new_event
-            )
+            ret = yield self.replication_layer.send_join(target_hosts, new_event)
 
             origin = ret["origin"]
             state = ret["state"]
@@ -700,7 +665,7 @@ class FederationHandler(BaseHandler):
     @log_function
     def on_make_join_request(self, room_id, user_id, query):
         """ We've received a /make_join/ request, so we create a partial
-        join event for the room and return that. We don *not* persist or
+        join event for the room and return that. We do *not* persist or
         process it until the other server has signed it and sent it back.
         """
         event_content = {"membership": Membership.JOIN}
@@ -860,6 +825,168 @@ class FederationHandler(BaseHandler):
         defer.returnValue(event)
 
     @defer.inlineCallbacks
+    def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
+        origin, event = yield self._make_and_verify_event(
+            target_hosts,
+            room_id,
+            user_id,
+            "leave",
+            {}
+        )
+        signed_event = self._sign_event(event)
+
+        # Try the host we successfully got a response to /make_join/
+        # request first.
+        try:
+            target_hosts.remove(origin)
+            target_hosts.insert(0, origin)
+        except ValueError:
+            pass
+
+        yield self.replication_layer.send_leave(
+            target_hosts,
+            signed_event
+        )
+        defer.returnValue(None)
+
+    @defer.inlineCallbacks
+    def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, content):
+        origin, pdu = yield self.replication_layer.make_membership_event(
+            target_hosts,
+            room_id,
+            user_id,
+            membership,
+            content
+        )
+
+        logger.debug("Got response to make_%s: %s", membership, pdu)
+
+        event = pdu
+
+        # We should assert some things.
+        # FIXME: Do this in a nicer way
+        assert(event.type == EventTypes.Member)
+        assert(event.user_id == user_id)
+        assert(event.state_key == user_id)
+        assert(event.room_id == room_id)
+        defer.returnValue((origin, event))
+
+    def _sign_event(self, event):
+        event.internal_metadata.outlier = False
+
+        builder = self.event_builder_factory.new(
+            unfreeze(event.get_pdu_json())
+        )
+
+        builder.event_id = self.event_builder_factory.create_event_id()
+        builder.origin = self.hs.hostname
+
+        if not hasattr(event, "signatures"):
+            builder.signatures = {}
+
+        add_hashes_and_signatures(
+            builder,
+            self.hs.hostname,
+            self.hs.config.signing_key[0],
+        )
+
+        return builder.build()
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_make_leave_request(self, room_id, user_id):
+        """ We've received a /make_leave/ request, so we create a partial
+        join event for the room and return that. We do *not* persist or
+        process it until the other server has signed it and sent it back.
+        """
+        builder = self.event_builder_factory.new({
+            "type": EventTypes.Member,
+            "content": {"membership": Membership.LEAVE},
+            "room_id": room_id,
+            "sender": user_id,
+            "state_key": user_id,
+        })
+
+        event, context = yield self._create_new_client_event(
+            builder=builder,
+        )
+
+        self.auth.check(event, auth_events=context.current_state)
+
+        defer.returnValue(event)
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_send_leave_request(self, origin, pdu):
+        """ We have received a leave event for a room. Fully process it."""
+        event = pdu
+
+        logger.debug(
+            "on_send_leave_request: Got event: %s, signatures: %s",
+            event.event_id,
+            event.signatures,
+        )
+
+        event.internal_metadata.outlier = False
+
+        context, event_stream_id, max_stream_id = yield self._handle_new_event(
+            origin, event
+        )
+
+        logger.debug(
+            "on_send_leave_request: After _handle_new_event: %s, sigs: %s",
+            event.event_id,
+            event.signatures,
+        )
+
+        extra_users = []
+        if event.type == EventTypes.Member:
+            target_user_id = event.state_key
+            target_user = UserID.from_string(target_user_id)
+            extra_users.append(target_user)
+
+        with PreserveLoggingContext():
+            d = self.notifier.on_new_room_event(
+                event, event_stream_id, max_stream_id, extra_users=extra_users
+            )
+
+        def log_failure(f):
+            logger.warn(
+                "Failed to notify about %s: %s",
+                event.event_id, f.value
+            )
+
+        d.addErrback(log_failure)
+
+        new_pdu = event
+
+        destinations = set()
+
+        for k, s in context.current_state.items():
+            try:
+                if k[0] == EventTypes.Member:
+                    if s.content["membership"] == Membership.LEAVE:
+                        destinations.add(
+                            UserID.from_string(s.state_key).domain
+                        )
+            except:
+                logger.warn(
+                    "Failed to get destination from event %s", s.event_id
+                )
+
+        destinations.discard(origin)
+
+        logger.debug(
+            "on_send_leave_request: Sending event: %s, signatures: %s",
+            event.event_id,
+            event.signatures,
+        )
+
+        self.replication_layer.send_pdu(new_pdu, destinations)
+
+        defer.returnValue(None)
+
+    @defer.inlineCallbacks
     def get_state_for_pdu(self, origin, room_id, event_id, do_auth=True):
         yield run_on_reactor()
 
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 3f0cde56f0..60f9fa58b0 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -389,7 +389,22 @@ class RoomMemberHandler(BaseHandler):
         if event.membership == Membership.JOIN:
             yield self._do_join(event, context, do_auth=do_auth)
         else:
-            # This is not a JOIN, so we can handle it normally.
+            if event.membership == Membership.LEAVE:
+                is_host_in_room = yield self.is_host_in_room(room_id, context)
+                if not is_host_in_room:
+                    # Rejecting an invite, rather than leaving a joined room
+                    handler = self.hs.get_handlers().federation_handler
+                    inviter = yield self.get_inviter(event)
+                    if not inviter:
+                        # return the same error as join_room_alias does
+                        raise SynapseError(404, "No known servers")
+                    yield handler.do_remotely_reject_invite(
+                        [inviter.domain],
+                        room_id,
+                        event.user_id
+                    )
+                    defer.returnValue({"room_id": room_id})
+                    return
 
             # FIXME: This isn't idempotency.
             if prev_state and prev_state.membership == event.membership:
@@ -413,7 +428,7 @@ class RoomMemberHandler(BaseHandler):
         defer.returnValue({"room_id": room_id})
 
     @defer.inlineCallbacks
-    def join_room_alias(self, joinee, room_alias, do_auth=True, content={}):
+    def join_room_alias(self, joinee, room_alias, content={}):
         directory_handler = self.hs.get_handlers().directory_handler
         mapping = yield directory_handler.get_association(room_alias)
 
@@ -447,8 +462,6 @@ class RoomMemberHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def _do_join(self, event, context, room_hosts=None, do_auth=True):
-        joinee = UserID.from_string(event.state_key)
-        # room_id = RoomID.from_string(event.room_id, self.hs)
         room_id = event.room_id
 
         # XXX: We don't do an auth check if we are doing an invite
@@ -456,48 +469,18 @@ class RoomMemberHandler(BaseHandler):
         # that we are allowed to join when we decide whether or not we
         # need to do the invite/join dance.
 
-        is_host_in_room = yield self.auth.check_host_in_room(
-            event.room_id,
-            self.hs.hostname
-        )
-        if not is_host_in_room:
-            # is *anyone* in the room?
-            room_member_keys = [
-                v for (k, v) in context.current_state.keys() if (
-                    k == "m.room.member"
-                )
-            ]
-            if len(room_member_keys) == 0:
-                # has the room been created so we can join it?
-                create_event = context.current_state.get(("m.room.create", ""))
-                if create_event:
-                    is_host_in_room = True
-
+        is_host_in_room = yield self.is_host_in_room(room_id, context)
         if is_host_in_room:
             should_do_dance = False
         elif room_hosts:  # TODO: Shouldn't this be remote_room_host?
             should_do_dance = True
         else:
-            # TODO(markjh): get prev_state from snapshot
-            prev_state = yield self.store.get_room_member(
-                joinee.to_string(), room_id
-            )
-
-            if prev_state and prev_state.membership == Membership.INVITE:
-                inviter = UserID.from_string(prev_state.user_id)
-
-                should_do_dance = not self.hs.is_mine(inviter)
-                room_hosts = [inviter.domain]
-            elif "third_party_invite" in event.content:
-                if "sender" in event.content["third_party_invite"]:
-                    inviter = UserID.from_string(
-                        event.content["third_party_invite"]["sender"]
-                    )
-                    should_do_dance = not self.hs.is_mine(inviter)
-                    room_hosts = [inviter.domain]
-            else:
+            inviter = yield self.get_inviter(event)
+            if not inviter:
                 # return the same error as join_room_alias does
                 raise SynapseError(404, "No known servers")
+            should_do_dance = not self.hs.is_mine(inviter)
+            room_hosts = [inviter.domain]
 
         if should_do_dance:
             handler = self.hs.get_handlers().federation_handler
@@ -505,8 +488,7 @@ class RoomMemberHandler(BaseHandler):
                 room_hosts,
                 room_id,
                 event.user_id,
-                event.content,  # FIXME To get a non-frozen dict
-                context
+                event.content  # FIXME To get a non-frozen dict
             )
         else:
             logger.debug("Doing normal join")
@@ -524,6 +506,44 @@ class RoomMemberHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
+    def get_inviter(self, event):
+        # TODO(markjh): get prev_state from snapshot
+        prev_state = yield self.store.get_room_member(
+            event.user_id, event.room_id
+        )
+
+        if prev_state and prev_state.membership == Membership.INVITE:
+            defer.returnValue(UserID.from_string(prev_state.user_id))
+            return
+        elif "third_party_invite" in event.content:
+            if "sender" in event.content["third_party_invite"]:
+                inviter = UserID.from_string(
+                    event.content["third_party_invite"]["sender"]
+                )
+                defer.returnValue(inviter)
+        defer.returnValue(None)
+
+    @defer.inlineCallbacks
+    def is_host_in_room(self, room_id, context):
+        is_host_in_room = yield self.auth.check_host_in_room(
+            room_id,
+            self.hs.hostname
+        )
+        if not is_host_in_room:
+            # is *anyone* in the room?
+            room_member_keys = [
+                v for (k, v) in context.current_state.keys() if (
+                    k == "m.room.member"
+                )
+            ]
+            if len(room_member_keys) == 0:
+                # has the room been created so we can join it?
+                create_event = context.current_state.get(("m.room.create", ""))
+                if create_event:
+                    is_host_in_room = True
+        defer.returnValue(is_host_in_room)
+
+    @defer.inlineCallbacks
     def get_joined_rooms_for_user(self, user):
         """Returns a list of roomids that the user has any of the given
         membership states in."""
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
new file mode 100644
index 0000000000..22808b9c07
--- /dev/null
+++ b/synapse/handlers/search.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from ._base import BaseHandler
+
+from synapse.api.constants import Membership
+from synapse.api.errors import SynapseError
+from synapse.events.utils import serialize_event
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchHandler(BaseHandler):
+
+    def __init__(self, hs):
+        super(SearchHandler, self).__init__(hs)
+
+    @defer.inlineCallbacks
+    def search(self, user, content):
+        """Performs a full text search for a user.
+
+        Args:
+            user (UserID)
+            content (dict): Search parameters
+
+        Returns:
+            dict to be returned to the client with results of search
+        """
+
+        try:
+            search_term = content["search_categories"]["room_events"]["search_term"]
+            keys = content["search_categories"]["room_events"].get("keys", [
+                "content.body", "content.name", "content.topic",
+            ])
+        except KeyError:
+            raise SynapseError(400, "Invalid search query")
+
+        # TODO: Search through left rooms too
+        rooms = yield self.store.get_rooms_for_user_where_membership_is(
+            user.to_string(),
+            membership_list=[Membership.JOIN],
+            # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
+        )
+        room_ids = set(r.room_id for r in rooms)
+
+        # TODO: Apply room filter to rooms list
+
+        rank_map, event_map = yield self.store.search_msgs(room_ids, search_term, keys)
+
+        allowed_events = yield self._filter_events_for_client(
+            user.to_string(), event_map.values()
+        )
+
+        # TODO: Filter allowed_events
+        # TODO: Add a limit
+
+        time_now = self.clock.time_msec()
+
+        results = {
+            e.event_id: {
+                "rank": rank_map[e.event_id],
+                "result": serialize_event(e, time_now)
+            }
+            for e in allowed_events
+        }
+
+        logger.info("Found %d results", len(results))
+
+        defer.returnValue({
+            "search_categories": {
+                "room_events": {
+                    "results": results,
+                    "count": len(results)
+                }
+            }
+        })
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index e651b49987..b8e2c81969 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -61,18 +61,37 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
         return bool(self.timeline or self.state or self.ephemeral)
 
 
+class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [
+    "room_id",
+    "timeline",
+    "state",
+])):
+    __slots__ = []
+
+    def __nonzero__(self):
+        """Make the result appear empty if there are no updates. This is used
+        to tell if room needs to be part of the sync result.
+        """
+        return bool(self.timeline or self.state)
+
+
 class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
     "room_id",
     "invite",
 ])):
     __slots__ = []
 
+    def __nonzero__(self):
+        """Invited rooms should always be reported to the client"""
+        return True
+
 
 class SyncResult(collections.namedtuple("SyncResult", [
     "next_batch",  # Token for the next sync
     "presence",  # List of presence events for the user.
     "joined",  # JoinedSyncResult for each joined room.
     "invited",  # InvitedSyncResult for each invited room.
+    "archived",  # ArchivedSyncResult for each archived room.
 ])):
     __slots__ = []
 
@@ -160,11 +179,17 @@ class SyncHandler(BaseHandler):
         )
         room_list = yield self.store.get_rooms_for_user_where_membership_is(
             user_id=sync_config.user.to_string(),
-            membership_list=[Membership.INVITE, Membership.JOIN]
+            membership_list=(
+                Membership.INVITE,
+                Membership.JOIN,
+                Membership.LEAVE,
+                Membership.BAN
+            )
         )
 
         joined = []
         invited = []
+        archived = []
         for event in room_list:
             if event.membership == Membership.JOIN:
                 room_sync = yield self.initial_sync_for_joined_room(
@@ -177,11 +202,23 @@ class SyncHandler(BaseHandler):
                     room_id=event.room_id,
                     invite=invite,
                 ))
+            elif event.membership in (Membership.LEAVE, Membership.BAN):
+                leave_token = now_token.copy_and_replace(
+                    "room_key", "s%d" % (event.stream_ordering,)
+                )
+                room_sync = yield self.initial_sync_for_archived_room(
+                    sync_config=sync_config,
+                    room_id=event.room_id,
+                    leave_event_id=event.event_id,
+                    leave_token=leave_token,
+                )
+                archived.append(room_sync)
 
         defer.returnValue(SyncResult(
             presence=presence,
             joined=joined,
             invited=invited,
+            archived=archived,
             next_batch=now_token,
         ))
 
@@ -241,6 +278,28 @@ class SyncHandler(BaseHandler):
         defer.returnValue((now_token, typing_by_room))
 
     @defer.inlineCallbacks
+    def initial_sync_for_archived_room(self, room_id, sync_config,
+                                       leave_event_id, leave_token):
+        """Sync a room for a client which is starting without any state
+        Returns:
+            A Deferred JoinedSyncResult.
+        """
+
+        batch = yield self.load_filtered_recents(
+            room_id, sync_config, leave_token,
+        )
+
+        leave_state = yield self.store.get_state_for_events(
+            [leave_event_id], None
+        )
+
+        defer.returnValue(ArchivedSyncResult(
+            room_id=room_id,
+            timeline=batch,
+            state=leave_state[leave_event_id].values(),
+        ))
+
+    @defer.inlineCallbacks
     def incremental_sync_with_gap(self, sync_config, since_token):
         """ Get the incremental delta needed to bring the client up to
         date with the server.
@@ -284,18 +343,22 @@ class SyncHandler(BaseHandler):
         )
 
         joined = []
+        archived = []
         if len(room_events) <= timeline_limit:
             # There is no gap in any of the rooms. Therefore we can just
             # partition the new events by room and return them.
             invite_events = []
+            leave_events = []
             events_by_room_id = {}
             for event in room_events:
                 events_by_room_id.setdefault(event.room_id, []).append(event)
                 if event.room_id not in joined_room_ids:
                     if (event.type == EventTypes.Member
-                            and event.membership == Membership.INVITE
                             and event.state_key == sync_config.user.to_string()):
-                        invite_events.append(event)
+                        if event.membership == Membership.INVITE:
+                            invite_events.append(event)
+                        elif event.membership in (Membership.LEAVE, Membership.BAN):
+                            leave_events.append(event)
 
             for room_id in joined_room_ids:
                 recents = events_by_room_id.get(room_id, [])
@@ -323,11 +386,16 @@ class SyncHandler(BaseHandler):
                 )
                 if room_sync:
                     joined.append(room_sync)
+
         else:
             invite_events = yield self.store.get_invites_for_user(
                 sync_config.user.to_string()
             )
 
+            leave_events = yield self.store.get_leave_and_ban_events_for_user(
+                sync_config.user.to_string()
+            )
+
             for room_id in joined_room_ids:
                 room_sync = yield self.incremental_sync_with_gap_for_room(
                     room_id, sync_config, since_token, now_token,
@@ -336,6 +404,12 @@ class SyncHandler(BaseHandler):
                 if room_sync:
                     joined.append(room_sync)
 
+        for leave_event in leave_events:
+            room_sync = yield self.incremental_sync_for_archived_room(
+                sync_config, leave_event, since_token
+            )
+            archived.append(room_sync)
+
         invited = [
             InvitedSyncResult(room_id=event.room_id, invite=event)
             for event in invite_events
@@ -345,6 +419,7 @@ class SyncHandler(BaseHandler):
             presence=presence,
             joined=joined,
             invited=invited,
+            archived=archived,
             next_batch=now_token,
         ))
 
@@ -444,6 +519,55 @@ class SyncHandler(BaseHandler):
         defer.returnValue(room_sync)
 
     @defer.inlineCallbacks
+    def incremental_sync_for_archived_room(self, sync_config, leave_event,
+                                           since_token):
+        """ Get the incremental delta needed to bring the client up to date for
+        the archived room.
+        Returns:
+            A Deferred ArchivedSyncResult
+        """
+
+        stream_token = yield self.store.get_stream_token_for_event(
+            leave_event.event_id
+        )
+
+        leave_token = since_token.copy_and_replace("room_key", stream_token)
+
+        batch = yield self.load_filtered_recents(
+            leave_event.room_id, sync_config, leave_token, since_token,
+        )
+
+        logging.debug("Recents %r", batch)
+
+        # TODO(mjark): This seems racy since this isn't being passed a
+        # token to indicate what point in the stream this is
+        leave_state = yield self.store.get_state_for_events(
+            [leave_event.event_id], None
+        )
+
+        state_events_at_leave = leave_state[leave_event.event_id].values()
+
+        state_at_previous_sync = yield self.get_state_at_previous_sync(
+            leave_event.room_id, since_token=since_token
+        )
+
+        state_events_delta = yield self.compute_state_delta(
+            since_token=since_token,
+            previous_state=state_at_previous_sync,
+            current_state=state_events_at_leave,
+        )
+
+        room_sync = ArchivedSyncResult(
+            room_id=leave_event.room_id,
+            timeline=batch,
+            state=state_events_delta,
+        )
+
+        logging.debug("Room sync: %r", room_sync)
+
+        defer.returnValue(room_sync)
+
+    @defer.inlineCallbacks
     def get_state_at_previous_sync(self, room_id, since_token):
         """ Get the room state at the previous sync the client made.
         Returns: