summary refs log tree commit diff
path: root/synapse/handlers
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/handlers')
-rw-r--r--synapse/handlers/_base.py18
-rw-r--r--synapse/handlers/events.py10
-rw-r--r--synapse/handlers/federation.py99
-rw-r--r--synapse/handlers/message.py47
-rw-r--r--synapse/handlers/presence.py4
-rw-r--r--synapse/handlers/private_user_data.py2
-rw-r--r--synapse/handlers/receipts.py6
-rw-r--r--synapse/handlers/room.py30
-rw-r--r--synapse/handlers/search.py201
-rw-r--r--synapse/handlers/sync.py20
-rw-r--r--synapse/handlers/typing.py11
11 files changed, 357 insertions, 91 deletions
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index 6873a4575d..eef325a94b 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -21,7 +21,6 @@ from synapse.api.constants import Membership, EventTypes
 from synapse.types import UserID, RoomAlias
 
 from synapse.util.logcontext import PreserveLoggingContext
-from synapse.util import third_party_invites
 
 import logging
 
@@ -47,7 +46,8 @@ class BaseHandler(object):
         self.event_builder_factory = hs.get_event_builder_factory()
 
     @defer.inlineCallbacks
-    def _filter_events_for_client(self, user_id, events, is_guest=False):
+    def _filter_events_for_client(self, user_id, events, is_guest=False,
+                                  require_all_visible_for_guests=True):
         # Assumes that user has at some point joined the room if not is_guest.
 
         def allowed(event, membership, visibility):
@@ -100,7 +100,9 @@ class BaseHandler(object):
             if should_include:
                 events_to_return.append(event)
 
-        if is_guest and len(events_to_return) < len(events):
+        if (require_all_visible_for_guests
+                and is_guest
+                and len(events_to_return) < len(events)):
             # This indicates that some events in the requested range were not
             # visible to guest users. To be safe, we reject the entire request,
             # so that we don't have to worry about interpreting visibility
@@ -189,16 +191,6 @@ class BaseHandler(object):
                         )
                     )
 
-        if (
-            event.type == EventTypes.Member and
-            event.content["membership"] == Membership.JOIN and
-            third_party_invites.join_has_third_party_invite(event.content)
-        ):
-            yield third_party_invites.check_key_valid(
-                self.hs.get_simple_http_client(),
-                event
-            )
-
         federation_handler = self.hs.get_handlers().federation_handler
 
         if event.type == EventTypes.Member:
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 53c8ca3a26..0e4c0d4d06 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -100,7 +100,7 @@ class EventStreamHandler(BaseHandler):
     @log_function
     def get_stream(self, auth_user_id, pagin_config, timeout=0,
                    as_client_event=True, affect_presence=True,
-                   only_room_events=False):
+                   only_room_events=False, room_id=None, is_guest=False):
         """Fetches the events stream for a given user.
 
         If `only_room_events` is `True` only room events will be returned.
@@ -119,9 +119,15 @@ class EventStreamHandler(BaseHandler):
                 # thundering herds on restart.
                 timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
 
+            if is_guest:
+                yield self.distributor.fire(
+                    "user_joined_room", user=auth_user, room_id=room_id
+                )
+
             events, tokens = yield self.notifier.get_events_for(
                 auth_user, pagin_config, timeout,
-                only_room_events=only_room_events
+                only_room_events=only_room_events,
+                is_guest=is_guest, guest_room_id=room_id
             )
 
             time_now = self.clock.time_msec()
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index b2395b28d1..872051b8b9 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -21,6 +21,7 @@ from synapse.api.errors import (
     AuthError, FederationError, StoreError, CodeMessageException, SynapseError,
 )
 from synapse.api.constants import EventTypes, Membership, RejectedReason
+from synapse.events.validator import EventValidator
 from synapse.util import unwrapFirstError
 from synapse.util.logcontext import PreserveLoggingContext
 from synapse.util.logutils import log_function
@@ -39,7 +40,6 @@ from twisted.internet import defer
 
 import itertools
 import logging
-from synapse.util import third_party_invites
 
 logger = logging.getLogger(__name__)
 
@@ -58,6 +58,8 @@ class FederationHandler(BaseHandler):
     def __init__(self, hs):
         super(FederationHandler, self).__init__(hs)
 
+        self.hs = hs
+
         self.distributor.observe(
             "user_joined_room",
             self._on_user_joined
@@ -68,7 +70,6 @@ class FederationHandler(BaseHandler):
         self.store = hs.get_datastore()
         self.replication_layer = hs.get_replication_layer()
         self.state_handler = hs.get_state_handler()
-        # self.auth_handler = gs.get_auth_handler()
         self.server_name = hs.hostname
         self.keyring = hs.get_keyring()
 
@@ -563,7 +564,7 @@ class FederationHandler(BaseHandler):
 
     @log_function
     @defer.inlineCallbacks
-    def do_invite_join(self, target_hosts, room_id, joinee, content):
+    def do_invite_join(self, target_hosts, room_id, joinee):
         """ Attempts to join the `joinee` to the room `room_id` via the
         server `target_host`.
 
@@ -583,8 +584,7 @@ class FederationHandler(BaseHandler):
             target_hosts,
             room_id,
             joinee,
-            "join",
-            content
+            "join"
         )
 
         self.room_queues[room_id] = []
@@ -661,16 +661,12 @@ class FederationHandler(BaseHandler):
 
     @defer.inlineCallbacks
     @log_function
-    def on_make_join_request(self, room_id, user_id, query):
+    def on_make_join_request(self, room_id, user_id):
         """ We've received a /make_join/ request, so we create a partial
         join event for the room and return that. We do *not* persist or
         process it until the other server has signed it and sent it back.
         """
         event_content = {"membership": Membership.JOIN}
-        if third_party_invites.has_join_keys(query):
-            event_content["third_party_invite"] = (
-                third_party_invites.extract_join_keys(query)
-            )
 
         builder = self.event_builder_factory.new({
             "type": EventTypes.Member,
@@ -686,9 +682,6 @@ class FederationHandler(BaseHandler):
 
         self.auth.check(event, auth_events=context.current_state)
 
-        if third_party_invites.join_has_third_party_invite(event.content):
-            third_party_invites.check_key_valid(self.hs.get_simple_http_client(), event)
-
         defer.returnValue(event)
 
     @defer.inlineCallbacks
@@ -828,8 +821,7 @@ class FederationHandler(BaseHandler):
             target_hosts,
             room_id,
             user_id,
-            "leave",
-            {}
+            "leave"
         )
         signed_event = self._sign_event(event)
 
@@ -848,13 +840,12 @@ class FederationHandler(BaseHandler):
         defer.returnValue(None)
 
     @defer.inlineCallbacks
-    def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, content):
+    def _make_and_verify_event(self, target_hosts, room_id, user_id, membership):
         origin, pdu = yield self.replication_layer.make_membership_event(
             target_hosts,
             room_id,
             user_id,
-            membership,
-            content
+            membership
         )
 
         logger.debug("Got response to make_%s: %s", membership, pdu)
@@ -1647,3 +1638,75 @@ class FederationHandler(BaseHandler):
             },
             "missing": [e.event_id for e in missing_locals],
         })
+
+    @defer.inlineCallbacks
+    @log_function
+    def exchange_third_party_invite(self, invite):
+        sender = invite["sender"]
+        room_id = invite["room_id"]
+
+        event_dict = {
+            "type": EventTypes.Member,
+            "content": {
+                "membership": Membership.INVITE,
+                "third_party_invite": invite,
+            },
+            "room_id": room_id,
+            "sender": sender,
+            "state_key": invite["mxid"],
+        }
+
+        if (yield self.auth.check_host_in_room(room_id, self.hs.hostname)):
+            builder = self.event_builder_factory.new(event_dict)
+            EventValidator().validate_new(builder)
+            event, context = yield self._create_new_client_event(builder=builder)
+            self.auth.check(event, context.current_state)
+            yield self._validate_keyserver(event, auth_events=context.current_state)
+            member_handler = self.hs.get_handlers().room_member_handler
+            yield member_handler.change_membership(event, context)
+        else:
+            destinations = set([x.split(":", 1)[-1] for x in (sender, room_id)])
+            yield self.replication_layer.forward_third_party_invite(
+                destinations,
+                room_id,
+                event_dict,
+            )
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
+        builder = self.event_builder_factory.new(event_dict)
+
+        event, context = yield self._create_new_client_event(
+            builder=builder,
+        )
+
+        self.auth.check(event, auth_events=context.current_state)
+        yield self._validate_keyserver(event, auth_events=context.current_state)
+
+        returned_invite = yield self.send_invite(origin, event)
+        # TODO: Make sure the signatures actually are correct.
+        event.signatures.update(returned_invite.signatures)
+        member_handler = self.hs.get_handlers().room_member_handler
+        yield member_handler.change_membership(event, context)
+
+    @defer.inlineCallbacks
+    def _validate_keyserver(self, event, auth_events):
+        token = event.content["third_party_invite"]["signed"]["token"]
+
+        invite_event = auth_events.get(
+            (EventTypes.ThirdPartyInvite, token,)
+        )
+
+        try:
+            response = yield self.hs.get_simple_http_client().get_json(
+                invite_event.content["key_validity_url"],
+                {"public_key": invite_event.content["public_key"]}
+            )
+        except Exception:
+            raise SynapseError(
+                502,
+                "Third party certificate could not be checked"
+            )
+        if "valid" not in response or not response["valid"]:
+            raise AuthError(403, "Third party certificate was invalid")
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 687e1527f7..654ecd2b37 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -16,7 +16,7 @@
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
-from synapse.api.errors import SynapseError
+from synapse.api.errors import SynapseError, AuthError, Codes
 from synapse.streams.config import PaginationConfig
 from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
@@ -229,7 +229,7 @@ class MessageHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def get_room_data(self, user_id=None, room_id=None,
-                      event_type=None, state_key=""):
+                      event_type=None, state_key="", is_guest=False):
         """ Get data from a room.
 
         Args:
@@ -239,23 +239,42 @@ class MessageHandler(BaseHandler):
         Raises:
             SynapseError if something went wrong.
         """
-        member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+        membership, membership_event_id = yield self._check_in_room_or_world_readable(
+            room_id, user_id, is_guest
+        )
 
-        if member_event.membership == Membership.JOIN:
+        if membership == Membership.JOIN:
             data = yield self.state_handler.get_current_state(
                 room_id, event_type, state_key
             )
-        elif member_event.membership == Membership.LEAVE:
+        elif membership == Membership.LEAVE:
             key = (event_type, state_key)
             room_state = yield self.store.get_state_for_events(
-                [member_event.event_id], [key]
+                [membership_event_id], [key]
             )
-            data = room_state[member_event.event_id].get(key)
+            data = room_state[membership_event_id].get(key)
 
         defer.returnValue(data)
 
     @defer.inlineCallbacks
-    def get_state_events(self, user_id, room_id):
+    def _check_in_room_or_world_readable(self, room_id, user_id, is_guest):
+        if is_guest:
+            visibility = yield self.state_handler.get_current_state(
+                room_id, EventTypes.RoomHistoryVisibility, ""
+            )
+            if visibility.content["history_visibility"] == "world_readable":
+                defer.returnValue((Membership.JOIN, None))
+                return
+            else:
+                raise AuthError(
+                    403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
+                )
+        else:
+            member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+            defer.returnValue((member_event.membership, member_event.event_id))
+
+    @defer.inlineCallbacks
+    def get_state_events(self, user_id, room_id, is_guest=False):
         """Retrieve all state events for a given room. If the user is
         joined to the room then return the current state. If the user has
         left the room return the state events from when they left.
@@ -266,15 +285,17 @@ class MessageHandler(BaseHandler):
         Returns:
             A list of dicts representing state events. [{}, {}, {}]
         """
-        member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+        membership, membership_event_id = yield self._check_in_room_or_world_readable(
+            room_id, user_id, is_guest
+        )
 
-        if member_event.membership == Membership.JOIN:
+        if membership == Membership.JOIN:
             room_state = yield self.state_handler.get_current_state(room_id)
-        elif member_event.membership == Membership.LEAVE:
+        elif membership == Membership.LEAVE:
             room_state = yield self.store.get_state_for_events(
-                [member_event.event_id], None
+                [membership_event_id], None
             )
-            room_state = room_state[member_event.event_id]
+            room_state = room_state[membership_event_id]
 
         now = self.clock.time_msec()
         defer.returnValue(
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index ce60642127..0b780cd528 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -1142,8 +1142,9 @@ class PresenceEventSource(object):
 
     @defer.inlineCallbacks
     @log_function
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, user, from_key, room_ids=None, **kwargs):
         from_key = int(from_key)
+        room_ids = room_ids or []
 
         presence = self.hs.get_handlers().presence_handler
         cachemap = presence._user_cachemap
@@ -1161,7 +1162,6 @@ class PresenceEventSource(object):
             user_ids_to_check |= set(
                 UserID.from_string(p["observed_user_id"]) for p in presence_list
             )
-        room_ids = yield presence.get_joined_rooms_for_user(user)
         for room_id in set(room_ids) & set(presence._room_serials):
             if presence._room_serials[room_id] > from_key:
                 joined = yield presence.get_joined_users_for_room_id(room_id)
diff --git a/synapse/handlers/private_user_data.py b/synapse/handlers/private_user_data.py
index 1778c71325..1abe45ed7b 100644
--- a/synapse/handlers/private_user_data.py
+++ b/synapse/handlers/private_user_data.py
@@ -24,7 +24,7 @@ class PrivateUserDataEventSource(object):
         return self.store.get_max_private_user_data_stream_id()
 
     @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, user, from_key, **kwargs):
         user_id = user.to_string()
         last_stream_id = from_key
 
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index a47ae3df42..973f4d5cae 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -164,17 +164,15 @@ class ReceiptEventSource(object):
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, from_key, room_ids, **kwargs):
         from_key = int(from_key)
         to_key = yield self.get_current_key()
 
         if from_key == to_key:
             defer.returnValue(([], to_key))
 
-        rooms = yield self.store.get_rooms_for_user(user.to_string())
-        rooms = [room.room_id for room in rooms]
         events = yield self.store.get_linearized_receipts_for_rooms(
-            rooms,
+            room_ids,
             from_key=from_key,
             to_key=to_key,
         )
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 9184dcd048..8cce8d0e99 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -38,6 +38,8 @@ import string
 
 logger = logging.getLogger(__name__)
 
+id_server_scheme = "https://"
+
 
 class RoomCreationHandler(BaseHandler):
 
@@ -488,8 +490,7 @@ class RoomMemberHandler(BaseHandler):
             yield handler.do_invite_join(
                 room_hosts,
                 room_id,
-                event.user_id,
-                event.content  # FIXME To get a non-frozen dict
+                event.user_id
             )
         else:
             logger.debug("Doing normal join")
@@ -632,7 +633,7 @@ class RoomMemberHandler(BaseHandler):
         """
         try:
             data = yield self.hs.get_simple_http_client().get_json(
-                "https://%s/_matrix/identity/api/v1/lookup" % (id_server,),
+                "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server,),
                 {
                     "medium": medium,
                     "address": address,
@@ -655,8 +656,8 @@ class RoomMemberHandler(BaseHandler):
             raise AuthError(401, "No signature from server %s" % (server_hostname,))
         for key_name, signature in data["signatures"][server_hostname].items():
             key_data = yield self.hs.get_simple_http_client().get_json(
-                "https://%s/_matrix/identity/api/v1/pubkey/%s" %
-                (server_hostname, key_name,),
+                "%s%s/_matrix/identity/api/v1/pubkey/%s" %
+                (id_server_scheme, server_hostname, key_name,),
             )
             if "public_key" not in key_data:
                 raise AuthError(401, "No public key named %s from %s" %
@@ -709,7 +710,9 @@ class RoomMemberHandler(BaseHandler):
     @defer.inlineCallbacks
     def _ask_id_server_for_third_party_invite(
             self, id_server, medium, address, room_id, sender):
-        is_url = "https://%s/_matrix/identity/api/v1/store-invite" % (id_server,)
+        is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
+            id_server_scheme, id_server,
+        )
         data = yield self.hs.get_simple_http_client().post_urlencoded_get_json(
             is_url,
             {
@@ -722,8 +725,8 @@ class RoomMemberHandler(BaseHandler):
         # TODO: Check for success
         token = data["token"]
         public_key = data["public_key"]
-        key_validity_url = "https://%s/_matrix/identity/api/v1/pubkey/isvalid" % (
-            id_server,
+        key_validity_url = "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % (
+            id_server_scheme, id_server,
         )
         defer.returnValue((token, public_key, key_validity_url))
 
@@ -807,7 +810,14 @@ class RoomEventSource(object):
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(
+            self,
+            user,
+            from_key,
+            limit,
+            room_ids,
+            is_guest,
+    ):
         # We just ignore the key for now.
 
         to_key = yield self.get_current_key()
@@ -828,6 +838,8 @@ class RoomEventSource(object):
                 from_key=from_key,
                 to_key=to_key,
                 limit=limit,
+                room_ids=room_ids,
+                is_guest=is_guest,
             )
 
         defer.returnValue((events, end_key))
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 2718e9482e..b7545c111f 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -22,6 +22,8 @@ from synapse.api.filtering import Filter
 from synapse.api.errors import SynapseError
 from synapse.events.utils import serialize_event
 
+from unpaddedbase64 import decode_base64, encode_base64
+
 import logging
 
 
@@ -34,27 +36,59 @@ class SearchHandler(BaseHandler):
         super(SearchHandler, self).__init__(hs)
 
     @defer.inlineCallbacks
-    def search(self, user, content):
+    def search(self, user, content, batch=None):
         """Performs a full text search for a user.
 
         Args:
             user (UserID)
             content (dict): Search parameters
+            batch (str): The next_batch parameter. Used for pagination.
 
         Returns:
             dict to be returned to the client with results of search
         """
 
+        batch_group = None
+        batch_group_key = None
+        batch_token = None
+        if batch:
+            try:
+                b = decode_base64(batch)
+                batch_group, batch_group_key, batch_token = b.split("\n")
+
+                assert batch_group is not None
+                assert batch_group_key is not None
+                assert batch_token is not None
+            except:
+                raise SynapseError(400, "Invalid batch")
+
         try:
-            search_term = content["search_categories"]["room_events"]["search_term"]
-            keys = content["search_categories"]["room_events"].get("keys", [
+            room_cat = content["search_categories"]["room_events"]
+
+            # The actual thing to query in FTS
+            search_term = room_cat["search_term"]
+
+            # Which "keys" to search over in FTS query
+            keys = room_cat.get("keys", [
                 "content.body", "content.name", "content.topic",
             ])
-            filter_dict = content["search_categories"]["room_events"].get("filter", {})
-            event_context = content["search_categories"]["room_events"].get(
+
+            # Filter to apply to results
+            filter_dict = room_cat.get("filter", {})
+
+            # What to order results by (impacts whether pagination can be doen)
+            order_by = room_cat.get("order_by", "rank")
+
+            # Include context around each event?
+            event_context = room_cat.get(
                 "event_context", None
             )
 
+            # Group results together? May allow clients to paginate within a
+            # group
+            group_by = room_cat.get("groupings", {}).get("group_by", {})
+            group_keys = [g["key"] for g in group_by]
+
             if event_context is not None:
                 before_limit = int(event_context.get(
                     "before_limit", 5
@@ -65,6 +99,15 @@ class SearchHandler(BaseHandler):
         except KeyError:
             raise SynapseError(400, "Invalid search query")
 
+        if order_by not in ("rank", "recent"):
+            raise SynapseError(400, "Invalid order by: %r" % (order_by,))
+
+        if set(group_keys) - {"room_id", "sender"}:
+            raise SynapseError(
+                400,
+                "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
+            )
+
         search_filter = Filter(filter_dict)
 
         # TODO: Search through left rooms too
@@ -77,19 +120,130 @@ class SearchHandler(BaseHandler):
 
         room_ids = search_filter.filter_rooms(room_ids)
 
-        rank_map, event_map, _ = yield self.store.search_msgs(
-            room_ids, search_term, keys
-        )
+        if batch_group == "room_id":
+            room_ids.intersection_update({batch_group_key})
 
-        filtered_events = search_filter.filter(event_map.values())
+        rank_map = {}  # event_id -> rank of event
+        allowed_events = []
+        room_groups = {}  # Holds result of grouping by room, if applicable
+        sender_group = {}  # Holds result of grouping by sender, if applicable
 
-        allowed_events = yield self._filter_events_for_client(
-            user.to_string(), filtered_events
-        )
+        # Holds the next_batch for the entire result set if one of those exists
+        global_next_batch = None
 
-        allowed_events.sort(key=lambda e: -rank_map[e.event_id])
-        allowed_events = allowed_events[:search_filter.limit()]
+        if order_by == "rank":
+            results = yield self.store.search_msgs(
+                room_ids, search_term, keys
+            )
+
+            results_map = {r["event"].event_id: r for r in results}
+
+            rank_map.update({r["event"].event_id: r["rank"] for r in results})
+
+            filtered_events = search_filter.filter([r["event"] for r in results])
+
+            events = yield self._filter_events_for_client(
+                user.to_string(), filtered_events
+            )
+
+            events.sort(key=lambda e: -rank_map[e.event_id])
+            allowed_events = events[:search_filter.limit()]
+
+            for e in allowed_events:
+                rm = room_groups.setdefault(e.room_id, {
+                    "results": [],
+                    "order": rank_map[e.event_id],
+                })
+                rm["results"].append(e.event_id)
+
+                s = sender_group.setdefault(e.sender, {
+                    "results": [],
+                    "order": rank_map[e.event_id],
+                })
+                s["results"].append(e.event_id)
+
+        elif order_by == "recent":
+            # In this case we specifically loop through each room as the given
+            # limit applies to each room, rather than a global list.
+            # This is not necessarilly a good idea.
+            for room_id in room_ids:
+                room_events = []
+                if batch_group == "room_id" and batch_group_key == room_id:
+                    pagination_token = batch_token
+                else:
+                    pagination_token = None
+                i = 0
+
+                # We keep looping and we keep filtering until we reach the limit
+                # or we run out of things.
+                # But only go around 5 times since otherwise synapse will be sad.
+                while len(room_events) < search_filter.limit() and i < 5:
+                    i += 1
+                    results = yield self.store.search_room(
+                        room_id, search_term, keys, search_filter.limit() * 2,
+                        pagination_token=pagination_token,
+                    )
+
+                    results_map = {r["event"].event_id: r for r in results}
+
+                    rank_map.update({r["event"].event_id: r["rank"] for r in results})
+
+                    filtered_events = search_filter.filter([
+                        r["event"] for r in results
+                    ])
+
+                    events = yield self._filter_events_for_client(
+                        user.to_string(), filtered_events
+                    )
+
+                    room_events.extend(events)
+                    room_events = room_events[:search_filter.limit()]
+
+                    if len(results) < search_filter.limit() * 2:
+                        pagination_token = None
+                        break
+                    else:
+                        pagination_token = results[-1]["pagination_token"]
+
+                if room_events:
+                    res = results_map[room_events[-1].event_id]
+                    pagination_token = res["pagination_token"]
+
+                    group = room_groups.setdefault(room_id, {})
+                    if pagination_token:
+                        next_batch = encode_base64("%s\n%s\n%s" % (
+                            "room_id", room_id, pagination_token
+                        ))
+                        group["next_batch"] = next_batch
+
+                        if batch_token:
+                            global_next_batch = next_batch
+
+                    group["results"] = [e.event_id for e in room_events]
+                    group["order"] = max(
+                        e.origin_server_ts/1000 for e in room_events
+                        if hasattr(e, "origin_server_ts")
+                    )
+
+                allowed_events.extend(room_events)
+
+            # Normalize the group orders
+            if room_groups:
+                if len(room_groups) > 1:
+                    mx = max(g["order"] for g in room_groups.values())
+                    mn = min(g["order"] for g in room_groups.values())
+
+                    for g in room_groups.values():
+                        g["order"] = (g["order"] - mn) * 1.0 / (mx - mn)
+                else:
+                    room_groups.values()[0]["order"] = 1
 
+        else:
+            # We should never get here due to the guard earlier.
+            raise NotImplementedError()
+
+        # If client has asked for "context" for each event (i.e. some surrounding
+        # events and state), fetch that
         if event_context is not None:
             now_token = yield self.hs.get_event_sources().get_current_token()
 
@@ -144,11 +298,22 @@ class SearchHandler(BaseHandler):
 
         logger.info("Found %d results", len(results))
 
+        rooms_cat_res = {
+            "results": results,
+            "count": len(results)
+        }
+
+        if room_groups and "room_id" in group_keys:
+            rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups
+
+        if sender_group and "sender" in group_keys:
+            rooms_cat_res.setdefault("groups", {})["sender"] = sender_group
+
+        if global_next_batch:
+            rooms_cat_res["next_batch"] = global_next_batch
+
         defer.returnValue({
             "search_categories": {
-                "room_events": {
-                    "results": results,
-                    "count": len(results)
-                }
+                "room_events": rooms_cat_res
             }
         })
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 1c1ee34b1e..5294d96466 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -295,11 +295,16 @@ class SyncHandler(BaseHandler):
 
         typing_key = since_token.typing_key if since_token else "0"
 
+        rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
+        room_ids = [room.room_id for room in rooms]
+
         typing_source = self.event_sources.sources["typing"]
-        typing, typing_key = yield typing_source.get_new_events_for_user(
+        typing, typing_key = yield typing_source.get_new_events(
             user=sync_config.user,
             from_key=typing_key,
             limit=sync_config.filter.ephemeral_limit(),
+            room_ids=room_ids,
+            is_guest=False,
         )
         now_token = now_token.copy_and_replace("typing_key", typing_key)
 
@@ -312,10 +317,13 @@ class SyncHandler(BaseHandler):
         receipt_key = since_token.receipt_key if since_token else "0"
 
         receipt_source = self.event_sources.sources["receipt"]
-        receipts, receipt_key = yield receipt_source.get_new_events_for_user(
+        receipts, receipt_key = yield receipt_source.get_new_events(
             user=sync_config.user,
             from_key=receipt_key,
             limit=sync_config.filter.ephemeral_limit(),
+            room_ids=room_ids,
+            # /sync doesn't support guest access, they can't get to this point in code
+            is_guest=False,
         )
         now_token = now_token.copy_and_replace("receipt_key", receipt_key)
 
@@ -360,11 +368,17 @@ class SyncHandler(BaseHandler):
         """
         now_token = yield self.event_sources.get_current_token()
 
+        rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
+        room_ids = [room.room_id for room in rooms]
+
         presence_source = self.event_sources.sources["presence"]
-        presence, presence_key = yield presence_source.get_new_events_for_user(
+        presence, presence_key = yield presence_source.get_new_events(
             user=sync_config.user,
             from_key=since_token.presence_key,
             limit=sync_config.filter.presence_limit(),
+            room_ids=room_ids,
+            # /sync doesn't support guest access, they can't get to this point in code
+            is_guest=False,
         )
         now_token = now_token.copy_and_replace("presence_key", presence_key)
 
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index d7096aab8c..2846f3e6e8 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -246,17 +246,12 @@ class TypingNotificationEventSource(object):
             },
         }
 
-    @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, from_key, room_ids, **kwargs):
         from_key = int(from_key)
         handler = self.handler()
 
-        joined_room_ids = (
-            yield self.room_member_handler().get_joined_rooms_for_user(user)
-        )
-
         events = []
-        for room_id in joined_room_ids:
+        for room_id in room_ids:
             if room_id not in handler._room_serials:
                 continue
             if handler._room_serials[room_id] <= from_key:
@@ -264,7 +259,7 @@ class TypingNotificationEventSource(object):
 
             events.append(self._make_event_for(room_id))
 
-        defer.returnValue((events, handler._latest_room_serial))
+        return events, handler._latest_room_serial
 
     def get_current_key(self):
         return self.handler()._latest_room_serial