summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml66
-rwxr-xr-x.circleci/merge_base_branch.sh27
-rw-r--r--changelog.d/3704.misc1
-rw-r--r--changelog.d/3822.misc1
-rw-r--r--changelog.d/3823.misc1
-rw-r--r--changelog.d/3824.bugfix1
-rw-r--r--changelog.d/3826.misc1
-rw-r--r--changelog.d/3827.misc1
-rw-r--r--changelog.d/3835.bugfix1
-rw-r--r--changelog.d/3840.misc1
-rw-r--r--changelog.d/3845.bugfix1
-rw-r--r--changelog.d/3847.misc1
-rw-r--r--synapse/config/homeserver.py2
-rw-r--r--synapse/config/jwt_config.py (renamed from synapse/config/jwt.py)0
-rw-r--r--synapse/crypto/context_factory.py2
-rw-r--r--synapse/crypto/keyclient.py8
-rw-r--r--synapse/crypto/keyring.py33
-rw-r--r--synapse/federation/federation_base.py34
-rw-r--r--synapse/federation/federation_client.py8
-rw-r--r--synapse/federation/transport/client.py5
-rw-r--r--synapse/federation/transport/server.py24
-rw-r--r--synapse/handlers/sync.py118
-rw-r--r--synapse/http/matrixfederationclient.py52
-rw-r--r--synapse/rest/client/v1/admin.py9
-rw-r--r--synapse/rest/client/v1/events.py12
-rw-r--r--synapse/rest/client/v1/initial_sync.py2
-rw-r--r--synapse/rest/client/v1/login.py44
-rw-r--r--synapse/rest/client/v1/push_rule.py24
-rw-r--r--synapse/rest/client/v1/pusher.py4
-rw-r--r--synapse/rest/client/v1/room.py14
-rw-r--r--synapse/rest/client/v1/voip.py6
-rw-r--r--synapse/rest/client/v2_alpha/sync.py2
-rw-r--r--synapse/rest/client/v2_alpha/thirdparty.py4
-rw-r--r--synapse/rest/key/v1/server_key_resource.py2
-rw-r--r--synapse/rest/key/v2/__init__.py4
-rw-r--r--synapse/rest/key/v2/remote_key_resource.py6
-rw-r--r--synapse/rest/media/v0/content_repository.py4
-rw-r--r--synapse/rest/media/v1/_base.py30
-rw-r--r--synapse/rest/media/v1/download_resource.py12
-rw-r--r--synapse/rest/media/v1/media_repository.py29
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py10
-rw-r--r--synapse/storage/events.py4
-rw-r--r--synapse/storage/keys.py1
-rw-r--r--synapse/storage/roommember.py65
44 files changed, 500 insertions, 177 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
index e03f01b837..5266544f3c 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -9,6 +9,8 @@ jobs:
       - store_artifacts:
           path: ~/project/logs
           destination: logs
+      - store_test_results:
+          path: logs
   sytestpy2postgres:
     machine: true
     steps:
@@ -18,6 +20,34 @@ jobs:
       - store_artifacts:
           path: ~/project/logs
           destination: logs
+      - store_test_results:
+          path: logs
+  sytestpy2merged:
+    machine: true
+    steps:
+      - checkout
+      - run: bash .circleci/merge_base_branch.sh
+      - run: docker pull matrixdotorg/sytest-synapsepy2
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+      - store_test_results:
+          path: logs
+
+  sytestpy2postgresmerged:
+    machine: true
+    steps:
+      - checkout
+      - run: bash .circleci/merge_base_branch.sh
+      - run: docker pull matrixdotorg/sytest-synapsepy2
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+      - store_test_results:
+          path: logs
+
   sytestpy3:
     machine: true
     steps:
@@ -27,6 +57,8 @@ jobs:
       - store_artifacts:
           path: ~/project/logs
           destination: logs
+      - store_test_results:
+          path: logs
   sytestpy3postgres:
     machine: true
     steps:
@@ -36,6 +68,32 @@ jobs:
       - store_artifacts:
           path: ~/project/logs
           destination: logs
+      - store_test_results:
+          path: logs
+  sytestpy3merged:
+    machine: true
+    steps:
+      - checkout
+      - run: bash .circleci/merge_base_branch.sh
+      - run: docker pull matrixdotorg/sytest-synapsepy3
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs hawkowl/sytestpy3
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+      - store_test_results:
+          path: logs
+  sytestpy3postgresmerged:
+    machine: true
+    steps:
+      - checkout
+      - run: bash .circleci/merge_base_branch.sh
+      - run: docker pull matrixdotorg/sytest-synapsepy3
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+      - store_test_results:
+          path: logs
 
 workflows:
   version: 2
@@ -43,6 +101,14 @@ workflows:
     jobs:
       - sytestpy2
       - sytestpy2postgres
+      - sytestpy2merged:
+          filters:
+            branches:
+              ignore: /develop|master/
+      - sytestpy2postgresmerged:
+          filters:
+            branches:
+              ignore: /develop|master/
 # Currently broken while the Python 3 port is incomplete
 #      - sytestpy3
 #      - sytestpy3postgres
diff --git a/.circleci/merge_base_branch.sh b/.circleci/merge_base_branch.sh
new file mode 100755
index 0000000000..2d700dbf11
--- /dev/null
+++ b/.circleci/merge_base_branch.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+
+set -e
+
+# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
+# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
+echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> "$BASH_ENV"
+source $BASH_ENV
+
+if [[ -z "${CIRCLE_PR_NUMBER}" ]]
+then
+    echo "Can't figure out what the PR number is!"
+    exit 1
+fi
+
+# Get the reference, using the GitHub API
+GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
+
+# Show what we are before
+git show -s
+
+# Fetch and merge. If it doesn't work, it will raise due to set -e.
+git fetch -u origin $GITBASE
+git merge --no-edit origin/$GITBASE
+
+# Show what we are after.
+git show -s
\ No newline at end of file
diff --git a/changelog.d/3704.misc b/changelog.d/3704.misc
new file mode 100644
index 0000000000..aaae0fbd63
--- /dev/null
+++ b/changelog.d/3704.misc
@@ -0,0 +1 @@
+CircleCI tests now run on the potential merge of a PR.
diff --git a/changelog.d/3822.misc b/changelog.d/3822.misc
new file mode 100644
index 0000000000..5250f31896
--- /dev/null
+++ b/changelog.d/3822.misc
@@ -0,0 +1 @@
+crypto/ is now ported to Python 3.
diff --git a/changelog.d/3823.misc b/changelog.d/3823.misc
new file mode 100644
index 0000000000..0da491ddaa
--- /dev/null
+++ b/changelog.d/3823.misc
@@ -0,0 +1 @@
+rest/ is now ported to Python 3.
diff --git a/changelog.d/3824.bugfix b/changelog.d/3824.bugfix
new file mode 100644
index 0000000000..99f199dcc6
--- /dev/null
+++ b/changelog.d/3824.bugfix
@@ -0,0 +1 @@
+Fix jwt import check
\ No newline at end of file
diff --git a/changelog.d/3826.misc b/changelog.d/3826.misc
new file mode 100644
index 0000000000..a4d9a012f9
--- /dev/null
+++ b/changelog.d/3826.misc
@@ -0,0 +1 @@
+add some logging for the keyring queue
diff --git a/changelog.d/3827.misc b/changelog.d/3827.misc
new file mode 100644
index 0000000000..bc294706cf
--- /dev/null
+++ b/changelog.d/3827.misc
@@ -0,0 +1 @@
+speed up lazy loading by 2-3x
diff --git a/changelog.d/3835.bugfix b/changelog.d/3835.bugfix
new file mode 100644
index 0000000000..00dbcbc8dc
--- /dev/null
+++ b/changelog.d/3835.bugfix
@@ -0,0 +1 @@
+fix VOIP crashes under Python 3 (#3821)
diff --git a/changelog.d/3840.misc b/changelog.d/3840.misc
new file mode 100644
index 0000000000..b9585ae9be
--- /dev/null
+++ b/changelog.d/3840.misc
@@ -0,0 +1 @@
+Disable lazy loading for incremental syncs for now
diff --git a/changelog.d/3845.bugfix b/changelog.d/3845.bugfix
new file mode 100644
index 0000000000..5b7e8f1934
--- /dev/null
+++ b/changelog.d/3845.bugfix
@@ -0,0 +1 @@
+Fix outbound requests occasionally wedging, which can result in federation breaking between servers.
diff --git a/changelog.d/3847.misc b/changelog.d/3847.misc
new file mode 100644
index 0000000000..bf8b5afea4
--- /dev/null
+++ b/changelog.d/3847.misc
@@ -0,0 +1 @@
+federation/ is now ported to Python 3.
\ No newline at end of file
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 2fd9c48abf..b8d5690f2b 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -21,7 +21,7 @@ from .consent_config import ConsentConfig
 from .database import DatabaseConfig
 from .emailconfig import EmailConfig
 from .groups import GroupsConfig
-from .jwt import JWTConfig
+from .jwt_config import JWTConfig
 from .key import KeyConfig
 from .logger import LoggingConfig
 from .metrics import MetricsConfig
diff --git a/synapse/config/jwt.py b/synapse/config/jwt_config.py
index 51e7f7e003..51e7f7e003 100644
--- a/synapse/config/jwt.py
+++ b/synapse/config/jwt_config.py
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 1a391adec1..02b76dfcfb 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -123,6 +123,6 @@ class ClientTLSOptionsFactory(object):
 
     def get_options(self, host):
         return ClientTLSOptions(
-            host.decode('utf-8'),
+            host,
             CertificateOptions(verify=False).getContext()
         )
diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py
index e94400b8e2..57d4665e84 100644
--- a/synapse/crypto/keyclient.py
+++ b/synapse/crypto/keyclient.py
@@ -50,7 +50,7 @@ def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1):
                 defer.returnValue((server_response, server_certificate))
         except SynapseKeyClientError as e:
             logger.warn("Error getting key for %r: %s", server_name, e)
-            if e.status.startswith("4"):
+            if e.status.startswith(b"4"):
                 # Don't retry for 4xx responses.
                 raise IOError("Cannot get key for %r" % server_name)
         except (ConnectError, DomainError) as e:
@@ -82,6 +82,12 @@ class SynapseKeyClientProtocol(HTTPClient):
         self._peer = self.transport.getPeer()
         logger.debug("Connected to %s", self._peer)
 
+        if not isinstance(self.path, bytes):
+            self.path = self.path.encode('ascii')
+
+        if not isinstance(self.host, bytes):
+            self.host = self.host.encode('ascii')
+
         self.sendCommand(b"GET", self.path)
         if self.host:
             self.sendHeader(b"Host", self.host)
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 30e2742102..d89f94c219 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -16,9 +16,10 @@
 
 import hashlib
 import logging
-import urllib
 from collections import namedtuple
 
+from six.moves import urllib
+
 from signedjson.key import (
     decode_verify_key_bytes,
     encode_verify_key_base64,
@@ -40,6 +41,7 @@ from synapse.api.errors import Codes, SynapseError
 from synapse.crypto.keyclient import fetch_server_key
 from synapse.util import logcontext, unwrapFirstError
 from synapse.util.logcontext import (
+    LoggingContext,
     PreserveLoggingContext,
     preserve_fn,
     run_in_background,
@@ -216,23 +218,34 @@ class Keyring(object):
             servers have completed. Follows the synapse rules of logcontext
             preservation.
         """
+        loop_count = 1
         while True:
             wait_on = [
-                self.key_downloads[server_name]
+                (server_name, self.key_downloads[server_name])
                 for server_name in server_names
                 if server_name in self.key_downloads
             ]
-            if wait_on:
-                with PreserveLoggingContext():
-                    yield defer.DeferredList(wait_on)
-            else:
+            if not wait_on:
                 break
+            logger.info(
+                "Waiting for existing lookups for %s to complete [loop %i]",
+                [w[0] for w in wait_on], loop_count,
+            )
+            with PreserveLoggingContext():
+                yield defer.DeferredList((w[1] for w in wait_on))
+
+            loop_count += 1
+
+        ctx = LoggingContext.current_context()
 
         def rm(r, server_name_):
-            self.key_downloads.pop(server_name_, None)
+            with PreserveLoggingContext(ctx):
+                logger.debug("Releasing key lookup lock on %s", server_name_)
+                self.key_downloads.pop(server_name_, None)
             return r
 
         for server_name, deferred in server_to_deferred.items():
+            logger.debug("Got key lookup lock on %s", server_name)
             self.key_downloads[server_name] = deferred
             deferred.addBoth(rm, server_name)
 
@@ -432,7 +445,7 @@ class Keyring(object):
         # an incoming request.
         query_response = yield self.client.post_json(
             destination=perspective_name,
-            path=b"/_matrix/key/v2/query",
+            path="/_matrix/key/v2/query",
             data={
                 u"server_keys": {
                     server_name: {
@@ -513,8 +526,8 @@ class Keyring(object):
 
             (response, tls_certificate) = yield fetch_server_key(
                 server_name, self.hs.tls_client_options_factory,
-                path=(b"/_matrix/key/v2/server/%s" % (
-                    urllib.quote(requested_key_id),
+                path=("/_matrix/key/v2/server/%s" % (
+                    urllib.parse.quote(requested_key_id),
                 )).encode("ascii"),
             )
 
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index 5be8e66fb8..61782ae1c0 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -143,11 +143,31 @@ class FederationBase(object):
         def callback(_, pdu):
             with logcontext.PreserveLoggingContext(ctx):
                 if not check_event_content_hash(pdu):
-                    logger.warn(
-                        "Event content has been tampered, redacting %s: %s",
-                        pdu.event_id, pdu.get_pdu_json()
-                    )
-                    return prune_event(pdu)
+                    # let's try to distinguish between failures because the event was
+                    # redacted (which are somewhat expected) vs actual ball-tampering
+                    # incidents.
+                    #
+                    # This is just a heuristic, so we just assume that if the keys are
+                    # about the same between the redacted and received events, then the
+                    # received event was probably a redacted copy (but we then use our
+                    # *actual* redacted copy to be on the safe side.)
+                    redacted_event = prune_event(pdu)
+                    if (
+                        set(six.iterkeys(redacted_event)) == set(six.iterkeys(pdu)) and
+                        set(six.iterkeys(redacted_event.content))
+                            == set(six.iterkeys(pdu.content))
+                    ):
+                        logger.info(
+                            "Event %s seems to have been redacted; using our redacted "
+                            "copy",
+                            pdu.event_id,
+                        )
+                    else:
+                        logger.warning(
+                            "Event %s content has been tampered, redacting",
+                            pdu.event_id, pdu.get_pdu_json(),
+                        )
+                    return redacted_event
 
                 if self.spam_checker.check_event_for_spam(pdu):
                     logger.warn(
@@ -162,8 +182,8 @@ class FederationBase(object):
             failure.trap(SynapseError)
             with logcontext.PreserveLoggingContext(ctx):
                 logger.warn(
-                    "Signature check failed for %s",
-                    pdu.event_id,
+                    "Signature check failed for %s: %s",
+                    pdu.event_id, failure.getErrorMessage(),
                 )
             return failure
 
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index c9f3c2d352..fe67b2ff42 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -271,10 +271,10 @@ class FederationClient(FederationBase):
                     event_id, destination, e,
                 )
             except NotRetryingDestination as e:
-                logger.info(e.message)
+                logger.info(str(e))
                 continue
             except FederationDeniedError as e:
-                logger.info(e.message)
+                logger.info(str(e))
                 continue
             except Exception as e:
                 pdu_attempts[destination] = now
@@ -510,7 +510,7 @@ class FederationClient(FederationBase):
                 else:
                     logger.warn(
                         "Failed to %s via %s: %i %s",
-                        description, destination, e.code, e.message,
+                        description, destination, e.code, e.args[0],
                     )
             except Exception:
                 logger.warn(
@@ -875,7 +875,7 @@ class FederationClient(FederationBase):
             except Exception as e:
                 logger.exception(
                     "Failed to send_third_party_invite via %s: %s",
-                    destination, e.message
+                    destination, str(e)
                 )
 
         raise RuntimeError("Failed to send to any server.")
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 1054441ca5..2ab973d6c8 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -15,7 +15,8 @@
 # limitations under the License.
 
 import logging
-import urllib
+
+from six.moves import urllib
 
 from twisted.internet import defer
 
@@ -951,4 +952,4 @@ def _create_path(prefix, path, *args):
     Returns:
         str
     """
-    return prefix + path % tuple(urllib.quote(arg, "") for arg in args)
+    return prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args)
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 3972922ff9..2f874b4838 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -90,8 +90,8 @@ class Authenticator(object):
     @defer.inlineCallbacks
     def authenticate_request(self, request, content):
         json_request = {
-            "method": request.method,
-            "uri": request.uri,
+            "method": request.method.decode('ascii'),
+            "uri": request.uri.decode('ascii'),
             "destination": self.server_name,
             "signatures": {},
         }
@@ -252,7 +252,7 @@ class BaseFederationServlet(object):
                     by the callback method. None if the request has already been handled.
             """
             content = None
-            if request.method in ["PUT", "POST"]:
+            if request.method in [b"PUT", b"POST"]:
                 # TODO: Handle other method types? other content types?
                 content = parse_json_object_from_request(request)
 
@@ -386,7 +386,7 @@ class FederationStateServlet(BaseFederationServlet):
         return self.handler.on_context_state_request(
             origin,
             context,
-            query.get("event_id", [None])[0],
+            parse_string_from_args(query, "event_id", None),
         )
 
 
@@ -397,7 +397,7 @@ class FederationStateIdsServlet(BaseFederationServlet):
         return self.handler.on_state_ids_request(
             origin,
             room_id,
-            query.get("event_id", [None])[0],
+            parse_string_from_args(query, "event_id", None),
         )
 
 
@@ -405,14 +405,12 @@ class FederationBackfillServlet(BaseFederationServlet):
     PATH = "/backfill/(?P<context>[^/]*)/"
 
     def on_GET(self, origin, content, query, context):
-        versions = query["v"]
-        limits = query["limit"]
+        versions = [x.decode('ascii') for x in query[b"v"]]
+        limit = parse_integer_from_args(query, "limit", None)
 
-        if not limits:
+        if not limit:
             return defer.succeed((400, {"error": "Did not include limit param"}))
 
-        limit = int(limits[-1])
-
         return self.handler.on_backfill_request(origin, context, versions, limit)
 
 
@@ -423,7 +421,7 @@ class FederationQueryServlet(BaseFederationServlet):
     def on_GET(self, origin, content, query, query_type):
         return self.handler.on_query_request(
             query_type,
-            {k: v[0].decode("utf-8") for k, v in query.items()}
+            {k.decode('utf8'): v[0].decode("utf-8") for k, v in query.items()}
         )
 
 
@@ -630,14 +628,14 @@ class OpenIdUserInfo(BaseFederationServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, origin, content, query):
-        token = query.get("access_token", [None])[0]
+        token = query.get(b"access_token", [None])[0]
         if token is None:
             defer.returnValue((401, {
                 "errcode": "M_MISSING_TOKEN", "error": "Access Token required"
             }))
             return
 
-        user_id = yield self.handler.on_openid_userinfo(token)
+        user_id = yield self.handler.on_openid_userinfo(token.decode('ascii'))
 
         if user_id is None:
             defer.returnValue((401, {
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 7eed2fcc9b..23983a51ab 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -24,6 +24,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.push.clientformat import format_push_rules_for_user
+from synapse.storage.roommember import MemberSummary
 from synapse.types import RoomStreamToken
 from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.expiringcache import ExpiringCache
@@ -525,6 +526,8 @@ class SyncHandler(object):
              A deferred dict describing the room summary
         """
 
+        # FIXME: we could/should get this from room_stats when matthew/stats lands
+
         # FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305
         last_events, _ = yield self.store.get_recent_event_ids_for_room(
             room_id, end_token=now_token.room_key, limit=1,
@@ -537,44 +540,54 @@ class SyncHandler(object):
         last_event = last_events[-1]
         state_ids = yield self.store.get_state_ids_for_event(
             last_event.event_id, [
-                (EventTypes.Member, None),
                 (EventTypes.Name, ''),
                 (EventTypes.CanonicalAlias, ''),
             ]
         )
 
-        member_ids = {
-            state_key: event_id
-            for (t, state_key), event_id in iteritems(state_ids)
-            if t == EventTypes.Member
-        }
+        # this is heavily cached, thus: fast.
+        details = yield self.store.get_room_summary(room_id)
+
         name_id = state_ids.get((EventTypes.Name, ''))
         canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ''))
 
         summary = {}
-
-        # FIXME: it feels very heavy to load up every single membership event
-        # just to calculate the counts.
-        member_events = yield self.store.get_events(member_ids.values())
-
-        joined_user_ids = []
-        invited_user_ids = []
-
-        for ev in member_events.values():
-            if ev.content.get("membership") == Membership.JOIN:
-                joined_user_ids.append(ev.state_key)
-            elif ev.content.get("membership") == Membership.INVITE:
-                invited_user_ids.append(ev.state_key)
+        empty_ms = MemberSummary([], 0)
 
         # TODO: only send these when they change.
-        summary["m.joined_member_count"] = len(joined_user_ids)
-        summary["m.invited_member_count"] = len(invited_user_ids)
+        summary["m.joined_member_count"] = (
+            details.get(Membership.JOIN, empty_ms).count
+        )
+        summary["m.invited_member_count"] = (
+            details.get(Membership.INVITE, empty_ms).count
+        )
 
         if name_id or canonical_alias_id:
             defer.returnValue(summary)
 
-        # FIXME: order by stream ordering, not alphabetic
+        joined_user_ids = [
+            r[0] for r in details.get(Membership.JOIN, empty_ms).members
+        ]
+        invited_user_ids = [
+            r[0] for r in details.get(Membership.INVITE, empty_ms).members
+        ]
+        gone_user_ids = (
+            [r[0] for r in details.get(Membership.LEAVE, empty_ms).members] +
+            [r[0] for r in details.get(Membership.BAN, empty_ms).members]
+        )
+
+        # FIXME: only build up a member_ids list for our heroes
+        member_ids = {}
+        for membership in (
+            Membership.JOIN,
+            Membership.INVITE,
+            Membership.LEAVE,
+            Membership.BAN
+        ):
+            for user_id, event_id in details.get(membership, empty_ms).members:
+                member_ids[user_id] = event_id
 
+        # FIXME: order by stream ordering rather than as returned by SQL
         me = sync_config.user.to_string()
         if (joined_user_ids or invited_user_ids):
             summary['m.heroes'] = sorted(
@@ -586,7 +599,11 @@ class SyncHandler(object):
             )[0:5]
         else:
             summary['m.heroes'] = sorted(
-                [user_id for user_id in member_ids.keys() if user_id != me]
+                [
+                    user_id
+                    for user_id in gone_user_ids
+                    if user_id != me
+                ]
             )[0:5]
 
         if not sync_config.filter_collection.lazy_load_members():
@@ -719,6 +736,26 @@ class SyncHandler(object):
                     lazy_load_members=lazy_load_members,
                 )
             elif batch.limited:
+                state_at_timeline_start = yield self.store.get_state_ids_for_event(
+                    batch.events[0].event_id, types=types,
+                    filtered_types=filtered_types,
+                )
+
+                # for now, we disable LL for gappy syncs - see
+                # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
+                # N.B. this slows down incr syncs as we are now processing way
+                # more state in the server than if we were LLing.
+                #
+                # We still have to filter timeline_start to LL entries (above) in order
+                # for _calculate_state's LL logic to work, as we have to include LL
+                # members for timeline senders in case they weren't loaded in the initial
+                # sync.  We do this by (counterintuitively) by filtering timeline_start
+                # members to just be ones which were timeline senders, which then ensures
+                # all of the rest get included in the state block (if we need to know
+                # about them).
+                types = None
+                filtered_types = None
+
                 state_at_previous_sync = yield self.get_state_at(
                     room_id, stream_position=since_token, types=types,
                     filtered_types=filtered_types,
@@ -729,24 +766,21 @@ class SyncHandler(object):
                     filtered_types=filtered_types,
                 )
 
-                state_at_timeline_start = yield self.store.get_state_ids_for_event(
-                    batch.events[0].event_id, types=types,
-                    filtered_types=filtered_types,
-                )
-
                 state_ids = _calculate_state(
                     timeline_contains=timeline_state,
                     timeline_start=state_at_timeline_start,
                     previous=state_at_previous_sync,
                     current=current_state_ids,
+                    # we have to include LL members in case LL initial sync missed them
                     lazy_load_members=lazy_load_members,
                 )
             else:
                 state_ids = {}
                 if lazy_load_members:
                     if types:
-                        # We're returning an incremental sync, with no "gap" since
-                        # the previous sync, so normally there would be no state to return
+                        # We're returning an incremental sync, with no
+                        # "gap" since the previous sync, so normally there would be
+                        # no state to return.
                         # But we're lazy-loading, so the client might need some more
                         # member events to understand the events in this timeline.
                         # So we fish out all the member events corresponding to the
@@ -1616,10 +1650,24 @@ class SyncHandler(object):
         )
 
         summary = {}
+
+        # we include a summary in room responses when we're lazy loading
+        # members (as the client otherwise doesn't have enough info to form
+        # the name itself).
         if (
             sync_config.filter_collection.lazy_load_members() and
             (
+                # we recalulate the summary:
+                #   if there are membership changes in the timeline, or
+                #   if membership has changed during a gappy sync, or
+                #   if this is an initial sync.
                 any(ev.type == EventTypes.Member for ev in batch.events) or
+                (
+                    # XXX: this may include false positives in the form of LL
+                    # members which have snuck into state
+                    batch.limited and
+                    any(t == EventTypes.Member for (t, k) in state)
+                ) or
                 since_token is None
             )
         ):
@@ -1649,6 +1697,16 @@ class SyncHandler(object):
                     unread_notifications["highlight_count"] = notifs["highlight_count"]
 
                 sync_result_builder.joined.append(room_sync)
+
+            if batch.limited:
+                user_id = sync_result_builder.sync_config.user.to_string()
+                logger.info(
+                    "Incremental syncing room %s for user %s with %d state events" % (
+                        room_id,
+                        user_id,
+                        len(state),
+                    )
+                )
         elif room_builder.rtype == "archived":
             room_sync = ArchivedSyncResult(
                 room_id=room_id,
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 6a1fc8ca55..f9a1fbf95d 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -280,7 +280,10 @@ class MatrixFederationHttpClient(object):
                 # :'(
                 # Update transactions table?
                 with logcontext.PreserveLoggingContext():
-                    body = yield treq.content(response)
+                    body = yield self._timeout_deferred(
+                        treq.content(response),
+                        timeout,
+                    )
                 raise HttpResponseException(
                     response.code, response.phrase, body
                 )
@@ -394,7 +397,10 @@ class MatrixFederationHttpClient(object):
             check_content_type_is_json(response.headers)
 
         with logcontext.PreserveLoggingContext():
-            body = yield treq.json_content(response)
+            body = yield self._timeout_deferred(
+                treq.json_content(response),
+                timeout,
+            )
         defer.returnValue(body)
 
     @defer.inlineCallbacks
@@ -444,7 +450,10 @@ class MatrixFederationHttpClient(object):
             check_content_type_is_json(response.headers)
 
         with logcontext.PreserveLoggingContext():
-            body = yield treq.json_content(response)
+            body = yield self._timeout_deferred(
+                treq.json_content(response),
+                timeout,
+            )
 
         defer.returnValue(body)
 
@@ -496,7 +505,10 @@ class MatrixFederationHttpClient(object):
             check_content_type_is_json(response.headers)
 
         with logcontext.PreserveLoggingContext():
-            body = yield treq.json_content(response)
+            body = yield self._timeout_deferred(
+                treq.json_content(response),
+                timeout,
+            )
 
         defer.returnValue(body)
 
@@ -543,7 +555,10 @@ class MatrixFederationHttpClient(object):
             check_content_type_is_json(response.headers)
 
         with logcontext.PreserveLoggingContext():
-            body = yield treq.json_content(response)
+            body = yield self._timeout_deferred(
+                treq.json_content(response),
+                timeout,
+            )
 
         defer.returnValue(body)
 
@@ -585,8 +600,10 @@ class MatrixFederationHttpClient(object):
 
         try:
             with logcontext.PreserveLoggingContext():
-                length = yield _readBodyToFile(
-                    response, output_stream, max_size
+                length = yield self._timeout_deferred(
+                    _readBodyToFile(
+                        response, output_stream, max_size
+                    ),
                 )
         except Exception:
             logger.exception("Failed to download body")
@@ -594,6 +611,27 @@ class MatrixFederationHttpClient(object):
 
         defer.returnValue((length, headers))
 
+    def _timeout_deferred(self, deferred, timeout_ms=None):
+        """Times the deferred out after `timeout_ms` ms
+
+        Args:
+            deferred (Deferred)
+            timeout_ms (int|None): Timeout in milliseconds. If None defaults
+                to 60 seconds.
+
+        Returns:
+            Deferred
+        """
+
+        add_timeout_to_deferred(
+            deferred,
+            timeout_ms / 1000. if timeout_ms else 60,
+            self.hs.get_reactor(),
+            cancelled_to_request_timed_out_error,
+        )
+
+        return deferred
+
 
 class _ReadBodyToFileProtocol(protocol.Protocol):
     def __init__(self, stream, deferred, max_size):
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index ad536ab570..41534b8c2a 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -101,7 +101,7 @@ class UserRegisterServlet(ClientV1RestServlet):
 
         nonce = self.hs.get_secrets().token_hex(64)
         self.nonces[nonce] = int(self.reactor.seconds())
-        return (200, {"nonce": nonce.encode('ascii')})
+        return (200, {"nonce": nonce})
 
     @defer.inlineCallbacks
     def on_POST(self, request):
@@ -164,7 +164,7 @@ class UserRegisterServlet(ClientV1RestServlet):
             key=self.hs.config.registration_shared_secret.encode(),
             digestmod=hashlib.sha1,
         )
-        want_mac.update(nonce)
+        want_mac.update(nonce.encode('utf8'))
         want_mac.update(b"\x00")
         want_mac.update(username)
         want_mac.update(b"\x00")
@@ -173,7 +173,10 @@ class UserRegisterServlet(ClientV1RestServlet):
         want_mac.update(b"admin" if admin else b"notadmin")
         want_mac = want_mac.hexdigest()
 
-        if not hmac.compare_digest(want_mac, got_mac.encode('ascii')):
+        if not hmac.compare_digest(
+                want_mac.encode('ascii'),
+                got_mac.encode('ascii')
+        ):
             raise SynapseError(403, "HMAC incorrect")
 
         # Reuse the parts of RegisterRestServlet to reduce code duplication
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 0f3a2e8b51..cd9b3bdbd1 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -45,20 +45,20 @@ class EventStreamRestServlet(ClientV1RestServlet):
         is_guest = requester.is_guest
         room_id = None
         if is_guest:
-            if "room_id" not in request.args:
+            if b"room_id" not in request.args:
                 raise SynapseError(400, "Guest users must specify room_id param")
-        if "room_id" in request.args:
-            room_id = request.args["room_id"][0]
+        if b"room_id" in request.args:
+            room_id = request.args[b"room_id"][0].decode('ascii')
 
         pagin_config = PaginationConfig.from_request(request)
         timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
-        if "timeout" in request.args:
+        if b"timeout" in request.args:
             try:
-                timeout = int(request.args["timeout"][0])
+                timeout = int(request.args[b"timeout"][0])
             except ValueError:
                 raise SynapseError(400, "timeout must be in milliseconds.")
 
-        as_client_event = "raw" not in request.args
+        as_client_event = b"raw" not in request.args
 
         chunk = yield self.event_stream_handler.get_stream(
             requester.user.to_string(),
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index fd5f85b53e..3ead75cb77 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -32,7 +32,7 @@ class InitialSyncRestServlet(ClientV1RestServlet):
     @defer.inlineCallbacks
     def on_GET(self, request):
         requester = yield self.auth.get_user_by_req(request)
-        as_client_event = "raw" not in request.args
+        as_client_event = b"raw" not in request.args
         pagination_config = PaginationConfig.from_request(request)
         include_archived = parse_boolean(request, "archived", default=False)
         content = yield self.initial_sync_handler.snapshot_all_rooms(
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index cb85fa1436..0010699d31 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -14,10 +14,9 @@
 # limitations under the License.
 
 import logging
-import urllib
 import xml.etree.ElementTree as ET
 
-from six.moves.urllib import parse as urlparse
+from six.moves import urllib
 
 from canonicaljson import json
 from saml2 import BINDING_HTTP_POST, config
@@ -134,7 +133,7 @@ class LoginRestServlet(ClientV1RestServlet):
                                        LoginRestServlet.SAML2_TYPE):
                 relay_state = ""
                 if "relay_state" in login_submission:
-                    relay_state = "&RelayState=" + urllib.quote(
+                    relay_state = "&RelayState=" + urllib.parse.quote(
                                   login_submission["relay_state"])
                 result = {
                     "uri": "%s%s" % (self.idp_redirect_url, relay_state)
@@ -366,7 +365,7 @@ class SAML2RestServlet(ClientV1RestServlet):
             (user_id, token) = yield handler.register_saml2(username)
             # Forward to the RelayState callback along with ava
             if 'RelayState' in request.args:
-                request.redirect(urllib.unquote(
+                request.redirect(urllib.parse.unquote(
                                  request.args['RelayState'][0]) +
                                  '?status=authenticated&access_token=' +
                                  token + '&user_id=' + user_id + '&ava=' +
@@ -377,7 +376,7 @@ class SAML2RestServlet(ClientV1RestServlet):
                                      "user_id": user_id, "token": token,
                                      "ava": saml2_auth.ava}))
         elif 'RelayState' in request.args:
-            request.redirect(urllib.unquote(
+            request.redirect(urllib.parse.unquote(
                              request.args['RelayState'][0]) +
                              '?status=not_authenticated')
             finish_request(request)
@@ -390,21 +389,22 @@ class CasRedirectServlet(ClientV1RestServlet):
 
     def __init__(self, hs):
         super(CasRedirectServlet, self).__init__(hs)
-        self.cas_server_url = hs.config.cas_server_url
-        self.cas_service_url = hs.config.cas_service_url
+        self.cas_server_url = hs.config.cas_server_url.encode('ascii')
+        self.cas_service_url = hs.config.cas_service_url.encode('ascii')
 
     def on_GET(self, request):
         args = request.args
-        if "redirectUrl" not in args:
+        if b"redirectUrl" not in args:
             return (400, "Redirect URL not specified for CAS auth")
-        client_redirect_url_param = urllib.urlencode({
-            "redirectUrl": args["redirectUrl"][0]
-        })
-        hs_redirect_url = self.cas_service_url + "/_matrix/client/api/v1/login/cas/ticket"
-        service_param = urllib.urlencode({
-            "service": "%s?%s" % (hs_redirect_url, client_redirect_url_param)
-        })
-        request.redirect("%s/login?%s" % (self.cas_server_url, service_param))
+        client_redirect_url_param = urllib.parse.urlencode({
+            b"redirectUrl": args[b"redirectUrl"][0]
+        }).encode('ascii')
+        hs_redirect_url = (self.cas_service_url +
+                           b"/_matrix/client/api/v1/login/cas/ticket")
+        service_param = urllib.parse.urlencode({
+            b"service": b"%s?%s" % (hs_redirect_url, client_redirect_url_param)
+        }).encode('ascii')
+        request.redirect(b"%s/login?%s" % (self.cas_server_url, service_param))
         finish_request(request)
 
 
@@ -422,11 +422,11 @@ class CasTicketServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        client_redirect_url = request.args["redirectUrl"][0]
+        client_redirect_url = request.args[b"redirectUrl"][0]
         http_client = self.hs.get_simple_http_client()
         uri = self.cas_server_url + "/proxyValidate"
         args = {
-            "ticket": request.args["ticket"],
+            "ticket": request.args[b"ticket"][0].decode('ascii'),
             "service": self.cas_service_url
         }
         try:
@@ -471,11 +471,11 @@ class CasTicketServlet(ClientV1RestServlet):
         finish_request(request)
 
     def add_login_token_to_redirect_url(self, url, token):
-        url_parts = list(urlparse.urlparse(url))
-        query = dict(urlparse.parse_qsl(url_parts[4]))
+        url_parts = list(urllib.parse.urlparse(url))
+        query = dict(urllib.parse.parse_qsl(url_parts[4]))
         query.update({"loginToken": token})
-        url_parts[4] = urllib.urlencode(query)
-        return urlparse.urlunparse(url_parts)
+        url_parts[4] = urllib.parse.urlencode(query).encode('ascii')
+        return urllib.parse.urlunparse(url_parts)
 
     def parse_cas_response(self, cas_response_body):
         user = None
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index 6e95d9bec2..9382b1f124 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -46,7 +46,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
         try:
             priority_class = _priority_class_from_spec(spec)
         except InvalidRuleException as e:
-            raise SynapseError(400, e.message)
+            raise SynapseError(400, str(e))
 
         requester = yield self.auth.get_user_by_req(request)
 
@@ -73,7 +73,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
                 content,
             )
         except InvalidRuleException as e:
-            raise SynapseError(400, e.message)
+            raise SynapseError(400, str(e))
 
         before = parse_string(request, "before")
         if before:
@@ -95,9 +95,9 @@ class PushRuleRestServlet(ClientV1RestServlet):
             )
             self.notify_user(user_id)
         except InconsistentRuleException as e:
-            raise SynapseError(400, e.message)
+            raise SynapseError(400, str(e))
         except RuleNotFoundException as e:
-            raise SynapseError(400, e.message)
+            raise SynapseError(400, str(e))
 
         defer.returnValue((200, {}))
 
@@ -142,10 +142,10 @@ class PushRuleRestServlet(ClientV1RestServlet):
                 PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
             )
 
-        if path[0] == '':
+        if path[0] == b'':
             defer.returnValue((200, rules))
-        elif path[0] == 'global':
-            path = path[1:]
+        elif path[0] == b'global':
+            path = [x.decode('ascii') for x in path[1:]]
             result = _filter_ruleset_with_path(rules['global'], path)
             defer.returnValue((200, result))
         else:
@@ -192,10 +192,10 @@ class PushRuleRestServlet(ClientV1RestServlet):
 def _rule_spec_from_path(path):
     if len(path) < 2:
         raise UnrecognizedRequestError()
-    if path[0] != 'pushrules':
+    if path[0] != b'pushrules':
         raise UnrecognizedRequestError()
 
-    scope = path[1]
+    scope = path[1].decode('ascii')
     path = path[2:]
     if scope != 'global':
         raise UnrecognizedRequestError()
@@ -203,13 +203,13 @@ def _rule_spec_from_path(path):
     if len(path) == 0:
         raise UnrecognizedRequestError()
 
-    template = path[0]
+    template = path[0].decode('ascii')
     path = path[1:]
 
     if len(path) == 0 or len(path[0]) == 0:
         raise UnrecognizedRequestError()
 
-    rule_id = path[0]
+    rule_id = path[0].decode('ascii')
 
     spec = {
         'scope': scope,
@@ -220,7 +220,7 @@ def _rule_spec_from_path(path):
     path = path[1:]
 
     if len(path) > 0 and len(path[0]) > 0:
-        spec['attr'] = path[0]
+        spec['attr'] = path[0].decode('ascii')
 
     return spec
 
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index 182a68b1e2..b84f0260f2 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -59,7 +59,7 @@ class PushersRestServlet(ClientV1RestServlet):
         ]
 
         for p in pushers:
-            for k, v in p.items():
+            for k, v in list(p.items()):
                 if k not in allowed_keys:
                     del p[k]
 
@@ -126,7 +126,7 @@ class PushersSetRestServlet(ClientV1RestServlet):
                 profile_tag=content.get('profile_tag', ""),
             )
         except PusherConfigException as pce:
-            raise SynapseError(400, "Config Error: " + pce.message,
+            raise SynapseError(400, "Config Error: " + str(pce),
                                errcode=Codes.MISSING_PARAM)
 
         self.notifier.on_new_replication_data()
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 976d98387d..663934efd0 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -207,7 +207,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
             "sender": requester.user.to_string(),
         }
 
-        if 'ts' in request.args and requester.app_service:
+        if b'ts' in request.args and requester.app_service:
             event_dict['origin_server_ts'] = parse_integer(request, "ts", 0)
 
         event = yield self.event_creation_hander.create_and_send_nonmember_event(
@@ -255,7 +255,9 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
         if RoomID.is_valid(room_identifier):
             room_id = room_identifier
             try:
-                remote_room_hosts = request.args["server_name"]
+                remote_room_hosts = [
+                    x.decode('ascii') for x in request.args[b"server_name"]
+                ]
             except Exception:
                 remote_room_hosts = None
         elif RoomAlias.is_valid(room_identifier):
@@ -461,10 +463,10 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
         pagination_config = PaginationConfig.from_request(
             request, default_limit=10,
         )
-        as_client_event = "raw" not in request.args
-        filter_bytes = parse_string(request, "filter")
+        as_client_event = b"raw" not in request.args
+        filter_bytes = parse_string(request, b"filter", encoding=None)
         if filter_bytes:
-            filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
+            filter_json = urlparse.unquote(filter_bytes.decode("UTF-8"))
             event_filter = Filter(json.loads(filter_json))
         else:
             event_filter = None
@@ -560,7 +562,7 @@ class RoomEventContextServlet(ClientV1RestServlet):
         # picking the API shape for symmetry with /messages
         filter_bytes = parse_string(request, "filter")
         if filter_bytes:
-            filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
+            filter_json = urlparse.unquote(filter_bytes)
             event_filter = Filter(json.loads(filter_json))
         else:
             event_filter = None
diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py
index 62f4c3d93e..53da905eea 100644
--- a/synapse/rest/client/v1/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -42,7 +42,11 @@ class VoipRestServlet(ClientV1RestServlet):
             expiry = (self.hs.get_clock().time_msec() + userLifetime) / 1000
             username = "%d:%s" % (expiry, requester.user.to_string())
 
-            mac = hmac.new(turnSecret, msg=username, digestmod=hashlib.sha1)
+            mac = hmac.new(
+                turnSecret.encode(),
+                msg=username.encode(),
+                digestmod=hashlib.sha1
+            )
             # We need to use standard padded base64 encoding here
             # encode_base64 because we need to add the standard padding to get the
             # same result as the TURN server.
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 263d8eb73e..0251146722 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -89,7 +89,7 @@ class SyncRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        if "from" in request.args:
+        if b"from" in request.args:
             # /events used to use 'from', but /sync uses 'since'.
             # Lets be helpful and whine if we see a 'from'.
             raise SynapseError(
diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py
index d9d379182e..b9b5d07677 100644
--- a/synapse/rest/client/v2_alpha/thirdparty.py
+++ b/synapse/rest/client/v2_alpha/thirdparty.py
@@ -79,7 +79,7 @@ class ThirdPartyUserServlet(RestServlet):
         yield self.auth.get_user_by_req(request, allow_guest=True)
 
         fields = request.args
-        fields.pop("access_token", None)
+        fields.pop(b"access_token", None)
 
         results = yield self.appservice_handler.query_3pe(
             ThirdPartyEntityKind.USER, protocol, fields
@@ -102,7 +102,7 @@ class ThirdPartyLocationServlet(RestServlet):
         yield self.auth.get_user_by_req(request, allow_guest=True)
 
         fields = request.args
-        fields.pop("access_token", None)
+        fields.pop(b"access_token", None)
 
         results = yield self.appservice_handler.query_3pe(
             ThirdPartyEntityKind.LOCATION, protocol, fields
diff --git a/synapse/rest/key/v1/server_key_resource.py b/synapse/rest/key/v1/server_key_resource.py
index b9ee6e1c13..38eb2ee23f 100644
--- a/synapse/rest/key/v1/server_key_resource.py
+++ b/synapse/rest/key/v1/server_key_resource.py
@@ -88,5 +88,5 @@ class LocalKey(Resource):
         )
 
     def getChild(self, name, request):
-        if name == '':
+        if name == b'':
             return self
diff --git a/synapse/rest/key/v2/__init__.py b/synapse/rest/key/v2/__init__.py
index 3491fd2118..cb5abcf826 100644
--- a/synapse/rest/key/v2/__init__.py
+++ b/synapse/rest/key/v2/__init__.py
@@ -22,5 +22,5 @@ from .remote_key_resource import RemoteKey
 class KeyApiV2Resource(Resource):
     def __init__(self, hs):
         Resource.__init__(self)
-        self.putChild("server", LocalKey(hs))
-        self.putChild("query", RemoteKey(hs))
+        self.putChild(b"server", LocalKey(hs))
+        self.putChild(b"query", RemoteKey(hs))
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 7d67e4b064..eb8782aa6e 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -103,7 +103,7 @@ class RemoteKey(Resource):
     def async_render_GET(self, request):
         if len(request.postpath) == 1:
             server, = request.postpath
-            query = {server: {}}
+            query = {server.decode('ascii'): {}}
         elif len(request.postpath) == 2:
             server, key_id = request.postpath
             minimum_valid_until_ts = parse_integer(
@@ -112,11 +112,12 @@ class RemoteKey(Resource):
             arguments = {}
             if minimum_valid_until_ts is not None:
                 arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
-            query = {server: {key_id: arguments}}
+            query = {server.decode('ascii'): {key_id.decode('ascii'): arguments}}
         else:
             raise SynapseError(
                 404, "Not found %r" % request.postpath, Codes.NOT_FOUND
             )
+
         yield self.query_keys(request, query, query_remote_on_cache_miss=True)
 
     def render_POST(self, request):
@@ -135,6 +136,7 @@ class RemoteKey(Resource):
     @defer.inlineCallbacks
     def query_keys(self, request, query, query_remote_on_cache_miss=False):
         logger.info("Handling query for keys %r", query)
+
         store_queries = []
         for server_name, key_ids in query.items():
             if (
diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index f255f2883f..5a426ff2f6 100644
--- a/synapse/rest/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -56,7 +56,7 @@ class ContentRepoResource(resource.Resource):
         # servers.
 
         # TODO: A little crude here, we could do this better.
-        filename = request.path.split('/')[-1]
+        filename = request.path.decode('ascii').split('/')[-1]
         # be paranoid
         filename = re.sub("[^0-9A-z.-_]", "", filename)
 
@@ -78,7 +78,7 @@ class ContentRepoResource(resource.Resource):
             # select private. don't bother setting Expires as all our matrix
             # clients are smart enough to be happy with Cache-Control (right?)
             request.setHeader(
-                "Cache-Control", "public,max-age=86400,s-maxage=86400"
+                b"Cache-Control", b"public,max-age=86400,s-maxage=86400"
             )
 
             d = FileSender().beginFileTransfer(f, request)
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index 65f4bd2910..76e479afa3 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -15,9 +15,8 @@
 
 import logging
 import os
-import urllib
 
-from six.moves.urllib import parse as urlparse
+from six.moves import urllib
 
 from twisted.internet import defer
 from twisted.protocols.basic import FileSender
@@ -35,10 +34,15 @@ def parse_media_id(request):
         # This allows users to append e.g. /test.png to the URL. Useful for
         # clients that parse the URL to see content type.
         server_name, media_id = request.postpath[:2]
+
+        if isinstance(server_name, bytes):
+            server_name = server_name.decode('utf-8')
+            media_id = media_id.decode('utf8')
+
         file_name = None
         if len(request.postpath) > 2:
             try:
-                file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
+                file_name = urllib.parse.unquote(request.postpath[-1].decode("utf-8"))
             except UnicodeDecodeError:
                 pass
         return server_name, media_id, file_name
@@ -93,22 +97,18 @@ def add_file_headers(request, media_type, file_size, upload_name):
         file_size (int): Size in bytes of the media, if known.
         upload_name (str): The name of the requested file, if any.
     """
+    def _quote(x):
+        return urllib.parse.quote(x.encode("utf-8"))
+
     request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
     if upload_name:
         if is_ascii(upload_name):
-            request.setHeader(
-                b"Content-Disposition",
-                b"inline; filename=%s" % (
-                    urllib.quote(upload_name.encode("utf-8")),
-                ),
-            )
+            disposition = ("inline; filename=%s" % (_quote(upload_name),)).encode("ascii")
         else:
-            request.setHeader(
-                b"Content-Disposition",
-                b"inline; filename*=utf-8''%s" % (
-                    urllib.quote(upload_name.encode("utf-8")),
-                ),
-            )
+            disposition = (
+                "inline; filename*=utf-8''%s" % (_quote(upload_name),)).encode("ascii")
+
+        request.setHeader(b"Content-Disposition", disposition)
 
     # cache for at least a day.
     # XXX: we might want to turn this off for data we don't want to
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index fbfa85f74f..ca90964d1d 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -47,12 +47,12 @@ class DownloadResource(Resource):
     def _async_render_GET(self, request):
         set_cors_headers(request)
         request.setHeader(
-            "Content-Security-Policy",
-            "default-src 'none';"
-            " script-src 'none';"
-            " plugin-types application/pdf;"
-            " style-src 'unsafe-inline';"
-            " object-src 'self';"
+            b"Content-Security-Policy",
+            b"default-src 'none';"
+            b" script-src 'none';"
+            b" plugin-types application/pdf;"
+            b" style-src 'unsafe-inline';"
+            b" object-src 'self';"
         )
         server_name, media_id, name = parse_media_id(request)
         if server_name == self.server_name:
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 241c972070..a828ff4438 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -20,7 +20,7 @@ import logging
 import os
 import shutil
 
-from six import iteritems
+from six import PY3, iteritems
 from six.moves.urllib import parse as urlparse
 
 import twisted.internet.error
@@ -397,13 +397,13 @@ class MediaRepository(object):
 
             yield finish()
 
-        media_type = headers["Content-Type"][0]
+        media_type = headers[b"Content-Type"][0].decode('ascii')
 
         time_now_ms = self.clock.time_msec()
 
-        content_disposition = headers.get("Content-Disposition", None)
+        content_disposition = headers.get(b"Content-Disposition", None)
         if content_disposition:
-            _, params = cgi.parse_header(content_disposition[0],)
+            _, params = cgi.parse_header(content_disposition[0].decode('ascii'),)
             upload_name = None
 
             # First check if there is a valid UTF-8 filename
@@ -419,9 +419,13 @@ class MediaRepository(object):
                     upload_name = upload_name_ascii
 
             if upload_name:
-                upload_name = urlparse.unquote(upload_name)
+                if PY3:
+                    upload_name = urlparse.unquote(upload_name)
+                else:
+                    upload_name = urlparse.unquote(upload_name.encode('ascii'))
                 try:
-                    upload_name = upload_name.decode("utf-8")
+                    if isinstance(upload_name, bytes):
+                        upload_name = upload_name.decode("utf-8")
                 except UnicodeDecodeError:
                     upload_name = None
         else:
@@ -755,14 +759,15 @@ class MediaRepositoryResource(Resource):
         Resource.__init__(self)
 
         media_repo = hs.get_media_repository()
-        self.putChild("upload", UploadResource(hs, media_repo))
-        self.putChild("download", DownloadResource(hs, media_repo))
-        self.putChild("thumbnail", ThumbnailResource(
+
+        self.putChild(b"upload", UploadResource(hs, media_repo))
+        self.putChild(b"download", DownloadResource(hs, media_repo))
+        self.putChild(b"thumbnail", ThumbnailResource(
             hs, media_repo, media_repo.media_storage,
         ))
-        self.putChild("identicon", IdenticonResource())
+        self.putChild(b"identicon", IdenticonResource())
         if hs.config.url_preview_enabled:
-            self.putChild("preview_url", PreviewUrlResource(
+            self.putChild(b"preview_url", PreviewUrlResource(
                 hs, media_repo, media_repo.media_storage,
             ))
-        self.putChild("config", MediaConfigResource(hs))
+        self.putChild(b"config", MediaConfigResource(hs))
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 778ef97337..cad2dec33a 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -261,7 +261,7 @@ class PreviewUrlResource(Resource):
 
         logger.debug("Calculated OG for %s as %s" % (url, og))
 
-        jsonog = json.dumps(og)
+        jsonog = json.dumps(og).encode('utf8')
 
         # store OG in history-aware DB cache
         yield self.store.store_url_cache(
@@ -301,20 +301,20 @@ class PreviewUrlResource(Resource):
                 logger.warn("Error downloading %s: %r", url, e)
                 raise SynapseError(
                     500, "Failed to download content: %s" % (
-                        traceback.format_exception_only(sys.exc_type, e),
+                        traceback.format_exception_only(sys.exc_info()[0], e),
                     ),
                     Codes.UNKNOWN,
                 )
             yield finish()
 
         try:
-            if "Content-Type" in headers:
-                media_type = headers["Content-Type"][0]
+            if b"Content-Type" in headers:
+                media_type = headers[b"Content-Type"][0].decode('ascii')
             else:
                 media_type = "application/octet-stream"
             time_now_ms = self.clock.time_msec()
 
-            content_disposition = headers.get("Content-Disposition", None)
+            content_disposition = headers.get(b"Content-Disposition", None)
             if content_disposition:
                 _, params = cgi.parse_header(content_disposition[0],)
                 download_name = None
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 8bf87f38f7..30ff87a4c4 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -930,6 +930,10 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore
                 )
 
                 self._invalidate_cache_and_stream(
+                    txn, self.get_room_summary, (room_id,)
+                )
+
+                self._invalidate_cache_and_stream(
                     txn, self.get_current_state_ids, (room_id,)
                 )
 
diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py
index f547977600..a1331c1a61 100644
--- a/synapse/storage/keys.py
+++ b/synapse/storage/keys.py
@@ -134,6 +134,7 @@ class KeyStore(SQLBaseStore):
         """
         key_id = "%s:%s" % (verify_key.alg, verify_key.version)
 
+        # XXX fix this to not need a lock (#3819)
         def _txn(txn):
             self._simple_upsert_txn(
                 txn,
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index 9b4e6d6aa8..0707f9a86a 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -51,6 +51,12 @@ ProfileInfo = namedtuple(
     "ProfileInfo", ("avatar_url", "display_name")
 )
 
+# "members" points to a truncated list of (user_id, event_id) tuples for users of
+# a given membership type, suitable for use in calculating heroes for a room.
+# "count" points to the total numberr of users of a given membership type.
+MemberSummary = namedtuple(
+    "MemberSummary", ("members", "count")
+)
 
 _MEMBERSHIP_PROFILE_UPDATE_NAME = "room_membership_profile_update"
 
@@ -82,6 +88,65 @@ class RoomMemberWorkerStore(EventsWorkerStore):
             return [to_ascii(r[0]) for r in txn]
         return self.runInteraction("get_users_in_room", f)
 
+    @cached(max_entries=100000)
+    def get_room_summary(self, room_id):
+        """ Get the details of a room roughly suitable for use by the room
+        summary extension to /sync. Useful when lazy loading room members.
+        Args:
+            room_id (str): The room ID to query
+        Returns:
+            Deferred[dict[str, MemberSummary]:
+                dict of membership states, pointing to a MemberSummary named tuple.
+        """
+
+        def _get_room_summary_txn(txn):
+            # first get counts.
+            # We do this all in one transaction to keep the cache small.
+            # FIXME: get rid of this when we have room_stats
+            sql = """
+                SELECT count(*), m.membership FROM room_memberships as m
+                 INNER JOIN current_state_events as c
+                 ON m.event_id = c.event_id
+                 AND m.room_id = c.room_id
+                 AND m.user_id = c.state_key
+                 WHERE c.type = 'm.room.member' AND c.room_id = ?
+                 GROUP BY m.membership
+            """
+
+            txn.execute(sql, (room_id,))
+            res = {}
+            for count, membership in txn:
+                summary = res.setdefault(to_ascii(membership), MemberSummary([], count))
+
+            # we order by membership and then fairly arbitrarily by event_id so
+            # heroes are consistent
+            sql = """
+                SELECT m.user_id, m.membership, m.event_id
+                FROM room_memberships as m
+                 INNER JOIN current_state_events as c
+                 ON m.event_id = c.event_id
+                 AND m.room_id = c.room_id
+                 AND m.user_id = c.state_key
+                 WHERE c.type = 'm.room.member' AND c.room_id = ?
+                 ORDER BY
+                    CASE m.membership WHEN ? THEN 1 WHEN ? THEN 2 ELSE 3 END ASC,
+                    m.event_id ASC
+                 LIMIT ?
+            """
+
+            # 6 is 5 (number of heroes) plus 1, in case one of them is the calling user.
+            txn.execute(sql, (room_id, Membership.JOIN, Membership.INVITE, 6))
+            for user_id, membership, event_id in txn:
+                summary = res[to_ascii(membership)]
+                # we will always have a summary for this membership type at this
+                # point given the summary currently contains the counts.
+                members = summary.members
+                members.append((to_ascii(user_id), to_ascii(event_id)))
+
+            return res
+
+        return self.runInteraction("get_room_summary", _get_room_summary_txn)
+
     @cached()
     def get_invited_rooms_for_user(self, user_id):
         """ Get all the rooms the user is invited to