summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rwxr-xr-xsynapse/app/homeserver.py3
-rw-r--r--synapse/config/ldap.py102
-rw-r--r--synapse/config/server.py20
-rw-r--r--synapse/federation/federation_server.py143
-rw-r--r--synapse/federation/transport/server.py2
-rw-r--r--synapse/handlers/auth.py203
-rw-r--r--synapse/python_dependencies.py3
-rw-r--r--synapse/rest/media/v0/content_repository.py112
-rw-r--r--synapse/storage/event_push_actions.py19
9 files changed, 344 insertions, 263 deletions
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 40ffd9bf0d..9c2dd32953 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -147,7 +147,7 @@ class SynapseHomeServer(HomeServer):
                         MEDIA_PREFIX: media_repo,
                         LEGACY_MEDIA_PREFIX: media_repo,
                         CONTENT_REPO_PREFIX: ContentRepoResource(
-                            self, self.config.uploads_path, self.auth, self.content_addr
+                            self, self.config.uploads_path
                         ),
                     })
 
@@ -301,7 +301,6 @@ def setup(config_options):
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
         config=config,
-        content_addr=config.content_addr,
         version_string=version_string,
         database_engine=database_engine,
     )
diff --git a/synapse/config/ldap.py b/synapse/config/ldap.py
index 9c14593a99..d83c2230be 100644
--- a/synapse/config/ldap.py
+++ b/synapse/config/ldap.py
@@ -13,40 +13,88 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from ._base import Config
+from ._base import Config, ConfigError
+
+
+MISSING_LDAP3 = (
+    "Missing ldap3 library. This is required for LDAP Authentication."
+)
+
+
+class LDAPMode(object):
+    SIMPLE = "simple",
+    SEARCH = "search",
+
+    LIST = (SIMPLE, SEARCH)
 
 
 class LDAPConfig(Config):
     def read_config(self, config):
-        ldap_config = config.get("ldap_config", None)
-        if ldap_config:
-            self.ldap_enabled = ldap_config.get("enabled", False)
-            self.ldap_server = ldap_config["server"]
-            self.ldap_port = ldap_config["port"]
-            self.ldap_tls = ldap_config.get("tls", False)
-            self.ldap_search_base = ldap_config["search_base"]
-            self.ldap_search_property = ldap_config["search_property"]
-            self.ldap_email_property = ldap_config["email_property"]
-            self.ldap_full_name_property = ldap_config["full_name_property"]
-        else:
-            self.ldap_enabled = False
-            self.ldap_server = None
-            self.ldap_port = None
-            self.ldap_tls = False
-            self.ldap_search_base = None
-            self.ldap_search_property = None
-            self.ldap_email_property = None
-            self.ldap_full_name_property = None
+        ldap_config = config.get("ldap_config", {})
+
+        self.ldap_enabled = ldap_config.get("enabled", False)
+
+        if self.ldap_enabled:
+            # verify dependencies are available
+            try:
+                import ldap3
+                ldap3  # to stop unused lint
+            except ImportError:
+                raise ConfigError(MISSING_LDAP3)
+
+            self.ldap_mode = LDAPMode.SIMPLE
+
+            # verify config sanity
+            self.require_keys(ldap_config, [
+                "uri",
+                "base",
+                "attributes",
+            ])
+
+            self.ldap_uri = ldap_config["uri"]
+            self.ldap_start_tls = ldap_config.get("start_tls", False)
+            self.ldap_base = ldap_config["base"]
+            self.ldap_attributes = ldap_config["attributes"]
+
+            if "bind_dn" in ldap_config:
+                self.ldap_mode = LDAPMode.SEARCH
+                self.require_keys(ldap_config, [
+                    "bind_dn",
+                    "bind_password",
+                ])
+
+                self.ldap_bind_dn = ldap_config["bind_dn"]
+                self.ldap_bind_password = ldap_config["bind_password"]
+                self.ldap_filter = ldap_config.get("filter", None)
+
+            # verify attribute lookup
+            self.require_keys(ldap_config['attributes'], [
+                "uid",
+                "name",
+                "mail",
+            ])
+
+    def require_keys(self, config, required):
+        missing = [key for key in required if key not in config]
+        if missing:
+            raise ConfigError(
+                "LDAP enabled but missing required config values: {}".format(
+                    ", ".join(missing)
+                )
+            )
 
     def default_config(self, **kwargs):
         return """\
         # ldap_config:
         #   enabled: true
-        #   server: "ldap://localhost"
-        #   port: 389
-        #   tls: false
-        #   search_base: "ou=Users,dc=example,dc=com"
-        #   search_property: "cn"
-        #   email_property: "email"
-        #   full_name_property: "givenName"
+        #   uri: "ldap://ldap.example.com:389"
+        #   start_tls: true
+        #   base: "ou=users,dc=example,dc=com"
+        #   attributes:
+        #      uid: "cn"
+        #      mail: "email"
+        #      name: "givenName"
+        #   #bind_dn:
+        #   #bind_password:
+        #   #filter: "(objectClass=posixAccount)"
         """
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 7840dc3ad6..d7e6f20518 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -107,26 +107,6 @@ class ServerConfig(Config):
                 ]
             })
 
-        # Attempt to guess the content_addr for the v0 content repostitory
-        content_addr = config.get("content_addr")
-        if not content_addr:
-            for listener in self.listeners:
-                if listener["type"] == "http" and not listener.get("tls", False):
-                    unsecure_port = listener["port"]
-                    break
-            else:
-                raise RuntimeError("Could not determine 'content_addr'")
-
-            host = self.server_name
-            if ':' not in host:
-                host = "%s:%d" % (host, unsecure_port)
-            else:
-                host = host.split(':')[0]
-                host = "%s:%d" % (host, unsecure_port)
-            content_addr = "http://%s" % (host,)
-
-        self.content_addr = content_addr
-
     def default_config(self, server_name, **kwargs):
         if ":" in server_name:
             bind_port = int(server_name.split(":")[1])
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 2a589524a4..85f5e752fe 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -49,6 +49,7 @@ class FederationServer(FederationBase):
         super(FederationServer, self).__init__(hs)
 
         self._room_pdu_linearizer = Linearizer()
+        self._server_linearizer = Linearizer()
 
     def set_handler(self, handler):
         """Sets the handler that the replication layer will use to communicate
@@ -89,11 +90,14 @@ class FederationServer(FederationBase):
     @defer.inlineCallbacks
     @log_function
     def on_backfill_request(self, origin, room_id, versions, limit):
-        pdus = yield self.handler.on_backfill_request(
-            origin, room_id, versions, limit
-        )
+        with (yield self._server_linearizer.queue((origin, room_id))):
+            pdus = yield self.handler.on_backfill_request(
+                origin, room_id, versions, limit
+            )
+
+            res = self._transaction_from_pdus(pdus).get_dict()
 
-        defer.returnValue((200, self._transaction_from_pdus(pdus).get_dict()))
+        defer.returnValue((200, res))
 
     @defer.inlineCallbacks
     @log_function
@@ -184,27 +188,28 @@ class FederationServer(FederationBase):
     @defer.inlineCallbacks
     @log_function
     def on_context_state_request(self, origin, room_id, event_id):
-        if event_id:
-            pdus = yield self.handler.get_state_for_pdu(
-                origin, room_id, event_id,
-            )
-            auth_chain = yield self.store.get_auth_chain(
-                [pdu.event_id for pdu in pdus]
-            )
+        with (yield self._server_linearizer.queue((origin, room_id))):
+            if event_id:
+                pdus = yield self.handler.get_state_for_pdu(
+                    origin, room_id, event_id,
+                )
+                auth_chain = yield self.store.get_auth_chain(
+                    [pdu.event_id for pdu in pdus]
+                )
 
-            for event in auth_chain:
-                # We sign these again because there was a bug where we
-                # incorrectly signed things the first time round
-                if self.hs.is_mine_id(event.event_id):
-                    event.signatures.update(
-                        compute_event_signature(
-                            event,
-                            self.hs.hostname,
-                            self.hs.config.signing_key[0]
+                for event in auth_chain:
+                    # We sign these again because there was a bug where we
+                    # incorrectly signed things the first time round
+                    if self.hs.is_mine_id(event.event_id):
+                        event.signatures.update(
+                            compute_event_signature(
+                                event,
+                                self.hs.hostname,
+                                self.hs.config.signing_key[0]
+                            )
                         )
-                    )
-        else:
-            raise NotImplementedError("Specify an event")
+            else:
+                raise NotImplementedError("Specify an event")
 
         defer.returnValue((200, {
             "pdus": [pdu.get_pdu_json() for pdu in pdus],
@@ -283,14 +288,16 @@ class FederationServer(FederationBase):
 
     @defer.inlineCallbacks
     def on_event_auth(self, origin, room_id, event_id):
-        time_now = self._clock.time_msec()
-        auth_pdus = yield self.handler.on_event_auth(event_id)
-        defer.returnValue((200, {
-            "auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
-        }))
+        with (yield self._server_linearizer.queue((origin, room_id))):
+            time_now = self._clock.time_msec()
+            auth_pdus = yield self.handler.on_event_auth(event_id)
+            res = {
+                "auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
+            }
+        defer.returnValue((200, res))
 
     @defer.inlineCallbacks
-    def on_query_auth_request(self, origin, content, event_id):
+    def on_query_auth_request(self, origin, content, room_id, event_id):
         """
         Content is a dict with keys::
             auth_chain (list): A list of events that give the auth chain.
@@ -309,32 +316,33 @@ class FederationServer(FederationBase):
         Returns:
             Deferred: Results in `dict` with the same format as `content`
         """
-        auth_chain = [
-            self.event_from_pdu_json(e)
-            for e in content["auth_chain"]
-        ]
-
-        signed_auth = yield self._check_sigs_and_hash_and_fetch(
-            origin, auth_chain, outlier=True
-        )
+        with (yield self._server_linearizer.queue((origin, room_id))):
+            auth_chain = [
+                self.event_from_pdu_json(e)
+                for e in content["auth_chain"]
+            ]
+
+            signed_auth = yield self._check_sigs_and_hash_and_fetch(
+                origin, auth_chain, outlier=True
+            )
 
-        ret = yield self.handler.on_query_auth(
-            origin,
-            event_id,
-            signed_auth,
-            content.get("rejects", []),
-            content.get("missing", []),
-        )
+            ret = yield self.handler.on_query_auth(
+                origin,
+                event_id,
+                signed_auth,
+                content.get("rejects", []),
+                content.get("missing", []),
+            )
 
-        time_now = self._clock.time_msec()
-        send_content = {
-            "auth_chain": [
-                e.get_pdu_json(time_now)
-                for e in ret["auth_chain"]
-            ],
-            "rejects": ret.get("rejects", []),
-            "missing": ret.get("missing", []),
-        }
+            time_now = self._clock.time_msec()
+            send_content = {
+                "auth_chain": [
+                    e.get_pdu_json(time_now)
+                    for e in ret["auth_chain"]
+                ],
+                "rejects": ret.get("rejects", []),
+                "missing": ret.get("missing", []),
+            }
 
         defer.returnValue(
             (200, send_content)
@@ -386,21 +394,24 @@ class FederationServer(FederationBase):
     @log_function
     def on_get_missing_events(self, origin, room_id, earliest_events,
                               latest_events, limit, min_depth):
-        logger.info(
-            "on_get_missing_events: earliest_events: %r, latest_events: %r,"
-            " limit: %d, min_depth: %d",
-            earliest_events, latest_events, limit, min_depth
-        )
-        missing_events = yield self.handler.on_get_missing_events(
-            origin, room_id, earliest_events, latest_events, limit, min_depth
-        )
+        with (yield self._server_linearizer.queue((origin, room_id))):
+            logger.info(
+                "on_get_missing_events: earliest_events: %r, latest_events: %r,"
+                " limit: %d, min_depth: %d",
+                earliest_events, latest_events, limit, min_depth
+            )
+            missing_events = yield self.handler.on_get_missing_events(
+                origin, room_id, earliest_events, latest_events, limit, min_depth
+            )
 
-        if len(missing_events) < 5:
-            logger.info("Returning %d events: %r", len(missing_events), missing_events)
-        else:
-            logger.info("Returning %d events", len(missing_events))
+            if len(missing_events) < 5:
+                logger.info(
+                    "Returning %d events: %r", len(missing_events), missing_events
+                )
+            else:
+                logger.info("Returning %d events", len(missing_events))
 
-        time_now = self._clock.time_msec()
+            time_now = self._clock.time_msec()
 
         defer.returnValue({
             "events": [ev.get_pdu_json(time_now) for ev in missing_events],
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 8a1965f45a..26fa88ae84 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -388,7 +388,7 @@ class FederationQueryAuthServlet(BaseFederationServlet):
     @defer.inlineCallbacks
     def on_POST(self, origin, content, query, context, event_id):
         new_content = yield self.handler.on_query_auth_request(
-            origin, content, event_id
+            origin, content, context, event_id
         )
 
         defer.returnValue((200, new_content))
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index b38f81e999..968095c141 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -20,6 +20,7 @@ from synapse.api.constants import LoginType
 from synapse.types import UserID
 from synapse.api.errors import AuthError, LoginError, Codes, StoreError, SynapseError
 from synapse.util.async import run_on_reactor
+from synapse.config.ldap import LDAPMode
 
 from twisted.web.client import PartialDownloadError
 
@@ -28,6 +29,12 @@ import bcrypt
 import pymacaroons
 import simplejson
 
+try:
+    import ldap3
+except ImportError:
+    ldap3 = None
+    pass
+
 import synapse.util.stringutils as stringutils
 
 
@@ -50,17 +57,20 @@ class AuthHandler(BaseHandler):
         self.INVALID_TOKEN_HTTP_STATUS = 401
 
         self.ldap_enabled = hs.config.ldap_enabled
-        self.ldap_server = hs.config.ldap_server
-        self.ldap_port = hs.config.ldap_port
-        self.ldap_tls = hs.config.ldap_tls
-        self.ldap_search_base = hs.config.ldap_search_base
-        self.ldap_search_property = hs.config.ldap_search_property
-        self.ldap_email_property = hs.config.ldap_email_property
-        self.ldap_full_name_property = hs.config.ldap_full_name_property
-
-        if self.ldap_enabled is True:
-            import ldap
-            logger.info("Import ldap version: %s", ldap.__version__)
+        if self.ldap_enabled:
+            if not ldap3:
+                raise RuntimeError(
+                    'Missing ldap3 library. This is required for LDAP Authentication.'
+                )
+            self.ldap_mode = hs.config.ldap_mode
+            self.ldap_uri = hs.config.ldap_uri
+            self.ldap_start_tls = hs.config.ldap_start_tls
+            self.ldap_base = hs.config.ldap_base
+            self.ldap_filter = hs.config.ldap_filter
+            self.ldap_attributes = hs.config.ldap_attributes
+            if self.ldap_mode == LDAPMode.SEARCH:
+                self.ldap_bind_dn = hs.config.ldap_bind_dn
+                self.ldap_bind_password = hs.config.ldap_bind_password
 
         self.hs = hs  # FIXME better possibility to access registrationHandler later?
 
@@ -452,40 +462,167 @@ class AuthHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def _check_ldap_password(self, user_id, password):
-        if not self.ldap_enabled:
-            logger.debug("LDAP not configured")
+        """ Attempt to authenticate a user against an LDAP Server
+            and register an account if none exists.
+
+            Returns:
+                True if authentication against LDAP was successful
+        """
+
+        if not ldap3 or not self.ldap_enabled:
             defer.returnValue(False)
 
-        import ldap
+        if self.ldap_mode not in LDAPMode.LIST:
+            raise RuntimeError(
+                'Invalid ldap mode specified: {mode}'.format(
+                    mode=self.ldap_mode
+                )
+            )
 
-        logger.info("Authenticating %s with LDAP" % user_id)
         try:
-            ldap_url = "%s:%s" % (self.ldap_server, self.ldap_port)
-            logger.debug("Connecting LDAP server at %s" % ldap_url)
-            l = ldap.initialize(ldap_url)
-            if self.ldap_tls:
-                logger.debug("Initiating TLS")
-                self._connection.start_tls_s()
+            server = ldap3.Server(self.ldap_uri)
+            logger.debug(
+                "Attempting ldap connection with %s",
+                self.ldap_uri
+            )
 
-            local_name = UserID.from_string(user_id).localpart
+            localpart = UserID.from_string(user_id).localpart
+            if self.ldap_mode == LDAPMode.SIMPLE:
+                # bind with the the local users ldap credentials
+                bind_dn = "{prop}={value},{base}".format(
+                    prop=self.ldap_attributes['uid'],
+                    value=localpart,
+                    base=self.ldap_base
+                )
+                conn = ldap3.Connection(server, bind_dn, password)
+                logger.debug(
+                    "Established ldap connection in simple mode: %s",
+                    conn
+                )
 
-            dn = "%s=%s, %s" % (
-                self.ldap_search_property,
-                local_name,
-                self.ldap_search_base)
-            logger.debug("DN for LDAP authentication: %s" % dn)
+                if self.ldap_start_tls:
+                    conn.start_tls()
+                    logger.debug(
+                        "Upgraded ldap connection in simple mode through StartTLS: %s",
+                        conn
+                    )
+
+                conn.bind()
+
+            elif self.ldap_mode == LDAPMode.SEARCH:
+                # connect with preconfigured credentials and search for local user
+                conn = ldap3.Connection(
+                    server,
+                    self.ldap_bind_dn,
+                    self.ldap_bind_password
+                )
+                logger.debug(
+                    "Established ldap connection in search mode: %s",
+                    conn
+                )
+
+                if self.ldap_start_tls:
+                    conn.start_tls()
+                    logger.debug(
+                        "Upgraded ldap connection in search mode through StartTLS: %s",
+                        conn
+                    )
 
-            l.simple_bind_s(dn.encode('utf-8'), password.encode('utf-8'))
+                conn.bind()
 
+                # find matching dn
+                query = "({prop}={value})".format(
+                    prop=self.ldap_attributes['uid'],
+                    value=localpart
+                )
+                if self.ldap_filter:
+                    query = "(&{query}{filter})".format(
+                        query=query,
+                        filter=self.ldap_filter
+                    )
+                logger.debug("ldap search filter: %s", query)
+                result = conn.search(self.ldap_base, query)
+
+                if result and len(conn.response) == 1:
+                    # found exactly one result
+                    user_dn = conn.response[0]['dn']
+                    logger.debug('ldap search found dn: %s', user_dn)
+
+                    # unbind and reconnect, rebind with found dn
+                    conn.unbind()
+                    conn = ldap3.Connection(
+                        server,
+                        user_dn,
+                        password,
+                        auto_bind=True
+                    )
+                else:
+                    # found 0 or > 1 results, abort!
+                    logger.warn(
+                        "ldap search returned unexpected (%d!=1) amount of results",
+                        len(conn.response)
+                    )
+                    defer.returnValue(False)
+
+            logger.info(
+                "User authenticated against ldap server: %s",
+                conn
+            )
+
+            # check for existing account, if none exists, create one
             if not (yield self.does_user_exist(user_id)):
-                handler = self.hs.get_handlers().registration_handler
-                user_id, access_token = (
-                    yield handler.register(localpart=local_name)
+                # query user metadata for account creation
+                query = "({prop}={value})".format(
+                    prop=self.ldap_attributes['uid'],
+                    value=localpart
+                )
+
+                if self.ldap_mode == LDAPMode.SEARCH and self.ldap_filter:
+                    query = "(&{filter}{user_filter})".format(
+                        filter=query,
+                        user_filter=self.ldap_filter
+                    )
+                logger.debug("ldap registration filter: %s", query)
+
+                result = conn.search(
+                    search_base=self.ldap_base,
+                    search_filter=query,
+                    attributes=[
+                        self.ldap_attributes['name'],
+                        self.ldap_attributes['mail']
+                    ]
                 )
 
+                if len(conn.response) == 1:
+                    attrs = conn.response[0]['attributes']
+                    mail = attrs[self.ldap_attributes['mail']][0]
+                    name = attrs[self.ldap_attributes['name']][0]
+
+                    # create account
+                    registration_handler = self.hs.get_handlers().registration_handler
+                    user_id, access_token = (
+                        yield registration_handler.register(localpart=localpart)
+                    )
+
+                    # TODO: bind email, set displayname with data from ldap directory
+
+                    logger.info(
+                        "ldap registration successful: %d: %s (%s, %)",
+                        user_id,
+                        localpart,
+                        name,
+                        mail
+                    )
+                else:
+                    logger.warn(
+                        "ldap registration failed: unexpected (%d!=1) amount of results",
+                        len(result)
+                    )
+                    defer.returnValue(False)
+
             defer.returnValue(True)
-        except ldap.LDAPError, e:
-            logger.warn("LDAP error: %s", e)
+        except ldap3.core.exceptions.LDAPException as e:
+            logger.warn("Error during ldap authentication: %s", e)
             defer.returnValue(False)
 
     @defer.inlineCallbacks
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index e0a7a19777..e024cec0a2 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -48,6 +48,9 @@ CONDITIONAL_REQUIREMENTS = {
         "Jinja2>=2.8": ["Jinja2>=2.8"],
         "bleach>=1.4.2": ["bleach>=1.4.2"],
     },
+    "ldap": {
+        "ldap3>=1.0": ["ldap3>=1.0"],
+    },
 }
 
 
diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index d9fc045fc6..956bd5da75 100644
--- a/synapse/rest/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -15,14 +15,12 @@
 
 from synapse.http.server import respond_with_json_bytes, finish_request
 
-from synapse.util.stringutils import random_string
 from synapse.api.errors import (
-    cs_exception, SynapseError, CodeMessageException, Codes, cs_error
+    Codes, cs_error
 )
 
 from twisted.protocols.basic import FileSender
 from twisted.web import server, resource
-from twisted.internet import defer
 
 import base64
 import simplejson as json
@@ -50,64 +48,10 @@ class ContentRepoResource(resource.Resource):
     """
     isLeaf = True
 
-    def __init__(self, hs, directory, auth, external_addr):
+    def __init__(self, hs, directory):
         resource.Resource.__init__(self)
         self.hs = hs
         self.directory = directory
-        self.auth = auth
-        self.external_addr = external_addr.rstrip('/')
-        self.max_upload_size = hs.config.max_upload_size
-
-        if not os.path.isdir(self.directory):
-            os.mkdir(self.directory)
-            logger.info("ContentRepoResource : Created %s directory.",
-                        self.directory)
-
-    @defer.inlineCallbacks
-    def map_request_to_name(self, request):
-        # auth the user
-        requester = yield self.auth.get_user_by_req(request)
-
-        # namespace all file uploads on the user
-        prefix = base64.urlsafe_b64encode(
-            requester.user.to_string()
-        ).replace('=', '')
-
-        # use a random string for the main portion
-        main_part = random_string(24)
-
-        # suffix with a file extension if we can make one. This is nice to
-        # provide a hint to clients on the file information. We will also reuse
-        # this info to spit back the content type to the client.
-        suffix = ""
-        if request.requestHeaders.hasHeader("Content-Type"):
-            content_type = request.requestHeaders.getRawHeaders(
-                "Content-Type")[0]
-            suffix = "." + base64.urlsafe_b64encode(content_type)
-            if (content_type.split("/")[0].lower() in
-                    ["image", "video", "audio"]):
-                file_ext = content_type.split("/")[-1]
-                # be a little paranoid and only allow a-z
-                file_ext = re.sub("[^a-z]", "", file_ext)
-                suffix += "." + file_ext
-
-        file_name = prefix + main_part + suffix
-        file_path = os.path.join(self.directory, file_name)
-        logger.info("User %s is uploading a file to path %s",
-                    request.user.user_id.to_string(),
-                    file_path)
-
-        # keep trying to make a non-clashing file, with a sensible max attempts
-        attempts = 0
-        while os.path.exists(file_path):
-            main_part = random_string(24)
-            file_name = prefix + main_part + suffix
-            file_path = os.path.join(self.directory, file_name)
-            attempts += 1
-            if attempts > 25:  # really? Really?
-                raise SynapseError(500, "Unable to create file.")
-
-        defer.returnValue(file_path)
 
     def render_GET(self, request):
         # no auth here on purpose, to allow anyone to view, even across home
@@ -155,58 +99,6 @@ class ContentRepoResource(resource.Resource):
 
         return server.NOT_DONE_YET
 
-    def render_POST(self, request):
-        self._async_render(request)
-        return server.NOT_DONE_YET
-
     def render_OPTIONS(self, request):
         respond_with_json_bytes(request, 200, {}, send_cors=True)
         return server.NOT_DONE_YET
-
-    @defer.inlineCallbacks
-    def _async_render(self, request):
-        try:
-            # TODO: The checks here are a bit late. The content will have
-            # already been uploaded to a tmp file at this point
-            content_length = request.getHeader("Content-Length")
-            if content_length is None:
-                raise SynapseError(
-                    msg="Request must specify a Content-Length", code=400
-                )
-            if int(content_length) > self.max_upload_size:
-                raise SynapseError(
-                    msg="Upload request body is too large",
-                    code=413,
-                )
-
-            fname = yield self.map_request_to_name(request)
-
-            # TODO I have a suspicious feeling this is just going to block
-            with open(fname, "wb") as f:
-                f.write(request.content.read())
-
-            # FIXME (erikj): These should use constants.
-            file_name = os.path.basename(fname)
-            # FIXME: we can't assume what the repo's public mounted path is
-            # ...plus self-signed SSL won't work to remote clients anyway
-            # ...and we can't assume that it's SSL anyway, as we might want to
-            # serve it via the non-SSL listener...
-            url = "%s/_matrix/content/%s" % (
-                self.external_addr, file_name
-            )
-
-            respond_with_json_bytes(request, 200,
-                                    json.dumps({"content_token": url}),
-                                    send_cors=True)
-
-        except CodeMessageException as e:
-            logger.exception(e)
-            respond_with_json_bytes(request, e.code,
-                                    json.dumps(cs_exception(e)))
-        except Exception as e:
-            logger.error("Failed to store file: %s" % e)
-            respond_with_json_bytes(
-                request,
-                500,
-                json.dumps({"error": "Internal server error"}),
-                send_cors=True)
diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py
index 940e11d7a2..5f1b6f63a9 100644
--- a/synapse/storage/event_push_actions.py
+++ b/synapse/storage/event_push_actions.py
@@ -152,7 +152,7 @@ class EventPushActionsStore(SQLBaseStore):
             if max_stream_ordering is not None:
                 sql += " AND ep.stream_ordering <= ?"
                 args.append(max_stream_ordering)
-            sql += " ORDER BY ep.stream_ordering ASC LIMIT ?"
+            sql += " ORDER BY ep.stream_ordering DESC LIMIT ?"
             args.append(limit)
             txn.execute(sql, args)
             return txn.fetchall()
@@ -176,14 +176,16 @@ class EventPushActionsStore(SQLBaseStore):
             if max_stream_ordering is not None:
                 sql += " AND ep.stream_ordering <= ?"
                 args.append(max_stream_ordering)
-            sql += " ORDER BY ep.stream_ordering ASC"
+            sql += " ORDER BY ep.stream_ordering DESC LIMIT ?"
+            args.append(limit)
             txn.execute(sql, args)
             return txn.fetchall()
         no_read_receipt = yield self.runInteraction(
             "get_unread_push_actions_for_user_in_range", get_no_receipt
         )
 
-        defer.returnValue([
+        # Make a list of dicts from the two sets of results.
+        notifs = [
             {
                 "event_id": row[0],
                 "room_id": row[1],
@@ -191,7 +193,16 @@ class EventPushActionsStore(SQLBaseStore):
                 "actions": json.loads(row[3]),
                 "received_ts": row[4],
             } for row in after_read_receipt + no_read_receipt
-        ])
+        ]
+
+        # Now sort it so it's ordered correctly, since currently it will
+        # contain results from the first query, correctly ordered, followed
+        # by results from the second query, but we want them all ordered
+        # by received_ts
+        notifs.sort(key=lambda r: -(r['received_ts'] or 0))
+
+        # Now return the first `limit`
+        defer.returnValue(notifs[:limit])
 
     @defer.inlineCallbacks
     def get_time_of_last_push_action_before(self, stream_ordering):