From a0fc256d6570652cedf9a68606e7849ff233e4e4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 9 Apr 2019 17:03:35 +0100 Subject: Limit in flight DNS requests This is to work around a bug in twisted where a large number of concurrent DNS requests cause it to tight loop forever. c.f. https://twistedmatrix.com/trac/ticket/9620#ticket --- synapse/app/_base.py | 83 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 82 insertions(+), 1 deletion(-) diff --git a/synapse/app/_base.py b/synapse/app/_base.py index d4c6c4c8e2..08199a5e8d 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -22,13 +22,14 @@ import traceback import psutil from daemonize import Daemonize -from twisted.internet import error, reactor +from twisted.internet import defer, error, reactor from twisted.protocols.tls import TLSMemoryBIOFactory import synapse from synapse.app import check_bind_error from synapse.crypto import context_factory from synapse.util import PreserveLoggingContext +from synapse.util.async_helpers import Linearizer from synapse.util.rlimit import change_resource_limit from synapse.util.versionstring import get_version_string @@ -99,6 +100,8 @@ def start_reactor( logger (logging.Logger): logger instance to pass to Daemonize """ + install_dns_limiter(reactor) + def run(): # make sure that we run the reactor with the sentinel log context, # otherwise other PreserveLoggingContext instances will get confused @@ -312,3 +315,81 @@ def setup_sentry(hs): name = hs.config.worker_name if hs.config.worker_name else "master" scope.set_tag("worker_app", app) scope.set_tag("worker_name", name) + + +def install_dns_limiter(reactor, max_dns_requests_in_flight=100): + """Replaces the resolver with one that limits the number of in flight DNS + requests. + + This is to workaround https://twistedmatrix.com/trac/ticket/9620, where we + can run out of file descriptors and infinite loop if we attempt to do too + many DNS queries at once + """ + new_resolver = _LimitedHostnameResolver( + reactor.nameResolver, max_dns_requests_in_flight, + ) + + reactor.installNameResolver(new_resolver) + + +class _LimitedHostnameResolver(object): + """Wraps a IHostnameResolver, limiting the number of in-flight DNS lookups. + """ + + def __init__(self, resolver, max_dns_requests_in_flight): + self._resolver = resolver + self._limiter = Linearizer( + name="dns_client_limiter", max_count=max_dns_requests_in_flight, + ) + + def resolveHostName(self, resolutionReceiver, hostName, portNumber=0, + addressTypes=None, transportSemantics='TCP'): + # Note this is happening deep within the reactor, so we don't need to + # worry about log contexts. + + # We need this function to return `resolutionReceiver` so we do all the + # actual logic involving deferreds in a separate function. + self._resolve( + resolutionReceiver, hostName, portNumber, + addressTypes, transportSemantics, + ) + + return resolutionReceiver + + @defer.inlineCallbacks + def _resolve(self, resolutionReceiver, hostName, portNumber=0, + addressTypes=None, transportSemantics='TCP'): + + with (yield self._limiter.queue(())): + # resolveHostName doesn't return a Deferred, so we need to hook into + # the receiver interface to get told when resolution has finished. + + deferred = defer.Deferred() + receiver = _DeferredResolutionReceiver(resolutionReceiver, deferred) + + self._resolver.resolveHostName( + receiver, hostName, portNumber, + addressTypes, transportSemantics, + ) + + yield deferred + + +class _DeferredResolutionReceiver(object): + """Wraps a IResolutionReceiver and simply resolves the given deferred when + resolution is complete + """ + + def __init__(self, receiver, deferred): + self._receiver = receiver + self._deferred = deferred + + def resolutionBegan(self, resolutionInProgress): + self._receiver.resolutionBegan(resolutionInProgress) + + def addressResolved(self, address): + self._receiver.addressResolved(address) + + def resolutionComplete(self): + self._deferred.callback(()) + self._receiver.resolutionComplete() -- cgit 1.5.1 From 02491e009d64b0458abcde188e021bbf5a288053 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 9 Apr 2019 17:18:21 +0100 Subject: Newsfile --- changelog.d/5037.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5037.bugfix diff --git a/changelog.d/5037.bugfix b/changelog.d/5037.bugfix new file mode 100644 index 0000000000..c3af418ddf --- /dev/null +++ b/changelog.d/5037.bugfix @@ -0,0 +1 @@ +Workaround bug in twisted where attempting too many concurrent DNS requests could cause it to hang due to running out of file descriptors. -- cgit 1.5.1 From d5adf297e6d6688d9410e39d7d90a0f9f01173eb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 15 Apr 2019 17:13:16 +0100 Subject: Move some rest endpoints to client reader --- docs/workers.rst | 3 +++ synapse/app/client_reader.py | 11 +++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/workers.rst b/docs/workers.rst index d80fc04d2e..d9d545610b 100644 --- a/docs/workers.rst +++ b/docs/workers.rst @@ -227,6 +227,9 @@ following regular expressions:: ^/_matrix/client/(api/v1|r0|unstable)/account/3pid$ ^/_matrix/client/(api/v1|r0|unstable)/keys/query$ ^/_matrix/client/(api/v1|r0|unstable)/keys/changes$ + ^/_matrix/client/versions$ + ^/_matrix/client/(api/v1|r0|unstable)/voip/turnServer$ + ^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$ Additionally, the following REST endpoints can be handled, but all requests must be routed to the same instance:: diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index beaea64a61..1e9e686107 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -45,6 +45,7 @@ from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.rest.client.v1.login import LoginRestServlet +from synapse.rest.client.v1.push_rule import PushRuleRestServlet from synapse.rest.client.v1.room import ( JoinedRoomMemberListRestServlet, PublicRoomListRestServlet, @@ -52,9 +53,11 @@ from synapse.rest.client.v1.room import ( RoomMemberListRestServlet, RoomStateRestServlet, ) +from synapse.rest.client.v1.voip import VoipRestServlet from synapse.rest.client.v2_alpha.account import ThreepidRestServlet from synapse.rest.client.v2_alpha.keys import KeyChangesServlet, KeyQueryServlet from synapse.rest.client.v2_alpha.register import RegisterRestServlet +from synapse.rest.client.versions import VersionsRestServlet from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree @@ -109,12 +112,12 @@ class ClientReaderServer(HomeServer): ThreepidRestServlet(self).register(resource) KeyQueryServlet(self).register(resource) KeyChangesServlet(self).register(resource) + VoipRestServlet(self).register(resource) + PushRuleRestServlet(self).register(resource) + VersionsRestServlet(self).register(resource) resources.update({ - "/_matrix/client/r0": resource, - "/_matrix/client/unstable": resource, - "/_matrix/client/v2_alpha": resource, - "/_matrix/client/api/v1": resource, + "/_matrix/client": resource, }) root_resource = create_resource_tree(resources, NoResource()) -- cgit 1.5.1 From 208251956dcc2aa18ec72c5a74d5d2e67546a68e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 15 Apr 2019 17:17:31 +0100 Subject: Newsfile --- changelog.d/5063.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5063.feature diff --git a/changelog.d/5063.feature b/changelog.d/5063.feature new file mode 100644 index 0000000000..fd7b80018e --- /dev/null +++ b/changelog.d/5063.feature @@ -0,0 +1 @@ +Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. -- cgit 1.5.1 From ec638a16022d29cbab32ac8ddb4cddb7f99d6495 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 15 Apr 2019 18:40:15 +0100 Subject: Only handle GET requests for /push_rules --- docs/workers.rst | 3 +++ synapse/rest/client/v1/push_rule.py | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/docs/workers.rst b/docs/workers.rst index d9d545610b..aa4e7a120b 100644 --- a/docs/workers.rst +++ b/docs/workers.rst @@ -229,6 +229,9 @@ following regular expressions:: ^/_matrix/client/(api/v1|r0|unstable)/keys/changes$ ^/_matrix/client/versions$ ^/_matrix/client/(api/v1|r0|unstable)/voip/turnServer$ + +Additionally, the following REST endpoints can be handled for GET requests:: + ^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$ Additionally, the following REST endpoints can be handled, but all requests must diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index c654f9b5f0..7e9d95de26 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -39,9 +39,13 @@ class PushRuleRestServlet(ClientV1RestServlet): super(PushRuleRestServlet, self).__init__(hs) self.store = hs.get_datastore() self.notifier = hs.get_notifier() + self._is_worker = hs.config.worker_app is not None @defer.inlineCallbacks def on_PUT(self, request): + if self._is_worker: + raise Exception("Cannot handle PUT /push_rules on worker") + spec = _rule_spec_from_path([x.decode('utf8') for x in request.postpath]) try: priority_class = _priority_class_from_spec(spec) @@ -103,6 +107,9 @@ class PushRuleRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_DELETE(self, request): + if self._is_worker: + raise Exception("Cannot handle DELETE /push_rules on worker") + spec = _rule_spec_from_path([x.decode('utf8') for x in request.postpath]) requester = yield self.auth.get_user_by_req(request) -- cgit 1.5.1 From 38642614cffb107bb69abf215eb5144681bc4491 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 15 Apr 2019 19:21:32 +0100 Subject: VersionRestServlet doesn't take a param --- synapse/app/client_reader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 1e9e686107..864f1eac48 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -114,7 +114,7 @@ class ClientReaderServer(HomeServer): KeyChangesServlet(self).register(resource) VoipRestServlet(self).register(resource) PushRuleRestServlet(self).register(resource) - VersionsRestServlet(self).register(resource) + VersionsRestServlet().register(resource) resources.update({ "/_matrix/client": resource, -- cgit 1.5.1 From 468b2bcb2ee42e877f6610e2704ef7faa73df344 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 15 Apr 2019 19:41:25 +0100 Subject: Newsfile --- changelog.d/5065.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5065.feature diff --git a/changelog.d/5065.feature b/changelog.d/5065.feature new file mode 100644 index 0000000000..fd7b80018e --- /dev/null +++ b/changelog.d/5065.feature @@ -0,0 +1 @@ +Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. -- cgit 1.5.1 From a137f4eac028fefda0787a168dd8853653155bc3 Mon Sep 17 00:00:00 2001 From: Silke Hofstra Date: Tue, 16 Apr 2019 12:41:17 +0200 Subject: Add systemd-python to optional dependencies (#4339) Using systemd-python allows for logging to the systemd journal, as is documented in: `synapse/contrib/systemd/log_config.yaml`. Signed-off-by: Silke Hofstra --- changelog.d/4339.feature | 1 + docker/Dockerfile-dhvirtualenv | 2 ++ setup.py | 6 +----- synapse/python_dependencies.py | 14 +++++++++----- 4 files changed, 13 insertions(+), 10 deletions(-) create mode 100644 changelog.d/4339.feature diff --git a/changelog.d/4339.feature b/changelog.d/4339.feature new file mode 100644 index 0000000000..cecff97b80 --- /dev/null +++ b/changelog.d/4339.feature @@ -0,0 +1 @@ +Add systemd-python to the optional dependencies to enable logging to the systemd journal. Install with `pip install matrix-synapse[systemd]`. diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index 224c92352d..9c4c9a5d80 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -50,7 +50,9 @@ RUN apt-get update -qq -o Acquire::Languages=none \ debhelper \ devscripts \ dh-systemd \ + libsystemd-dev \ lsb-release \ + pkg-config \ python3-dev \ python3-pip \ python3-setuptools \ diff --git a/setup.py b/setup.py index 55b1b10a77..55663e9cac 100755 --- a/setup.py +++ b/setup.py @@ -86,13 +86,9 @@ long_description = read_file(("README.rst",)) REQUIREMENTS = dependencies['REQUIREMENTS'] CONDITIONAL_REQUIREMENTS = dependencies['CONDITIONAL_REQUIREMENTS'] +ALL_OPTIONAL_REQUIREMENTS = dependencies['ALL_OPTIONAL_REQUIREMENTS'] # Make `pip install matrix-synapse[all]` install all the optional dependencies. -ALL_OPTIONAL_REQUIREMENTS = set() - -for optional_deps in CONDITIONAL_REQUIREMENTS.values(): - ALL_OPTIONAL_REQUIREMENTS = set(optional_deps) | ALL_OPTIONAL_REQUIREMENTS - CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 62c1748665..779f36dbed 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -86,18 +86,22 @@ CONDITIONAL_REQUIREMENTS = { "acme": ["txacme>=0.9.2"], "saml2": ["pysaml2>=4.5.0"], + "systemd": ["systemd-python>=231"], "url_preview": ["lxml>=3.5.0"], "test": ["mock>=2.0", "parameterized"], "sentry": ["sentry-sdk>=0.7.2"], } +ALL_OPTIONAL_REQUIREMENTS = set() + +for name, optional_deps in CONDITIONAL_REQUIREMENTS.items(): + # Exclude systemd as it's a system-based requirement. + if name not in ["systemd"]: + ALL_OPTIONAL_REQUIREMENTS = set(optional_deps) | ALL_OPTIONAL_REQUIREMENTS -def list_requirements(): - deps = set(REQUIREMENTS) - for opt in CONDITIONAL_REQUIREMENTS.values(): - deps = set(opt) | deps - return list(deps) +def list_requirements(): + return list(set(REQUIREMENTS) | ALL_OPTIONAL_REQUIREMENTS) class DependencyException(Exception): -- cgit 1.5.1 From 3f22e993f096ceec21027cc52b93cefa5979382c Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 16 Apr 2019 08:31:59 -0600 Subject: Use packages.matrix.org for packages (#5067) * Use packages.matrix.org for packages See https://github.com/vector-im/riot-web/issues/9497 (applies to more than just Olm) * changelog --- INSTALL.md | 6 +++--- changelog.d/5067.misc | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/5067.misc diff --git a/INSTALL.md b/INSTALL.md index a5c3c6efaa..d0e74be18c 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -257,13 +257,13 @@ https://github.com/spantaleev/matrix-docker-ansible-deploy #### Matrix.org packages Matrix.org provides Debian/Ubuntu packages of the latest stable version of -Synapse via https://matrix.org/packages/debian/. To use them: +Synapse via https://packages.matrix.org/debian/. To use them: ``` sudo apt install -y lsb-release curl apt-transport-https -echo "deb https://matrix.org/packages/debian `lsb_release -cs` main" | +echo "deb https://packages.matrix.org/debian `lsb_release -cs` main" | sudo tee /etc/apt/sources.list.d/matrix-org.list -curl "https://matrix.org/packages/debian/repo-key.asc" | +curl "https://packages.matrix.org/debian/repo-key.asc" | sudo apt-key add - sudo apt update sudo apt install matrix-synapse-py3 diff --git a/changelog.d/5067.misc b/changelog.d/5067.misc new file mode 100644 index 0000000000..bbb4337dbf --- /dev/null +++ b/changelog.d/5067.misc @@ -0,0 +1 @@ +Update documentation for where to get Synapse packages. -- cgit 1.5.1 From ad010f630616fd0211706293e49fed38d037ceb5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 16 Apr 2019 17:42:50 +0100 Subject: Remove usage of request.postpath This is an undocumented variable in twisted, and relies on the servlet being mounted in the right way. This also breaks getting push rules on workers. --- synapse/rest/client/v1/push_rule.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 7e9d95de26..506ec95ddd 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -31,7 +31,7 @@ from .base import ClientV1RestServlet, client_path_patterns class PushRuleRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/pushrules/.*$") + PATTERNS = client_path_patterns("/(?Ppushrules/.*)$") SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = ( "Unrecognised request: You probably wanted a trailing slash") @@ -42,11 +42,11 @@ class PushRuleRestServlet(ClientV1RestServlet): self._is_worker = hs.config.worker_app is not None @defer.inlineCallbacks - def on_PUT(self, request): + def on_PUT(self, request, path): if self._is_worker: raise Exception("Cannot handle PUT /push_rules on worker") - spec = _rule_spec_from_path([x.decode('utf8') for x in request.postpath]) + spec = _rule_spec_from_path([x for x in path.split("/")]) try: priority_class = _priority_class_from_spec(spec) except InvalidRuleException as e: @@ -106,11 +106,11 @@ class PushRuleRestServlet(ClientV1RestServlet): defer.returnValue((200, {})) @defer.inlineCallbacks - def on_DELETE(self, request): + def on_DELETE(self, request, path): if self._is_worker: raise Exception("Cannot handle DELETE /push_rules on worker") - spec = _rule_spec_from_path([x.decode('utf8') for x in request.postpath]) + spec = _rule_spec_from_path([x for x in path.split("/")]) requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -130,7 +130,7 @@ class PushRuleRestServlet(ClientV1RestServlet): raise @defer.inlineCallbacks - def on_GET(self, request): + def on_GET(self, request, path): requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -141,7 +141,7 @@ class PushRuleRestServlet(ClientV1RestServlet): rules = format_push_rules_for_user(requester.user, rules) - path = [x.decode('utf8') for x in request.postpath][1:] + path = [x for x in path.split("/")][1:] if path == []: # we're a reference impl: pedantry is our job. @@ -157,7 +157,7 @@ class PushRuleRestServlet(ClientV1RestServlet): else: raise UnrecognizedRequestError() - def on_OPTIONS(self, _): + def on_OPTIONS(self, request, path): return 200, {} def notify_user(self, user_id): -- cgit 1.5.1 From 14d5ad7d2b0d4163dae62de7a5699838b1ffdcbe Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 16 Apr 2019 17:52:00 +0100 Subject: Newsfile --- changelog.d/5070.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5070.feature diff --git a/changelog.d/5070.feature b/changelog.d/5070.feature new file mode 100644 index 0000000000..fd7b80018e --- /dev/null +++ b/changelog.d/5070.feature @@ -0,0 +1 @@ +Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. -- cgit 1.5.1 From 600ec04739a3fd7a2697a837f6e232c970bd97d3 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 17 Apr 2019 12:01:59 +0100 Subject: Make sure we're not registering the same 3pid twice --- changelog.d/5071.bugfix | 1 + synapse/rest/client/v2_alpha/register.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 changelog.d/5071.bugfix diff --git a/changelog.d/5071.bugfix b/changelog.d/5071.bugfix new file mode 100644 index 0000000000..ddf7ab5fa8 --- /dev/null +++ b/changelog.d/5071.bugfix @@ -0,0 +1 @@ +Make sure we're not registering the same 3pid twice on registration. diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 6d235262c8..dc3e265bcd 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -391,6 +391,13 @@ class RegisterRestServlet(RestServlet): # the user-facing checks will probably already have happened in # /register/email/requestToken when we requested a 3pid, but that's not # guaranteed. + # + # Also check that we're not trying to register a 3pid that's already + # been registered. + # + # This has probably happened in /register/email/requestToken as well, + # but if a user hits this endpoint twice then clicks on each link from + # the two activation emails, they would register the same 3pid twice. if auth_result: for login_type in [LoginType.EMAIL_IDENTITY, LoginType.MSISDN]: @@ -406,6 +413,17 @@ class RegisterRestServlet(RestServlet): Codes.THREEPID_DENIED, ) + existingUid = yield self.store.get_user_id_by_threepid( + medium, address, + ) + + if existingUid is not None: + raise SynapseError( + 400, + "%s is already in use" % medium, + Codes.THREEPID_IN_USE, + ) + if registered_user_id is not None: logger.info( "Already registered user ID %r for this session", -- cgit 1.5.1 From 20f0617e87924c929f0db0c06d30de0c8d15081c Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 10 Apr 2019 17:58:47 +0100 Subject: Send out emails with links to extend an account's validity period --- changelog.d/5047.feature | 1 + docs/sample_config.yaml | 29 ++- synapse/api/auth.py | 5 +- synapse/config/emailconfig.py | 7 +- synapse/config/registration.py | 52 ++++- synapse/handlers/account_validity.py | 228 +++++++++++++++++++++ synapse/push/mailer.py | 14 +- synapse/push/pusher.py | 6 +- synapse/res/templates/mail-expiry.css | 4 + synapse/res/templates/notice_expiry.html | 43 ++++ synapse/res/templates/notice_expiry.txt | 7 + synapse/rest/__init__.py | 2 + synapse/rest/client/v2_alpha/account_validity.py | 62 ++++++ synapse/server.py | 5 + synapse/storage/registration.py | 168 +++++++++++++-- .../storage/schema/delta/54/account_validity.sql | 9 +- tests/rest/client/v2_alpha/test_register.py | 100 ++++++++- 17 files changed, 699 insertions(+), 43 deletions(-) create mode 100644 changelog.d/5047.feature create mode 100644 synapse/handlers/account_validity.py create mode 100644 synapse/res/templates/mail-expiry.css create mode 100644 synapse/res/templates/notice_expiry.html create mode 100644 synapse/res/templates/notice_expiry.txt create mode 100644 synapse/rest/client/v2_alpha/account_validity.py diff --git a/changelog.d/5047.feature b/changelog.d/5047.feature new file mode 100644 index 0000000000..12766a82a7 --- /dev/null +++ b/changelog.d/5047.feature @@ -0,0 +1 @@ +Add time-based account expiration. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 5594c8b9af..8bbd437239 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -643,11 +643,31 @@ uploads_path: "DATADIR/uploads" # #enable_registration: false -# Optional account validity parameter. This allows for, e.g., accounts to -# be denied any request after a given period. +# Optional account validity configuration. This allows for accounts to be denied +# any request after a given period. +# +# ``enabled`` defines whether the account validity feature is enabled. Defaults +# to False. +# +# ``period`` allows setting the period after which an account is valid +# after its registration. When renewing the account, its validity period +# will be extended by this amount of time. This parameter is required when using +# the account validity feature. +# +# ``renew_at`` is the amount of time before an account's expiry date at which +# Synapse will send an email to the account's email address with a renewal link. +# This needs the ``email`` and ``public_baseurl`` configuration sections to be +# filled. +# +# ``renew_email_subject`` is the subject of the email sent out with the renewal +# link. ``%(app)s`` can be used as a placeholder for the ``app_name`` parameter +# from the ``email`` section. # #account_validity: +# enabled: True # period: 6w +# renew_at: 1w +# renew_email_subject: "Renew your %(app)s account" # The user must provide all of the below types of 3PID when registering. # @@ -890,7 +910,7 @@ password_config: -# Enable sending emails for notification events +# Enable sending emails for notification events or expiry notices # Defining a custom URL for Riot is only needed if email notifications # should contain links to a self-hosted installation of Riot; when set # the "app_name" setting is ignored. @@ -912,6 +932,9 @@ password_config: # #template_dir: res/templates # notif_template_html: notif_mail.html # notif_template_text: notif_mail.txt +# # Templates for account expiry notices. +# expiry_template_html: notice_expiry.html +# expiry_template_text: notice_expiry.txt # notif_for_new_users: True # riot_base_url: "http://localhost/riot" diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 976e0dd18b..4482962510 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -230,8 +230,9 @@ class Auth(object): # Deny the request if the user account has expired. if self._account_validity.enabled: - expiration_ts = yield self.store.get_expiration_ts_for_user(user) - if self.clock.time_msec() >= expiration_ts: + user_id = user.to_string() + expiration_ts = yield self.store.get_expiration_ts_for_user(user_id) + if expiration_ts and self.clock.time_msec() >= expiration_ts: raise AuthError( 403, "User account has expired", diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 93d70cff14..60827be72f 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -71,6 +71,8 @@ class EmailConfig(Config): self.email_notif_from = email_config["notif_from"] self.email_notif_template_html = email_config["notif_template_html"] self.email_notif_template_text = email_config["notif_template_text"] + self.email_expiry_template_html = email_config["expiry_template_html"] + self.email_expiry_template_text = email_config["expiry_template_text"] template_dir = email_config.get("template_dir") # we need an absolute path, because we change directory after starting (and @@ -120,7 +122,7 @@ class EmailConfig(Config): def default_config(self, config_dir_path, server_name, **kwargs): return """ - # Enable sending emails for notification events + # Enable sending emails for notification events or expiry notices # Defining a custom URL for Riot is only needed if email notifications # should contain links to a self-hosted installation of Riot; when set # the "app_name" setting is ignored. @@ -142,6 +144,9 @@ class EmailConfig(Config): # #template_dir: res/templates # notif_template_html: notif_mail.html # notif_template_text: notif_mail.txt + # # Templates for account expiry notices. + # expiry_template_html: notice_expiry.html + # expiry_template_text: notice_expiry.txt # notif_for_new_users: True # riot_base_url: "http://localhost/riot" """ diff --git a/synapse/config/registration.py b/synapse/config/registration.py index b7a7b4f1cf..129c208204 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -21,12 +21,26 @@ from synapse.util.stringutils import random_string_with_symbols class AccountValidityConfig(Config): - def __init__(self, config): - self.enabled = (len(config) > 0) + def __init__(self, config, synapse_config): + self.enabled = config.get("enabled", False) + self.renew_by_email_enabled = ("renew_at" in config) - period = config.get("period", None) - if period: - self.period = self.parse_duration(period) + if self.enabled: + if "period" in config: + self.period = self.parse_duration(config["period"]) + else: + raise ConfigError("'period' is required when using account validity") + + if "renew_at" in config: + self.renew_at = self.parse_duration(config["renew_at"]) + + if "renew_email_subject" in config: + self.renew_email_subject = config["renew_email_subject"] + else: + self.renew_email_subject = "Renew your %(app)s account" + + if self.renew_by_email_enabled and "public_baseurl" not in synapse_config: + raise ConfigError("Can't send renewal emails without 'public_baseurl'") class RegistrationConfig(Config): @@ -40,7 +54,9 @@ class RegistrationConfig(Config): strtobool(str(config["disable_registration"])) ) - self.account_validity = AccountValidityConfig(config.get("account_validity", {})) + self.account_validity = AccountValidityConfig( + config.get("account_validity", {}), config, + ) self.registrations_require_3pid = config.get("registrations_require_3pid", []) self.allowed_local_3pids = config.get("allowed_local_3pids", []) @@ -86,11 +102,31 @@ class RegistrationConfig(Config): # #enable_registration: false - # Optional account validity parameter. This allows for, e.g., accounts to - # be denied any request after a given period. + # Optional account validity configuration. This allows for accounts to be denied + # any request after a given period. + # + # ``enabled`` defines whether the account validity feature is enabled. Defaults + # to False. + # + # ``period`` allows setting the period after which an account is valid + # after its registration. When renewing the account, its validity period + # will be extended by this amount of time. This parameter is required when using + # the account validity feature. + # + # ``renew_at`` is the amount of time before an account's expiry date at which + # Synapse will send an email to the account's email address with a renewal link. + # This needs the ``email`` and ``public_baseurl`` configuration sections to be + # filled. + # + # ``renew_email_subject`` is the subject of the email sent out with the renewal + # link. ``%%(app)s`` can be used as a placeholder for the ``app_name`` parameter + # from the ``email`` section. # #account_validity: + # enabled: True # period: 6w + # renew_at: 1w + # renew_email_subject: "Renew your %%(app)s account" # The user must provide all of the below types of 3PID when registering. # diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py new file mode 100644 index 0000000000..e82049e42d --- /dev/null +++ b/synapse/handlers/account_validity.py @@ -0,0 +1,228 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import email.mime.multipart +import email.utils +import logging +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText + +from twisted.internet import defer + +from synapse.api.errors import StoreError +from synapse.types import UserID +from synapse.util import stringutils +from synapse.util.logcontext import make_deferred_yieldable + +try: + from synapse.push.mailer import load_jinja2_templates +except ImportError: + load_jinja2_templates = None + +logger = logging.getLogger(__name__) + + +class AccountValidityHandler(object): + def __init__(self, hs): + self.hs = hs + self.store = self.hs.get_datastore() + self.sendmail = self.hs.get_sendmail() + self.clock = self.hs.get_clock() + + self._account_validity = self.hs.config.account_validity + + if self._account_validity.renew_by_email_enabled and load_jinja2_templates: + # Don't do email-specific configuration if renewal by email is disabled. + try: + app_name = self.hs.config.email_app_name + + self._subject = self._account_validity.renew_email_subject % { + "app": app_name, + } + + self._from_string = self.hs.config.email_notif_from % { + "app": app_name, + } + except Exception: + # If substitution failed, fall back to the bare strings. + self._subject = self._account_validity.renew_email_subject + self._from_string = self.hs.config.email_notif_from + + self._raw_from = email.utils.parseaddr(self._from_string)[1] + + self._template_html, self._template_text = load_jinja2_templates( + config=self.hs.config, + template_html_name=self.hs.config.email_expiry_template_html, + template_text_name=self.hs.config.email_expiry_template_text, + ) + + # Check the renewal emails to send and send them every 30min. + self.clock.looping_call( + self.send_renewal_emails, + 30 * 60 * 1000, + ) + + @defer.inlineCallbacks + def send_renewal_emails(self): + """Gets the list of users whose account is expiring in the amount of time + configured in the ``renew_at`` parameter from the ``account_validity`` + configuration, and sends renewal emails to all of these users as long as they + have an email 3PID attached to their account. + """ + expiring_users = yield self.store.get_users_expiring_soon() + + if expiring_users: + for user in expiring_users: + yield self._send_renewal_email( + user_id=user["user_id"], + expiration_ts=user["expiration_ts_ms"], + ) + + @defer.inlineCallbacks + def _send_renewal_email(self, user_id, expiration_ts): + """Sends out a renewal email to every email address attached to the given user + with a unique link allowing them to renew their account. + + Args: + user_id (str): ID of the user to send email(s) to. + expiration_ts (int): Timestamp in milliseconds for the expiration date of + this user's account (used in the email templates). + """ + addresses = yield self._get_email_addresses_for_user(user_id) + + # Stop right here if the user doesn't have at least one email address. + # In this case, they will have to ask their server admin to renew their + # account manually. + if not addresses: + return + + try: + user_display_name = yield self.store.get_profile_displayname( + UserID.from_string(user_id).localpart + ) + if user_display_name is None: + user_display_name = user_id + except StoreError: + user_display_name = user_id + + renewal_token = yield self._get_renewal_token(user_id) + url = "%s_matrix/client/unstable/account_validity/renew?token=%s" % ( + self.hs.config.public_baseurl, + renewal_token, + ) + + template_vars = { + "display_name": user_display_name, + "expiration_ts": expiration_ts, + "url": url, + } + + html_text = self._template_html.render(**template_vars) + html_part = MIMEText(html_text, "html", "utf8") + + plain_text = self._template_text.render(**template_vars) + text_part = MIMEText(plain_text, "plain", "utf8") + + for address in addresses: + raw_to = email.utils.parseaddr(address)[1] + + multipart_msg = MIMEMultipart('alternative') + multipart_msg['Subject'] = self._subject + multipart_msg['From'] = self._from_string + multipart_msg['To'] = address + multipart_msg['Date'] = email.utils.formatdate() + multipart_msg['Message-ID'] = email.utils.make_msgid() + multipart_msg.attach(text_part) + multipart_msg.attach(html_part) + + logger.info("Sending renewal email to %s", address) + + yield make_deferred_yieldable(self.sendmail( + self.hs.config.email_smtp_host, + self._raw_from, raw_to, multipart_msg.as_string().encode('utf8'), + reactor=self.hs.get_reactor(), + port=self.hs.config.email_smtp_port, + requireAuthentication=self.hs.config.email_smtp_user is not None, + username=self.hs.config.email_smtp_user, + password=self.hs.config.email_smtp_pass, + requireTransportSecurity=self.hs.config.require_transport_security + )) + + yield self.store.set_renewal_mail_status( + user_id=user_id, + email_sent=True, + ) + + @defer.inlineCallbacks + def _get_email_addresses_for_user(self, user_id): + """Retrieve the list of email addresses attached to a user's account. + + Args: + user_id (str): ID of the user to lookup email addresses for. + + Returns: + defer.Deferred[list[str]]: Email addresses for this account. + """ + threepids = yield self.store.user_get_threepids(user_id) + + addresses = [] + for threepid in threepids: + if threepid["medium"] == "email": + addresses.append(threepid["address"]) + + defer.returnValue(addresses) + + @defer.inlineCallbacks + def _get_renewal_token(self, user_id): + """Generates a 32-byte long random string that will be inserted into the + user's renewal email's unique link, then saves it into the database. + + Args: + user_id (str): ID of the user to generate a string for. + + Returns: + defer.Deferred[str]: The generated string. + + Raises: + StoreError(500): Couldn't generate a unique string after 5 attempts. + """ + attempts = 0 + while attempts < 5: + try: + renewal_token = stringutils.random_string(32) + yield self.store.set_renewal_token_for_user(user_id, renewal_token) + defer.returnValue(renewal_token) + except StoreError: + attempts += 1 + raise StoreError(500, "Couldn't generate a unique string as refresh string.") + + @defer.inlineCallbacks + def renew_account(self, renewal_token): + """Renews the account attached to a given renewal token by pushing back the + expiration date by the current validity period in the server's configuration. + + Args: + renewal_token (str): Token sent with the renewal request. + """ + user_id = yield self.store.get_user_from_renewal_token(renewal_token) + + logger.debug("Renewing an account for user %s", user_id) + + new_expiration_date = self.clock.time_msec() + self._account_validity.period + + yield self.store.renew_account_for_user( + user_id=user_id, + new_expiration_ts=new_expiration_date, + ) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 1eb5be0957..c269bcf4a4 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -521,11 +521,11 @@ def format_ts_filter(value, format): return time.strftime(format, time.localtime(value / 1000)) -def load_jinja2_templates(config): +def load_jinja2_templates(config, template_html_name, template_text_name): """Load the jinja2 email templates from disk Returns: - (notif_template_html, notif_template_text) + (template_html, template_text) """ logger.info("loading email templates from '%s'", config.email_template_dir) loader = jinja2.FileSystemLoader(config.email_template_dir) @@ -533,14 +533,10 @@ def load_jinja2_templates(config): env.filters["format_ts"] = format_ts_filter env.filters["mxc_to_http"] = _create_mxc_to_http_filter(config) - notif_template_html = env.get_template( - config.email_notif_template_html - ) - notif_template_text = env.get_template( - config.email_notif_template_text - ) + template_html = env.get_template(template_html_name) + template_text = env.get_template(template_text_name) - return notif_template_html, notif_template_text + return template_html, template_text def _create_mxc_to_http_filter(config): diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index b33f2a357b..14bc7823cf 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -44,7 +44,11 @@ class PusherFactory(object): if hs.config.email_enable_notifs: self.mailers = {} # app_name -> Mailer - templates = load_jinja2_templates(hs.config) + templates = load_jinja2_templates( + config=hs.config, + template_html_name=hs.config.email_notif_template_html, + template_text_name=hs.config.email_notif_template_text, + ) self.notif_template_html, self.notif_template_text = templates self.pusher_types["email"] = self._create_email_pusher diff --git a/synapse/res/templates/mail-expiry.css b/synapse/res/templates/mail-expiry.css new file mode 100644 index 0000000000..3dea486467 --- /dev/null +++ b/synapse/res/templates/mail-expiry.css @@ -0,0 +1,4 @@ +.noticetext { + margin-top: 10px; + margin-bottom: 10px; +} diff --git a/synapse/res/templates/notice_expiry.html b/synapse/res/templates/notice_expiry.html new file mode 100644 index 0000000000..f0d7c66e1b --- /dev/null +++ b/synapse/res/templates/notice_expiry.html @@ -0,0 +1,43 @@ + + + + + + + + + + + + +
+ + + + + + + + +
+
Hi {{ display_name }},
+
+
Your account will expire on {{ expiration_ts|format_ts("%d-%m-%Y") }}. This means that you will lose access to your account after this date.
+
To extend the validity of your account, please click on the link bellow (or copy and paste it into a new browser tab):
+ +
+
+ + diff --git a/synapse/res/templates/notice_expiry.txt b/synapse/res/templates/notice_expiry.txt new file mode 100644 index 0000000000..41f1c4279c --- /dev/null +++ b/synapse/res/templates/notice_expiry.txt @@ -0,0 +1,7 @@ +Hi {{ display_name }}, + +Your account will expire on {{ expiration_ts|format_ts("%d-%m-%Y") }}. This means that you will lose access to your account after this date. + +To extend the validity of your account, please click on the link bellow (or copy and paste it to a new browser tab): + +{{ url }} diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 91f5247d52..a66885d349 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -33,6 +33,7 @@ from synapse.rest.client.v1 import ( from synapse.rest.client.v2_alpha import ( account, account_data, + account_validity, auth, capabilities, devices, @@ -109,3 +110,4 @@ class ClientRestResource(JsonResource): groups.register_servlets(hs, client_resource) room_upgrade_rest_servlet.register_servlets(hs, client_resource) capabilities.register_servlets(hs, client_resource) + account_validity.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py new file mode 100644 index 0000000000..1ff6a6b638 --- /dev/null +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.api.errors import SynapseError +from synapse.http.server import finish_request +from synapse.http.servlet import RestServlet + +from ._base import client_v2_patterns + +logger = logging.getLogger(__name__) + + +class AccountValidityRenewServlet(RestServlet): + PATTERNS = client_v2_patterns("/account_validity/renew$") + SUCCESS_HTML = b"Your account has been successfully renewed." + + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + super(AccountValidityRenewServlet, self).__init__() + + self.hs = hs + self.account_activity_handler = hs.get_account_validity_handler() + + @defer.inlineCallbacks + def on_GET(self, request): + if b"token" not in request.args: + raise SynapseError(400, "Missing renewal token") + renewal_token = request.args[b"token"][0] + + yield self.account_activity_handler.renew_account(renewal_token.decode('utf8')) + + request.setResponseCode(200) + request.setHeader(b"Content-Type", b"text/html; charset=utf-8") + request.setHeader(b"Content-Length", b"%d" % ( + len(AccountValidityRenewServlet.SUCCESS_HTML), + )) + request.write(AccountValidityRenewServlet.SUCCESS_HTML) + finish_request(request) + defer.returnValue(None) + + +def register_servlets(hs, http_server): + AccountValidityRenewServlet(hs).register(http_server) diff --git a/synapse/server.py b/synapse/server.py index dc8f1ccb8c..8c30ac2fa5 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -47,6 +47,7 @@ from synapse.federation.transport.client import TransportLayerClient from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer from synapse.groups.groups_server import GroupsServerHandler from synapse.handlers import Handlers +from synapse.handlers.account_validity import AccountValidityHandler from synapse.handlers.acme import AcmeHandler from synapse.handlers.appservice import ApplicationServicesHandler from synapse.handlers.auth import AuthHandler, MacaroonGenerator @@ -183,6 +184,7 @@ class HomeServer(object): 'room_context_handler', 'sendmail', 'registration_handler', + 'account_validity_handler', ] REQUIRED_ON_MASTER_STARTUP = [ @@ -506,6 +508,9 @@ class HomeServer(object): def build_registration_handler(self): return RegistrationHandler(self) + def build_account_validity_handler(self): + return AccountValidityHandler(self) + def remove_pusher(self, app_id, push_key, user_id): return self.get_pusherpool().remove_pusher(app_id, push_key, user_id) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index eede8ae4d2..a78850259f 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -32,6 +32,7 @@ class RegistrationWorkerStore(SQLBaseStore): super(RegistrationWorkerStore, self).__init__(db_conn, hs) self.config = hs.config + self.clock = hs.get_clock() @cached() def get_user_by_id(self, user_id): @@ -87,25 +88,156 @@ class RegistrationWorkerStore(SQLBaseStore): ) @cachedInlineCallbacks() - def get_expiration_ts_for_user(self, user): + def get_expiration_ts_for_user(self, user_id): """Get the expiration timestamp for the account bearing a given user ID. Args: - user (str): The ID of the user. + user_id (str): The ID of the user. Returns: defer.Deferred: None, if the account has no expiration timestamp, - otherwise int representation of the timestamp (as a number of - milliseconds since epoch). + otherwise int representation of the timestamp (as a number of + milliseconds since epoch). """ res = yield self._simple_select_one_onecol( table="account_validity", - keyvalues={"user_id": user.to_string()}, + keyvalues={"user_id": user_id}, retcol="expiration_ts_ms", allow_none=True, - desc="get_expiration_date_for_user", + desc="get_expiration_ts_for_user", + ) + defer.returnValue(res) + + @defer.inlineCallbacks + def renew_account_for_user(self, user_id, new_expiration_ts): + """Updates the account validity table with a new timestamp for a given + user, removes the existing renewal token from this user, and unsets the + flag indicating that an email has been sent for renewing this account. + + Args: + user_id (str): ID of the user whose account validity to renew. + new_expiration_ts: New expiration date, as a timestamp in milliseconds + since epoch. + """ + def renew_account_for_user_txn(txn): + self._simple_update_txn( + txn=txn, + table="account_validity", + keyvalues={"user_id": user_id}, + updatevalues={ + "expiration_ts_ms": new_expiration_ts, + "email_sent": False, + "renewal_token": None, + }, + ) + self._invalidate_cache_and_stream( + txn, self.get_expiration_ts_for_user, (user_id,), + ) + + yield self.runInteraction( + "renew_account_for_user", + renew_account_for_user_txn, + ) + + @defer.inlineCallbacks + def set_renewal_token_for_user(self, user_id, renewal_token): + """Defines a renewal token for a given user. + + Args: + user_id (str): ID of the user to set the renewal token for. + renewal_token (str): Random unique string that will be used to renew the + user's account. + + Raises: + StoreError: The provided token is already set for another user. + """ + yield self._simple_update_one( + table="account_validity", + keyvalues={"user_id": user_id}, + updatevalues={"renewal_token": renewal_token}, + desc="set_renewal_token_for_user", + ) + + @defer.inlineCallbacks + def get_user_from_renewal_token(self, renewal_token): + """Get a user ID from a renewal token. + + Args: + renewal_token (str): The renewal token to perform the lookup with. + + Returns: + defer.Deferred[str]: The ID of the user to which the token belongs. + """ + res = yield self._simple_select_one_onecol( + table="account_validity", + keyvalues={"renewal_token": renewal_token}, + retcol="user_id", + desc="get_user_from_renewal_token", + ) + + defer.returnValue(res) + + @defer.inlineCallbacks + def get_renewal_token_for_user(self, user_id): + """Get the renewal token associated with a given user ID. + + Args: + user_id (str): The user ID to lookup a token for. + + Returns: + defer.Deferred[str]: The renewal token associated with this user ID. + """ + res = yield self._simple_select_one_onecol( + table="account_validity", + keyvalues={"user_id": user_id}, + retcol="renewal_token", + desc="get_renewal_token_for_user", ) + defer.returnValue(res) + @defer.inlineCallbacks + def get_users_expiring_soon(self): + """Selects users whose account will expire in the [now, now + renew_at] time + window (see configuration for account_validity for information on what renew_at + refers to). + + Returns: + Deferred: Resolves to a list[dict[user_id (str), expiration_ts_ms (int)]] + """ + def select_users_txn(txn, now_ms, renew_at): + sql = ( + "SELECT user_id, expiration_ts_ms FROM account_validity" + " WHERE email_sent = ? AND (expiration_ts_ms - ?) <= ?" + ) + values = [False, now_ms, renew_at] + txn.execute(sql, values) + return self.cursor_to_dict(txn) + + res = yield self.runInteraction( + "get_users_expiring_soon", + select_users_txn, + self.clock.time_msec(), self.config.account_validity.renew_at, + ) + + defer.returnValue(res) + + @defer.inlineCallbacks + def set_renewal_mail_status(self, user_id, email_sent): + """Sets or unsets the flag that indicates whether a renewal email has been sent + to the user (and the user hasn't renewed their account yet). + + Args: + user_id (str): ID of the user to set/unset the flag for. + email_sent (bool): Flag which indicates whether a renewal email has been sent + to this user. + """ + yield self._simple_update_one( + table="account_validity", + keyvalues={"user_id": user_id}, + updatevalues={"email_sent": email_sent}, + desc="set_renewal_mail_status", + ) + @defer.inlineCallbacks def is_server_admin(self, user): res = yield self._simple_select_one_onecol( @@ -508,22 +640,24 @@ class RegistrationStore(RegistrationWorkerStore, } ) - if self._account_validity.enabled: - now_ms = self.clock.time_msec() - expiration_ts = now_ms + self._account_validity.period - self._simple_insert_txn( - txn, - "account_validity", - values={ - "user_id": user_id, - "expiration_ts_ms": expiration_ts, - } - ) except self.database_engine.module.IntegrityError: raise StoreError( 400, "User ID already taken.", errcode=Codes.USER_IN_USE ) + if self._account_validity.enabled: + now_ms = self.clock.time_msec() + expiration_ts = now_ms + self._account_validity.period + self._simple_insert_txn( + txn, + "account_validity", + values={ + "user_id": user_id, + "expiration_ts_ms": expiration_ts, + "email_sent": False, + } + ) + if token: # it's possible for this to get a conflict, but only for a single user # since tokens are namespaced based on their user ID diff --git a/synapse/storage/schema/delta/54/account_validity.sql b/synapse/storage/schema/delta/54/account_validity.sql index 57249262d7..2357626000 100644 --- a/synapse/storage/schema/delta/54/account_validity.sql +++ b/synapse/storage/schema/delta/54/account_validity.sql @@ -13,8 +13,15 @@ * limitations under the License. */ +DROP TABLE IF EXISTS account_validity; + -- Track what users are in public rooms. CREATE TABLE IF NOT EXISTS account_validity ( user_id TEXT PRIMARY KEY, - expiration_ts_ms BIGINT NOT NULL + expiration_ts_ms BIGINT NOT NULL, + email_sent BOOLEAN NOT NULL, + renewal_token TEXT ); + +CREATE INDEX account_validity_email_sent_idx ON account_validity(email_sent, expiration_ts_ms) +CREATE UNIQUE INDEX account_validity_renewal_string_idx ON account_validity(renewal_token) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index d3611ed21f..8fb5140a05 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -1,14 +1,22 @@ import datetime import json +import os + +import pkg_resources from synapse.api.constants import LoginType from synapse.api.errors import Codes from synapse.appservice import ApplicationService from synapse.rest.client.v1 import admin, login -from synapse.rest.client.v2_alpha import register, sync +from synapse.rest.client.v2_alpha import account_validity, register, sync from tests import unittest +try: + from synapse.push.mailer import load_jinja2_templates +except ImportError: + load_jinja2_templates = None + class RegisterRestServletTestCase(unittest.HomeserverTestCase): @@ -197,6 +205,7 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): config = self.default_config() + # Test for account expiring after a week. config.enable_registration = True config.account_validity.enabled = True config.account_validity.period = 604800000 # Time in ms for 1 week @@ -228,3 +237,92 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): self.assertEquals( channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result, ) + + +class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): + + skip = "No Jinja installed" if not load_jinja2_templates else None + servlets = [ + register.register_servlets, + admin.register_servlets, + login.register_servlets, + sync.register_servlets, + account_validity.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + config = self.default_config() + # Test for account expiring after a week and renewal emails being sent 2 + # days before expiry. + config.enable_registration = True + config.account_validity.enabled = True + config.account_validity.renew_by_email_enabled = True + config.account_validity.period = 604800000 # Time in ms for 1 week + config.account_validity.renew_at = 172800000 # Time in ms for 2 days + config.account_validity.renew_email_subject = "Renew your account" + + # Email config. + self.email_attempts = [] + + def sendmail(*args, **kwargs): + self.email_attempts.append((args, kwargs)) + return + + config.email_template_dir = os.path.abspath( + pkg_resources.resource_filename('synapse', 'res/templates') + ) + config.email_expiry_template_html = "notice_expiry.html" + config.email_expiry_template_text = "notice_expiry.txt" + config.email_smtp_host = "127.0.0.1" + config.email_smtp_port = 20 + config.require_transport_security = False + config.email_smtp_user = None + config.email_smtp_pass = None + config.email_notif_from = "test@example.com" + + self.hs = self.setup_test_homeserver(config=config, sendmail=sendmail) + + self.store = self.hs.get_datastore() + + return self.hs + + def test_renewal_email(self): + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + # We need to manually add an email address otherwise the handler will do + # nothing. + now = self.hs.clock.time_msec() + self.get_success(self.store.user_add_threepid( + user_id=user_id, medium="email", address="kermit@example.com", + validated_at=now, added_at=now, + )) + + # The specific endpoint doesn't matter, all we need is an authenticated + # endpoint. + request, channel = self.make_request( + b"GET", "/sync", access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + # Move 6 days forward. This should trigger a renewal email to be sent. + self.reactor.advance(datetime.timedelta(days=6).total_seconds()) + self.assertEqual(len(self.email_attempts), 1) + + # Retrieving the URL from the email is too much pain for now, so we + # retrieve the token from the DB. + renewal_token = self.get_success(self.store.get_renewal_token_for_user(user_id)) + url = "/_matrix/client/unstable/account_validity/renew?token=%s" % renewal_token + request, channel = self.make_request(b"GET", url) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + # Move 3 days forward. If the renewal failed, every authed request with + # our access token should be denied from now, otherwise they should + # succeed. + self.reactor.advance(datetime.timedelta(days=3).total_seconds()) + request, channel = self.make_request( + b"GET", "/sync", access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) -- cgit 1.5.1 From eaf41a943b2cd3f7f32d142c9552d558eb37a074 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 16 Apr 2019 20:13:59 +0100 Subject: Add management endpoints for account validity --- changelog.d/5073.feature | 1 + docs/admin_api/account_validity.rst | 42 +++++++++++ synapse/api/auth.py | 2 +- synapse/handlers/account_validity.py | 33 +++++++- synapse/rest/client/v1/admin.py | 39 ++++++++++ synapse/rest/client/v2_alpha/account_validity.py | 31 +++++++- synapse/storage/registration.py | 29 +++++--- tests/rest/client/v2_alpha/test_register.py | 95 ++++++++++++++++++++++-- 8 files changed, 246 insertions(+), 26 deletions(-) create mode 100644 changelog.d/5073.feature create mode 100644 docs/admin_api/account_validity.rst diff --git a/changelog.d/5073.feature b/changelog.d/5073.feature new file mode 100644 index 0000000000..12766a82a7 --- /dev/null +++ b/changelog.d/5073.feature @@ -0,0 +1 @@ +Add time-based account expiration. diff --git a/docs/admin_api/account_validity.rst b/docs/admin_api/account_validity.rst new file mode 100644 index 0000000000..980ea23605 --- /dev/null +++ b/docs/admin_api/account_validity.rst @@ -0,0 +1,42 @@ +Account validity API +==================== + +This API allows a server administrator to manage the validity of an account. To +use it, you must enable the account validity feature (under +``account_validity``) in Synapse's configuration. + +Renew account +------------- + +This API extends the validity of an account by as much time as configured in the +``period`` parameter from the ``account_validity`` configuration. + +The API is:: + + POST /_matrix/client/unstable/account_validity/send_mail + +with the following body: + +.. code:: json + + { + "user_id": "", + "expiration_ts": 0, + "enable_renewal_emails": true + } + + +``expiration_ts`` is an optional parameter and overrides the expiration date, +which otherwise defaults to now + validity period. + +``enable_renewal_emails`` is also an optional parameter and enables/disables +sending renewal emails to the user. Defaults to true. + +The API returns with the new expiration date for this account, as a timestamp in +milliseconds since epoch: + +.. code:: json + + { + "expiration_ts": 0 + } diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 4482962510..960e66dbdc 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -232,7 +232,7 @@ class Auth(object): if self._account_validity.enabled: user_id = user.to_string() expiration_ts = yield self.store.get_expiration_ts_for_user(user_id) - if expiration_ts and self.clock.time_msec() >= expiration_ts: + if expiration_ts is not None and self.clock.time_msec() >= expiration_ts: raise AuthError( 403, "User account has expired", diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index e82049e42d..261446517d 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -90,6 +90,11 @@ class AccountValidityHandler(object): expiration_ts=user["expiration_ts_ms"], ) + @defer.inlineCallbacks + def send_renewal_email_to_user(self, user_id): + expiration_ts = yield self.store.get_expiration_ts_for_user(user_id) + yield self._send_renewal_email(user_id, expiration_ts) + @defer.inlineCallbacks def _send_renewal_email(self, user_id, expiration_ts): """Sends out a renewal email to every email address attached to the given user @@ -217,12 +222,32 @@ class AccountValidityHandler(object): renewal_token (str): Token sent with the renewal request. """ user_id = yield self.store.get_user_from_renewal_token(renewal_token) - logger.debug("Renewing an account for user %s", user_id) + yield self.renew_account_for_user(user_id) - new_expiration_date = self.clock.time_msec() + self._account_validity.period + @defer.inlineCallbacks + def renew_account_for_user(self, user_id, expiration_ts=None, email_sent=False): + """Renews the account attached to a given user by pushing back the + expiration date by the current validity period in the server's + configuration. - yield self.store.renew_account_for_user( + Args: + renewal_token (str): Token sent with the renewal request. + expiration_ts (int): New expiration date. Defaults to now + validity period. + email_sent (bool): Whether an email has been sent for this validity period. + Defaults to False. + + Returns: + defer.Deferred[int]: New expiration date for this account, as a timestamp + in milliseconds since epoch. + """ + if expiration_ts is None: + expiration_ts = self.clock.time_msec() + self._account_validity.period + + yield self.store.set_account_validity_for_user( user_id=user_id, - new_expiration_ts=new_expiration_date, + expiration_ts=expiration_ts, + email_sent=email_sent, ) + + defer.returnValue(expiration_ts) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index e788769639..d27472c538 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -786,6 +786,44 @@ class SearchUsersRestServlet(ClientV1RestServlet): defer.returnValue((200, ret)) +class AccountValidityRenewServlet(ClientV1RestServlet): + PATTERNS = client_path_patterns("/admin/account_validity/validity$") + + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + super(AccountValidityRenewServlet, self).__init__(hs) + + self.hs = hs + self.account_activity_handler = hs.get_account_validity_handler() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + body = parse_json_object_from_request(request) + + if "user_id" not in body: + raise SynapseError(400, "Missing property 'user_id' in the request body") + + expiration_ts = yield self.account_activity_handler.renew_account_for_user( + body["user_id"], body.get("expiration_ts"), + not body.get("enable_renewal_emails", True), + ) + + res = { + "expiration_ts": expiration_ts, + } + defer.returnValue((200, res)) + + def register_servlets(hs, http_server): WhoisRestServlet(hs).register(http_server) PurgeMediaCacheRestServlet(hs).register(http_server) @@ -801,3 +839,4 @@ def register_servlets(hs, http_server): ListMediaInRoom(hs).register(http_server) UserRegisterServlet(hs).register(http_server) VersionServlet(hs).register(http_server) + AccountValidityRenewServlet(hs).register(http_server) diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py index 1ff6a6b638..fc8dbeb617 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -17,7 +17,7 @@ import logging from twisted.internet import defer -from synapse.api.errors import SynapseError +from synapse.api.errors import AuthError, SynapseError from synapse.http.server import finish_request from synapse.http.servlet import RestServlet @@ -39,6 +39,7 @@ class AccountValidityRenewServlet(RestServlet): self.hs = hs self.account_activity_handler = hs.get_account_validity_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request): @@ -58,5 +59,33 @@ class AccountValidityRenewServlet(RestServlet): defer.returnValue(None) +class AccountValiditySendMailServlet(RestServlet): + PATTERNS = client_v2_patterns("/account_validity/send_mail$") + + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + super(AccountValiditySendMailServlet, self).__init__() + + self.hs = hs + self.account_activity_handler = hs.get_account_validity_handler() + self.auth = hs.get_auth() + self.account_validity = self.hs.config.account_validity + + @defer.inlineCallbacks + def on_POST(self, request): + if not self.account_validity.renew_by_email_enabled: + raise AuthError(403, "Account renewal via email is disabled on this server.") + + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + yield self.account_activity_handler.send_renewal_email_to_user(user_id) + + defer.returnValue((200, {})) + + def register_servlets(hs, http_server): AccountValidityRenewServlet(hs).register(http_server) + AccountValiditySendMailServlet(hs).register(http_server) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index a78850259f..dfdb4e7e34 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -108,25 +108,30 @@ class RegistrationWorkerStore(SQLBaseStore): defer.returnValue(res) @defer.inlineCallbacks - def renew_account_for_user(self, user_id, new_expiration_ts): - """Updates the account validity table with a new timestamp for a given - user, removes the existing renewal token from this user, and unsets the - flag indicating that an email has been sent for renewing this account. + def set_account_validity_for_user(self, user_id, expiration_ts, email_sent, + renewal_token=None): + """Updates the account validity properties of the given account, with the + given values. Args: - user_id (str): ID of the user whose account validity to renew. - new_expiration_ts: New expiration date, as a timestamp in milliseconds + user_id (str): ID of the account to update properties for. + expiration_ts (int): New expiration date, as a timestamp in milliseconds since epoch. + email_sent (bool): True means a renewal email has been sent for this + account and there's no need to send another one for the current validity + period. + renewal_token (str): Renewal token the user can use to extend the validity + of their account. Defaults to no token. """ - def renew_account_for_user_txn(txn): + def set_account_validity_for_user_txn(txn): self._simple_update_txn( txn=txn, table="account_validity", keyvalues={"user_id": user_id}, updatevalues={ - "expiration_ts_ms": new_expiration_ts, - "email_sent": False, - "renewal_token": None, + "expiration_ts_ms": expiration_ts, + "email_sent": email_sent, + "renewal_token": renewal_token, }, ) self._invalidate_cache_and_stream( @@ -134,8 +139,8 @@ class RegistrationWorkerStore(SQLBaseStore): ) yield self.runInteraction( - "renew_account_for_user", - renew_account_for_user_txn, + "set_account_validity_for_user", + set_account_validity_for_user_txn, ) @defer.inlineCallbacks diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 8fb5140a05..3d44667489 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -201,6 +201,7 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): admin.register_servlets, login.register_servlets, sync.register_servlets, + account_validity.register_servlets, ] def make_homeserver(self, reactor, clock): @@ -238,6 +239,68 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result, ) + def test_manual_renewal(self): + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + + self.reactor.advance(datetime.timedelta(weeks=1).total_seconds()) + + # If we register the admin user at the beginning of the test, it will + # expire at the same time as the normal user and the renewal request + # will be denied. + self.register_user("admin", "adminpassword", admin=True) + admin_tok = self.login("admin", "adminpassword") + + url = "/_matrix/client/unstable/admin/account_validity/validity" + params = { + "user_id": user_id, + } + request_data = json.dumps(params) + request, channel = self.make_request( + b"POST", url, request_data, access_token=admin_tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + # The specific endpoint doesn't matter, all we need is an authenticated + # endpoint. + request, channel = self.make_request( + b"GET", "/sync", access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + def test_manual_expire(self): + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + + self.register_user("admin", "adminpassword", admin=True) + admin_tok = self.login("admin", "adminpassword") + + url = "/_matrix/client/unstable/admin/account_validity/validity" + params = { + "user_id": user_id, + "expiration_ts": 0, + "enable_renewal_emails": False, + } + request_data = json.dumps(params) + request, channel = self.make_request( + b"POST", url, request_data, access_token=admin_tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + # The specific endpoint doesn't matter, all we need is an authenticated + # endpoint. + request, channel = self.make_request( + b"GET", "/sync", access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"403", channel.result) + self.assertEquals( + channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result, + ) + class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): @@ -287,6 +350,8 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): return self.hs def test_renewal_email(self): + self.email_attempts = [] + user_id = self.register_user("kermit", "monkey") tok = self.login("kermit", "monkey") # We need to manually add an email address otherwise the handler will do @@ -297,14 +362,6 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): validated_at=now, added_at=now, )) - # The specific endpoint doesn't matter, all we need is an authenticated - # endpoint. - request, channel = self.make_request( - b"GET", "/sync", access_token=tok, - ) - self.render(request) - self.assertEquals(channel.result["code"], b"200", channel.result) - # Move 6 days forward. This should trigger a renewal email to be sent. self.reactor.advance(datetime.timedelta(days=6).total_seconds()) self.assertEqual(len(self.email_attempts), 1) @@ -326,3 +383,25 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): ) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) + + def test_manual_email_send(self): + self.email_attempts = [] + + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + # We need to manually add an email address otherwise the handler will do + # nothing. + now = self.hs.clock.time_msec() + self.get_success(self.store.user_add_threepid( + user_id=user_id, medium="email", address="kermit@example.com", + validated_at=now, added_at=now, + )) + + request, channel = self.make_request( + b"POST", "/_matrix/client/unstable/account_validity/send_mail", + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + self.assertEqual(len(self.email_attempts), 1) -- cgit 1.5.1 From 95c603ae6f0f1639f6299fff62d7a75002221276 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 17 Apr 2019 23:52:00 +0100 Subject: Update debian install docs for new key and repo (#5074) --- INSTALL.md | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index a5c3c6efaa..d55a1f89ac 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -257,18 +257,38 @@ https://github.com/spantaleev/matrix-docker-ansible-deploy #### Matrix.org packages Matrix.org provides Debian/Ubuntu packages of the latest stable version of -Synapse via https://matrix.org/packages/debian/. To use them: +Synapse via https://packages.matrix.org/debian/. To use them: +For Debian 9 (Stretch), Ubuntu 16.04 (Xenial), and later: + +``` +sudo apt install -y lsb-release wget apt-transport-https +sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg +echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" | + sudo tee /etc/apt/sources.list.d/matrix-org.list +sudo apt update +sudo apt install matrix-synapse-py3 ``` -sudo apt install -y lsb-release curl apt-transport-https -echo "deb https://matrix.org/packages/debian `lsb_release -cs` main" | + +For Debian 8 (Jessie): + +``` +sudo apt install -y lsb-release wget apt-transport-https +sudo wget -O /etc/apt/trusted.gpg.d/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg +echo "deb [signed-by=5586CCC0CBBBEFC7A25811ADF473DD4473365DE1] https://packages.matrix.org/debian/ $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/matrix-org.list -curl "https://matrix.org/packages/debian/repo-key.asc" | - sudo apt-key add - sudo apt update sudo apt install matrix-synapse-py3 ``` +**Note**: if you followed a previous version of these instructions which +recommended using `apt-key add` to add an old key from +`https://matrix.org/packages/debian/`, you should note that this key has been +revoked. You should remove the old key with `sudo apt-key remove +C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to +update your configuration. + + #### Downstream Debian/Ubuntu packages For `buster` and `sid`, Synapse is available in the Debian repositories and -- cgit 1.5.1 From f8826d31cdc974552351bfa7d14ad40bb225d3e7 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 18 Apr 2019 14:46:08 +0100 Subject: Don't crash on lack of expiry templates --- changelog.d/5077.bugfix | 1 + synapse/config/emailconfig.py | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5077.bugfix diff --git a/changelog.d/5077.bugfix b/changelog.d/5077.bugfix new file mode 100644 index 0000000000..f3345a6353 --- /dev/null +++ b/changelog.d/5077.bugfix @@ -0,0 +1 @@ +Don't crash on lack of expiry templates. diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 60827be72f..342a6ce5fd 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -71,8 +71,12 @@ class EmailConfig(Config): self.email_notif_from = email_config["notif_from"] self.email_notif_template_html = email_config["notif_template_html"] self.email_notif_template_text = email_config["notif_template_text"] - self.email_expiry_template_html = email_config["expiry_template_html"] - self.email_expiry_template_text = email_config["expiry_template_text"] + self.email_expiry_template_html = email_config.get( + "expiry_template_html", "notice_expiry.html", + ) + self.email_expiry_template_text = email_config.get( + "expiry_template_text", "notice_expiry.txt", + ) template_dir = email_config.get("template_dir") # we need an absolute path, because we change directory after starting (and -- cgit 1.5.1 From 3d26eae14a69c5a3b11c6f25b81c883dcdb2098d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 22 Apr 2019 14:36:51 +0100 Subject: add gpg key fingerprint --- INSTALL.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/INSTALL.md b/INSTALL.md index d55a1f89ac..1032a2c68e 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -281,6 +281,8 @@ sudo apt update sudo apt install matrix-synapse-py3 ``` +The fingerprint of the repository signing key is AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058. + **Note**: if you followed a previous version of these instructions which recommended using `apt-key add` to add an old key from `https://matrix.org/packages/debian/`, you should note that this key has been -- cgit 1.5.1 From 6b2b9a58c44c9c2f7efc45e8b588cde17fcdb0d4 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 25 Apr 2019 02:37:33 +1000 Subject: Prevent "producer not unregistered" message (#5009) --- changelog.d/5009.bugfix | 1 + synapse/rest/media/v1/_base.py | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 changelog.d/5009.bugfix diff --git a/changelog.d/5009.bugfix b/changelog.d/5009.bugfix new file mode 100644 index 0000000000..e66d3dd1aa --- /dev/null +++ b/changelog.d/5009.bugfix @@ -0,0 +1 @@ +Clients timing out/disappearing while downloading from the media repository will now no longer log a spurious "Producer was not unregistered" message. \ No newline at end of file diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py index e2b5df701d..2dcc8f74d6 100644 --- a/synapse/rest/media/v1/_base.py +++ b/synapse/rest/media/v1/_base.py @@ -191,6 +191,10 @@ def respond_with_responder(request, responder, media_type, file_size, upload_nam # in that case. logger.warning("Failed to write to consumer: %s %s", type(e), e) + # Unregister the producer, if it has one, so Twisted doesn't complain + if request.producer: + request.unregisterProducer() + finish_request(request) -- cgit 1.5.1 From 788163e2048b560e62005ad3da5ba274ba2726e9 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Wed, 24 Apr 2019 17:44:06 +0100 Subject: Remove log error for .well-known/matrix/client (#4972) --- changelog.d/4972.misc | 1 + synapse/rest/well_known.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/4972.misc diff --git a/changelog.d/4972.misc b/changelog.d/4972.misc new file mode 100644 index 0000000000..e104a9bb34 --- /dev/null +++ b/changelog.d/4972.misc @@ -0,0 +1 @@ +Reduce log level of .well-known/matrix/client responses. diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index ab901e63f2..a7fa4f39af 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -68,6 +68,6 @@ class WellKnownResource(Resource): request.setHeader(b"Content-Type", b"text/plain") return b'.well-known not available' - logger.error("returning: %s", r) + logger.debug("returning: %s", r) request.setHeader(b"Content-Type", b"application/json") return json.dumps(r).encode("utf-8") -- cgit 1.5.1 From 6824ddd93df1cfc347e4c8f423d54fab5bb732fb Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 25 Apr 2019 06:22:49 -0700 Subject: Config option for verifying federation certificates (MSC 1711) (#4967) --- changelog.d/4967.feature | 1 + docs/MSC1711_certificates_FAQ.md | 1 - docs/sample_config.yaml | 34 ++++++++ synapse/config/server.py | 6 +- synapse/config/tls.py | 95 ++++++++++++++++++++-- synapse/crypto/context_factory.py | 33 ++++++-- synapse/http/federation/matrix_federation_agent.py | 2 +- .../federation/test_matrix_federation_agent.py | 3 +- 8 files changed, 158 insertions(+), 17 deletions(-) create mode 100644 changelog.d/4967.feature diff --git a/changelog.d/4967.feature b/changelog.d/4967.feature new file mode 100644 index 0000000000..7f9f81f849 --- /dev/null +++ b/changelog.d/4967.feature @@ -0,0 +1 @@ +Implementation of [MSC1711](https://github.com/matrix-org/matrix-doc/pull/1711) including config options for requiring valid TLS certificates for federation traffic, the ability to disable TLS validation for specific domains, and the ability to specify your own list of CA certificates. diff --git a/docs/MSC1711_certificates_FAQ.md b/docs/MSC1711_certificates_FAQ.md index 8eb22656db..ebfb20f5c8 100644 --- a/docs/MSC1711_certificates_FAQ.md +++ b/docs/MSC1711_certificates_FAQ.md @@ -177,7 +177,6 @@ You can do this with a `.well-known` file as follows: on `customer.example.net:8000` it correctly handles HTTP requests with Host header set to `customer.example.net:8000`. - ## FAQ ### Synapse 0.99.0 has just been released, what do I need to do right now? diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index ab02e8f20e..a7f6bf31ac 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -260,6 +260,40 @@ listeners: # #tls_private_key_path: "CONFDIR/SERVERNAME.tls.key" +# Whether to verify TLS certificates when sending federation traffic. +# +# This currently defaults to `false`, however this will change in +# Synapse 1.0 when valid federation certificates will be required. +# +#federation_verify_certificates: true + +# Skip federation certificate verification on the following whitelist +# of domains. +# +# This setting should only be used in very specific cases, such as +# federation over Tor hidden services and similar. For private networks +# of homeservers, you likely want to use a private CA instead. +# +# Only effective if federation_verify_certicates is `true`. +# +#federation_certificate_verification_whitelist: +# - lon.example.com +# - *.domain.com +# - *.onion + +# List of custom certificate authorities for federation traffic. +# +# This setting should only normally be used within a private network of +# homeservers. +# +# Note that this list will replace those that are provided by your +# operating environment. Certificates must be in PEM format. +# +#federation_custom_ca_list: +# - myCA1.pem +# - myCA2.pem +# - myCA3.pem + # ACME support: This will configure Synapse to request a valid TLS certificate # for your configured `server_name` via Let's Encrypt. # diff --git a/synapse/config/server.py b/synapse/config/server.py index c5e5679d52..cdf1e4d286 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -114,11 +114,13 @@ class ServerConfig(Config): # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( - "federation_domain_whitelist", None + "federation_domain_whitelist", None, ) - # turn the whitelist into a hash for speed of lookup + if federation_domain_whitelist is not None: + # turn the whitelist into a hash for speed of lookup self.federation_domain_whitelist = {} + for domain in federation_domain_whitelist: self.federation_domain_whitelist[domain] = True diff --git a/synapse/config/tls.py b/synapse/config/tls.py index f0014902da..72dd5926f9 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -24,8 +24,10 @@ import six from unpaddedbase64 import encode_base64 from OpenSSL import crypto +from twisted.internet._sslverify import Certificate, trustRootFromCertificates from synapse.config._base import Config, ConfigError +from synapse.util import glob_to_regex logger = logging.getLogger(__name__) @@ -70,6 +72,53 @@ class TlsConfig(Config): self.tls_fingerprints = list(self._original_tls_fingerprints) + # Whether to verify certificates on outbound federation traffic + self.federation_verify_certificates = config.get( + "federation_verify_certificates", False, + ) + + # Whitelist of domains to not verify certificates for + fed_whitelist_entries = config.get( + "federation_certificate_verification_whitelist", [], + ) + + # Support globs (*) in whitelist values + self.federation_certificate_verification_whitelist = [] + for entry in fed_whitelist_entries: + # Convert globs to regex + entry_regex = glob_to_regex(entry) + self.federation_certificate_verification_whitelist.append(entry_regex) + + # List of custom certificate authorities for federation traffic validation + custom_ca_list = config.get( + "federation_custom_ca_list", None, + ) + + # Read in and parse custom CA certificates + self.federation_ca_trust_root = None + if custom_ca_list is not None: + if len(custom_ca_list) == 0: + # A trustroot cannot be generated without any CA certificates. + # Raise an error if this option has been specified without any + # corresponding certificates. + raise ConfigError("federation_custom_ca_list specified without " + "any certificate files") + + certs = [] + for ca_file in custom_ca_list: + logger.debug("Reading custom CA certificate file: %s", ca_file) + content = self.read_file(ca_file) + + # Parse the CA certificates + try: + cert_base = Certificate.loadPEM(content) + certs.append(cert_base) + except Exception as e: + raise ConfigError("Error parsing custom CA certificate file %s: %s" + % (ca_file, e)) + + self.federation_ca_trust_root = trustRootFromCertificates(certs) + # This config option applies to non-federation HTTP clients # (e.g. for talking to recaptcha, identity servers, and such) # It should never be used in production, and is intended for @@ -99,15 +148,15 @@ class TlsConfig(Config): try: with open(self.tls_certificate_file, 'rb') as f: cert_pem = f.read() - except Exception: - logger.exception("Failed to read existing certificate off disk!") - raise + except Exception as e: + raise ConfigError("Failed to read existing certificate file %s: %s" + % (self.tls_certificate_file, e)) try: tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem) - except Exception: - logger.exception("Failed to parse existing certificate off disk!") - raise + except Exception as e: + raise ConfigError("Failed to parse existing certificate file %s: %s" + % (self.tls_certificate_file, e)) if not allow_self_signed: if tls_certificate.get_subject() == tls_certificate.get_issuer(): @@ -192,6 +241,40 @@ class TlsConfig(Config): # #tls_private_key_path: "%(tls_private_key_path)s" + # Whether to verify TLS certificates when sending federation traffic. + # + # This currently defaults to `false`, however this will change in + # Synapse 1.0 when valid federation certificates will be required. + # + #federation_verify_certificates: true + + # Skip federation certificate verification on the following whitelist + # of domains. + # + # This setting should only be used in very specific cases, such as + # federation over Tor hidden services and similar. For private networks + # of homeservers, you likely want to use a private CA instead. + # + # Only effective if federation_verify_certicates is `true`. + # + #federation_certificate_verification_whitelist: + # - lon.example.com + # - *.domain.com + # - *.onion + + # List of custom certificate authorities for federation traffic. + # + # This setting should only normally be used within a private network of + # homeservers. + # + # Note that this list will replace those that are provided by your + # operating environment. Certificates must be in PEM format. + # + #federation_custom_ca_list: + # - myCA1.pem + # - myCA2.pem + # - myCA3.pem + # ACME support: This will configure Synapse to request a valid TLS certificate # for your configured `server_name` via Let's Encrypt. # diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 49cbc7098f..59ea087e66 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -18,10 +18,10 @@ import logging from zope.interface import implementer from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName +from twisted.internet._sslverify import ClientTLSOptions, _defaultCurveName from twisted.internet.abstract import isIPAddress, isIPv6Address from twisted.internet.interfaces import IOpenSSLClientConnectionCreator -from twisted.internet.ssl import CertificateOptions, ContextFactory +from twisted.internet.ssl import CertificateOptions, ContextFactory, platformTrust from twisted.python.failure import Failure logger = logging.getLogger(__name__) @@ -90,7 +90,7 @@ def _tolerateErrors(wrapped): @implementer(IOpenSSLClientConnectionCreator) -class ClientTLSOptions(object): +class ClientTLSOptionsNoVerify(object): """ Client creator for TLS without certificate identity verification. This is a copy of twisted.internet._sslverify.ClientTLSOptions with the identity @@ -127,9 +127,30 @@ class ClientTLSOptionsFactory(object): to remote servers for federation.""" def __init__(self, config): - # We don't use config options yet - self._options = CertificateOptions(verify=False) + self._config = config + self._options_noverify = CertificateOptions() + + # Check if we're using a custom list of a CA certificates + trust_root = config.federation_ca_trust_root + if trust_root is None: + # Use CA root certs provided by OpenSSL + trust_root = platformTrust() + + self._options_verify = CertificateOptions(trustRoot=trust_root) def get_options(self, host): # Use _makeContext so that we get a fresh OpenSSL CTX each time. - return ClientTLSOptions(host, self._options._makeContext()) + + # Check if certificate verification has been enabled + should_verify = self._config.federation_verify_certificates + + # Check if we've disabled certificate verification for this host + if should_verify: + for regex in self._config.federation_certificate_verification_whitelist: + if regex.match(host): + should_verify = False + break + + if should_verify: + return ClientTLSOptions(host, self._options_verify._makeContext()) + return ClientTLSOptionsNoVerify(host, self._options_noverify._makeContext()) diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 1334c630cc..b4cbe97b41 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -149,7 +149,7 @@ class MatrixFederationAgent(object): tls_options = None else: tls_options = self._tls_client_options_factory.get_options( - res.tls_server_name.decode("ascii") + res.tls_server_name.decode("ascii"), ) # make sure that the Host header is set correctly diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index dcf184d3cf..e9eb662c4c 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -39,6 +39,7 @@ from synapse.util.logcontext import LoggingContext from tests.http import ServerTLSContext from tests.server import FakeTransport, ThreadedMemoryReactorClock from tests.unittest import TestCase +from tests.utils import default_config logger = logging.getLogger(__name__) @@ -53,7 +54,7 @@ class MatrixFederationAgentTests(TestCase): self.agent = MatrixFederationAgent( reactor=self.reactor, - tls_client_options_factory=ClientTLSOptionsFactory(None), + tls_client_options_factory=ClientTLSOptionsFactory(default_config("test")), _well_known_tls_policy=TrustingTLSPolicyForHTTPS(), _srv_resolver=self.mock_resolver, _well_known_cache=self.well_known_cache, -- cgit 1.5.1 From 4a9a118a94748ce287741f207f2077c6595ff178 Mon Sep 17 00:00:00 2001 From: *=0=1=4=* <21157384+f35f0ef9d0e827dae86552d3899f78fc@users.noreply.github.com> Date: Thu, 25 Apr 2019 08:47:22 -0500 Subject: Fix handling of SYNAPSE_NO_TLS in docker image (#5005) --- changelog.d/5005.misc | 1 + docker/README.md | 5 +++-- docker/start.py | 12 ++++++++++++ 3 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5005.misc diff --git a/changelog.d/5005.misc b/changelog.d/5005.misc new file mode 100644 index 0000000000..f74147b352 --- /dev/null +++ b/changelog.d/5005.misc @@ -0,0 +1 @@ +Convert SYNAPSE_NO_TLS Docker variable to boolean for user friendliness. Contributed by Gabriel Eckerson. diff --git a/docker/README.md b/docker/README.md index b48d74e09c..b27a692d5b 100644 --- a/docker/README.md +++ b/docker/README.md @@ -102,8 +102,9 @@ when ``SYNAPSE_CONFIG_PATH`` is not set. * ``SYNAPSE_SERVER_NAME`` (mandatory), the server public hostname. * ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous statistics reporting back to the Matrix project which helps us to get funding. -* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if - you run your own TLS-capable reverse proxy). +* `SYNAPSE_NO_TLS`, (accepts `true`, `false`, `on`, `off`, `1`, `0`, `yes`, `no`]): disable + TLS in Synapse (use this if you run your own TLS-capable reverse proxy). Defaults + to `false` (ie, TLS is enabled by default). * ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on the Synapse instance. * ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server. diff --git a/docker/start.py b/docker/start.py index 941d9996a8..2da555272a 100755 --- a/docker/start.py +++ b/docker/start.py @@ -59,6 +59,18 @@ else: if not os.path.exists("/compiled"): os.mkdir("/compiled") config_path = "/compiled/homeserver.yaml" + + # Convert SYNAPSE_NO_TLS to boolean if exists + if "SYNAPSE_NO_TLS" in environ: + tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"]) + if tlsanswerstring in ("true", "on", "1", "yes"): + environ["SYNAPSE_NO_TLS"] = True + else: + if tlsanswerstring in ("false", "off", "0", "no"): + environ["SYNAPSE_NO_TLS"] = False + else: + print("Environment variable \"SYNAPSE_NO_TLS\" found but value \"" + tlsanswerstring + "\" unrecognized; exiting.") + sys.exit(2) convert("/conf/homeserver.yaml", config_path, environ) convert("/conf/log.config", "/compiled/log.config", environ) -- cgit 1.5.1 From 00714e5102523829eb20129567152667df154d5b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 14:52:19 +0100 Subject: set PIP_USE_PEP517 = False for tests pip 19.1 otherwise complains about "editable mode is not supported for pyproject.toml-style projects" --- tox.ini | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tox.ini b/tox.ini index ef543890f9..d0e519ce46 100644 --- a/tox.ini +++ b/tox.ini @@ -24,6 +24,11 @@ deps = pip>=10 setenv = + # we have a pyproject.toml, but don't want pip to use it for building. + # (otherwise we get an error about 'editable mode is not supported for + # pyproject.toml-style projects'). + PIP_USE_PEP517 = false + PYTHONDONTWRITEBYTECODE = no_byte_code COVERAGE_PROCESS_START = {toxinidir}/.coveragerc -- cgit 1.5.1 From afe560b07235ef002461893dc467c80260e85c56 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 14:54:54 +0100 Subject: Add --no-pep-517 to README instructions --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 24afb93d7d..5409f0c563 100644 --- a/README.rst +++ b/README.rst @@ -173,7 +173,7 @@ Synapse offers two database engines: * `PostgreSQL `_ By default Synapse uses SQLite in and doing so trades performance for convenience. -SQLite is only recommended in Synapse for testing purposes or for servers with +SQLite is only recommended in Synapse for testing purposes or for servers with light workloads. Almost all installations should opt to use PostreSQL. Advantages include: @@ -272,7 +272,7 @@ to install using pip and a virtualenv:: virtualenv -p python3 env source env/bin/activate - python -m pip install -e .[all] + python -m pip install --no-pep-517 -e .[all] This will run a process of downloading and installing all the needed dependencies into a virtual env. -- cgit 1.5.1 From e86d74d748af614c3a7e4d8e9eb7968c50e37aa8 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 14:55:33 +0100 Subject: Changelog --- changelog.d/5098.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5098.misc diff --git a/changelog.d/5098.misc b/changelog.d/5098.misc new file mode 100644 index 0000000000..9cd83bf226 --- /dev/null +++ b/changelog.d/5098.misc @@ -0,0 +1 @@ +Add workarounds for pep-517 install errors. -- cgit 1.5.1 From 7ca638c76135d7a0f86f6aa7981554bbe0b7a335 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 20:53:10 +0100 Subject: Clarify logging when PDU signature checking fails --- synapse/crypto/keyring.py | 4 +--- synapse/federation/federation_base.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index ed2e994437..b6d1b4cf0b 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -120,11 +120,9 @@ class Keyring(object): key_ids = signature_ids(json_object, server_name) if not key_ids: - logger.warn("Request from %s: no supported signature keys", - server_name) deferred = defer.fail(SynapseError( 400, - "Not signed with a supported algorithm", + "Not signed by %s" % (server_name, ), Codes.UNAUTHORIZED, )) else: diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index dfe6b4aa5c..1d641337da 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -269,7 +269,17 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): for p in pdus_to_check_sender ]) + def sender_err(e, pdu_to_check): + logger.warning( + "event id %s: unable to verify signature for sender %s: %s", + pdu_to_check.pdu.event_id, + pdu_to_check.sender_domain, + e, + ) + return e + for p, d in zip(pdus_to_check_sender, more_deferreds): + d.addErrback(sender_err, p) p.deferreds.append(d) # now let's look for events where the sender's domain is different to the @@ -291,7 +301,16 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): for p in pdus_to_check_event_id ]) + def event_err(e, pdu_to_check): + logger.warning( + "event id %s: unable to verify signature for event id domain: %s", + pdu_to_check.pdu.event_id, + e, + ) + return e + for p, d in zip(pdus_to_check_event_id, more_deferreds): + d.addErrback(event_err, p) p.deferreds.append(d) # replace lists of deferreds with single Deferreds -- cgit 1.5.1 From fd8fb32bdd70b0ad68666e0f39a95ac90f9b1c27 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 21:08:12 +0100 Subject: remove extraneous exception logging --- synapse/crypto/keyring.py | 48 +++++++++++++++++++++++------------ synapse/federation/federation_base.py | 4 +-- 2 files changed, 34 insertions(+), 18 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index b6d1b4cf0b..d8ba870cca 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -114,38 +114,54 @@ class Keyring(object): server_name. The deferreds run their callbacks in the sentinel logcontext. """ + # a list of VerifyKeyRequests verify_requests = [] + handle = preserve_fn(_handle_key_deferred) - for server_name, json_object in server_and_json: + def process(server_name, json_object): + """Process an entry in the request list + Given a (server_name, json_object) pair from the request list, + adds a key request to verify_requests, and returns a deferred which will + complete or fail (in the sentinel context) when verification completes. + """ key_ids = signature_ids(json_object, server_name) + if not key_ids: - deferred = defer.fail(SynapseError( - 400, - "Not signed by %s" % (server_name, ), - Codes.UNAUTHORIZED, - )) - else: - deferred = defer.Deferred() + return defer.fail( + SynapseError( + 400, + "Not signed by %s" % (server_name,), + Codes.UNAUTHORIZED, + ) + ) logger.debug("Verifying for %s with key_ids %s", server_name, key_ids) + # add the key request to the queue, but don't start it off yet. verify_request = VerifyKeyRequest( - server_name, key_ids, json_object, deferred + server_name, key_ids, json_object, defer.Deferred(), ) - verify_requests.append(verify_request) - run_in_background(self._start_key_lookups, verify_requests) + # now run _handle_key_deferred, which will wait for the key request + # to complete and then do the verification. + # + # We want _handle_key_request to log to the right context, so we + # wrap it with preserve_fn (aka run_in_background) + return handle(verify_request) - # Pass those keys to handle_key_deferred so that the json object - # signatures can be verified - handle = preserve_fn(_handle_key_deferred) - return [ - handle(rq) for rq in verify_requests + results = [ + process(server_name, json_object) + for server_name, json_object in server_and_json ] + if verify_requests: + run_in_background(self._start_key_lookups, verify_requests) + + return results + @defer.inlineCallbacks def _start_key_lookups(self, verify_requests): """Sets off the key fetches for each verify request diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 1d641337da..832e2bdb9b 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -274,7 +274,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): "event id %s: unable to verify signature for sender %s: %s", pdu_to_check.pdu.event_id, pdu_to_check.sender_domain, - e, + e.getErrorMessage(), ) return e @@ -305,7 +305,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): logger.warning( "event id %s: unable to verify signature for event id domain: %s", pdu_to_check.pdu.event_id, - e, + e.getErrorMessage(), ) return e -- cgit 1.5.1 From 837d7f85a9d0a479487cd6205a3982f3981e4276 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 22:17:59 +0100 Subject: more logging improvements --- synapse/federation/federation_base.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 832e2bdb9b..cffa831d80 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -270,13 +270,14 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): ]) def sender_err(e, pdu_to_check): - logger.warning( - "event id %s: unable to verify signature for sender %s: %s", + errmsg = "event id %s: unable to verify signature for sender %s: %s" % ( pdu_to_check.pdu.event_id, pdu_to_check.sender_domain, e.getErrorMessage(), ) - return e + # XX not really sure if these are the right codes, but they are what + # we've done for ages + raise SynapseError(400, errmsg, Codes.UNAUTHORIZED) for p, d in zip(pdus_to_check_sender, more_deferreds): d.addErrback(sender_err, p) @@ -302,12 +303,14 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): ]) def event_err(e, pdu_to_check): - logger.warning( - "event id %s: unable to verify signature for event id domain: %s", - pdu_to_check.pdu.event_id, - e.getErrorMessage(), + errmsg = ( + "event id %s: unable to verify signature for event id domain: %s" % ( + pdu_to_check.pdu.event_id, + e.getErrorMessage(), + ) ) - return e + # XX as above: not really sure if these are the right codes + raise SynapseError(400, errmsg, Codes.UNAUTHORIZED) for p, d in zip(pdus_to_check_event_id, more_deferreds): d.addErrback(event_err, p) -- cgit 1.5.1 From 0962d3cdffd91dec7472661d81b21a35800eb8b5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Apr 2019 23:05:06 +0100 Subject: changelog --- changelog.d/5100.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5100.misc diff --git a/changelog.d/5100.misc b/changelog.d/5100.misc new file mode 100644 index 0000000000..db5eb1b156 --- /dev/null +++ b/changelog.d/5100.misc @@ -0,0 +1 @@ +Improve logging when event-signature checks fail. -- cgit 1.5.1 From bd0d45ca69587f4f258b738dfa3a55704838081e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Apr 2019 11:13:16 +0100 Subject: Fix infinite loop in presence handler Fixes #5102 --- changelog.d/5103.bugfix | 1 + synapse/handlers/presence.py | 5 +++++ synapse/storage/state_deltas.py | 18 ++++++++++++++++++ 3 files changed, 24 insertions(+) create mode 100644 changelog.d/5103.bugfix diff --git a/changelog.d/5103.bugfix b/changelog.d/5103.bugfix new file mode 100644 index 0000000000..590d80d58f --- /dev/null +++ b/changelog.d/5103.bugfix @@ -0,0 +1 @@ +Fix bug where presence updates were sent to all servers in a room when a new server joined, rather than to just the new server. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index bd1285b15c..59d53f1050 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -828,6 +828,11 @@ class PresenceHandler(object): if typ != EventTypes.Member: continue + if event_id is None: + # state has been deleted, so this is not a join. We only care about + # joins. + continue + event = yield self.store.get_event(event_id) if event.content.get("membership") != Membership.JOIN: # We only care about joins diff --git a/synapse/storage/state_deltas.py b/synapse/storage/state_deltas.py index 56e42f583d..31a0279b18 100644 --- a/synapse/storage/state_deltas.py +++ b/synapse/storage/state_deltas.py @@ -22,6 +22,24 @@ logger = logging.getLogger(__name__) class StateDeltasStore(SQLBaseStore): def get_current_state_deltas(self, prev_stream_id): + """Fetch a list of room state changes since the given stream id + + Each entry in the result contains the following fields: + - stream_id (int) + - room_id (str) + - type (str): event type + - state_key (str): + - event_id (str|None): new event_id for this state key. None if the + state has been deleted. + - prev_event_id (str|None): previous event_id for this state key. None + if it's new state. + + Args: + prev_stream_id (int): point to get changes since (exclusive) + + Returns: + Deferred[list[dict]]: results + """ prev_stream_id = int(prev_stream_id) if not self._curr_state_delta_stream_cache.has_any_entity_changed( prev_stream_id -- cgit 1.5.1 From 28a81ed62fdf489001385514e666729d85deb4bc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 26 Apr 2019 18:06:25 +0100 Subject: Ratelimit 3pid invites We do ratelimit sending the 3PID invite events, but that happens after spamming the identity server. --- synapse/handlers/room_member.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index e432740832..53e0103b5b 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -33,6 +33,8 @@ from synapse.types import RoomID, UserID from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room, user_left_room +from ._base import BaseHandler + logger = logging.getLogger(__name__) id_server_scheme = "https://" @@ -71,6 +73,11 @@ class RoomMemberHandler(object): self.spam_checker = hs.get_spam_checker() self._server_notices_mxid = self.config.server_notices_mxid + # This is only used to get at ratelimit function, and + # maybe_kick_guest_users. It's fine there are multiple of these as + # it doesn't store state. + self.base_handler = BaseHandler(hs) + @abc.abstractmethod def _remote_join(self, requester, remote_room_hosts, room_id, user, content): """Try and join a room that this server is not in @@ -702,6 +709,9 @@ class RoomMemberHandler(object): Codes.FORBIDDEN, ) + # Check whether we'll be ratelimited + yield self.base_handler.ratelimit(requester, update=False) + invitee = yield self._lookup_3pid( id_server, medium, address ) -- cgit 1.5.1 From 19f0722b2cd4e1cc09602c04721f143c878ed974 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 26 Apr 2019 18:08:33 +0100 Subject: Newsfile --- changelog.d/5104.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5104.bugfix diff --git a/changelog.d/5104.bugfix b/changelog.d/5104.bugfix new file mode 100644 index 0000000000..f88aca8a40 --- /dev/null +++ b/changelog.d/5104.bugfix @@ -0,0 +1 @@ +Fix the ratelimting on third party invites. -- cgit 1.5.1 From 8c5b1e30d454f87fba22d16b59b7ff0a76cb4ca4 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 29 Apr 2019 15:40:31 -0600 Subject: Add a default .m.rule.tombstone push rule (#4867) * Add a default .m.rule.tombstone push rule In support of MSC1930: https://github.com/matrix-org/matrix-doc/pull/1930 * changelog * Appease the changelog linter --- changelog.d/4867.feature | 1 + synapse/push/baserules.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 changelog.d/4867.feature diff --git a/changelog.d/4867.feature b/changelog.d/4867.feature new file mode 100644 index 0000000000..f5f9030e22 --- /dev/null +++ b/changelog.d/4867.feature @@ -0,0 +1 @@ +Add a default .m.rule.tombstone push rule. diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 8f0682c948..3523a40108 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -261,6 +261,23 @@ BASE_APPEND_OVERRIDE_RULES = [ 'value': True, } ] + }, + { + 'rule_id': 'global/override/.m.rule.tombstone', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'type', + 'pattern': 'm.room.tombstone', + '_id': '_tombstone', + } + ], + 'actions': [ + 'notify', { + 'set_tweak': 'highlight', + 'value': True, + } + ] } ] -- cgit 1.5.1 From d8e357b7cfc80de824ab10167c7472098506549f Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 1 May 2019 11:34:22 +0100 Subject: Fix typo in account validity admin route --- docs/admin_api/account_validity.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin_api/account_validity.rst b/docs/admin_api/account_validity.rst index 980ea23605..47439460fb 100644 --- a/docs/admin_api/account_validity.rst +++ b/docs/admin_api/account_validity.rst @@ -13,7 +13,7 @@ This API extends the validity of an account by as much time as configured in the The API is:: - POST /_matrix/client/unstable/account_validity/send_mail + POST /_matrix/client/unstable/account_validity/validity with the following body: -- cgit 1.5.1 From 031919dafbc1c4e8a4060e498c4585ee95b91397 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 1 May 2019 11:38:27 +0100 Subject: Fix whole path for admin route --- docs/admin_api/account_validity.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin_api/account_validity.rst b/docs/admin_api/account_validity.rst index 47439460fb..c26353752f 100644 --- a/docs/admin_api/account_validity.rst +++ b/docs/admin_api/account_validity.rst @@ -13,7 +13,7 @@ This API extends the validity of an account by as much time as configured in the The API is:: - POST /_matrix/client/unstable/account_validity/validity + POST /_matrix/client/unstable/admin/account_validity/validity with the following body: -- cgit 1.5.1 From 803a28fd1d2a72a7757a88e80d8c588fb65bba9e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 1 May 2019 11:43:31 +0100 Subject: Add changelog --- changelog.d/5116.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5116.misc diff --git a/changelog.d/5116.misc b/changelog.d/5116.misc new file mode 100644 index 0000000000..dcbf7c1fb8 --- /dev/null +++ b/changelog.d/5116.misc @@ -0,0 +1 @@ + Add time-based account expiration. -- cgit 1.5.1 From 6aad81ec0c0c411978980eadd2f8a2ebc0f75cb1 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 1 May 2019 11:50:15 +0100 Subject: Rename changelog file --- changelog.d/5116.feature | 1 + changelog.d/5116.misc | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 changelog.d/5116.feature delete mode 100644 changelog.d/5116.misc diff --git a/changelog.d/5116.feature b/changelog.d/5116.feature new file mode 100644 index 0000000000..dcbf7c1fb8 --- /dev/null +++ b/changelog.d/5116.feature @@ -0,0 +1 @@ + Add time-based account expiration. diff --git a/changelog.d/5116.misc b/changelog.d/5116.misc deleted file mode 100644 index dcbf7c1fb8..0000000000 --- a/changelog.d/5116.misc +++ /dev/null @@ -1 +0,0 @@ - Add time-based account expiration. -- cgit 1.5.1 From 579b637b6c52991f59bbcf7d836587218e42e744 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 May 2019 10:40:33 +0100 Subject: Move admin API away from ClientV1RestServlet --- synapse/rest/client/v1/admin.py | 66 ++++++++++++++++++++--------------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 0a1e233b23..8efa457d15 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -28,6 +28,7 @@ import synapse from synapse.api.constants import Membership, UserTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.servlet import ( + RestServlet, assert_params_in_dict, parse_integer, parse_json_object_from_request, @@ -36,16 +37,17 @@ from synapse.http.servlet import ( from synapse.types import UserID, create_requester from synapse.util.versionstring import get_version_string -from .base import ClientV1RestServlet, client_path_patterns +from .base import client_path_patterns logger = logging.getLogger(__name__) -class UsersRestServlet(ClientV1RestServlet): +class UsersRestServlet(RestServlet): PATTERNS = client_path_patterns("/admin/users/(?P[^/]*)") def __init__(self, hs): - super(UsersRestServlet, self).__init__(hs) + self.hs = hs + self.auth = hs.get_auth() self.handlers = hs.get_handlers() @defer.inlineCallbacks @@ -69,9 +71,12 @@ class UsersRestServlet(ClientV1RestServlet): defer.returnValue((200, ret)) -class VersionServlet(ClientV1RestServlet): +class VersionServlet(RestServlet): PATTERNS = client_path_patterns("/admin/server_version") + def __init__(self, hs): + self.auth = hs.get_auth() + @defer.inlineCallbacks def on_GET(self, request): requester = yield self.auth.get_user_by_req(request) @@ -88,7 +93,7 @@ class VersionServlet(ClientV1RestServlet): defer.returnValue((200, ret)) -class UserRegisterServlet(ClientV1RestServlet): +class UserRegisterServlet(RestServlet): """ Attributes: NONCE_TIMEOUT (int): Seconds until a generated nonce won't be accepted @@ -99,7 +104,6 @@ class UserRegisterServlet(ClientV1RestServlet): NONCE_TIMEOUT = 60 def __init__(self, hs): - super(UserRegisterServlet, self).__init__(hs) self.handlers = hs.get_handlers() self.reactor = hs.get_reactor() self.nonces = {} @@ -226,11 +230,12 @@ class UserRegisterServlet(ClientV1RestServlet): defer.returnValue((200, result)) -class WhoisRestServlet(ClientV1RestServlet): +class WhoisRestServlet(RestServlet): PATTERNS = client_path_patterns("/admin/whois/(?P[^/]*)") def __init__(self, hs): - super(WhoisRestServlet, self).__init__(hs) + self.hs = hs + self.auth = hs.get_auth() self.handlers = hs.get_handlers() @defer.inlineCallbacks @@ -251,12 +256,12 @@ class WhoisRestServlet(ClientV1RestServlet): defer.returnValue((200, ret)) -class PurgeMediaCacheRestServlet(ClientV1RestServlet): +class PurgeMediaCacheRestServlet(RestServlet): PATTERNS = client_path_patterns("/admin/purge_media_cache") def __init__(self, hs): self.media_repository = hs.get_media_repository() - super(PurgeMediaCacheRestServlet, self).__init__(hs) + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request): @@ -274,7 +279,7 @@ class PurgeMediaCacheRestServlet(ClientV1RestServlet): defer.returnValue((200, ret)) -class PurgeHistoryRestServlet(ClientV1RestServlet): +class PurgeHistoryRestServlet(RestServlet): PATTERNS = client_path_patterns( "/admin/purge_history/(?P[^/]*)(/(?P[^/]+))?" ) @@ -285,9 +290,9 @@ class PurgeHistoryRestServlet(ClientV1RestServlet): Args: hs (synapse.server.HomeServer) """ - super(PurgeHistoryRestServlet, self).__init__(hs) self.pagination_handler = hs.get_pagination_handler() self.store = hs.get_datastore() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request, room_id, event_id): @@ -371,7 +376,7 @@ class PurgeHistoryRestServlet(ClientV1RestServlet): })) -class PurgeHistoryStatusRestServlet(ClientV1RestServlet): +class PurgeHistoryStatusRestServlet(RestServlet): PATTERNS = client_path_patterns( "/admin/purge_history_status/(?P[^/]+)" ) @@ -382,8 +387,8 @@ class PurgeHistoryStatusRestServlet(ClientV1RestServlet): Args: hs (synapse.server.HomeServer) """ - super(PurgeHistoryStatusRestServlet, self).__init__(hs) self.pagination_handler = hs.get_pagination_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, purge_id): @@ -400,12 +405,12 @@ class PurgeHistoryStatusRestServlet(ClientV1RestServlet): defer.returnValue((200, purge_status.asdict())) -class DeactivateAccountRestServlet(ClientV1RestServlet): +class DeactivateAccountRestServlet(RestServlet): PATTERNS = client_path_patterns("/admin/deactivate/(?P[^/]*)") def __init__(self, hs): - super(DeactivateAccountRestServlet, self).__init__(hs) self._deactivate_account_handler = hs.get_deactivate_account_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request, target_user_id): @@ -438,7 +443,7 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): })) -class ShutdownRoomRestServlet(ClientV1RestServlet): +class ShutdownRoomRestServlet(RestServlet): """Shuts down a room by removing all local users from the room and blocking all future invites and joins to the room. Any local aliases will be repointed to a new room created by `new_room_user_id` and kicked users will be auto @@ -452,12 +457,13 @@ class ShutdownRoomRestServlet(ClientV1RestServlet): ) def __init__(self, hs): - super(ShutdownRoomRestServlet, self).__init__(hs) + self.hs = hs self.store = hs.get_datastore() self.state = hs.get_state_handler() self._room_creation_handler = hs.get_room_creation_handler() self.event_creation_handler = hs.get_event_creation_handler() self.room_member_handler = hs.get_room_member_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request, room_id): @@ -564,15 +570,15 @@ class ShutdownRoomRestServlet(ClientV1RestServlet): })) -class QuarantineMediaInRoom(ClientV1RestServlet): +class QuarantineMediaInRoom(RestServlet): """Quarantines all media in a room so that no one can download it via this server. """ PATTERNS = client_path_patterns("/admin/quarantine_media/(?P[^/]+)") def __init__(self, hs): - super(QuarantineMediaInRoom, self).__init__(hs) self.store = hs.get_datastore() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request, room_id): @@ -588,13 +594,12 @@ class QuarantineMediaInRoom(ClientV1RestServlet): defer.returnValue((200, {"num_quarantined": num_quarantined})) -class ListMediaInRoom(ClientV1RestServlet): +class ListMediaInRoom(RestServlet): """Lists all of the media in a given room. """ PATTERNS = client_path_patterns("/admin/room/(?P[^/]+)/media") def __init__(self, hs): - super(ListMediaInRoom, self).__init__(hs) self.store = hs.get_datastore() @defer.inlineCallbacks @@ -609,7 +614,7 @@ class ListMediaInRoom(ClientV1RestServlet): defer.returnValue((200, {"local": local_mxcs, "remote": remote_mxcs})) -class ResetPasswordRestServlet(ClientV1RestServlet): +class ResetPasswordRestServlet(RestServlet): """Post request to allow an administrator reset password for a user. This needs user to have administrator access in Synapse. Example: @@ -626,7 +631,6 @@ class ResetPasswordRestServlet(ClientV1RestServlet): def __init__(self, hs): self.store = hs.get_datastore() - super(ResetPasswordRestServlet, self).__init__(hs) self.hs = hs self.auth = hs.get_auth() self._set_password_handler = hs.get_set_password_handler() @@ -653,7 +657,7 @@ class ResetPasswordRestServlet(ClientV1RestServlet): defer.returnValue((200, {})) -class GetUsersPaginatedRestServlet(ClientV1RestServlet): +class GetUsersPaginatedRestServlet(RestServlet): """Get request to get specific number of users from Synapse. This needs user to have administrator access in Synapse. Example: @@ -666,7 +670,6 @@ class GetUsersPaginatedRestServlet(ClientV1RestServlet): def __init__(self, hs): self.store = hs.get_datastore() - super(GetUsersPaginatedRestServlet, self).__init__(hs) self.hs = hs self.auth = hs.get_auth() self.handlers = hs.get_handlers() @@ -736,7 +739,7 @@ class GetUsersPaginatedRestServlet(ClientV1RestServlet): defer.returnValue((200, ret)) -class SearchUsersRestServlet(ClientV1RestServlet): +class SearchUsersRestServlet(RestServlet): """Get request to search user table for specific users according to search term. This needs user to have administrator access in Synapse. @@ -750,7 +753,6 @@ class SearchUsersRestServlet(ClientV1RestServlet): def __init__(self, hs): self.store = hs.get_datastore() - super(SearchUsersRestServlet, self).__init__(hs) self.hs = hs self.auth = hs.get_auth() self.handlers = hs.get_handlers() @@ -784,15 +786,15 @@ class SearchUsersRestServlet(ClientV1RestServlet): defer.returnValue((200, ret)) -class DeleteGroupAdminRestServlet(ClientV1RestServlet): +class DeleteGroupAdminRestServlet(RestServlet): """Allows deleting of local groups """ PATTERNS = client_path_patterns("/admin/delete_group/(?P[^/]*)") def __init__(self, hs): - super(DeleteGroupAdminRestServlet, self).__init__(hs) self.group_server = hs.get_groups_server_handler() self.is_mine_id = hs.is_mine_id + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request, group_id): @@ -809,7 +811,7 @@ class DeleteGroupAdminRestServlet(ClientV1RestServlet): defer.returnValue((200, {})) -class AccountValidityRenewServlet(ClientV1RestServlet): +class AccountValidityRenewServlet(RestServlet): PATTERNS = client_path_patterns("/admin/account_validity/validity$") def __init__(self, hs): @@ -817,8 +819,6 @@ class AccountValidityRenewServlet(ClientV1RestServlet): Args: hs (synapse.server.HomeServer): server """ - super(AccountValidityRenewServlet, self).__init__(hs) - self.hs = hs self.account_activity_handler = hs.get_account_validity_handler() self.auth = hs.get_auth() -- cgit 1.5.1 From 8e9ca8353730949290460f9183c5ee48941d1f75 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 May 2019 15:18:58 +0100 Subject: Move admin API to a new prefix --- docs/admin_api/account_validity.rst | 2 +- docs/admin_api/delete_group.md | 2 +- docs/admin_api/media_admin_api.md | 2 +- docs/admin_api/purge_history_api.rst | 4 +- docs/admin_api/purge_remote_media.rst | 2 +- docs/admin_api/register_api.rst | 4 +- docs/admin_api/user_admin_api.rst | 6 +-- docs/admin_api/version_api.rst | 2 +- synapse/app/homeserver.py | 2 + synapse/config/server.py | 4 +- synapse/rest/__init__.py | 12 +++++- synapse/rest/client/v1/admin.py | 70 +++++++++++++++++++++++++---------- 12 files changed, 76 insertions(+), 36 deletions(-) diff --git a/docs/admin_api/account_validity.rst b/docs/admin_api/account_validity.rst index c26353752f..7559de4c57 100644 --- a/docs/admin_api/account_validity.rst +++ b/docs/admin_api/account_validity.rst @@ -13,7 +13,7 @@ This API extends the validity of an account by as much time as configured in the The API is:: - POST /_matrix/client/unstable/admin/account_validity/validity + POST /_synapse/admin/v1/account_validity/validity with the following body: diff --git a/docs/admin_api/delete_group.md b/docs/admin_api/delete_group.md index d703d108b0..1710488ea8 100644 --- a/docs/admin_api/delete_group.md +++ b/docs/admin_api/delete_group.md @@ -8,7 +8,7 @@ being deleted. The API is: ``` -POST /_matrix/client/r0/admin/delete_group/ +POST /_synapse/admin/v1/delete_group/ ``` including an `access_token` of a server admin. diff --git a/docs/admin_api/media_admin_api.md b/docs/admin_api/media_admin_api.md index abdbc1ea86..5e9f8e5d84 100644 --- a/docs/admin_api/media_admin_api.md +++ b/docs/admin_api/media_admin_api.md @@ -4,7 +4,7 @@ This API gets a list of known media in a room. The API is: ``` -GET /_matrix/client/r0/admin/room//media +GET /_synapse/admin/v1/room//media ``` including an `access_token` of a server admin. diff --git a/docs/admin_api/purge_history_api.rst b/docs/admin_api/purge_history_api.rst index a5c3dc8149..f7be226fd9 100644 --- a/docs/admin_api/purge_history_api.rst +++ b/docs/admin_api/purge_history_api.rst @@ -10,7 +10,7 @@ paginate further back in the room from the point being purged from. The API is: -``POST /_matrix/client/r0/admin/purge_history/[/]`` +``POST /_synapse/admin/v1/purge_history/[/]`` including an ``access_token`` of a server admin. @@ -49,7 +49,7 @@ Purge status query It is possible to poll for updates on recent purges with a second API; -``GET /_matrix/client/r0/admin/purge_history_status/`` +``GET /_synapse/admin/v1/purge_history_status/`` (again, with a suitable ``access_token``). This API returns a JSON body like the following: diff --git a/docs/admin_api/purge_remote_media.rst b/docs/admin_api/purge_remote_media.rst index 5deb02a3df..dacd5bc8fb 100644 --- a/docs/admin_api/purge_remote_media.rst +++ b/docs/admin_api/purge_remote_media.rst @@ -6,7 +6,7 @@ media. The API is:: - POST /_matrix/client/r0/admin/purge_media_cache?before_ts=&access_token= + POST /_synapse/admin/v1/purge_media_cache?before_ts=&access_token= {} diff --git a/docs/admin_api/register_api.rst b/docs/admin_api/register_api.rst index 084e74ebf5..3a63109aa0 100644 --- a/docs/admin_api/register_api.rst +++ b/docs/admin_api/register_api.rst @@ -12,7 +12,7 @@ is not enabled. To fetch the nonce, you need to request one from the API:: - > GET /_matrix/client/r0/admin/register + > GET /_synapse/admin/v1/register < {"nonce": "thisisanonce"} @@ -22,7 +22,7 @@ body containing the nonce, username, password, whether they are an admin As an example:: - > POST /_matrix/client/r0/admin/register + > POST /_synapse/admin/v1/register > { "nonce": "thisisanonce", "username": "pepper_roni", diff --git a/docs/admin_api/user_admin_api.rst b/docs/admin_api/user_admin_api.rst index d17121a188..8aca4f158d 100644 --- a/docs/admin_api/user_admin_api.rst +++ b/docs/admin_api/user_admin_api.rst @@ -5,7 +5,7 @@ This API returns information about a specific user account. The api is:: - GET /_matrix/client/r0/admin/whois/ + GET /_synapse/admin/v1/whois/ including an ``access_token`` of a server admin. @@ -50,7 +50,7 @@ references to it). The api is:: - POST /_matrix/client/r0/admin/deactivate/ + POST /_synapse/admin/v1/deactivate/ with a body of: @@ -73,7 +73,7 @@ Changes the password of another user. The api is:: - POST /_matrix/client/r0/admin/reset_password/ + POST /_synapse/admin/v1/reset_password/ with a body of: diff --git a/docs/admin_api/version_api.rst b/docs/admin_api/version_api.rst index 30a91b5f43..6d66543b96 100644 --- a/docs/admin_api/version_api.rst +++ b/docs/admin_api/version_api.rst @@ -8,7 +8,7 @@ contains Synapse version information). The api is:: - GET /_matrix/client/r0/admin/server_version + GET /_synapse/admin/v1/server_version including an ``access_token`` of a server admin. diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 79be977ea6..3d7db61d14 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -62,6 +62,7 @@ from synapse.python_dependencies import check_requirements from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource +from synapse.rest.client.v1.admin import AdminRestResource from synapse.rest.key.v2 import KeyApiV2Resource from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.well_known import WellKnownResource @@ -180,6 +181,7 @@ class SynapseHomeServer(HomeServer): "/_matrix/client/v2_alpha": client_resource, "/_matrix/client/versions": client_resource, "/.well-known/matrix/client": WellKnownResource(self), + "/_synapse/admin": AdminRestResource(self), }) if self.get_config().saml2_enabled: diff --git a/synapse/config/server.py b/synapse/config/server.py index cdf1e4d286..cc232422b9 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -388,8 +388,8 @@ class ServerConfig(Config): # # Valid resource names are: # - # client: the client-server API (/_matrix/client). Also implies 'media' and - # 'static'. + # client: the client-server API (/_matrix/client), and the synapse admin + # API (/_synapse/admin). Also implies 'media' and 'static'. # # consent: user consent forms (/_matrix/consent). See # docs/consent_tracking.md. diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index a66885d349..6bc50f78e1 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -58,8 +58,14 @@ from synapse.rest.client.v2_alpha import ( class ClientRestResource(JsonResource): - """A resource for version 1 of the matrix client API.""" + """Matrix Client API REST resource. + This gets mounted at various points under /_matrix/client, including: + * /_matrix/client/r0 + * /_matrix/client/api/v1 + * /_matrix/client/unstable + * etc + """ def __init__(self, hs): JsonResource.__init__(self, hs, canonical_json=False) self.register_servlets(self, hs) @@ -82,7 +88,6 @@ class ClientRestResource(JsonResource): presence.register_servlets(hs, client_resource) directory.register_servlets(hs, client_resource) voip.register_servlets(hs, client_resource) - admin.register_servlets(hs, client_resource) pusher.register_servlets(hs, client_resource) push_rule.register_servlets(hs, client_resource) logout.register_servlets(hs, client_resource) @@ -111,3 +116,6 @@ class ClientRestResource(JsonResource): room_upgrade_rest_servlet.register_servlets(hs, client_resource) capabilities.register_servlets(hs, client_resource) account_validity.register_servlets(hs, client_resource) + + # moving to /_synapse/admin + admin.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 8efa457d15..d4f83e4ae8 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -18,6 +18,7 @@ import hashlib import hmac import logging import platform +import re from six import text_type from six.moves import http_client @@ -27,6 +28,7 @@ from twisted.internet import defer import synapse from synapse.api.constants import Membership, UserTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError +from synapse.http.server import JsonResource from synapse.http.servlet import ( RestServlet, assert_params_in_dict, @@ -37,13 +39,33 @@ from synapse.http.servlet import ( from synapse.types import UserID, create_requester from synapse.util.versionstring import get_version_string -from .base import client_path_patterns - logger = logging.getLogger(__name__) +def historical_admin_path_patterns(path_regex): + """Returns the list of patterns for an admin endpoint, including historical ones + + This is a backwards-compatibility hack. Previously, the Admin API was exposed at + various paths under /_matrix/client. This function returns a list of patterns + matching those paths (as well as the new one), so that existing scripts which rely + on the endpoints being available there are not broken. + + Note that this should only be used for existing endpoints: new ones should just + register for the /_synapse/admin path. + """ + return list( + re.compile(prefix + path_regex) + for prefix in ( + "^/_synapse/admin/v1", + "^/_matrix/client/api/v1/admin", + "^/_matrix/client/unstable/admin", + "^/_matrix/client/r0/admin" + ) + ) + + class UsersRestServlet(RestServlet): - PATTERNS = client_path_patterns("/admin/users/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/users/(?P[^/]*)") def __init__(self, hs): self.hs = hs @@ -72,7 +94,7 @@ class UsersRestServlet(RestServlet): class VersionServlet(RestServlet): - PATTERNS = client_path_patterns("/admin/server_version") + PATTERNS = historical_admin_path_patterns("/server_version") def __init__(self, hs): self.auth = hs.get_auth() @@ -100,7 +122,7 @@ class UserRegisterServlet(RestServlet): nonces (dict[str, int]): The nonces that we will accept. A dict of nonce to the time it was generated, in int seconds. """ - PATTERNS = client_path_patterns("/admin/register") + PATTERNS = historical_admin_path_patterns("/register") NONCE_TIMEOUT = 60 def __init__(self, hs): @@ -231,7 +253,7 @@ class UserRegisterServlet(RestServlet): class WhoisRestServlet(RestServlet): - PATTERNS = client_path_patterns("/admin/whois/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/whois/(?P[^/]*)") def __init__(self, hs): self.hs = hs @@ -257,7 +279,7 @@ class WhoisRestServlet(RestServlet): class PurgeMediaCacheRestServlet(RestServlet): - PATTERNS = client_path_patterns("/admin/purge_media_cache") + PATTERNS = historical_admin_path_patterns("/purge_media_cache") def __init__(self, hs): self.media_repository = hs.get_media_repository() @@ -280,8 +302,8 @@ class PurgeMediaCacheRestServlet(RestServlet): class PurgeHistoryRestServlet(RestServlet): - PATTERNS = client_path_patterns( - "/admin/purge_history/(?P[^/]*)(/(?P[^/]+))?" + PATTERNS = historical_admin_path_patterns( + "/purge_history/(?P[^/]*)(/(?P[^/]+))?" ) def __init__(self, hs): @@ -377,8 +399,8 @@ class PurgeHistoryRestServlet(RestServlet): class PurgeHistoryStatusRestServlet(RestServlet): - PATTERNS = client_path_patterns( - "/admin/purge_history_status/(?P[^/]+)" + PATTERNS = historical_admin_path_patterns( + "/purge_history_status/(?P[^/]+)" ) def __init__(self, hs): @@ -406,7 +428,7 @@ class PurgeHistoryStatusRestServlet(RestServlet): class DeactivateAccountRestServlet(RestServlet): - PATTERNS = client_path_patterns("/admin/deactivate/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/deactivate/(?P[^/]*)") def __init__(self, hs): self._deactivate_account_handler = hs.get_deactivate_account_handler() @@ -449,7 +471,7 @@ class ShutdownRoomRestServlet(RestServlet): to a new room created by `new_room_user_id` and kicked users will be auto joined to the new room. """ - PATTERNS = client_path_patterns("/admin/shutdown_room/(?P[^/]+)") + PATTERNS = historical_admin_path_patterns("/shutdown_room/(?P[^/]+)") DEFAULT_MESSAGE = ( "Sharing illegal content on this server is not permitted and rooms in" @@ -574,7 +596,7 @@ class QuarantineMediaInRoom(RestServlet): """Quarantines all media in a room so that no one can download it via this server. """ - PATTERNS = client_path_patterns("/admin/quarantine_media/(?P[^/]+)") + PATTERNS = historical_admin_path_patterns("/quarantine_media/(?P[^/]+)") def __init__(self, hs): self.store = hs.get_datastore() @@ -597,7 +619,7 @@ class QuarantineMediaInRoom(RestServlet): class ListMediaInRoom(RestServlet): """Lists all of the media in a given room. """ - PATTERNS = client_path_patterns("/admin/room/(?P[^/]+)/media") + PATTERNS = historical_admin_path_patterns("/room/(?P[^/]+)/media") def __init__(self, hs): self.store = hs.get_datastore() @@ -627,7 +649,7 @@ class ResetPasswordRestServlet(RestServlet): Returns: 200 OK with empty object if success otherwise an error. """ - PATTERNS = client_path_patterns("/admin/reset_password/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/reset_password/(?P[^/]*)") def __init__(self, hs): self.store = hs.get_datastore() @@ -666,7 +688,7 @@ class GetUsersPaginatedRestServlet(RestServlet): Returns: 200 OK with json object {list[dict[str, Any]], count} or empty object. """ - PATTERNS = client_path_patterns("/admin/users_paginate/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/users_paginate/(?P[^/]*)") def __init__(self, hs): self.store = hs.get_datastore() @@ -749,7 +771,7 @@ class SearchUsersRestServlet(RestServlet): Returns: 200 OK with json object {list[dict[str, Any]], count} or empty object. """ - PATTERNS = client_path_patterns("/admin/search_users/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/search_users/(?P[^/]*)") def __init__(self, hs): self.store = hs.get_datastore() @@ -789,7 +811,7 @@ class SearchUsersRestServlet(RestServlet): class DeleteGroupAdminRestServlet(RestServlet): """Allows deleting of local groups """ - PATTERNS = client_path_patterns("/admin/delete_group/(?P[^/]*)") + PATTERNS = historical_admin_path_patterns("/delete_group/(?P[^/]*)") def __init__(self, hs): self.group_server = hs.get_groups_server_handler() @@ -812,7 +834,7 @@ class DeleteGroupAdminRestServlet(RestServlet): class AccountValidityRenewServlet(RestServlet): - PATTERNS = client_path_patterns("/admin/account_validity/validity$") + PATTERNS = historical_admin_path_patterns("/account_validity/validity$") def __init__(self, hs): """ @@ -847,6 +869,14 @@ class AccountValidityRenewServlet(RestServlet): defer.returnValue((200, res)) +class AdminRestResource(JsonResource): + """The REST resource which gets mounted at /_synapse/admin""" + + def __init__(self, hs): + JsonResource.__init__(self, hs, canonical_json=False) + register_servlets(hs, self) + + def register_servlets(hs, http_server): WhoisRestServlet(hs).register(http_server) PurgeMediaCacheRestServlet(hs).register(http_server) -- cgit 1.5.1 From 40e576e29cf6f06d6b5244c5d1df34cf33b1f556 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 May 2019 15:32:38 +0100 Subject: Move admin api impl to its own package It doesn't really belong under rest/client/v1 any more. --- synapse/app/homeserver.py | 2 +- synapse/rest/__init__.py | 5 +- synapse/rest/admin/__init__.py | 896 ++++++++++++++++++++++++ synapse/rest/client/v1/admin.py | 896 ------------------------ tests/handlers/test_user_directory.py | 7 +- tests/push/test_email.py | 5 +- tests/push/test_http.py | 5 +- tests/rest/admin/__init__.py | 14 + tests/rest/admin/test_admin.py | 617 ++++++++++++++++ tests/rest/client/test_consent.py | 5 +- tests/rest/client/test_identity.py | 5 +- tests/rest/client/v1/test_admin.py | 616 ---------------- tests/rest/client/v1/test_events.py | 5 +- tests/rest/client/v1/test_login.py | 5 +- tests/rest/client/v1/test_rooms.py | 5 +- tests/rest/client/v2_alpha/test_auth.py | 4 +- tests/rest/client/v2_alpha/test_capabilities.py | 6 +- tests/rest/client/v2_alpha/test_register.py | 7 +- tests/rest/client/v2_alpha/test_sync.py | 5 +- tests/server_notices/test_consent.py | 6 +- tests/storage/test_client_ips.py | 5 +- 21 files changed, 1573 insertions(+), 1548 deletions(-) create mode 100644 synapse/rest/admin/__init__.py delete mode 100644 synapse/rest/client/v1/admin.py create mode 100644 tests/rest/admin/__init__.py create mode 100644 tests/rest/admin/test_admin.py delete mode 100644 tests/rest/client/v1/test_admin.py diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3d7db61d14..1045d28949 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -62,7 +62,7 @@ from synapse.python_dependencies import check_requirements from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource -from synapse.rest.client.v1.admin import AdminRestResource +from synapse.rest.admin import AdminRestResource from synapse.rest.key.v2 import KeyApiV2Resource from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.well_known import WellKnownResource diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 6bc50f78e1..e8e1bcddea 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -13,11 +13,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import synapse.rest.admin from synapse.http.server import JsonResource from synapse.rest.client import versions from synapse.rest.client.v1 import ( - admin, directory, events, initial_sync, @@ -118,4 +117,4 @@ class ClientRestResource(JsonResource): account_validity.register_servlets(hs, client_resource) # moving to /_synapse/admin - admin.register_servlets(hs, client_resource) + synapse.rest.admin.register_servlets(hs, client_resource) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py new file mode 100644 index 0000000000..ddf9ced1a3 --- /dev/null +++ b/synapse/rest/admin/__init__.py @@ -0,0 +1,896 @@ +# -*- coding: utf-8 -*- +# Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018-2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +import hmac +import logging +import platform +import re + +from six import text_type +from six.moves import http_client + +from twisted.internet import defer + +import synapse +from synapse.api.constants import Membership, UserTypes +from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError +from synapse.http.server import JsonResource +from synapse.http.servlet import ( + RestServlet, + assert_params_in_dict, + parse_integer, + parse_json_object_from_request, + parse_string, +) +from synapse.types import UserID, create_requester +from synapse.util.versionstring import get_version_string + +logger = logging.getLogger(__name__) + + +def historical_admin_path_patterns(path_regex): + """Returns the list of patterns for an admin endpoint, including historical ones + + This is a backwards-compatibility hack. Previously, the Admin API was exposed at + various paths under /_matrix/client. This function returns a list of patterns + matching those paths (as well as the new one), so that existing scripts which rely + on the endpoints being available there are not broken. + + Note that this should only be used for existing endpoints: new ones should just + register for the /_synapse/admin path. + """ + return list( + re.compile(prefix + path_regex) + for prefix in ( + "^/_synapse/admin/v1", + "^/_matrix/client/api/v1/admin", + "^/_matrix/client/unstable/admin", + "^/_matrix/client/r0/admin" + ) + ) + + +class UsersRestServlet(RestServlet): + PATTERNS = historical_admin_path_patterns("/users/(?P[^/]*)") + + def __init__(self, hs): + self.hs = hs + self.auth = hs.get_auth() + self.handlers = hs.get_handlers() + + @defer.inlineCallbacks + def on_GET(self, request, user_id): + target_user = UserID.from_string(user_id) + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + # To allow all users to get the users list + # if not is_admin and target_user != auth_user: + # raise AuthError(403, "You are not a server admin") + + if not self.hs.is_mine(target_user): + raise SynapseError(400, "Can only users a local user") + + ret = yield self.handlers.admin_handler.get_users() + + defer.returnValue((200, ret)) + + +class VersionServlet(RestServlet): + PATTERNS = historical_admin_path_patterns("/server_version") + + def __init__(self, hs): + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_GET(self, request): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + ret = { + 'server_version': get_version_string(synapse), + 'python_version': platform.python_version(), + } + + defer.returnValue((200, ret)) + + +class UserRegisterServlet(RestServlet): + """ + Attributes: + NONCE_TIMEOUT (int): Seconds until a generated nonce won't be accepted + nonces (dict[str, int]): The nonces that we will accept. A dict of + nonce to the time it was generated, in int seconds. + """ + PATTERNS = historical_admin_path_patterns("/register") + NONCE_TIMEOUT = 60 + + def __init__(self, hs): + self.handlers = hs.get_handlers() + self.reactor = hs.get_reactor() + self.nonces = {} + self.hs = hs + + def _clear_old_nonces(self): + """ + Clear out old nonces that are older than NONCE_TIMEOUT. + """ + now = int(self.reactor.seconds()) + + for k, v in list(self.nonces.items()): + if now - v > self.NONCE_TIMEOUT: + del self.nonces[k] + + def on_GET(self, request): + """ + Generate a new nonce. + """ + self._clear_old_nonces() + + nonce = self.hs.get_secrets().token_hex(64) + self.nonces[nonce] = int(self.reactor.seconds()) + return (200, {"nonce": nonce}) + + @defer.inlineCallbacks + def on_POST(self, request): + self._clear_old_nonces() + + if not self.hs.config.registration_shared_secret: + raise SynapseError(400, "Shared secret registration is not enabled") + + body = parse_json_object_from_request(request) + + if "nonce" not in body: + raise SynapseError( + 400, "nonce must be specified", errcode=Codes.BAD_JSON, + ) + + nonce = body["nonce"] + + if nonce not in self.nonces: + raise SynapseError( + 400, "unrecognised nonce", + ) + + # Delete the nonce, so it can't be reused, even if it's invalid + del self.nonces[nonce] + + if "username" not in body: + raise SynapseError( + 400, "username must be specified", errcode=Codes.BAD_JSON, + ) + else: + if ( + not isinstance(body['username'], text_type) + or len(body['username']) > 512 + ): + raise SynapseError(400, "Invalid username") + + username = body["username"].encode("utf-8") + if b"\x00" in username: + raise SynapseError(400, "Invalid username") + + if "password" not in body: + raise SynapseError( + 400, "password must be specified", errcode=Codes.BAD_JSON, + ) + else: + if ( + not isinstance(body['password'], text_type) + or len(body['password']) > 512 + ): + raise SynapseError(400, "Invalid password") + + password = body["password"].encode("utf-8") + if b"\x00" in password: + raise SynapseError(400, "Invalid password") + + admin = body.get("admin", None) + user_type = body.get("user_type", None) + + if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES: + raise SynapseError(400, "Invalid user type") + + got_mac = body["mac"] + + want_mac = hmac.new( + key=self.hs.config.registration_shared_secret.encode(), + digestmod=hashlib.sha1, + ) + want_mac.update(nonce.encode('utf8')) + want_mac.update(b"\x00") + want_mac.update(username) + want_mac.update(b"\x00") + want_mac.update(password) + want_mac.update(b"\x00") + want_mac.update(b"admin" if admin else b"notadmin") + if user_type: + want_mac.update(b"\x00") + want_mac.update(user_type.encode('utf8')) + want_mac = want_mac.hexdigest() + + if not hmac.compare_digest( + want_mac.encode('ascii'), + got_mac.encode('ascii') + ): + raise SynapseError(403, "HMAC incorrect") + + # Reuse the parts of RegisterRestServlet to reduce code duplication + from synapse.rest.client.v2_alpha.register import RegisterRestServlet + + register = RegisterRestServlet(self.hs) + + (user_id, _) = yield register.registration_handler.register( + localpart=body['username'].lower(), + password=body["password"], + admin=bool(admin), + generate_token=False, + user_type=user_type, + ) + + result = yield register._create_registration_details(user_id, body) + defer.returnValue((200, result)) + + +class WhoisRestServlet(RestServlet): + PATTERNS = historical_admin_path_patterns("/whois/(?P[^/]*)") + + def __init__(self, hs): + self.hs = hs + self.auth = hs.get_auth() + self.handlers = hs.get_handlers() + + @defer.inlineCallbacks + def on_GET(self, request, user_id): + target_user = UserID.from_string(user_id) + requester = yield self.auth.get_user_by_req(request) + auth_user = requester.user + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin and target_user != auth_user: + raise AuthError(403, "You are not a server admin") + + if not self.hs.is_mine(target_user): + raise SynapseError(400, "Can only whois a local user") + + ret = yield self.handlers.admin_handler.get_whois(target_user) + + defer.returnValue((200, ret)) + + +class PurgeMediaCacheRestServlet(RestServlet): + PATTERNS = historical_admin_path_patterns("/purge_media_cache") + + def __init__(self, hs): + self.media_repository = hs.get_media_repository() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + before_ts = parse_integer(request, "before_ts", required=True) + logger.info("before_ts: %r", before_ts) + + ret = yield self.media_repository.delete_old_remote_media(before_ts) + + defer.returnValue((200, ret)) + + +class PurgeHistoryRestServlet(RestServlet): + PATTERNS = historical_admin_path_patterns( + "/purge_history/(?P[^/]*)(/(?P[^/]+))?" + ) + + def __init__(self, hs): + """ + + Args: + hs (synapse.server.HomeServer) + """ + self.pagination_handler = hs.get_pagination_handler() + self.store = hs.get_datastore() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request, room_id, event_id): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + body = parse_json_object_from_request(request, allow_empty_body=True) + + delete_local_events = bool(body.get("delete_local_events", False)) + + # establish the topological ordering we should keep events from. The + # user can provide an event_id in the URL or the request body, or can + # provide a timestamp in the request body. + if event_id is None: + event_id = body.get('purge_up_to_event_id') + + if event_id is not None: + event = yield self.store.get_event(event_id) + + if event.room_id != room_id: + raise SynapseError(400, "Event is for wrong room.") + + token = yield self.store.get_topological_token_for_event(event_id) + + logger.info( + "[purge] purging up to token %s (event_id %s)", + token, event_id, + ) + elif 'purge_up_to_ts' in body: + ts = body['purge_up_to_ts'] + if not isinstance(ts, int): + raise SynapseError( + 400, "purge_up_to_ts must be an int", + errcode=Codes.BAD_JSON, + ) + + stream_ordering = ( + yield self.store.find_first_stream_ordering_after_ts(ts) + ) + + r = ( + yield self.store.get_room_event_after_stream_ordering( + room_id, stream_ordering, + ) + ) + if not r: + logger.warn( + "[purge] purging events not possible: No event found " + "(received_ts %i => stream_ordering %i)", + ts, stream_ordering, + ) + raise SynapseError( + 404, + "there is no event to be purged", + errcode=Codes.NOT_FOUND, + ) + (stream, topo, _event_id) = r + token = "t%d-%d" % (topo, stream) + logger.info( + "[purge] purging up to token %s (received_ts %i => " + "stream_ordering %i)", + token, ts, stream_ordering, + ) + else: + raise SynapseError( + 400, + "must specify purge_up_to_event_id or purge_up_to_ts", + errcode=Codes.BAD_JSON, + ) + + purge_id = yield self.pagination_handler.start_purge_history( + room_id, token, + delete_local_events=delete_local_events, + ) + + defer.returnValue((200, { + "purge_id": purge_id, + })) + + +class PurgeHistoryStatusRestServlet(RestServlet): + PATTERNS = historical_admin_path_patterns( + "/purge_history_status/(?P[^/]+)" + ) + + def __init__(self, hs): + """ + + Args: + hs (synapse.server.HomeServer) + """ + self.pagination_handler = hs.get_pagination_handler() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_GET(self, request, purge_id): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + purge_status = self.pagination_handler.get_purge_status(purge_id) + if purge_status is None: + raise NotFoundError("purge id '%s' not found" % purge_id) + + defer.returnValue((200, purge_status.asdict())) + + +class DeactivateAccountRestServlet(RestServlet): + PATTERNS = historical_admin_path_patterns("/deactivate/(?P[^/]*)") + + def __init__(self, hs): + self._deactivate_account_handler = hs.get_deactivate_account_handler() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request, target_user_id): + body = parse_json_object_from_request(request, allow_empty_body=True) + erase = body.get("erase", False) + if not isinstance(erase, bool): + raise SynapseError( + http_client.BAD_REQUEST, + "Param 'erase' must be a boolean, if given", + Codes.BAD_JSON, + ) + + UserID.from_string(target_user_id) + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + result = yield self._deactivate_account_handler.deactivate_account( + target_user_id, erase, + ) + if result: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) + + +class ShutdownRoomRestServlet(RestServlet): + """Shuts down a room by removing all local users from the room and blocking + all future invites and joins to the room. Any local aliases will be repointed + to a new room created by `new_room_user_id` and kicked users will be auto + joined to the new room. + """ + PATTERNS = historical_admin_path_patterns("/shutdown_room/(?P[^/]+)") + + DEFAULT_MESSAGE = ( + "Sharing illegal content on this server is not permitted and rooms in" + " violation will be blocked." + ) + + def __init__(self, hs): + self.hs = hs + self.store = hs.get_datastore() + self.state = hs.get_state_handler() + self._room_creation_handler = hs.get_room_creation_handler() + self.event_creation_handler = hs.get_event_creation_handler() + self.room_member_handler = hs.get_room_member_handler() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request, room_id): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + if not is_admin: + raise AuthError(403, "You are not a server admin") + + content = parse_json_object_from_request(request) + assert_params_in_dict(content, ["new_room_user_id"]) + new_room_user_id = content["new_room_user_id"] + + room_creator_requester = create_requester(new_room_user_id) + + message = content.get("message", self.DEFAULT_MESSAGE) + room_name = content.get("room_name", "Content Violation Notification") + + info = yield self._room_creation_handler.create_room( + room_creator_requester, + config={ + "preset": "public_chat", + "name": room_name, + "power_level_content_override": { + "users_default": -10, + }, + }, + ratelimit=False, + ) + new_room_id = info["room_id"] + + requester_user_id = requester.user.to_string() + + logger.info( + "Shutting down room %r, joining to new room: %r", + room_id, new_room_id, + ) + + # This will work even if the room is already blocked, but that is + # desirable in case the first attempt at blocking the room failed below. + yield self.store.block_room(room_id, requester_user_id) + + users = yield self.state.get_current_users_in_room(room_id) + kicked_users = [] + failed_to_kick_users = [] + for user_id in users: + if not self.hs.is_mine_id(user_id): + continue + + logger.info("Kicking %r from %r...", user_id, room_id) + + try: + target_requester = create_requester(user_id) + yield self.room_member_handler.update_membership( + requester=target_requester, + target=target_requester.user, + room_id=room_id, + action=Membership.LEAVE, + content={}, + ratelimit=False, + require_consent=False, + ) + + yield self.room_member_handler.forget(target_requester.user, room_id) + + yield self.room_member_handler.update_membership( + requester=target_requester, + target=target_requester.user, + room_id=new_room_id, + action=Membership.JOIN, + content={}, + ratelimit=False, + require_consent=False, + ) + + kicked_users.append(user_id) + except Exception: + logger.exception( + "Failed to leave old room and join new room for %r", user_id, + ) + failed_to_kick_users.append(user_id) + + yield self.event_creation_handler.create_and_send_nonmember_event( + room_creator_requester, + { + "type": "m.room.message", + "content": {"body": message, "msgtype": "m.text"}, + "room_id": new_room_id, + "sender": new_room_user_id, + }, + ratelimit=False, + ) + + aliases_for_room = yield self.store.get_aliases_for_room(room_id) + + yield self.store.update_aliases_for_room( + room_id, new_room_id, requester_user_id + ) + + defer.returnValue((200, { + "kicked_users": kicked_users, + "failed_to_kick_users": failed_to_kick_users, + "local_aliases": aliases_for_room, + "new_room_id": new_room_id, + })) + + +class QuarantineMediaInRoom(RestServlet): + """Quarantines all media in a room so that no one can download it via + this server. + """ + PATTERNS = historical_admin_path_patterns("/quarantine_media/(?P[^/]+)") + + def __init__(self, hs): + self.store = hs.get_datastore() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request, room_id): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + if not is_admin: + raise AuthError(403, "You are not a server admin") + + num_quarantined = yield self.store.quarantine_media_ids_in_room( + room_id, requester.user.to_string(), + ) + + defer.returnValue((200, {"num_quarantined": num_quarantined})) + + +class ListMediaInRoom(RestServlet): + """Lists all of the media in a given room. + """ + PATTERNS = historical_admin_path_patterns("/room/(?P[^/]+)/media") + + def __init__(self, hs): + self.store = hs.get_datastore() + + @defer.inlineCallbacks + def on_GET(self, request, room_id): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + if not is_admin: + raise AuthError(403, "You are not a server admin") + + local_mxcs, remote_mxcs = yield self.store.get_media_mxcs_in_room(room_id) + + defer.returnValue((200, {"local": local_mxcs, "remote": remote_mxcs})) + + +class ResetPasswordRestServlet(RestServlet): + """Post request to allow an administrator reset password for a user. + This needs user to have administrator access in Synapse. + Example: + http://localhost:8008/_matrix/client/api/v1/admin/reset_password/ + @user:to_reset_password?access_token=admin_access_token + JsonBodyToSend: + { + "new_password": "secret" + } + Returns: + 200 OK with empty object if success otherwise an error. + """ + PATTERNS = historical_admin_path_patterns("/reset_password/(?P[^/]*)") + + def __init__(self, hs): + self.store = hs.get_datastore() + self.hs = hs + self.auth = hs.get_auth() + self._set_password_handler = hs.get_set_password_handler() + + @defer.inlineCallbacks + def on_POST(self, request, target_user_id): + """Post request to allow an administrator reset password for a user. + This needs user to have administrator access in Synapse. + """ + UserID.from_string(target_user_id) + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + params = parse_json_object_from_request(request) + assert_params_in_dict(params, ["new_password"]) + new_password = params['new_password'] + + yield self._set_password_handler.set_password( + target_user_id, new_password, requester + ) + defer.returnValue((200, {})) + + +class GetUsersPaginatedRestServlet(RestServlet): + """Get request to get specific number of users from Synapse. + This needs user to have administrator access in Synapse. + Example: + http://localhost:8008/_matrix/client/api/v1/admin/users_paginate/ + @admin:user?access_token=admin_access_token&start=0&limit=10 + Returns: + 200 OK with json object {list[dict[str, Any]], count} or empty object. + """ + PATTERNS = historical_admin_path_patterns("/users_paginate/(?P[^/]*)") + + def __init__(self, hs): + self.store = hs.get_datastore() + self.hs = hs + self.auth = hs.get_auth() + self.handlers = hs.get_handlers() + + @defer.inlineCallbacks + def on_GET(self, request, target_user_id): + """Get request to get specific number of users from Synapse. + This needs user to have administrator access in Synapse. + """ + target_user = UserID.from_string(target_user_id) + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + # To allow all users to get the users list + # if not is_admin and target_user != auth_user: + # raise AuthError(403, "You are not a server admin") + + if not self.hs.is_mine(target_user): + raise SynapseError(400, "Can only users a local user") + + order = "name" # order by name in user table + start = parse_integer(request, "start", required=True) + limit = parse_integer(request, "limit", required=True) + + logger.info("limit: %s, start: %s", limit, start) + + ret = yield self.handlers.admin_handler.get_users_paginate( + order, start, limit + ) + defer.returnValue((200, ret)) + + @defer.inlineCallbacks + def on_POST(self, request, target_user_id): + """Post request to get specific number of users from Synapse.. + This needs user to have administrator access in Synapse. + Example: + http://localhost:8008/_matrix/client/api/v1/admin/users_paginate/ + @admin:user?access_token=admin_access_token + JsonBodyToSend: + { + "start": "0", + "limit": "10 + } + Returns: + 200 OK with json object {list[dict[str, Any]], count} or empty object. + """ + UserID.from_string(target_user_id) + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + order = "name" # order by name in user table + params = parse_json_object_from_request(request) + assert_params_in_dict(params, ["limit", "start"]) + limit = params['limit'] + start = params['start'] + logger.info("limit: %s, start: %s", limit, start) + + ret = yield self.handlers.admin_handler.get_users_paginate( + order, start, limit + ) + defer.returnValue((200, ret)) + + +class SearchUsersRestServlet(RestServlet): + """Get request to search user table for specific users according to + search term. + This needs user to have administrator access in Synapse. + Example: + http://localhost:8008/_matrix/client/api/v1/admin/search_users/ + @admin:user?access_token=admin_access_token&term=alice + Returns: + 200 OK with json object {list[dict[str, Any]], count} or empty object. + """ + PATTERNS = historical_admin_path_patterns("/search_users/(?P[^/]*)") + + def __init__(self, hs): + self.store = hs.get_datastore() + self.hs = hs + self.auth = hs.get_auth() + self.handlers = hs.get_handlers() + + @defer.inlineCallbacks + def on_GET(self, request, target_user_id): + """Get request to search user table for specific users according to + search term. + This needs user to have a administrator access in Synapse. + """ + target_user = UserID.from_string(target_user_id) + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + # To allow all users to get the users list + # if not is_admin and target_user != auth_user: + # raise AuthError(403, "You are not a server admin") + + if not self.hs.is_mine(target_user): + raise SynapseError(400, "Can only users a local user") + + term = parse_string(request, "term", required=True) + logger.info("term: %s ", term) + + ret = yield self.handlers.admin_handler.search_users( + term + ) + defer.returnValue((200, ret)) + + +class DeleteGroupAdminRestServlet(RestServlet): + """Allows deleting of local groups + """ + PATTERNS = historical_admin_path_patterns("/delete_group/(?P[^/]*)") + + def __init__(self, hs): + self.group_server = hs.get_groups_server_handler() + self.is_mine_id = hs.is_mine_id + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + if not self.is_mine_id(group_id): + raise SynapseError(400, "Can only delete local groups") + + yield self.group_server.delete_group(group_id, requester.user.to_string()) + defer.returnValue((200, {})) + + +class AccountValidityRenewServlet(RestServlet): + PATTERNS = historical_admin_path_patterns("/account_validity/validity$") + + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + self.hs = hs + self.account_activity_handler = hs.get_account_validity_handler() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request): + requester = yield self.auth.get_user_by_req(request) + is_admin = yield self.auth.is_server_admin(requester.user) + + if not is_admin: + raise AuthError(403, "You are not a server admin") + + body = parse_json_object_from_request(request) + + if "user_id" not in body: + raise SynapseError(400, "Missing property 'user_id' in the request body") + + expiration_ts = yield self.account_activity_handler.renew_account_for_user( + body["user_id"], body.get("expiration_ts"), + not body.get("enable_renewal_emails", True), + ) + + res = { + "expiration_ts": expiration_ts, + } + defer.returnValue((200, res)) + + +class AdminRestResource(JsonResource): + """The REST resource which gets mounted at /_synapse/admin""" + + def __init__(self, hs): + JsonResource.__init__(self, hs, canonical_json=False) + register_servlets(hs, self) + + +def register_servlets(hs, http_server): + WhoisRestServlet(hs).register(http_server) + PurgeMediaCacheRestServlet(hs).register(http_server) + PurgeHistoryStatusRestServlet(hs).register(http_server) + DeactivateAccountRestServlet(hs).register(http_server) + PurgeHistoryRestServlet(hs).register(http_server) + UsersRestServlet(hs).register(http_server) + ResetPasswordRestServlet(hs).register(http_server) + GetUsersPaginatedRestServlet(hs).register(http_server) + SearchUsersRestServlet(hs).register(http_server) + ShutdownRoomRestServlet(hs).register(http_server) + QuarantineMediaInRoom(hs).register(http_server) + ListMediaInRoom(hs).register(http_server) + UserRegisterServlet(hs).register(http_server) + VersionServlet(hs).register(http_server) + DeleteGroupAdminRestServlet(hs).register(http_server) + AccountValidityRenewServlet(hs).register(http_server) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py deleted file mode 100644 index d4f83e4ae8..0000000000 --- a/synapse/rest/client/v1/admin.py +++ /dev/null @@ -1,896 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import hashlib -import hmac -import logging -import platform -import re - -from six import text_type -from six.moves import http_client - -from twisted.internet import defer - -import synapse -from synapse.api.constants import Membership, UserTypes -from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError -from synapse.http.server import JsonResource -from synapse.http.servlet import ( - RestServlet, - assert_params_in_dict, - parse_integer, - parse_json_object_from_request, - parse_string, -) -from synapse.types import UserID, create_requester -from synapse.util.versionstring import get_version_string - -logger = logging.getLogger(__name__) - - -def historical_admin_path_patterns(path_regex): - """Returns the list of patterns for an admin endpoint, including historical ones - - This is a backwards-compatibility hack. Previously, the Admin API was exposed at - various paths under /_matrix/client. This function returns a list of patterns - matching those paths (as well as the new one), so that existing scripts which rely - on the endpoints being available there are not broken. - - Note that this should only be used for existing endpoints: new ones should just - register for the /_synapse/admin path. - """ - return list( - re.compile(prefix + path_regex) - for prefix in ( - "^/_synapse/admin/v1", - "^/_matrix/client/api/v1/admin", - "^/_matrix/client/unstable/admin", - "^/_matrix/client/r0/admin" - ) - ) - - -class UsersRestServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/users/(?P[^/]*)") - - def __init__(self, hs): - self.hs = hs - self.auth = hs.get_auth() - self.handlers = hs.get_handlers() - - @defer.inlineCallbacks - def on_GET(self, request, user_id): - target_user = UserID.from_string(user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - # To allow all users to get the users list - # if not is_admin and target_user != auth_user: - # raise AuthError(403, "You are not a server admin") - - if not self.hs.is_mine(target_user): - raise SynapseError(400, "Can only users a local user") - - ret = yield self.handlers.admin_handler.get_users() - - defer.returnValue((200, ret)) - - -class VersionServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/server_version") - - def __init__(self, hs): - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_GET(self, request): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - ret = { - 'server_version': get_version_string(synapse), - 'python_version': platform.python_version(), - } - - defer.returnValue((200, ret)) - - -class UserRegisterServlet(RestServlet): - """ - Attributes: - NONCE_TIMEOUT (int): Seconds until a generated nonce won't be accepted - nonces (dict[str, int]): The nonces that we will accept. A dict of - nonce to the time it was generated, in int seconds. - """ - PATTERNS = historical_admin_path_patterns("/register") - NONCE_TIMEOUT = 60 - - def __init__(self, hs): - self.handlers = hs.get_handlers() - self.reactor = hs.get_reactor() - self.nonces = {} - self.hs = hs - - def _clear_old_nonces(self): - """ - Clear out old nonces that are older than NONCE_TIMEOUT. - """ - now = int(self.reactor.seconds()) - - for k, v in list(self.nonces.items()): - if now - v > self.NONCE_TIMEOUT: - del self.nonces[k] - - def on_GET(self, request): - """ - Generate a new nonce. - """ - self._clear_old_nonces() - - nonce = self.hs.get_secrets().token_hex(64) - self.nonces[nonce] = int(self.reactor.seconds()) - return (200, {"nonce": nonce}) - - @defer.inlineCallbacks - def on_POST(self, request): - self._clear_old_nonces() - - if not self.hs.config.registration_shared_secret: - raise SynapseError(400, "Shared secret registration is not enabled") - - body = parse_json_object_from_request(request) - - if "nonce" not in body: - raise SynapseError( - 400, "nonce must be specified", errcode=Codes.BAD_JSON, - ) - - nonce = body["nonce"] - - if nonce not in self.nonces: - raise SynapseError( - 400, "unrecognised nonce", - ) - - # Delete the nonce, so it can't be reused, even if it's invalid - del self.nonces[nonce] - - if "username" not in body: - raise SynapseError( - 400, "username must be specified", errcode=Codes.BAD_JSON, - ) - else: - if ( - not isinstance(body['username'], text_type) - or len(body['username']) > 512 - ): - raise SynapseError(400, "Invalid username") - - username = body["username"].encode("utf-8") - if b"\x00" in username: - raise SynapseError(400, "Invalid username") - - if "password" not in body: - raise SynapseError( - 400, "password must be specified", errcode=Codes.BAD_JSON, - ) - else: - if ( - not isinstance(body['password'], text_type) - or len(body['password']) > 512 - ): - raise SynapseError(400, "Invalid password") - - password = body["password"].encode("utf-8") - if b"\x00" in password: - raise SynapseError(400, "Invalid password") - - admin = body.get("admin", None) - user_type = body.get("user_type", None) - - if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES: - raise SynapseError(400, "Invalid user type") - - got_mac = body["mac"] - - want_mac = hmac.new( - key=self.hs.config.registration_shared_secret.encode(), - digestmod=hashlib.sha1, - ) - want_mac.update(nonce.encode('utf8')) - want_mac.update(b"\x00") - want_mac.update(username) - want_mac.update(b"\x00") - want_mac.update(password) - want_mac.update(b"\x00") - want_mac.update(b"admin" if admin else b"notadmin") - if user_type: - want_mac.update(b"\x00") - want_mac.update(user_type.encode('utf8')) - want_mac = want_mac.hexdigest() - - if not hmac.compare_digest( - want_mac.encode('ascii'), - got_mac.encode('ascii') - ): - raise SynapseError(403, "HMAC incorrect") - - # Reuse the parts of RegisterRestServlet to reduce code duplication - from synapse.rest.client.v2_alpha.register import RegisterRestServlet - - register = RegisterRestServlet(self.hs) - - (user_id, _) = yield register.registration_handler.register( - localpart=body['username'].lower(), - password=body["password"], - admin=bool(admin), - generate_token=False, - user_type=user_type, - ) - - result = yield register._create_registration_details(user_id, body) - defer.returnValue((200, result)) - - -class WhoisRestServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/whois/(?P[^/]*)") - - def __init__(self, hs): - self.hs = hs - self.auth = hs.get_auth() - self.handlers = hs.get_handlers() - - @defer.inlineCallbacks - def on_GET(self, request, user_id): - target_user = UserID.from_string(user_id) - requester = yield self.auth.get_user_by_req(request) - auth_user = requester.user - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin and target_user != auth_user: - raise AuthError(403, "You are not a server admin") - - if not self.hs.is_mine(target_user): - raise SynapseError(400, "Can only whois a local user") - - ret = yield self.handlers.admin_handler.get_whois(target_user) - - defer.returnValue((200, ret)) - - -class PurgeMediaCacheRestServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/purge_media_cache") - - def __init__(self, hs): - self.media_repository = hs.get_media_repository() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - before_ts = parse_integer(request, "before_ts", required=True) - logger.info("before_ts: %r", before_ts) - - ret = yield self.media_repository.delete_old_remote_media(before_ts) - - defer.returnValue((200, ret)) - - -class PurgeHistoryRestServlet(RestServlet): - PATTERNS = historical_admin_path_patterns( - "/purge_history/(?P[^/]*)(/(?P[^/]+))?" - ) - - def __init__(self, hs): - """ - - Args: - hs (synapse.server.HomeServer) - """ - self.pagination_handler = hs.get_pagination_handler() - self.store = hs.get_datastore() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request, room_id, event_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - body = parse_json_object_from_request(request, allow_empty_body=True) - - delete_local_events = bool(body.get("delete_local_events", False)) - - # establish the topological ordering we should keep events from. The - # user can provide an event_id in the URL or the request body, or can - # provide a timestamp in the request body. - if event_id is None: - event_id = body.get('purge_up_to_event_id') - - if event_id is not None: - event = yield self.store.get_event(event_id) - - if event.room_id != room_id: - raise SynapseError(400, "Event is for wrong room.") - - token = yield self.store.get_topological_token_for_event(event_id) - - logger.info( - "[purge] purging up to token %s (event_id %s)", - token, event_id, - ) - elif 'purge_up_to_ts' in body: - ts = body['purge_up_to_ts'] - if not isinstance(ts, int): - raise SynapseError( - 400, "purge_up_to_ts must be an int", - errcode=Codes.BAD_JSON, - ) - - stream_ordering = ( - yield self.store.find_first_stream_ordering_after_ts(ts) - ) - - r = ( - yield self.store.get_room_event_after_stream_ordering( - room_id, stream_ordering, - ) - ) - if not r: - logger.warn( - "[purge] purging events not possible: No event found " - "(received_ts %i => stream_ordering %i)", - ts, stream_ordering, - ) - raise SynapseError( - 404, - "there is no event to be purged", - errcode=Codes.NOT_FOUND, - ) - (stream, topo, _event_id) = r - token = "t%d-%d" % (topo, stream) - logger.info( - "[purge] purging up to token %s (received_ts %i => " - "stream_ordering %i)", - token, ts, stream_ordering, - ) - else: - raise SynapseError( - 400, - "must specify purge_up_to_event_id or purge_up_to_ts", - errcode=Codes.BAD_JSON, - ) - - purge_id = yield self.pagination_handler.start_purge_history( - room_id, token, - delete_local_events=delete_local_events, - ) - - defer.returnValue((200, { - "purge_id": purge_id, - })) - - -class PurgeHistoryStatusRestServlet(RestServlet): - PATTERNS = historical_admin_path_patterns( - "/purge_history_status/(?P[^/]+)" - ) - - def __init__(self, hs): - """ - - Args: - hs (synapse.server.HomeServer) - """ - self.pagination_handler = hs.get_pagination_handler() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_GET(self, request, purge_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - purge_status = self.pagination_handler.get_purge_status(purge_id) - if purge_status is None: - raise NotFoundError("purge id '%s' not found" % purge_id) - - defer.returnValue((200, purge_status.asdict())) - - -class DeactivateAccountRestServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/deactivate/(?P[^/]*)") - - def __init__(self, hs): - self._deactivate_account_handler = hs.get_deactivate_account_handler() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request, target_user_id): - body = parse_json_object_from_request(request, allow_empty_body=True) - erase = body.get("erase", False) - if not isinstance(erase, bool): - raise SynapseError( - http_client.BAD_REQUEST, - "Param 'erase' must be a boolean, if given", - Codes.BAD_JSON, - ) - - UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - result = yield self._deactivate_account_handler.deactivate_account( - target_user_id, erase, - ) - if result: - id_server_unbind_result = "success" - else: - id_server_unbind_result = "no-support" - - defer.returnValue((200, { - "id_server_unbind_result": id_server_unbind_result, - })) - - -class ShutdownRoomRestServlet(RestServlet): - """Shuts down a room by removing all local users from the room and blocking - all future invites and joins to the room. Any local aliases will be repointed - to a new room created by `new_room_user_id` and kicked users will be auto - joined to the new room. - """ - PATTERNS = historical_admin_path_patterns("/shutdown_room/(?P[^/]+)") - - DEFAULT_MESSAGE = ( - "Sharing illegal content on this server is not permitted and rooms in" - " violation will be blocked." - ) - - def __init__(self, hs): - self.hs = hs - self.store = hs.get_datastore() - self.state = hs.get_state_handler() - self._room_creation_handler = hs.get_room_creation_handler() - self.event_creation_handler = hs.get_event_creation_handler() - self.room_member_handler = hs.get_room_member_handler() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request, room_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - if not is_admin: - raise AuthError(403, "You are not a server admin") - - content = parse_json_object_from_request(request) - assert_params_in_dict(content, ["new_room_user_id"]) - new_room_user_id = content["new_room_user_id"] - - room_creator_requester = create_requester(new_room_user_id) - - message = content.get("message", self.DEFAULT_MESSAGE) - room_name = content.get("room_name", "Content Violation Notification") - - info = yield self._room_creation_handler.create_room( - room_creator_requester, - config={ - "preset": "public_chat", - "name": room_name, - "power_level_content_override": { - "users_default": -10, - }, - }, - ratelimit=False, - ) - new_room_id = info["room_id"] - - requester_user_id = requester.user.to_string() - - logger.info( - "Shutting down room %r, joining to new room: %r", - room_id, new_room_id, - ) - - # This will work even if the room is already blocked, but that is - # desirable in case the first attempt at blocking the room failed below. - yield self.store.block_room(room_id, requester_user_id) - - users = yield self.state.get_current_users_in_room(room_id) - kicked_users = [] - failed_to_kick_users = [] - for user_id in users: - if not self.hs.is_mine_id(user_id): - continue - - logger.info("Kicking %r from %r...", user_id, room_id) - - try: - target_requester = create_requester(user_id) - yield self.room_member_handler.update_membership( - requester=target_requester, - target=target_requester.user, - room_id=room_id, - action=Membership.LEAVE, - content={}, - ratelimit=False, - require_consent=False, - ) - - yield self.room_member_handler.forget(target_requester.user, room_id) - - yield self.room_member_handler.update_membership( - requester=target_requester, - target=target_requester.user, - room_id=new_room_id, - action=Membership.JOIN, - content={}, - ratelimit=False, - require_consent=False, - ) - - kicked_users.append(user_id) - except Exception: - logger.exception( - "Failed to leave old room and join new room for %r", user_id, - ) - failed_to_kick_users.append(user_id) - - yield self.event_creation_handler.create_and_send_nonmember_event( - room_creator_requester, - { - "type": "m.room.message", - "content": {"body": message, "msgtype": "m.text"}, - "room_id": new_room_id, - "sender": new_room_user_id, - }, - ratelimit=False, - ) - - aliases_for_room = yield self.store.get_aliases_for_room(room_id) - - yield self.store.update_aliases_for_room( - room_id, new_room_id, requester_user_id - ) - - defer.returnValue((200, { - "kicked_users": kicked_users, - "failed_to_kick_users": failed_to_kick_users, - "local_aliases": aliases_for_room, - "new_room_id": new_room_id, - })) - - -class QuarantineMediaInRoom(RestServlet): - """Quarantines all media in a room so that no one can download it via - this server. - """ - PATTERNS = historical_admin_path_patterns("/quarantine_media/(?P[^/]+)") - - def __init__(self, hs): - self.store = hs.get_datastore() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request, room_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - if not is_admin: - raise AuthError(403, "You are not a server admin") - - num_quarantined = yield self.store.quarantine_media_ids_in_room( - room_id, requester.user.to_string(), - ) - - defer.returnValue((200, {"num_quarantined": num_quarantined})) - - -class ListMediaInRoom(RestServlet): - """Lists all of the media in a given room. - """ - PATTERNS = historical_admin_path_patterns("/room/(?P[^/]+)/media") - - def __init__(self, hs): - self.store = hs.get_datastore() - - @defer.inlineCallbacks - def on_GET(self, request, room_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - if not is_admin: - raise AuthError(403, "You are not a server admin") - - local_mxcs, remote_mxcs = yield self.store.get_media_mxcs_in_room(room_id) - - defer.returnValue((200, {"local": local_mxcs, "remote": remote_mxcs})) - - -class ResetPasswordRestServlet(RestServlet): - """Post request to allow an administrator reset password for a user. - This needs user to have administrator access in Synapse. - Example: - http://localhost:8008/_matrix/client/api/v1/admin/reset_password/ - @user:to_reset_password?access_token=admin_access_token - JsonBodyToSend: - { - "new_password": "secret" - } - Returns: - 200 OK with empty object if success otherwise an error. - """ - PATTERNS = historical_admin_path_patterns("/reset_password/(?P[^/]*)") - - def __init__(self, hs): - self.store = hs.get_datastore() - self.hs = hs - self.auth = hs.get_auth() - self._set_password_handler = hs.get_set_password_handler() - - @defer.inlineCallbacks - def on_POST(self, request, target_user_id): - """Post request to allow an administrator reset password for a user. - This needs user to have administrator access in Synapse. - """ - UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - params = parse_json_object_from_request(request) - assert_params_in_dict(params, ["new_password"]) - new_password = params['new_password'] - - yield self._set_password_handler.set_password( - target_user_id, new_password, requester - ) - defer.returnValue((200, {})) - - -class GetUsersPaginatedRestServlet(RestServlet): - """Get request to get specific number of users from Synapse. - This needs user to have administrator access in Synapse. - Example: - http://localhost:8008/_matrix/client/api/v1/admin/users_paginate/ - @admin:user?access_token=admin_access_token&start=0&limit=10 - Returns: - 200 OK with json object {list[dict[str, Any]], count} or empty object. - """ - PATTERNS = historical_admin_path_patterns("/users_paginate/(?P[^/]*)") - - def __init__(self, hs): - self.store = hs.get_datastore() - self.hs = hs - self.auth = hs.get_auth() - self.handlers = hs.get_handlers() - - @defer.inlineCallbacks - def on_GET(self, request, target_user_id): - """Get request to get specific number of users from Synapse. - This needs user to have administrator access in Synapse. - """ - target_user = UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - # To allow all users to get the users list - # if not is_admin and target_user != auth_user: - # raise AuthError(403, "You are not a server admin") - - if not self.hs.is_mine(target_user): - raise SynapseError(400, "Can only users a local user") - - order = "name" # order by name in user table - start = parse_integer(request, "start", required=True) - limit = parse_integer(request, "limit", required=True) - - logger.info("limit: %s, start: %s", limit, start) - - ret = yield self.handlers.admin_handler.get_users_paginate( - order, start, limit - ) - defer.returnValue((200, ret)) - - @defer.inlineCallbacks - def on_POST(self, request, target_user_id): - """Post request to get specific number of users from Synapse.. - This needs user to have administrator access in Synapse. - Example: - http://localhost:8008/_matrix/client/api/v1/admin/users_paginate/ - @admin:user?access_token=admin_access_token - JsonBodyToSend: - { - "start": "0", - "limit": "10 - } - Returns: - 200 OK with json object {list[dict[str, Any]], count} or empty object. - """ - UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - order = "name" # order by name in user table - params = parse_json_object_from_request(request) - assert_params_in_dict(params, ["limit", "start"]) - limit = params['limit'] - start = params['start'] - logger.info("limit: %s, start: %s", limit, start) - - ret = yield self.handlers.admin_handler.get_users_paginate( - order, start, limit - ) - defer.returnValue((200, ret)) - - -class SearchUsersRestServlet(RestServlet): - """Get request to search user table for specific users according to - search term. - This needs user to have administrator access in Synapse. - Example: - http://localhost:8008/_matrix/client/api/v1/admin/search_users/ - @admin:user?access_token=admin_access_token&term=alice - Returns: - 200 OK with json object {list[dict[str, Any]], count} or empty object. - """ - PATTERNS = historical_admin_path_patterns("/search_users/(?P[^/]*)") - - def __init__(self, hs): - self.store = hs.get_datastore() - self.hs = hs - self.auth = hs.get_auth() - self.handlers = hs.get_handlers() - - @defer.inlineCallbacks - def on_GET(self, request, target_user_id): - """Get request to search user table for specific users according to - search term. - This needs user to have a administrator access in Synapse. - """ - target_user = UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - # To allow all users to get the users list - # if not is_admin and target_user != auth_user: - # raise AuthError(403, "You are not a server admin") - - if not self.hs.is_mine(target_user): - raise SynapseError(400, "Can only users a local user") - - term = parse_string(request, "term", required=True) - logger.info("term: %s ", term) - - ret = yield self.handlers.admin_handler.search_users( - term - ) - defer.returnValue((200, ret)) - - -class DeleteGroupAdminRestServlet(RestServlet): - """Allows deleting of local groups - """ - PATTERNS = historical_admin_path_patterns("/delete_group/(?P[^/]*)") - - def __init__(self, hs): - self.group_server = hs.get_groups_server_handler() - self.is_mine_id = hs.is_mine_id - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request, group_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - if not self.is_mine_id(group_id): - raise SynapseError(400, "Can only delete local groups") - - yield self.group_server.delete_group(group_id, requester.user.to_string()) - defer.returnValue((200, {})) - - -class AccountValidityRenewServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/account_validity/validity$") - - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ - self.hs = hs - self.account_activity_handler = hs.get_account_validity_handler() - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_POST(self, request): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - body = parse_json_object_from_request(request) - - if "user_id" not in body: - raise SynapseError(400, "Missing property 'user_id' in the request body") - - expiration_ts = yield self.account_activity_handler.renew_account_for_user( - body["user_id"], body.get("expiration_ts"), - not body.get("enable_renewal_emails", True), - ) - - res = { - "expiration_ts": expiration_ts, - } - defer.returnValue((200, res)) - - -class AdminRestResource(JsonResource): - """The REST resource which gets mounted at /_synapse/admin""" - - def __init__(self, hs): - JsonResource.__init__(self, hs, canonical_json=False) - register_servlets(hs, self) - - -def register_servlets(hs, http_server): - WhoisRestServlet(hs).register(http_server) - PurgeMediaCacheRestServlet(hs).register(http_server) - PurgeHistoryStatusRestServlet(hs).register(http_server) - DeactivateAccountRestServlet(hs).register(http_server) - PurgeHistoryRestServlet(hs).register(http_server) - UsersRestServlet(hs).register(http_server) - ResetPasswordRestServlet(hs).register(http_server) - GetUsersPaginatedRestServlet(hs).register(http_server) - SearchUsersRestServlet(hs).register(http_server) - ShutdownRoomRestServlet(hs).register(http_server) - QuarantineMediaInRoom(hs).register(http_server) - ListMediaInRoom(hs).register(http_server) - UserRegisterServlet(hs).register(http_server) - VersionServlet(hs).register(http_server) - DeleteGroupAdminRestServlet(hs).register(http_server) - AccountValidityRenewServlet(hs).register(http_server) diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index f1d0aa42b6..32ef83e9c0 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -14,8 +14,9 @@ # limitations under the License. from mock import Mock +import synapse.rest.admin from synapse.api.constants import UserTypes -from synapse.rest.client.v1 import admin, login, room +from synapse.rest.client.v1 import login, room from synapse.rest.client.v2_alpha import user_directory from synapse.storage.roommember import ProfileInfo @@ -29,7 +30,7 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, ] @@ -327,7 +328,7 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase): user_directory.register_servlets, room.register_servlets, login.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, ] def make_homeserver(self, reactor, clock): diff --git a/tests/push/test_email.py b/tests/push/test_email.py index be3fed8de3..e29bd18ad7 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -19,7 +19,8 @@ import pkg_resources from twisted.internet.defer import Deferred -from synapse.rest.client.v1 import admin, login, room +import synapse.rest.admin +from synapse.rest.client.v1 import login, room from tests.unittest import HomeserverTestCase @@ -33,7 +34,7 @@ class EmailPusherTests(HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, login.register_servlets, ] diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 6dc45e8506..3f9f56bb79 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -17,7 +17,8 @@ from mock import Mock from twisted.internet.defer import Deferred -from synapse.rest.client.v1 import admin, login, room +import synapse.rest.admin +from synapse.rest.client.v1 import login, room from synapse.util.logcontext import make_deferred_yieldable from tests.unittest import HomeserverTestCase @@ -32,7 +33,7 @@ class HTTPPusherTests(HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/admin/__init__.py b/tests/rest/admin/__init__.py new file mode 100644 index 0000000000..1453d04571 --- /dev/null +++ b/tests/rest/admin/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py new file mode 100644 index 0000000000..42858b5fea --- /dev/null +++ b/tests/rest/admin/test_admin.py @@ -0,0 +1,617 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +import hmac +import json + +from mock import Mock + +import synapse.rest.admin +from synapse.api.constants import UserTypes +from synapse.rest.client.v1 import events, login, room +from synapse.rest.client.v2_alpha import groups + +from tests import unittest + + +class VersionTestCase(unittest.HomeserverTestCase): + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + ] + + url = '/_matrix/client/r0/admin/server_version' + + def test_version_string(self): + self.register_user("admin", "pass", admin=True) + self.admin_token = self.login("admin", "pass") + + request, channel = self.make_request("GET", self.url, + access_token=self.admin_token) + self.render(request) + + self.assertEqual(200, int(channel.result["code"]), + msg=channel.result["body"]) + self.assertEqual({'server_version', 'python_version'}, + set(channel.json_body.keys())) + + def test_inaccessible_to_non_admins(self): + self.register_user("unprivileged-user", "pass", admin=False) + user_token = self.login("unprivileged-user", "pass") + + request, channel = self.make_request("GET", self.url, + access_token=user_token) + self.render(request) + + self.assertEqual(403, int(channel.result['code']), + msg=channel.result['body']) + + +class UserRegisterTestCase(unittest.HomeserverTestCase): + + servlets = [synapse.rest.admin.register_servlets] + + def make_homeserver(self, reactor, clock): + + self.url = "/_matrix/client/r0/admin/register" + + self.registration_handler = Mock() + self.identity_handler = Mock() + self.login_handler = Mock() + self.device_handler = Mock() + self.device_handler.check_device_registered = Mock(return_value="FAKE") + + self.datastore = Mock(return_value=Mock()) + self.datastore.get_current_state_deltas = Mock(return_value=[]) + + self.secrets = Mock() + + self.hs = self.setup_test_homeserver() + + self.hs.config.registration_shared_secret = u"shared" + + self.hs.get_media_repository = Mock() + self.hs.get_deactivate_account_handler = Mock() + + return self.hs + + def test_disabled(self): + """ + If there is no shared secret, registration through this method will be + prevented. + """ + self.hs.config.registration_shared_secret = None + + request, channel = self.make_request("POST", self.url, b'{}') + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual( + 'Shared secret registration is not enabled', channel.json_body["error"] + ) + + def test_get_nonce(self): + """ + Calling GET on the endpoint will return a randomised nonce, using the + homeserver's secrets provider. + """ + secrets = Mock() + secrets.token_hex = Mock(return_value="abcd") + + self.hs.get_secrets = Mock(return_value=secrets) + + request, channel = self.make_request("GET", self.url) + self.render(request) + + self.assertEqual(channel.json_body, {"nonce": "abcd"}) + + def test_expired_nonce(self): + """ + Calling GET on the endpoint will return a randomised nonce, which will + only last for SALT_TIMEOUT (60s). + """ + request, channel = self.make_request("GET", self.url) + self.render(request) + nonce = channel.json_body["nonce"] + + # 59 seconds + self.reactor.advance(59) + + body = json.dumps({"nonce": nonce}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('username must be specified', channel.json_body["error"]) + + # 61 seconds + self.reactor.advance(2) + + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('unrecognised nonce', channel.json_body["error"]) + + def test_register_incorrect_nonce(self): + """ + Only the provided nonce can be used, as it's checked in the MAC. + """ + request, channel = self.make_request("GET", self.url) + self.render(request) + nonce = channel.json_body["nonce"] + + want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) + want_mac.update(b"notthenonce\x00bob\x00abc123\x00admin") + want_mac = want_mac.hexdigest() + + body = json.dumps( + { + "nonce": nonce, + "username": "bob", + "password": "abc123", + "admin": True, + "mac": want_mac, + } + ) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual("HMAC incorrect", channel.json_body["error"]) + + def test_register_correct_nonce(self): + """ + When the correct nonce is provided, and the right key is provided, the + user is registered. + """ + request, channel = self.make_request("GET", self.url) + self.render(request) + nonce = channel.json_body["nonce"] + + want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) + want_mac.update( + nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin\x00support" + ) + want_mac = want_mac.hexdigest() + + body = json.dumps( + { + "nonce": nonce, + "username": "bob", + "password": "abc123", + "admin": True, + "user_type": UserTypes.SUPPORT, + "mac": want_mac, + } + ) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual("@bob:test", channel.json_body["user_id"]) + + def test_nonce_reuse(self): + """ + A valid unrecognised nonce. + """ + request, channel = self.make_request("GET", self.url) + self.render(request) + nonce = channel.json_body["nonce"] + + want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) + want_mac.update( + nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin" + ) + want_mac = want_mac.hexdigest() + + body = json.dumps( + { + "nonce": nonce, + "username": "bob", + "password": "abc123", + "admin": True, + "mac": want_mac, + } + ) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual("@bob:test", channel.json_body["user_id"]) + + # Now, try and reuse it + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('unrecognised nonce', channel.json_body["error"]) + + def test_missing_parts(self): + """ + Synapse will complain if you don't give nonce, username, password, and + mac. Admin and user_types are optional. Additional checks are done for length + and type. + """ + + def nonce(): + request, channel = self.make_request("GET", self.url) + self.render(request) + return channel.json_body["nonce"] + + # + # Nonce check + # + + # Must be present + body = json.dumps({}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('nonce must be specified', channel.json_body["error"]) + + # + # Username checks + # + + # Must be present + body = json.dumps({"nonce": nonce()}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('username must be specified', channel.json_body["error"]) + + # Must be a string + body = json.dumps({"nonce": nonce(), "username": 1234}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid username', channel.json_body["error"]) + + # Must not have null bytes + body = json.dumps({"nonce": nonce(), "username": u"abcd\u0000"}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid username', channel.json_body["error"]) + + # Must not have null bytes + body = json.dumps({"nonce": nonce(), "username": "a" * 1000}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid username', channel.json_body["error"]) + + # + # Password checks + # + + # Must be present + body = json.dumps({"nonce": nonce(), "username": "a"}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('password must be specified', channel.json_body["error"]) + + # Must be a string + body = json.dumps({"nonce": nonce(), "username": "a", "password": 1234}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid password', channel.json_body["error"]) + + # Must not have null bytes + body = json.dumps( + {"nonce": nonce(), "username": "a", "password": u"abcd\u0000"} + ) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid password', channel.json_body["error"]) + + # Super long + body = json.dumps({"nonce": nonce(), "username": "a", "password": "A" * 1000}) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid password', channel.json_body["error"]) + + # + # user_type check + # + + # Invalid user_type + body = json.dumps({ + "nonce": nonce(), + "username": "a", + "password": "1234", + "user_type": "invalid"} + ) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid user type', channel.json_body["error"]) + + +class ShutdownRoomTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + events.register_servlets, + room.register_servlets, + room.register_deprecated_servlets, + ] + + def prepare(self, reactor, clock, hs): + self.event_creation_handler = hs.get_event_creation_handler() + hs.config.user_consent_version = "1" + + consent_uri_builder = Mock() + consent_uri_builder.build_user_consent_uri.return_value = ( + "http://example.com" + ) + self.event_creation_handler._consent_uri_builder = consent_uri_builder + + self.store = hs.get_datastore() + + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + + self.other_user = self.register_user("user", "pass") + self.other_user_token = self.login("user", "pass") + + # Mark the admin user as having consented + self.get_success( + self.store.user_set_consent_version(self.admin_user, "1"), + ) + + def test_shutdown_room_consent(self): + """Test that we can shutdown rooms with local users who have not + yet accepted the privacy policy. This used to fail when we tried to + force part the user from the old room. + """ + self.event_creation_handler._block_events_without_consent_error = None + + room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_token) + + # Assert one user in room + users_in_room = self.get_success( + self.store.get_users_in_room(room_id), + ) + self.assertEqual([self.other_user], users_in_room) + + # Enable require consent to send events + self.event_creation_handler._block_events_without_consent_error = "Error" + + # Assert that the user is getting consent error + self.helper.send( + room_id, + body="foo", tok=self.other_user_token, expect_code=403, + ) + + # Test that the admin can still send shutdown + url = "admin/shutdown_room/" + room_id + request, channel = self.make_request( + "POST", + url.encode('ascii'), + json.dumps({"new_room_user_id": self.admin_user}), + access_token=self.admin_user_tok, + ) + self.render(request) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + + # Assert there is now no longer anyone in the room + users_in_room = self.get_success( + self.store.get_users_in_room(room_id), + ) + self.assertEqual([], users_in_room) + + @unittest.DEBUG + def test_shutdown_room_block_peek(self): + """Test that a world_readable room can no longer be peeked into after + it has been shut down. + """ + + self.event_creation_handler._block_events_without_consent_error = None + + room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_token) + + # Enable world readable + url = "rooms/%s/state/m.room.history_visibility" % (room_id,) + request, channel = self.make_request( + "PUT", + url.encode('ascii'), + json.dumps({"history_visibility": "world_readable"}), + access_token=self.other_user_token, + ) + self.render(request) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + + # Test that the admin can still send shutdown + url = "admin/shutdown_room/" + room_id + request, channel = self.make_request( + "POST", + url.encode('ascii'), + json.dumps({"new_room_user_id": self.admin_user}), + access_token=self.admin_user_tok, + ) + self.render(request) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + + # Assert we can no longer peek into the room + self._assert_peek(room_id, expect_code=403) + + def _assert_peek(self, room_id, expect_code): + """Assert that the admin user can (or cannot) peek into the room. + """ + + url = "rooms/%s/initialSync" % (room_id,) + request, channel = self.make_request( + "GET", + url.encode('ascii'), + access_token=self.admin_user_tok, + ) + self.render(request) + self.assertEqual( + expect_code, int(channel.result["code"]), msg=channel.result["body"], + ) + + url = "events?timeout=0&room_id=" + room_id + request, channel = self.make_request( + "GET", + url.encode('ascii'), + access_token=self.admin_user_tok, + ) + self.render(request) + self.assertEqual( + expect_code, int(channel.result["code"]), msg=channel.result["body"], + ) + + +class DeleteGroupTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + groups.register_servlets, + ] + + def prepare(self, reactor, clock, hs): + self.store = hs.get_datastore() + + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + + self.other_user = self.register_user("user", "pass") + self.other_user_token = self.login("user", "pass") + + def test_delete_group(self): + # Create a new group + request, channel = self.make_request( + "POST", + "/create_group".encode('ascii'), + access_token=self.admin_user_tok, + content={ + "localpart": "test", + } + ) + + self.render(request) + self.assertEqual( + 200, int(channel.result["code"]), msg=channel.result["body"], + ) + + group_id = channel.json_body["group_id"] + + self._check_group(group_id, expect_code=200) + + # Invite/join another user + + url = "/groups/%s/admin/users/invite/%s" % (group_id, self.other_user) + request, channel = self.make_request( + "PUT", + url.encode('ascii'), + access_token=self.admin_user_tok, + content={} + ) + self.render(request) + self.assertEqual( + 200, int(channel.result["code"]), msg=channel.result["body"], + ) + + url = "/groups/%s/self/accept_invite" % (group_id,) + request, channel = self.make_request( + "PUT", + url.encode('ascii'), + access_token=self.other_user_token, + content={} + ) + self.render(request) + self.assertEqual( + 200, int(channel.result["code"]), msg=channel.result["body"], + ) + + # Check other user knows they're in the group + self.assertIn(group_id, self._get_groups_user_is_in(self.admin_user_tok)) + self.assertIn(group_id, self._get_groups_user_is_in(self.other_user_token)) + + # Now delete the group + url = "/admin/delete_group/" + group_id + request, channel = self.make_request( + "POST", + url.encode('ascii'), + access_token=self.admin_user_tok, + content={ + "localpart": "test", + } + ) + + self.render(request) + self.assertEqual( + 200, int(channel.result["code"]), msg=channel.result["body"], + ) + + # Check group returns 404 + self._check_group(group_id, expect_code=404) + + # Check users don't think they're in the group + self.assertNotIn(group_id, self._get_groups_user_is_in(self.admin_user_tok)) + self.assertNotIn(group_id, self._get_groups_user_is_in(self.other_user_token)) + + def _check_group(self, group_id, expect_code): + """Assert that trying to fetch the given group results in the given + HTTP status code + """ + + url = "/groups/%s/profile" % (group_id,) + request, channel = self.make_request( + "GET", + url.encode('ascii'), + access_token=self.admin_user_tok, + ) + + self.render(request) + self.assertEqual( + expect_code, int(channel.result["code"]), msg=channel.result["body"], + ) + + def _get_groups_user_is_in(self, access_token): + """Returns the list of groups the user is in (given their access token) + """ + request, channel = self.make_request( + "GET", + "/joined_groups".encode('ascii'), + access_token=access_token, + ) + + self.render(request) + self.assertEqual( + 200, int(channel.result["code"]), msg=channel.result["body"], + ) + + return channel.json_body["groups"] diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py index 4294bbec2a..36e6c1c67d 100644 --- a/tests/rest/client/test_consent.py +++ b/tests/rest/client/test_consent.py @@ -15,8 +15,9 @@ import os +import synapse.rest.admin from synapse.api.urls import ConsentURIBuilder -from synapse.rest.client.v1 import admin, login, room +from synapse.rest.client.v1 import login, room from synapse.rest.consent import consent_resource from tests import unittest @@ -31,7 +32,7 @@ except Exception: class ConsentResourceTestCase(unittest.HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index ca63b2e6ed..d4fe0aee7d 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -15,7 +15,8 @@ import json -from synapse.rest.client.v1 import admin, login, room +import synapse.rest.admin +from synapse.rest.client.v1 import login, room from tests import unittest @@ -23,7 +24,7 @@ from tests import unittest class IdentityTestCase(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py deleted file mode 100644 index c00ef21d75..0000000000 --- a/tests/rest/client/v1/test_admin.py +++ /dev/null @@ -1,616 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import hashlib -import hmac -import json - -from mock import Mock - -from synapse.api.constants import UserTypes -from synapse.rest.client.v1 import admin, events, login, room -from synapse.rest.client.v2_alpha import groups - -from tests import unittest - - -class VersionTestCase(unittest.HomeserverTestCase): - - servlets = [ - admin.register_servlets, - login.register_servlets, - ] - - url = '/_matrix/client/r0/admin/server_version' - - def test_version_string(self): - self.register_user("admin", "pass", admin=True) - self.admin_token = self.login("admin", "pass") - - request, channel = self.make_request("GET", self.url, - access_token=self.admin_token) - self.render(request) - - self.assertEqual(200, int(channel.result["code"]), - msg=channel.result["body"]) - self.assertEqual({'server_version', 'python_version'}, - set(channel.json_body.keys())) - - def test_inaccessible_to_non_admins(self): - self.register_user("unprivileged-user", "pass", admin=False) - user_token = self.login("unprivileged-user", "pass") - - request, channel = self.make_request("GET", self.url, - access_token=user_token) - self.render(request) - - self.assertEqual(403, int(channel.result['code']), - msg=channel.result['body']) - - -class UserRegisterTestCase(unittest.HomeserverTestCase): - - servlets = [admin.register_servlets] - - def make_homeserver(self, reactor, clock): - - self.url = "/_matrix/client/r0/admin/register" - - self.registration_handler = Mock() - self.identity_handler = Mock() - self.login_handler = Mock() - self.device_handler = Mock() - self.device_handler.check_device_registered = Mock(return_value="FAKE") - - self.datastore = Mock(return_value=Mock()) - self.datastore.get_current_state_deltas = Mock(return_value=[]) - - self.secrets = Mock() - - self.hs = self.setup_test_homeserver() - - self.hs.config.registration_shared_secret = u"shared" - - self.hs.get_media_repository = Mock() - self.hs.get_deactivate_account_handler = Mock() - - return self.hs - - def test_disabled(self): - """ - If there is no shared secret, registration through this method will be - prevented. - """ - self.hs.config.registration_shared_secret = None - - request, channel = self.make_request("POST", self.url, b'{}') - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual( - 'Shared secret registration is not enabled', channel.json_body["error"] - ) - - def test_get_nonce(self): - """ - Calling GET on the endpoint will return a randomised nonce, using the - homeserver's secrets provider. - """ - secrets = Mock() - secrets.token_hex = Mock(return_value="abcd") - - self.hs.get_secrets = Mock(return_value=secrets) - - request, channel = self.make_request("GET", self.url) - self.render(request) - - self.assertEqual(channel.json_body, {"nonce": "abcd"}) - - def test_expired_nonce(self): - """ - Calling GET on the endpoint will return a randomised nonce, which will - only last for SALT_TIMEOUT (60s). - """ - request, channel = self.make_request("GET", self.url) - self.render(request) - nonce = channel.json_body["nonce"] - - # 59 seconds - self.reactor.advance(59) - - body = json.dumps({"nonce": nonce}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('username must be specified', channel.json_body["error"]) - - # 61 seconds - self.reactor.advance(2) - - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('unrecognised nonce', channel.json_body["error"]) - - def test_register_incorrect_nonce(self): - """ - Only the provided nonce can be used, as it's checked in the MAC. - """ - request, channel = self.make_request("GET", self.url) - self.render(request) - nonce = channel.json_body["nonce"] - - want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update(b"notthenonce\x00bob\x00abc123\x00admin") - want_mac = want_mac.hexdigest() - - body = json.dumps( - { - "nonce": nonce, - "username": "bob", - "password": "abc123", - "admin": True, - "mac": want_mac, - } - ) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual("HMAC incorrect", channel.json_body["error"]) - - def test_register_correct_nonce(self): - """ - When the correct nonce is provided, and the right key is provided, the - user is registered. - """ - request, channel = self.make_request("GET", self.url) - self.render(request) - nonce = channel.json_body["nonce"] - - want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update( - nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin\x00support" - ) - want_mac = want_mac.hexdigest() - - body = json.dumps( - { - "nonce": nonce, - "username": "bob", - "password": "abc123", - "admin": True, - "user_type": UserTypes.SUPPORT, - "mac": want_mac, - } - ) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual("@bob:test", channel.json_body["user_id"]) - - def test_nonce_reuse(self): - """ - A valid unrecognised nonce. - """ - request, channel = self.make_request("GET", self.url) - self.render(request) - nonce = channel.json_body["nonce"] - - want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update( - nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin" - ) - want_mac = want_mac.hexdigest() - - body = json.dumps( - { - "nonce": nonce, - "username": "bob", - "password": "abc123", - "admin": True, - "mac": want_mac, - } - ) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual("@bob:test", channel.json_body["user_id"]) - - # Now, try and reuse it - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('unrecognised nonce', channel.json_body["error"]) - - def test_missing_parts(self): - """ - Synapse will complain if you don't give nonce, username, password, and - mac. Admin and user_types are optional. Additional checks are done for length - and type. - """ - - def nonce(): - request, channel = self.make_request("GET", self.url) - self.render(request) - return channel.json_body["nonce"] - - # - # Nonce check - # - - # Must be present - body = json.dumps({}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('nonce must be specified', channel.json_body["error"]) - - # - # Username checks - # - - # Must be present - body = json.dumps({"nonce": nonce()}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('username must be specified', channel.json_body["error"]) - - # Must be a string - body = json.dumps({"nonce": nonce(), "username": 1234}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid username', channel.json_body["error"]) - - # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": u"abcd\u0000"}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid username', channel.json_body["error"]) - - # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": "a" * 1000}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid username', channel.json_body["error"]) - - # - # Password checks - # - - # Must be present - body = json.dumps({"nonce": nonce(), "username": "a"}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('password must be specified', channel.json_body["error"]) - - # Must be a string - body = json.dumps({"nonce": nonce(), "username": "a", "password": 1234}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid password', channel.json_body["error"]) - - # Must not have null bytes - body = json.dumps( - {"nonce": nonce(), "username": "a", "password": u"abcd\u0000"} - ) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid password', channel.json_body["error"]) - - # Super long - body = json.dumps({"nonce": nonce(), "username": "a", "password": "A" * 1000}) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid password', channel.json_body["error"]) - - # - # user_type check - # - - # Invalid user_type - body = json.dumps({ - "nonce": nonce(), - "username": "a", - "password": "1234", - "user_type": "invalid"} - ) - request, channel = self.make_request("POST", self.url, body.encode('utf8')) - self.render(request) - - self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual('Invalid user type', channel.json_body["error"]) - - -class ShutdownRoomTestCase(unittest.HomeserverTestCase): - servlets = [ - admin.register_servlets, - login.register_servlets, - events.register_servlets, - room.register_servlets, - room.register_deprecated_servlets, - ] - - def prepare(self, reactor, clock, hs): - self.event_creation_handler = hs.get_event_creation_handler() - hs.config.user_consent_version = "1" - - consent_uri_builder = Mock() - consent_uri_builder.build_user_consent_uri.return_value = ( - "http://example.com" - ) - self.event_creation_handler._consent_uri_builder = consent_uri_builder - - self.store = hs.get_datastore() - - self.admin_user = self.register_user("admin", "pass", admin=True) - self.admin_user_tok = self.login("admin", "pass") - - self.other_user = self.register_user("user", "pass") - self.other_user_token = self.login("user", "pass") - - # Mark the admin user as having consented - self.get_success( - self.store.user_set_consent_version(self.admin_user, "1"), - ) - - def test_shutdown_room_consent(self): - """Test that we can shutdown rooms with local users who have not - yet accepted the privacy policy. This used to fail when we tried to - force part the user from the old room. - """ - self.event_creation_handler._block_events_without_consent_error = None - - room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_token) - - # Assert one user in room - users_in_room = self.get_success( - self.store.get_users_in_room(room_id), - ) - self.assertEqual([self.other_user], users_in_room) - - # Enable require consent to send events - self.event_creation_handler._block_events_without_consent_error = "Error" - - # Assert that the user is getting consent error - self.helper.send( - room_id, - body="foo", tok=self.other_user_token, expect_code=403, - ) - - # Test that the admin can still send shutdown - url = "admin/shutdown_room/" + room_id - request, channel = self.make_request( - "POST", - url.encode('ascii'), - json.dumps({"new_room_user_id": self.admin_user}), - access_token=self.admin_user_tok, - ) - self.render(request) - - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - - # Assert there is now no longer anyone in the room - users_in_room = self.get_success( - self.store.get_users_in_room(room_id), - ) - self.assertEqual([], users_in_room) - - @unittest.DEBUG - def test_shutdown_room_block_peek(self): - """Test that a world_readable room can no longer be peeked into after - it has been shut down. - """ - - self.event_creation_handler._block_events_without_consent_error = None - - room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_token) - - # Enable world readable - url = "rooms/%s/state/m.room.history_visibility" % (room_id,) - request, channel = self.make_request( - "PUT", - url.encode('ascii'), - json.dumps({"history_visibility": "world_readable"}), - access_token=self.other_user_token, - ) - self.render(request) - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - - # Test that the admin can still send shutdown - url = "admin/shutdown_room/" + room_id - request, channel = self.make_request( - "POST", - url.encode('ascii'), - json.dumps({"new_room_user_id": self.admin_user}), - access_token=self.admin_user_tok, - ) - self.render(request) - - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - - # Assert we can no longer peek into the room - self._assert_peek(room_id, expect_code=403) - - def _assert_peek(self, room_id, expect_code): - """Assert that the admin user can (or cannot) peek into the room. - """ - - url = "rooms/%s/initialSync" % (room_id,) - request, channel = self.make_request( - "GET", - url.encode('ascii'), - access_token=self.admin_user_tok, - ) - self.render(request) - self.assertEqual( - expect_code, int(channel.result["code"]), msg=channel.result["body"], - ) - - url = "events?timeout=0&room_id=" + room_id - request, channel = self.make_request( - "GET", - url.encode('ascii'), - access_token=self.admin_user_tok, - ) - self.render(request) - self.assertEqual( - expect_code, int(channel.result["code"]), msg=channel.result["body"], - ) - - -class DeleteGroupTestCase(unittest.HomeserverTestCase): - servlets = [ - admin.register_servlets, - login.register_servlets, - groups.register_servlets, - ] - - def prepare(self, reactor, clock, hs): - self.store = hs.get_datastore() - - self.admin_user = self.register_user("admin", "pass", admin=True) - self.admin_user_tok = self.login("admin", "pass") - - self.other_user = self.register_user("user", "pass") - self.other_user_token = self.login("user", "pass") - - def test_delete_group(self): - # Create a new group - request, channel = self.make_request( - "POST", - "/create_group".encode('ascii'), - access_token=self.admin_user_tok, - content={ - "localpart": "test", - } - ) - - self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) - - group_id = channel.json_body["group_id"] - - self._check_group(group_id, expect_code=200) - - # Invite/join another user - - url = "/groups/%s/admin/users/invite/%s" % (group_id, self.other_user) - request, channel = self.make_request( - "PUT", - url.encode('ascii'), - access_token=self.admin_user_tok, - content={} - ) - self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) - - url = "/groups/%s/self/accept_invite" % (group_id,) - request, channel = self.make_request( - "PUT", - url.encode('ascii'), - access_token=self.other_user_token, - content={} - ) - self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) - - # Check other user knows they're in the group - self.assertIn(group_id, self._get_groups_user_is_in(self.admin_user_tok)) - self.assertIn(group_id, self._get_groups_user_is_in(self.other_user_token)) - - # Now delete the group - url = "/admin/delete_group/" + group_id - request, channel = self.make_request( - "POST", - url.encode('ascii'), - access_token=self.admin_user_tok, - content={ - "localpart": "test", - } - ) - - self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) - - # Check group returns 404 - self._check_group(group_id, expect_code=404) - - # Check users don't think they're in the group - self.assertNotIn(group_id, self._get_groups_user_is_in(self.admin_user_tok)) - self.assertNotIn(group_id, self._get_groups_user_is_in(self.other_user_token)) - - def _check_group(self, group_id, expect_code): - """Assert that trying to fetch the given group results in the given - HTTP status code - """ - - url = "/groups/%s/profile" % (group_id,) - request, channel = self.make_request( - "GET", - url.encode('ascii'), - access_token=self.admin_user_tok, - ) - - self.render(request) - self.assertEqual( - expect_code, int(channel.result["code"]), msg=channel.result["body"], - ) - - def _get_groups_user_is_in(self, access_token): - """Returns the list of groups the user is in (given their access token) - """ - request, channel = self.make_request( - "GET", - "/joined_groups".encode('ascii'), - access_token=access_token, - ) - - self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) - - return channel.json_body["groups"] diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index 36d8547275..5cb1c1ae9f 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -17,7 +17,8 @@ from mock import Mock, NonCallableMock -from synapse.rest.client.v1 import admin, events, login, room +import synapse.rest.admin +from synapse.rest.client.v1 import events, login, room from tests import unittest @@ -28,7 +29,7 @@ class EventStreamPermissionsTestCase(unittest.HomeserverTestCase): servlets = [ events.register_servlets, room.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py index 86312f1096..8d9ef877f6 100644 --- a/tests/rest/client/v1/test_login.py +++ b/tests/rest/client/v1/test_login.py @@ -1,6 +1,7 @@ import json -from synapse.rest.client.v1 import admin, login +import synapse.rest.admin +from synapse.rest.client.v1 import login from tests import unittest @@ -10,7 +11,7 @@ LOGIN_URL = b"/_matrix/client/r0/login" class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 015c144248..1a34924f3e 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -22,8 +22,9 @@ from six.moves.urllib import parse as urlparse from twisted.internet import defer +import synapse.rest.admin from synapse.api.constants import Membership -from synapse.rest.client.v1 import admin, login, room +from synapse.rest.client.v1 import login, room from tests import unittest @@ -803,7 +804,7 @@ class RoomMessageListTestCase(RoomBase): class RoomSearchTestCase(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py index 7fa120a10f..67021185d0 100644 --- a/tests/rest/client/v2_alpha/test_auth.py +++ b/tests/rest/client/v2_alpha/test_auth.py @@ -16,8 +16,8 @@ from twisted.internet.defer import succeed +import synapse.rest.admin from synapse.api.constants import LoginType -from synapse.rest.client.v1 import admin from synapse.rest.client.v2_alpha import auth, register from tests import unittest @@ -27,7 +27,7 @@ class FallbackAuthTests(unittest.HomeserverTestCase): servlets = [ auth.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, register.register_servlets, ] hijack_auth = False diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py index bbfc77e829..8134163e20 100644 --- a/tests/rest/client/v2_alpha/test_capabilities.py +++ b/tests/rest/client/v2_alpha/test_capabilities.py @@ -12,9 +12,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import synapse.rest.admin from synapse.api.room_versions import DEFAULT_ROOM_VERSION, KNOWN_ROOM_VERSIONS -from synapse.rest.client.v1 import admin, login +from synapse.rest.client.v1 import login from synapse.rest.client.v2_alpha import capabilities from tests import unittest @@ -23,7 +23,7 @@ from tests import unittest class CapabilitiesTestCase(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, capabilities.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 3d44667489..4d698af03a 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -4,10 +4,11 @@ import os import pkg_resources +import synapse.rest.admin from synapse.api.constants import LoginType from synapse.api.errors import Codes from synapse.appservice import ApplicationService -from synapse.rest.client.v1 import admin, login +from synapse.rest.client.v1 import login from synapse.rest.client.v2_alpha import account_validity, register, sync from tests import unittest @@ -198,7 +199,7 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): servlets = [ register.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, login.register_servlets, sync.register_servlets, account_validity.register_servlets, @@ -307,7 +308,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ register.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, login.register_servlets, sync.register_servlets, account_validity.register_servlets, diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 99b716f00a..65fac1d5ce 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -15,7 +15,8 @@ from mock import Mock -from synapse.rest.client.v1 import admin, login, room +import synapse.rest.admin +from synapse.rest.client.v1 import login, room from synapse.rest.client.v2_alpha import sync from tests import unittest @@ -72,7 +73,7 @@ class FilterTestCase(unittest.HomeserverTestCase): class SyncTypingTests(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets, room.register_servlets, login.register_servlets, sync.register_servlets, diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py index 95badc985e..e8b8ac5725 100644 --- a/tests/server_notices/test_consent.py +++ b/tests/server_notices/test_consent.py @@ -12,8 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from synapse.rest.client.v1 import admin, login, room +import synapse.rest.admin +from synapse.rest.client.v1 import login, room from synapse.rest.client.v2_alpha import sync from tests import unittest @@ -23,7 +23,7 @@ class ConsentNoticesTests(unittest.HomeserverTestCase): servlets = [ sync.register_servlets, - admin.register_servlets, + synapse.rest.admin.register_servlets, login.register_servlets, room.register_servlets, ] diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 858efe4992..b0f6fd34d8 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -18,8 +18,9 @@ from mock import Mock from twisted.internet import defer +import synapse.rest.admin from synapse.http.site import XForwardedForRequest -from synapse.rest.client.v1 import admin, login +from synapse.rest.client.v1 import login from tests import unittest @@ -205,7 +206,7 @@ class ClientIpStoreTestCase(unittest.HomeserverTestCase): class ClientIpAuthTestCase(unittest.HomeserverTestCase): - servlets = [admin.register_servlets, login.register_servlets] + servlets = [synapse.rest.admin.register_servlets, login.register_servlets] def make_homeserver(self, reactor, clock): hs = self.setup_test_homeserver() -- cgit 1.5.1 From 03ad6bd4832ce23ac8d2f5d1308cb2caefc651e2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 May 2019 15:34:38 +0100 Subject: changelog --- changelog.d/5119.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5119.feature diff --git a/changelog.d/5119.feature b/changelog.d/5119.feature new file mode 100644 index 0000000000..a3a73f09d3 --- /dev/null +++ b/changelog.d/5119.feature @@ -0,0 +1 @@ +Move admin APIs to `/_synapse/admin/v1`. (The old paths are retained for backwards-compatibility, for now). \ No newline at end of file -- cgit 1.5.1 From cc4bd762df35dfceb8250d85f400f9f691477eff Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 May 2019 16:48:23 +0100 Subject: Fix sample config --- docs/sample_config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index a7f6bf31ac..61be7502bc 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -136,8 +136,8 @@ pid_file: DATADIR/homeserver.pid # # Valid resource names are: # -# client: the client-server API (/_matrix/client). Also implies 'media' and -# 'static'. +# client: the client-server API (/_matrix/client), and the synapse admin +# API (/_synapse/admin). Also implies 'media' and 'static'. # # consent: user consent forms (/_matrix/consent). See # docs/consent_tracking.md. -- cgit 1.5.1 From f203c98794aa5344c1cbc26e526bd334fd80a1c2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 May 2019 17:49:11 +0100 Subject: fix examples --- synapse/rest/admin/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index ddf9ced1a3..afac8b1724 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -640,7 +640,7 @@ class ResetPasswordRestServlet(RestServlet): """Post request to allow an administrator reset password for a user. This needs user to have administrator access in Synapse. Example: - http://localhost:8008/_matrix/client/api/v1/admin/reset_password/ + http://localhost:8008/_synapse/admin/v1/reset_password/ @user:to_reset_password?access_token=admin_access_token JsonBodyToSend: { @@ -683,7 +683,7 @@ class GetUsersPaginatedRestServlet(RestServlet): """Get request to get specific number of users from Synapse. This needs user to have administrator access in Synapse. Example: - http://localhost:8008/_matrix/client/api/v1/admin/users_paginate/ + http://localhost:8008/_synapse/admin/v1/users_paginate/ @admin:user?access_token=admin_access_token&start=0&limit=10 Returns: 200 OK with json object {list[dict[str, Any]], count} or empty object. @@ -731,7 +731,7 @@ class GetUsersPaginatedRestServlet(RestServlet): """Post request to get specific number of users from Synapse.. This needs user to have administrator access in Synapse. Example: - http://localhost:8008/_matrix/client/api/v1/admin/users_paginate/ + http://localhost:8008/_synapse/admin/v1/users_paginate/ @admin:user?access_token=admin_access_token JsonBodyToSend: { @@ -766,7 +766,7 @@ class SearchUsersRestServlet(RestServlet): search term. This needs user to have administrator access in Synapse. Example: - http://localhost:8008/_matrix/client/api/v1/admin/search_users/ + http://localhost:8008/_synapse/admin/v1/search_users/ @admin:user?access_token=admin_access_token&term=alice Returns: 200 OK with json object {list[dict[str, Any]], count} or empty object. -- cgit 1.5.1 From 0836cbb9f5aaf9260161fa998b2fe4c0ea7e692b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 2 May 2019 10:45:52 +0100 Subject: Factor out an "assert_requester_is_admin" function (#5120) Rather than copying-and-pasting the same four lines hundreds of times --- changelog.d/5120.misc | 1 + synapse/api/auth.py | 2 +- synapse/rest/admin/__init__.py | 95 +++++++++--------------------------------- synapse/rest/admin/_base.py | 59 ++++++++++++++++++++++++++ 4 files changed, 81 insertions(+), 76 deletions(-) create mode 100644 changelog.d/5120.misc create mode 100644 synapse/rest/admin/_base.py diff --git a/changelog.d/5120.misc b/changelog.d/5120.misc new file mode 100644 index 0000000000..6575f29322 --- /dev/null +++ b/changelog.d/5120.misc @@ -0,0 +1 @@ +Factor out an "assert_requester_is_admin" function. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 960e66dbdc..0c6c93a87b 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -556,7 +556,7 @@ class Auth(object): """ Check if the given user is a local server admin. Args: - user (str): mxid of user to check + user (UserID): user to check Returns: bool: True if the user is an admin diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index afac8b1724..d02f5198b8 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -36,6 +36,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.rest.admin._base import assert_requester_is_admin, assert_user_is_admin from synapse.types import UserID, create_requester from synapse.util.versionstring import get_version_string @@ -75,15 +76,7 @@ class UsersRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): target_user = UserID.from_string(user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") - - # To allow all users to get the users list - # if not is_admin and target_user != auth_user: - # raise AuthError(403, "You are not a server admin") + yield assert_requester_is_admin(self.auth, request) if not self.hs.is_mine(target_user): raise SynapseError(400, "Can only users a local user") @@ -101,11 +94,7 @@ class VersionServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_requester_is_admin(self.auth, request) ret = { 'server_version': get_version_string(synapse), @@ -265,10 +254,9 @@ class WhoisRestServlet(RestServlet): target_user = UserID.from_string(user_id) requester = yield self.auth.get_user_by_req(request) auth_user = requester.user - is_admin = yield self.auth.is_server_admin(requester.user) - if not is_admin and target_user != auth_user: - raise AuthError(403, "You are not a server admin") + if target_user != auth_user: + yield assert_user_is_admin(self.auth, auth_user) if not self.hs.is_mine(target_user): raise SynapseError(400, "Can only whois a local user") @@ -287,11 +275,7 @@ class PurgeMediaCacheRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_requester_is_admin(self.auth, request) before_ts = parse_integer(request, "before_ts", required=True) logger.info("before_ts: %r", before_ts) @@ -318,11 +302,7 @@ class PurgeHistoryRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id, event_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request, allow_empty_body=True) @@ -414,11 +394,7 @@ class PurgeHistoryStatusRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, purge_id): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_requester_is_admin(self.auth, request) purge_status = self.pagination_handler.get_purge_status(purge_id) if purge_status is None: @@ -436,6 +412,7 @@ class DeactivateAccountRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, target_user_id): + yield assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request, allow_empty_body=True) erase = body.get("erase", False) if not isinstance(erase, bool): @@ -446,11 +423,6 @@ class DeactivateAccountRestServlet(RestServlet): ) UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") result = yield self._deactivate_account_handler.deactivate_account( target_user_id, erase, @@ -490,9 +462,7 @@ class ShutdownRoomRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id): requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_user_is_admin(self.auth, requester.user) content = parse_json_object_from_request(request) assert_params_in_dict(content, ["new_room_user_id"]) @@ -605,9 +575,7 @@ class QuarantineMediaInRoom(RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id): requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_user_is_admin(self.auth, requester.user) num_quarantined = yield self.store.quarantine_media_ids_in_room( room_id, requester.user.to_string(), @@ -662,12 +630,10 @@ class ResetPasswordRestServlet(RestServlet): """Post request to allow an administrator reset password for a user. This needs user to have administrator access in Synapse. """ - UserID.from_string(target_user_id) requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) + yield assert_user_is_admin(self.auth, requester.user) - if not is_admin: - raise AuthError(403, "You are not a server admin") + UserID.from_string(target_user_id) params = parse_json_object_from_request(request) assert_params_in_dict(params, ["new_password"]) @@ -701,16 +667,9 @@ class GetUsersPaginatedRestServlet(RestServlet): """Get request to get specific number of users from Synapse. This needs user to have administrator access in Synapse. """ - target_user = UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) + yield assert_requester_is_admin(self.auth, request) - if not is_admin: - raise AuthError(403, "You are not a server admin") - - # To allow all users to get the users list - # if not is_admin and target_user != auth_user: - # raise AuthError(403, "You are not a server admin") + target_user = UserID.from_string(target_user_id) if not self.hs.is_mine(target_user): raise SynapseError(400, "Can only users a local user") @@ -741,12 +700,8 @@ class GetUsersPaginatedRestServlet(RestServlet): Returns: 200 OK with json object {list[dict[str, Any]], count} or empty object. """ + yield assert_requester_is_admin(self.auth, request) UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") order = "name" # order by name in user table params = parse_json_object_from_request(request) @@ -785,12 +740,9 @@ class SearchUsersRestServlet(RestServlet): search term. This needs user to have a administrator access in Synapse. """ - target_user = UserID.from_string(target_user_id) - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) + yield assert_requester_is_admin(self.auth, request) - if not is_admin: - raise AuthError(403, "You are not a server admin") + target_user = UserID.from_string(target_user_id) # To allow all users to get the users list # if not is_admin and target_user != auth_user: @@ -821,10 +773,7 @@ class DeleteGroupAdminRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_user_is_admin(self.auth, requester.user) if not self.is_mine_id(group_id): raise SynapseError(400, "Can only delete local groups") @@ -847,11 +796,7 @@ class AccountValidityRenewServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): - requester = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(requester.user) - - if not is_admin: - raise AuthError(403, "You are not a server admin") + yield assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) diff --git a/synapse/rest/admin/_base.py b/synapse/rest/admin/_base.py new file mode 100644 index 0000000000..881d67b89c --- /dev/null +++ b/synapse/rest/admin/_base.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from twisted.internet import defer + +from synapse.api.errors import AuthError + + +@defer.inlineCallbacks +def assert_requester_is_admin(auth, request): + """Verify that the requester is an admin user + + WARNING: MAKE SURE YOU YIELD ON THE RESULT! + + Args: + auth (synapse.api.auth.Auth): + request (twisted.web.server.Request): incoming request + + Returns: + Deferred + + Raises: + AuthError if the requester is not an admin + """ + requester = yield auth.get_user_by_req(request) + yield assert_user_is_admin(auth, requester.user) + + +@defer.inlineCallbacks +def assert_user_is_admin(auth, user_id): + """Verify that the given user is an admin user + + WARNING: MAKE SURE YOU YIELD ON THE RESULT! + + Args: + auth (synapse.api.auth.Auth): + user_id (UserID): + + Returns: + Deferred + + Raises: + AuthError if the user is not an admin + """ + + is_admin = yield auth.is_server_admin(user_id) + if not is_admin: + raise AuthError(403, "You are not a server admin") -- cgit 1.5.1 From 84196cb2310ca55100f32cfecbc62810085418e5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 2 May 2019 09:21:29 +0100 Subject: Add some limitations to alias creation --- changelog.d/5124.bugfix | 1 + docs/sample_config.yaml | 5 +++++ synapse/config/server.py | 11 +++++++++++ synapse/handlers/directory.py | 22 +++++++++++++++++++++- synapse/handlers/room.py | 3 ++- 5 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5124.bugfix diff --git a/changelog.d/5124.bugfix b/changelog.d/5124.bugfix new file mode 100644 index 0000000000..46df1e9fd5 --- /dev/null +++ b/changelog.d/5124.bugfix @@ -0,0 +1 @@ +Add some missing limitations to room alias creation. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index a7f6bf31ac..ec18516b5c 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -239,6 +239,11 @@ listeners: # Used by phonehome stats to group together related servers. #server_context: context +# Whether to require a user to be in the room to add an alias to it. +# Defaults to 'true'. +# +#require_membership_for_aliases: false + ## TLS ## diff --git a/synapse/config/server.py b/synapse/config/server.py index cdf1e4d286..b1c9d25c71 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -134,6 +134,12 @@ class ServerConfig(Config): # sending out any replication updates. self.replication_torture_level = config.get("replication_torture_level") + # Whether to require a user to be in the room to add an alias to it. + # Defaults to True. + self.require_membership_for_aliases = config.get( + "require_membership_for_aliases", True, + ) + self.listeners = [] for listener in config.get("listeners", []): if not isinstance(listener.get("port", None), int): @@ -490,6 +496,11 @@ class ServerConfig(Config): # Used by phonehome stats to group together related servers. #server_context: context + + # Whether to require a user to be in the room to add an alias to it. + # Defaults to 'true'. + # + #require_membership_for_aliases: false """ % locals() def read_arguments(self, args): diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 27bd06df5d..50c587aa61 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -36,6 +36,7 @@ logger = logging.getLogger(__name__) class DirectoryHandler(BaseHandler): + MAX_ALIAS_LENGTH = 255 def __init__(self, hs): super(DirectoryHandler, self).__init__(hs) @@ -43,8 +44,10 @@ class DirectoryHandler(BaseHandler): self.state = hs.get_state_handler() self.appservice_handler = hs.get_application_service_handler() self.event_creation_handler = hs.get_event_creation_handler() + self.store = hs.get_datastore() self.config = hs.config self.enable_room_list_search = hs.config.enable_room_list_search + self.require_membership = hs.config.require_membership_for_aliases self.federation = hs.get_federation_client() hs.get_federation_registry().register_query_handler( @@ -83,7 +86,7 @@ class DirectoryHandler(BaseHandler): @defer.inlineCallbacks def create_association(self, requester, room_alias, room_id, servers=None, - send_event=True): + send_event=True, check_membership=True): """Attempt to create a new alias Args: @@ -93,6 +96,8 @@ class DirectoryHandler(BaseHandler): servers (list[str]|None): List of servers that others servers should try and join via send_event (bool): Whether to send an updated m.room.aliases event + check_membership (bool): Whether to check if the user is in the room + before the alias can be set (if the server's config requires it). Returns: Deferred @@ -100,6 +105,13 @@ class DirectoryHandler(BaseHandler): user_id = requester.user.to_string() + if len(room_alias.to_string()) > self.MAX_ALIAS_LENGTH: + raise SynapseError( + 400, + "Can't create aliases longer than %s characters" % self.MAX_ALIAS_LENGTH, + Codes.INVALID_PARAM, + ) + service = requester.app_service if service: if not service.is_interested_in_alias(room_alias.to_string()): @@ -108,6 +120,14 @@ class DirectoryHandler(BaseHandler): " this kind of alias.", errcode=Codes.EXCLUSIVE ) else: + if self.require_membership and check_membership: + rooms_for_user = yield self.store.get_rooms_for_user(user_id) + if room_id not in rooms_for_user: + raise AuthError( + 403, + "You must be in the room to create an alias for it", + ) + if not self.spam_checker.user_may_create_room_alias(user_id, room_alias): raise AuthError( 403, "This user is not permitted to create this alias", diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 17628e2684..e37ae96899 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -402,7 +402,7 @@ class RoomCreationHandler(BaseHandler): yield directory_handler.create_association( requester, RoomAlias.from_string(alias), new_room_id, servers=(self.hs.hostname, ), - send_event=False, + send_event=False, check_membership=False, ) logger.info("Moved alias %s to new room", alias) except SynapseError as e: @@ -538,6 +538,7 @@ class RoomCreationHandler(BaseHandler): room_alias=room_alias, servers=[self.hs.hostname], send_event=False, + check_membership=False, ) preset_config = config.get( -- cgit 1.5.1 From 12f9d51e826058998cb11759e068de8977ddd3d5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 2 May 2019 11:59:16 +0100 Subject: Add admin api for sending server_notices (#5121) --- changelog.d/5121.feature | 1 + docs/admin_api/server_notices.md | 48 ++++++++++++ docs/server_notices.md | 25 ++---- synapse/rest/__init__.py | 4 +- synapse/rest/admin/__init__.py | 17 +++- synapse/rest/admin/server_notice_servlet.py | 100 ++++++++++++++++++++++++ tests/handlers/test_user_directory.py | 4 +- tests/push/test_email.py | 2 +- tests/push/test_http.py | 2 +- tests/rest/admin/test_admin.py | 8 +- tests/rest/client/test_consent.py | 2 +- tests/rest/client/test_identity.py | 2 +- tests/rest/client/v1/test_events.py | 2 +- tests/rest/client/v1/test_login.py | 2 +- tests/rest/client/v1/test_rooms.py | 2 +- tests/rest/client/v2_alpha/test_auth.py | 2 +- tests/rest/client/v2_alpha/test_capabilities.py | 2 +- tests/rest/client/v2_alpha/test_register.py | 4 +- tests/rest/client/v2_alpha/test_sync.py | 2 +- tests/server_notices/test_consent.py | 2 +- tests/storage/test_client_ips.py | 5 +- 21 files changed, 196 insertions(+), 42 deletions(-) create mode 100644 changelog.d/5121.feature create mode 100644 docs/admin_api/server_notices.md create mode 100644 synapse/rest/admin/server_notice_servlet.py diff --git a/changelog.d/5121.feature b/changelog.d/5121.feature new file mode 100644 index 0000000000..54b228680d --- /dev/null +++ b/changelog.d/5121.feature @@ -0,0 +1 @@ +Implement an admin API for sending server notices. Many thanks to @krombel who provided a foundation for this work. diff --git a/docs/admin_api/server_notices.md b/docs/admin_api/server_notices.md new file mode 100644 index 0000000000..5ddd21cfb2 --- /dev/null +++ b/docs/admin_api/server_notices.md @@ -0,0 +1,48 @@ +# Server Notices + +The API to send notices is as follows: + +``` +POST /_synapse/admin/v1/send_server_notice +``` + +or: + +``` +PUT /_synapse/admin/v1/send_server_notice/{txnId} +``` + +You will need to authenticate with an access token for an admin user. + +When using the `PUT` form, retransmissions with the same transaction ID will be +ignored in the same way as with `PUT +/_matrix/client/r0/rooms/{roomId}/send/{eventType}/{txnId}`. + +The request body should look something like the following: + +```json +{ + "user_id": "@target_user:server_name", + "content": { + "msgtype": "m.text", + "body": "This is my message" + } +} +``` + +You can optionally include the following additional parameters: + +* `type`: the type of event. Defaults to `m.room.message`. +* `state_key`: Setting this will result in a state event being sent. + + +Once the notice has been sent, the APU will return the following response: + +```json +{ + "event_id": "" +} +``` + +Note that server notices must be enabled in `homeserver.yaml` before this API +can be used. See [server_notices.md](../server_notices.md) for more information. diff --git a/docs/server_notices.md b/docs/server_notices.md index 58f8776319..950a6608e9 100644 --- a/docs/server_notices.md +++ b/docs/server_notices.md @@ -1,5 +1,4 @@ -Server Notices -============== +# Server Notices 'Server Notices' are a new feature introduced in Synapse 0.30. They provide a channel whereby server administrators can send messages to users on the server. @@ -11,8 +10,7 @@ they may also find a use for features such as "Message of the day". This is a feature specific to Synapse, but it uses standard Matrix communication mechanisms, so should work with any Matrix client. -User experience ---------------- +## User experience When the user is first sent a server notice, they will get an invitation to a room (typically called 'Server Notices', though this is configurable in @@ -29,8 +27,7 @@ levels. Having joined the room, the user can leave the room if they want. Subsequent server notices will then cause a new room to be created. -Synapse configuration ---------------------- +## Synapse configuration Server notices come from a specific user id on the server. Server administrators are free to choose the user id - something like `server` is @@ -58,17 +55,7 @@ room which will be created. `system_mxid_display_name` and `system_mxid_avatar_url` can be used to set the displayname and avatar of the Server Notices user. -Sending notices ---------------- +## Sending notices -As of the current version of synapse, there is no convenient interface for -sending notices (other than the automated ones sent as part of consent -tracking). - -In the meantime, it is possible to test this feature using the manhole. Having -gone into the manhole as described in [manhole.md](manhole.md), a notice can be -sent with something like: - -``` ->>> hs.get_server_notices_manager().send_notice('@user:server.com', {'msgtype':'m.text', 'body':'foo'}) -``` +To send server notices to users you can use the +[admin_api](admin_api/server_notices.md). diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index e8e1bcddea..3a24d31d1b 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -117,4 +117,6 @@ class ClientRestResource(JsonResource): account_validity.register_servlets(hs, client_resource) # moving to /_synapse/admin - synapse.rest.admin.register_servlets(hs, client_resource) + synapse.rest.admin.register_servlets_for_client_rest_resource( + hs, client_resource + ) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index d02f5198b8..0ce89741f0 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -37,6 +37,7 @@ from synapse.http.servlet import ( parse_string, ) from synapse.rest.admin._base import assert_requester_is_admin, assert_user_is_admin +from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet from synapse.types import UserID, create_requester from synapse.util.versionstring import get_version_string @@ -813,16 +814,26 @@ class AccountValidityRenewServlet(RestServlet): } defer.returnValue((200, res)) +######################################################################################## +# +# please don't add more servlets here: this file is already long and unwieldy. Put +# them in separate files within the 'admin' package. +# +######################################################################################## + class AdminRestResource(JsonResource): """The REST resource which gets mounted at /_synapse/admin""" def __init__(self, hs): JsonResource.__init__(self, hs, canonical_json=False) - register_servlets(hs, self) + + register_servlets_for_client_rest_resource(hs, self) + SendServerNoticeServlet(hs).register(self) -def register_servlets(hs, http_server): +def register_servlets_for_client_rest_resource(hs, http_server): + """Register only the servlets which need to be exposed on /_matrix/client/xxx""" WhoisRestServlet(hs).register(http_server) PurgeMediaCacheRestServlet(hs).register(http_server) PurgeHistoryStatusRestServlet(hs).register(http_server) @@ -839,3 +850,5 @@ def register_servlets(hs, http_server): VersionServlet(hs).register(http_server) DeleteGroupAdminRestServlet(hs).register(http_server) AccountValidityRenewServlet(hs).register(http_server) + # don't add more things here: new servlets should only be exposed on + # /_synapse/admin so should not go here. Instead register them in AdminRestResource. diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py new file mode 100644 index 0000000000..ae5aca9dac --- /dev/null +++ b/synapse/rest/admin/server_notice_servlet.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import re + +from twisted.internet import defer + +from synapse.api.constants import EventTypes +from synapse.api.errors import SynapseError +from synapse.http.servlet import ( + RestServlet, + assert_params_in_dict, + parse_json_object_from_request, +) +from synapse.rest.admin import assert_requester_is_admin +from synapse.rest.client.transactions import HttpTransactionCache +from synapse.types import UserID + + +class SendServerNoticeServlet(RestServlet): + """Servlet which will send a server notice to a given user + + POST /_synapse/admin/v1/send_server_notice + { + "user_id": "@target_user:server_name", + "content": { + "msgtype": "m.text", + "body": "This is my message" + } + } + + returns: + + { + "event_id": "$1895723857jgskldgujpious" + } + """ + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + self.hs = hs + self.auth = hs.get_auth() + self.txns = HttpTransactionCache(hs) + self.snm = hs.get_server_notices_manager() + + def register(self, json_resource): + PATTERN = "^/_synapse/admin/v1/send_server_notice" + json_resource.register_paths( + "POST", + (re.compile(PATTERN + "$"), ), + self.on_POST, + ) + json_resource.register_paths( + "PUT", + (re.compile(PATTERN + "/(?P[^/]*)$",), ), + self.on_PUT, + ) + + @defer.inlineCallbacks + def on_POST(self, request, txn_id=None): + yield assert_requester_is_admin(self.auth, request) + body = parse_json_object_from_request(request) + assert_params_in_dict(body, ("user_id", "content")) + event_type = body.get("type", EventTypes.Message) + state_key = body.get("state_key") + + if not self.snm.is_enabled(): + raise SynapseError(400, "Server notices are not enabled on this server") + + user_id = body["user_id"] + UserID.from_string(user_id) + if not self.hs.is_mine_id(user_id): + raise SynapseError(400, "Server notices can only be sent to local users") + + event = yield self.snm.send_notice( + user_id=body["user_id"], + type=event_type, + state_key=state_key, + event_content=body["content"], + ) + + defer.returnValue((200, {"event_id": event.event_id})) + + def on_PUT(self, request, txn_id): + return self.txns.fetch_or_execute_request( + request, self.on_POST, request, txn_id, + ) diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 32ef83e9c0..7dd1a1daf8 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -30,7 +30,7 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, ] @@ -328,7 +328,7 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase): user_directory.register_servlets, room.register_servlets, login.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, ] def make_homeserver(self, reactor, clock): diff --git a/tests/push/test_email.py b/tests/push/test_email.py index e29bd18ad7..325ea449ae 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -34,7 +34,7 @@ class EmailPusherTests(HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, ] diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 3f9f56bb79..13bd2c8688 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -33,7 +33,7 @@ class HTTPPusherTests(HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 42858b5fea..db4cfd8550 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -30,7 +30,7 @@ from tests import unittest class VersionTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] @@ -63,7 +63,7 @@ class VersionTestCase(unittest.HomeserverTestCase): class UserRegisterTestCase(unittest.HomeserverTestCase): - servlets = [synapse.rest.admin.register_servlets] + servlets = [synapse.rest.admin.register_servlets_for_client_rest_resource] def make_homeserver(self, reactor, clock): @@ -359,7 +359,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): class ShutdownRoomTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, events.register_servlets, room.register_servlets, @@ -496,7 +496,7 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): class DeleteGroupTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, groups.register_servlets, ] diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py index 36e6c1c67d..5528971190 100644 --- a/tests/rest/client/test_consent.py +++ b/tests/rest/client/test_consent.py @@ -32,7 +32,7 @@ except Exception: class ConsentResourceTestCase(unittest.HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index d4fe0aee7d..2e51ffa418 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -24,7 +24,7 @@ from tests import unittest class IdentityTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index 5cb1c1ae9f..8a9a55a527 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -29,7 +29,7 @@ class EventStreamPermissionsTestCase(unittest.HomeserverTestCase): servlets = [ events.register_servlets, room.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py index 8d9ef877f6..9ebd91f678 100644 --- a/tests/rest/client/v1/test_login.py +++ b/tests/rest/client/v1/test_login.py @@ -11,7 +11,7 @@ LOGIN_URL = b"/_matrix/client/r0/login" class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 1a34924f3e..521ac80f9a 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -804,7 +804,7 @@ class RoomMessageListTestCase(RoomBase): class RoomSearchTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py index 67021185d0..0ca3c4657b 100644 --- a/tests/rest/client/v2_alpha/test_auth.py +++ b/tests/rest/client/v2_alpha/test_auth.py @@ -27,7 +27,7 @@ class FallbackAuthTests(unittest.HomeserverTestCase): servlets = [ auth.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, register.register_servlets, ] hijack_auth = False diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py index 8134163e20..f3ef977404 100644 --- a/tests/rest/client/v2_alpha/test_capabilities.py +++ b/tests/rest/client/v2_alpha/test_capabilities.py @@ -23,7 +23,7 @@ from tests import unittest class CapabilitiesTestCase(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, capabilities.register_servlets, login.register_servlets, ] diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 4d698af03a..1c3a621d26 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -199,7 +199,7 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): servlets = [ register.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, sync.register_servlets, account_validity.register_servlets, @@ -308,7 +308,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ register.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, sync.register_servlets, account_validity.register_servlets, diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 65fac1d5ce..71895094bd 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -73,7 +73,7 @@ class FilterTestCase(unittest.HomeserverTestCase): class SyncTypingTests(unittest.HomeserverTestCase): servlets = [ - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, sync.register_servlets, diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py index e8b8ac5725..e0b4e0eb63 100644 --- a/tests/server_notices/test_consent.py +++ b/tests/server_notices/test_consent.py @@ -23,7 +23,7 @@ class ConsentNoticesTests(unittest.HomeserverTestCase): servlets = [ sync.register_servlets, - synapse.rest.admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, room.register_servlets, ] diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index b0f6fd34d8..b62eae7abc 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -206,7 +206,10 @@ class ClientIpStoreTestCase(unittest.HomeserverTestCase): class ClientIpAuthTestCase(unittest.HomeserverTestCase): - servlets = [synapse.rest.admin.register_servlets, login.register_servlets] + servlets = [ + synapse.rest.admin.register_servlets_for_client_rest_resource, + login.register_servlets, + ] def make_homeserver(self, reactor, clock): hs = self.setup_test_homeserver() -- cgit 1.5.1 From 176f31c2e3e048353c4382cf5d1a34a1359f48b1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 2 May 2019 15:23:08 +0100 Subject: Rate limit early --- synapse/handlers/room_member.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index ad3df7cc7d..3e86b9c690 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -710,8 +710,9 @@ class RoomMemberHandler(object): Codes.FORBIDDEN, ) - # Check whether we'll be ratelimited - yield self.base_handler.ratelimit(requester, update=False) + # We need to rate limit *before* we send out any 3PID invites, so we + # can't just rely on the standard ratelimiting of events. + yield self.base_handler.ratelimit(requester) invitee = yield self._lookup_3pid( id_server, medium, address -- cgit 1.5.1 From 247dc1bd0bd9ee2b9525495c0dbd819baf10ec1f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 12:38:03 +0100 Subject: Use SystemRandom for token generation --- changelog.d/5133.bugfix | 1 + synapse/util/stringutils.py | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5133.bugfix diff --git a/changelog.d/5133.bugfix b/changelog.d/5133.bugfix new file mode 100644 index 0000000000..12a32a906b --- /dev/null +++ b/changelog.d/5133.bugfix @@ -0,0 +1 @@ +Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for for identifying and responsibly disclosing this issue! diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index fdcb375f95..69dffd8244 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -24,14 +24,19 @@ _string_with_symbols = ( string.digits + string.ascii_letters + ".,;:^&*-_+=#~@" ) +# random_string and random_string_with_symbols are used for a range of things, +# some cryptographically important, some less so. We use SystemRandom to make sure +# we get cryptographically-secure randoms. +rand = random.SystemRandom() + def random_string(length): - return ''.join(random.choice(string.ascii_letters) for _ in range(length)) + return ''.join(rand.choice(string.ascii_letters) for _ in range(length)) def random_string_with_symbols(length): return ''.join( - random.choice(_string_with_symbols) for _ in range(length) + rand.choice(_string_with_symbols) for _ in range(length) ) -- cgit 1.5.1 From 60c3635f0507699ccb63115ff974f54d45c90c83 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 May 2019 14:40:15 +0100 Subject: typo --- changelog.d/5133.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/5133.bugfix b/changelog.d/5133.bugfix index 12a32a906b..be6474a692 100644 --- a/changelog.d/5133.bugfix +++ b/changelog.d/5133.bugfix @@ -1 +1 @@ -Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for for identifying and responsibly disclosing this issue! +Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for identifying and responsibly disclosing this issue! -- cgit 1.5.1 From 1a7104fde3abc5392b90ca084efa896d46e24f91 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 13:46:50 +0100 Subject: Blacklist 0.0.0.0 and :: by default for URL previews --- changelog.d/5134.bugfix | 1 + docs/sample_config.yaml | 14 +++++++++----- synapse/config/repository.py | 28 ++++++++++++++++++---------- 3 files changed, 28 insertions(+), 15 deletions(-) create mode 100644 changelog.d/5134.bugfix diff --git a/changelog.d/5134.bugfix b/changelog.d/5134.bugfix new file mode 100644 index 0000000000..684d48c53a --- /dev/null +++ b/changelog.d/5134.bugfix @@ -0,0 +1 @@ +Blacklist 0.0.0.0 and :: by default for URL previews. Thanks to @opnsec for identifying and responsibly disclosing this issue too! diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 4ada0fba0e..0589734b8a 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -506,11 +506,12 @@ uploads_path: "DATADIR/uploads" # height: 600 # method: scale -# Is the preview URL API enabled? If enabled, you *must* specify -# an explicit url_preview_ip_range_blacklist of IPs that the spider is -# denied from accessing. +# Is the preview URL API enabled? # -#url_preview_enabled: false +# 'false' by default: uncomment the following to enable it (and specify a +# url_preview_ip_range_blacklist blacklist). +# +#url_preview_enabled: true # List of IP address CIDR ranges that the URL preview spider is denied # from accessing. There are no defaults: you must explicitly @@ -520,6 +521,9 @@ uploads_path: "DATADIR/uploads" # synapse to issue arbitrary GET requests to your internal services, # causing serious security issues. # +# This must be specified if url_preview_enabled. It is recommended that you +# uncomment the following list as a starting point. +# #url_preview_ip_range_blacklist: # - '127.0.0.0/8' # - '10.0.0.0/8' @@ -530,7 +534,7 @@ uploads_path: "DATADIR/uploads" # - '::1/128' # - 'fe80::/64' # - 'fc00::/7' -# + # List of IP address CIDR ranges that the URL preview spider is allowed # to access even if they are specified in url_preview_ip_range_blacklist. # This is useful for specifying exceptions to wide-ranging blacklisted diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 3f34ad9b2a..d155d69d8a 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -186,17 +186,21 @@ class ContentRepositoryConfig(Config): except ImportError: raise ConfigError(MISSING_NETADDR) - if "url_preview_ip_range_blacklist" in config: - self.url_preview_ip_range_blacklist = IPSet( - config["url_preview_ip_range_blacklist"] - ) - else: + if "url_preview_ip_range_blacklist" not in config: raise ConfigError( "For security, you must specify an explicit target IP address " "blacklist in url_preview_ip_range_blacklist for url previewing " "to work" ) + self.url_preview_ip_range_blacklist = IPSet( + config["url_preview_ip_range_blacklist"] + ) + + # we always blacklist '0.0.0.0' and '::', which are supposed to be + # unroutable addresses. + self.url_preview_ip_range_blacklist.update(['0.0.0.0', '::']) + self.url_preview_ip_range_whitelist = IPSet( config.get("url_preview_ip_range_whitelist", ()) ) @@ -260,11 +264,12 @@ class ContentRepositoryConfig(Config): #thumbnail_sizes: %(formatted_thumbnail_sizes)s - # Is the preview URL API enabled? If enabled, you *must* specify - # an explicit url_preview_ip_range_blacklist of IPs that the spider is - # denied from accessing. + # Is the preview URL API enabled? # - #url_preview_enabled: false + # 'false' by default: uncomment the following to enable it (and specify a + # url_preview_ip_range_blacklist blacklist). + # + #url_preview_enabled: true # List of IP address CIDR ranges that the URL preview spider is denied # from accessing. There are no defaults: you must explicitly @@ -274,6 +279,9 @@ class ContentRepositoryConfig(Config): # synapse to issue arbitrary GET requests to your internal services, # causing serious security issues. # + # This must be specified if url_preview_enabled. It is recommended that you + # uncomment the following list as a starting point. + # #url_preview_ip_range_blacklist: # - '127.0.0.0/8' # - '10.0.0.0/8' @@ -284,7 +292,7 @@ class ContentRepositoryConfig(Config): # - '::1/128' # - 'fe80::/64' # - 'fc00::/7' - # + # List of IP address CIDR ranges that the URL preview spider is allowed # to access even if they are specified in url_preview_ip_range_blacklist. # This is useful for specifying exceptions to wide-ranging blacklisted -- cgit 1.5.1 From 1565ebec2c7aa9f6f2a8b60227b405cae12e7170 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 15:50:59 +0100 Subject: more config comment updates --- docs/sample_config.yaml | 7 +++++-- synapse/config/repository.py | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 0589734b8a..6ed75ff764 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -521,8 +521,11 @@ uploads_path: "DATADIR/uploads" # synapse to issue arbitrary GET requests to your internal services, # causing serious security issues. # -# This must be specified if url_preview_enabled. It is recommended that you -# uncomment the following list as a starting point. +# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly +# listed here, since they correspond to unroutable addresses.) +# +# This must be specified if url_preview_enabled is set. It is recommended that +# you uncomment the following list as a starting point. # #url_preview_ip_range_blacklist: # - '127.0.0.0/8' diff --git a/synapse/config/repository.py b/synapse/config/repository.py index d155d69d8a..fbfcecc240 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -279,8 +279,11 @@ class ContentRepositoryConfig(Config): # synapse to issue arbitrary GET requests to your internal services, # causing serious security issues. # - # This must be specified if url_preview_enabled. It is recommended that you - # uncomment the following list as a starting point. + # (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly + # listed here, since they correspond to unroutable addresses.) + # + # This must be specified if url_preview_enabled is set. It is recommended that + # you uncomment the following list as a starting point. # #url_preview_ip_range_blacklist: # - '127.0.0.0/8' -- cgit 1.5.1 From 863ec0962210fcb946e68caa7431f69583814c73 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 16:03:24 +0100 Subject: 0.99.3.1 --- CHANGES.md | 9 +++++++++ changelog.d/5133.bugfix | 1 - changelog.d/5134.bugfix | 1 - debian/changelog | 6 ++++++ synapse/__init__.py | 2 +- 5 files changed, 16 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/5133.bugfix delete mode 100644 changelog.d/5134.bugfix diff --git a/CHANGES.md b/CHANGES.md index 490c2021e0..d8eba2ec60 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 0.99.3.1 (2019-05-03) +============================= + +Bugfixes +-------- + +- Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for identifying and responsibly disclosing this issue! ([\#5133](https://github.com/matrix-org/synapse/issues/5133)) +- Blacklist 0.0.0.0 and :: by default for URL previews. Thanks to @opnsec for identifying and responsibly disclosing this issue too! ([\#5134](https://github.com/matrix-org/synapse/issues/5134)) + Synapse 0.99.3 (2019-04-01) =========================== diff --git a/changelog.d/5133.bugfix b/changelog.d/5133.bugfix deleted file mode 100644 index be6474a692..0000000000 --- a/changelog.d/5133.bugfix +++ /dev/null @@ -1 +0,0 @@ -Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for identifying and responsibly disclosing this issue! diff --git a/changelog.d/5134.bugfix b/changelog.d/5134.bugfix deleted file mode 100644 index 684d48c53a..0000000000 --- a/changelog.d/5134.bugfix +++ /dev/null @@ -1 +0,0 @@ -Blacklist 0.0.0.0 and :: by default for URL previews. Thanks to @opnsec for identifying and responsibly disclosing this issue too! diff --git a/debian/changelog b/debian/changelog index 03df2e1c00..ea712f0dab 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (0.99.3.1) stable; urgency=medium + + * New synapse release 0.99.3.1. + + -- Synapse Packaging team Fri, 03 May 2019 16:02:43 +0100 + matrix-synapse-py3 (0.99.3) stable; urgency=medium [ Richard van der Hoff ] diff --git a/synapse/__init__.py b/synapse/__init__.py index 6bb5a8b24d..8959312cb0 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -27,4 +27,4 @@ try: except ImportError: pass -__version__ = "0.99.3" +__version__ = "0.99.3.1" -- cgit 1.5.1 From f73f18fe7baba7e6f442e308112eeba91e4b8a61 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 16:09:34 +0100 Subject: changelog tweaks --- CHANGES.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index d8eba2ec60..4b84e20823 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,8 +1,10 @@ Synapse 0.99.3.1 (2019-05-03) ============================= -Bugfixes --------- +Security update +--------------- + +This release includes two security fixes: - Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for identifying and responsibly disclosing this issue! ([\#5133](https://github.com/matrix-org/synapse/issues/5133)) - Blacklist 0.0.0.0 and :: by default for URL previews. Thanks to @opnsec for identifying and responsibly disclosing this issue too! ([\#5134](https://github.com/matrix-org/synapse/issues/5134)) -- cgit 1.5.1 From e3281d7d261ee533818ae1638014295bbb4b98af Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 18:30:13 +0100 Subject: pin urllib3 to <1.25 --- synapse/python_dependencies.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index f71e21ff4d..c75119a030 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -69,6 +69,14 @@ REQUIREMENTS = [ "attrs>=17.4.0", "netaddr>=0.7.18", + + # requests is a transitive dep of treq, and urlib3 is a transitive dep + # of requests, as well as of sentry-sdk. + # + # As of requests 2.21, requests does not yet support urllib3 1.25. + # (If we do not pin it here, pip will give us the latest urllib3 + # due to the dep via sentry-sdk.) + "urllib3<1.25", ] CONDITIONAL_REQUIREMENTS = { -- cgit 1.5.1 From ecc09673156c340e390c9c8a19af80133622fd4f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 18:30:35 +0100 Subject: Debian: we now need libpq-dev. psycopg 2.8 is now out, which means that the C library gets built from source, so we now need libpq-dev when building. Turns out the need for this package is already documented in docs/postgres.rst. --- docker/Dockerfile-dhvirtualenv | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index 224c92352d..3de032cf8c 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -55,7 +55,8 @@ RUN apt-get update -qq -o Acquire::Languages=none \ python3-pip \ python3-setuptools \ python3-venv \ - sqlite3 + sqlite3 \ + libpq-dev COPY --from=builder /dh-virtualenv_1.1-1_all.deb / -- cgit 1.5.1 From 5485852b43a9eb5e8dbe892ee66790237c7e4db9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 18:36:55 +0100 Subject: changelog --- changelog.d/5135.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5135.misc diff --git a/changelog.d/5135.misc b/changelog.d/5135.misc new file mode 100644 index 0000000000..eab06dd91d --- /dev/null +++ b/changelog.d/5135.misc @@ -0,0 +1 @@ +Ensure that we have `urllib3` <1.25, to resolve incompatibility with `requests`. \ No newline at end of file -- cgit 1.5.1 From 0b5cf9560775297f31fb4afa384fc37be74d7490 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 18:44:14 +0100 Subject: include disco in deb build target list --- scripts-dev/build_debian_packages | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages index 6b9be99060..93305ee9b1 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages @@ -24,6 +24,7 @@ DISTS = ( "ubuntu:xenial", "ubuntu:bionic", "ubuntu:cosmic", + "ubuntu:disco", ) DESC = '''\ -- cgit 1.5.1 From fa21455e08d4115cd14f6c4ca34a1b432d924b4f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 May 2019 18:56:24 +0100 Subject: 0.99.3.2 --- CHANGES.md | 9 +++++++++ changelog.d/5135.misc | 1 - debian/changelog | 6 ++++++ synapse/__init__.py | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/5135.misc diff --git a/CHANGES.md b/CHANGES.md index 4b84e20823..d8cfbbebef 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 0.99.3.2 (2019-05-03) +============================= + +Internal Changes +---------------- + +- Ensure that we have `urllib3` <1.25, to resolve incompatibility with `requests`. ([\#5135](https://github.com/matrix-org/synapse/issues/5135)) + + Synapse 0.99.3.1 (2019-05-03) ============================= diff --git a/changelog.d/5135.misc b/changelog.d/5135.misc deleted file mode 100644 index eab06dd91d..0000000000 --- a/changelog.d/5135.misc +++ /dev/null @@ -1 +0,0 @@ -Ensure that we have `urllib3` <1.25, to resolve incompatibility with `requests`. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index ea712f0dab..c25425bf26 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (0.99.3.2) stable; urgency=medium + + * New synapse release 0.99.3.2. + + -- Synapse Packaging team Fri, 03 May 2019 18:56:20 +0100 + matrix-synapse-py3 (0.99.3.1) stable; urgency=medium * New synapse release 0.99.3.1. diff --git a/synapse/__init__.py b/synapse/__init__.py index 8959312cb0..315fa96551 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -27,4 +27,4 @@ try: except ImportError: pass -__version__ = "0.99.3.1" +__version__ = "0.99.3.2" -- cgit 1.5.1 From 4804206dbe292ca4887fe972fe15e972d53a4c73 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 May 2019 22:13:35 +0100 Subject: Fix sample config ... after it got broken in 1565ebec2c. --- docs/sample_config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 84337a7c72..b6b9da6e41 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -566,7 +566,7 @@ uploads_path: "DATADIR/uploads" # (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly # listed here, since they correspond to unroutable addresses.) # -# This must be specified if url_preview_enabled is set. It is recommended that +# This must be specified if url_preview_enabled is set. It is recommended that # you uncomment the following list as a starting point. # #url_preview_ip_range_blacklist: -- cgit 1.5.1 From 3fdff1420790a29331608ca33ce2ace67dbaa4cc Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 6 May 2019 15:15:02 -0600 Subject: Fix spelling in server notices admin API docs (#5142) --- changelog.d/5142.feature | 1 + docs/admin_api/server_notices.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5142.feature diff --git a/changelog.d/5142.feature b/changelog.d/5142.feature new file mode 100644 index 0000000000..54b228680d --- /dev/null +++ b/changelog.d/5142.feature @@ -0,0 +1 @@ +Implement an admin API for sending server notices. Many thanks to @krombel who provided a foundation for this work. diff --git a/docs/admin_api/server_notices.md b/docs/admin_api/server_notices.md index 5ddd21cfb2..858b052b84 100644 --- a/docs/admin_api/server_notices.md +++ b/docs/admin_api/server_notices.md @@ -36,7 +36,7 @@ You can optionally include the following additional parameters: * `state_key`: Setting this will result in a state event being sent. -Once the notice has been sent, the APU will return the following response: +Once the notice has been sent, the API will return the following response: ```json { -- cgit 1.5.1 From 59e2d2694deec13aaa0062e04b5460f978967dc1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 7 May 2019 09:29:30 +0100 Subject: Remove the requirement to authenticate for /admin/server_version. (#5122) This endpoint isn't much use for its intended purpose if you first need to get yourself an admin's auth token. I've restricted it to the `/_synapse/admin` path to make it a bit easier to lock down for those concerned about exposing this information. I don't imagine anyone is using it in anger currently. --- changelog.d/5122.misc | 1 + docs/admin_api/version_api.rst | 2 -- synapse/rest/admin/__init__.py | 15 +++++---------- tests/rest/admin/test_admin.py | 30 ++++++++---------------------- tests/unittest.py | 22 ++++++++++++++++++---- 5 files changed, 32 insertions(+), 38 deletions(-) create mode 100644 changelog.d/5122.misc diff --git a/changelog.d/5122.misc b/changelog.d/5122.misc new file mode 100644 index 0000000000..e1be8a6210 --- /dev/null +++ b/changelog.d/5122.misc @@ -0,0 +1 @@ +Remove the requirement to authenticate for /admin/server_version. diff --git a/docs/admin_api/version_api.rst b/docs/admin_api/version_api.rst index 6d66543b96..833d9028be 100644 --- a/docs/admin_api/version_api.rst +++ b/docs/admin_api/version_api.rst @@ -10,8 +10,6 @@ The api is:: GET /_synapse/admin/v1/server_version -including an ``access_token`` of a server admin. - It returns a JSON body like the following: .. code:: json diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 0ce89741f0..744d85594f 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -88,21 +88,16 @@ class UsersRestServlet(RestServlet): class VersionServlet(RestServlet): - PATTERNS = historical_admin_path_patterns("/server_version") + PATTERNS = (re.compile("^/_synapse/admin/v1/server_version$"), ) def __init__(self, hs): - self.auth = hs.get_auth() - - @defer.inlineCallbacks - def on_GET(self, request): - yield assert_requester_is_admin(self.auth, request) - - ret = { + self.res = { 'server_version': get_version_string(synapse), 'python_version': platform.python_version(), } - defer.returnValue((200, ret)) + def on_GET(self, request): + return 200, self.res class UserRegisterServlet(RestServlet): @@ -830,6 +825,7 @@ class AdminRestResource(JsonResource): register_servlets_for_client_rest_resource(hs, self) SendServerNoticeServlet(hs).register(self) + VersionServlet(hs).register(self) def register_servlets_for_client_rest_resource(hs, http_server): @@ -847,7 +843,6 @@ def register_servlets_for_client_rest_resource(hs, http_server): QuarantineMediaInRoom(hs).register(http_server) ListMediaInRoom(hs).register(http_server) UserRegisterServlet(hs).register(http_server) - VersionServlet(hs).register(http_server) DeleteGroupAdminRestServlet(hs).register(http_server) AccountValidityRenewServlet(hs).register(http_server) # don't add more things here: new servlets should only be exposed on diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index db4cfd8550..da19a83918 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -21,6 +21,8 @@ from mock import Mock import synapse.rest.admin from synapse.api.constants import UserTypes +from synapse.http.server import JsonResource +from synapse.rest.admin import VersionServlet from synapse.rest.client.v1 import events, login, room from synapse.rest.client.v2_alpha import groups @@ -28,20 +30,15 @@ from tests import unittest class VersionTestCase(unittest.HomeserverTestCase): + url = '/_synapse/admin/v1/server_version' - servlets = [ - synapse.rest.admin.register_servlets_for_client_rest_resource, - login.register_servlets, - ] - - url = '/_matrix/client/r0/admin/server_version' + def create_test_json_resource(self): + resource = JsonResource(self.hs) + VersionServlet(self.hs).register(resource) + return resource def test_version_string(self): - self.register_user("admin", "pass", admin=True) - self.admin_token = self.login("admin", "pass") - - request, channel = self.make_request("GET", self.url, - access_token=self.admin_token) + request, channel = self.make_request("GET", self.url, shorthand=False) self.render(request) self.assertEqual(200, int(channel.result["code"]), @@ -49,17 +46,6 @@ class VersionTestCase(unittest.HomeserverTestCase): self.assertEqual({'server_version', 'python_version'}, set(channel.json_body.keys())) - def test_inaccessible_to_non_admins(self): - self.register_user("unprivileged-user", "pass", admin=False) - user_token = self.login("unprivileged-user", "pass") - - request, channel = self.make_request("GET", self.url, - access_token=user_token) - self.render(request) - - self.assertEqual(403, int(channel.result['code']), - msg=channel.result['body']) - class UserRegisterTestCase(unittest.HomeserverTestCase): diff --git a/tests/unittest.py b/tests/unittest.py index 8c65736a51..029a88d770 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -181,10 +181,7 @@ class HomeserverTestCase(TestCase): raise Exception("A homeserver wasn't returned, but %r" % (self.hs,)) # Register the resources - self.resource = JsonResource(self.hs) - - for servlet in self.servlets: - servlet(self.hs, self.resource) + self.resource = self.create_test_json_resource() from tests.rest.client.v1.utils import RestHelper @@ -230,6 +227,23 @@ class HomeserverTestCase(TestCase): hs = self.setup_test_homeserver() return hs + def create_test_json_resource(self): + """ + Create a test JsonResource, with the relevant servlets registerd to it + + The default implementation calls each function in `servlets` to do the + registration. + + Returns: + JsonResource: + """ + resource = JsonResource(self.hs) + + for servlet in self.servlets: + servlet(self.hs, resource) + + return resource + def default_config(self, name="test"): """ Get a default HomeServer config object. -- cgit 1.5.1 From 1473058b5eb14b5128c0b6ee6e88e89602ad96c5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 8 May 2019 17:01:30 +0100 Subject: Do checks on aliases for incoming m.room.aliases events (#5128) Follow-up to #5124 Also added a bunch of checks to make sure everything (both the stuff added on #5124 and this PR) works as intended. --- changelog.d/5128.bugfix | 1 + synapse/api/constants.py | 3 + synapse/events/snapshot.py | 8 +- synapse/events/validator.py | 15 ++- synapse/handlers/directory.py | 7 +- synapse/handlers/message.py | 30 ++++++ tests/rest/client/v1/test_directory.py | 169 +++++++++++++++++++++++++++++++++ 7 files changed, 225 insertions(+), 8 deletions(-) create mode 100644 changelog.d/5128.bugfix create mode 100644 tests/rest/client/v1/test_directory.py diff --git a/changelog.d/5128.bugfix b/changelog.d/5128.bugfix new file mode 100644 index 0000000000..46df1e9fd5 --- /dev/null +++ b/changelog.d/5128.bugfix @@ -0,0 +1 @@ +Add some missing limitations to room alias creation. diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 0860b75905..8547a63535 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -20,6 +20,9 @@ # the "depth" field on events is limited to 2**63 - 1 MAX_DEPTH = 2**63 - 1 +# the maximum length for a room alias is 255 characters +MAX_ALIAS_LENGTH = 255 + class Membership(object): diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 368b5f6ae4..fa09c132a0 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -187,7 +187,9 @@ class EventContext(object): Returns: Deferred[dict[(str, str), str]|None]: Returns None if state_group - is None, which happens when the associated event is an outlier. + is None, which happens when the associated event is an outlier. + Maps a (type, state_key) to the event ID of the state event matching + this tuple. """ if not self._fetching_state_deferred: @@ -205,7 +207,9 @@ class EventContext(object): Returns: Deferred[dict[(str, str), str]|None]: Returns None if state_group - is None, which happens when the associated event is an outlier. + is None, which happens when the associated event is an outlier. + Maps a (type, state_key) to the event ID of the state event matching + this tuple. """ if not self._fetching_state_deferred: diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 514273c792..711af512b2 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -15,8 +15,8 @@ from six import string_types -from synapse.api.constants import EventTypes, Membership -from synapse.api.errors import SynapseError +from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership +from synapse.api.errors import Codes, SynapseError from synapse.api.room_versions import EventFormatVersions from synapse.types import EventID, RoomID, UserID @@ -56,6 +56,17 @@ class EventValidator(object): if not isinstance(getattr(event, s), string_types): raise SynapseError(400, "'%s' not a string type" % (s,)) + if event.type == EventTypes.Aliases: + if "aliases" in event.content: + for alias in event.content["aliases"]: + if len(alias) > MAX_ALIAS_LENGTH: + raise SynapseError( + 400, + ("Can't create aliases longer than" + " %d characters" % (MAX_ALIAS_LENGTH,)), + Codes.INVALID_PARAM, + ) + def validate_builder(self, event): """Validates that the builder/event has roughly the right format. Only checks values that we expect a proto event to have, rather than all the diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 50c587aa61..a12f9508d8 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -19,7 +19,7 @@ import string from twisted.internet import defer -from synapse.api.constants import EventTypes +from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes from synapse.api.errors import ( AuthError, CodeMessageException, @@ -36,7 +36,6 @@ logger = logging.getLogger(__name__) class DirectoryHandler(BaseHandler): - MAX_ALIAS_LENGTH = 255 def __init__(self, hs): super(DirectoryHandler, self).__init__(hs) @@ -105,10 +104,10 @@ class DirectoryHandler(BaseHandler): user_id = requester.user.to_string() - if len(room_alias.to_string()) > self.MAX_ALIAS_LENGTH: + if len(room_alias.to_string()) > MAX_ALIAS_LENGTH: raise SynapseError( 400, - "Can't create aliases longer than %s characters" % self.MAX_ALIAS_LENGTH, + "Can't create aliases longer than %s characters" % MAX_ALIAS_LENGTH, Codes.INVALID_PARAM, ) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 224d34ef3a..e5afeadf68 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -228,6 +228,7 @@ class EventCreationHandler(object): self.ratelimiter = hs.get_ratelimiter() self.notifier = hs.get_notifier() self.config = hs.config + self.require_membership_for_aliases = hs.config.require_membership_for_aliases self.send_event_to_master = ReplicationSendEventRestServlet.make_client(hs) @@ -336,6 +337,35 @@ class EventCreationHandler(object): prev_events_and_hashes=prev_events_and_hashes, ) + # In an ideal world we wouldn't need the second part of this condition. However, + # this behaviour isn't spec'd yet, meaning we should be able to deactivate this + # behaviour. Another reason is that this code is also evaluated each time a new + # m.room.aliases event is created, which includes hitting a /directory route. + # Therefore not including this condition here would render the similar one in + # synapse.handlers.directory pointless. + if builder.type == EventTypes.Aliases and self.require_membership_for_aliases: + # Ideally we'd do the membership check in event_auth.check(), which + # describes a spec'd algorithm for authenticating events received over + # federation as well as those created locally. As of room v3, aliases events + # can be created by users that are not in the room, therefore we have to + # tolerate them in event_auth.check(). + prev_state_ids = yield context.get_prev_state_ids(self.store) + prev_event_id = prev_state_ids.get((EventTypes.Member, event.sender)) + prev_event = yield self.store.get_event(prev_event_id, allow_none=True) + if not prev_event or prev_event.membership != Membership.JOIN: + logger.warning( + ("Attempt to send `m.room.aliases` in room %s by user %s but" + " membership is %s"), + event.room_id, + event.sender, + prev_event.membership if prev_event else None, + ) + + raise AuthError( + 403, + "You must be in the room to create an alias for it", + ) + self.validator.validate_new(event) defer.returnValue((event, context)) diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py new file mode 100644 index 0000000000..4804000ec7 --- /dev/null +++ b/tests/rest/client/v1/test_directory.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from synapse.rest.admin import register_servlets +from synapse.rest.client.v1 import directory, login, room +from synapse.types import RoomAlias +from synapse.util.stringutils import random_string + +from tests import unittest + + +class DirectoryTestCase(unittest.HomeserverTestCase): + + servlets = [ + register_servlets, + directory.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + config = self.default_config() + config.require_membership_for_aliases = True + + self.hs = self.setup_test_homeserver(config=config) + + return self.hs + + def prepare(self, reactor, clock, homeserver): + self.room_owner = self.register_user("room_owner", "test") + self.room_owner_tok = self.login("room_owner", "test") + + self.room_id = self.helper.create_room_as( + self.room_owner, tok=self.room_owner_tok, + ) + + self.user = self.register_user("user", "test") + self.user_tok = self.login("user", "test") + + def test_state_event_not_in_room(self): + self.ensure_user_left_room() + self.set_alias_via_state_event(403) + + def test_directory_endpoint_not_in_room(self): + self.ensure_user_left_room() + self.set_alias_via_directory(403) + + def test_state_event_in_room_too_long(self): + self.ensure_user_joined_room() + self.set_alias_via_state_event(400, alias_length=256) + + def test_directory_in_room_too_long(self): + self.ensure_user_joined_room() + self.set_alias_via_directory(400, alias_length=256) + + def test_state_event_in_room(self): + self.ensure_user_joined_room() + self.set_alias_via_state_event(200) + + def test_directory_in_room(self): + self.ensure_user_joined_room() + self.set_alias_via_directory(200) + + def test_room_creation_too_long(self): + url = "/_matrix/client/r0/createRoom" + + # We use deliberately a localpart under the length threshold so + # that we can make sure that the check is done on the whole alias. + data = { + "room_alias_name": random_string(256 - len(self.hs.hostname)), + } + request_data = json.dumps(data) + request, channel = self.make_request( + "POST", url, request_data, access_token=self.user_tok, + ) + self.render(request) + self.assertEqual(channel.code, 400, channel.result) + + def test_room_creation(self): + url = "/_matrix/client/r0/createRoom" + + # Check with an alias of allowed length. There should already be + # a test that ensures it works in test_register.py, but let's be + # as cautious as possible here. + data = { + "room_alias_name": random_string(5), + } + request_data = json.dumps(data) + request, channel = self.make_request( + "POST", url, request_data, access_token=self.user_tok, + ) + self.render(request) + self.assertEqual(channel.code, 200, channel.result) + + def set_alias_via_state_event(self, expected_code, alias_length=5): + url = ("/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" + % (self.room_id, self.hs.hostname)) + + data = { + "aliases": [ + self.random_alias(alias_length), + ], + } + request_data = json.dumps(data) + + request, channel = self.make_request( + "PUT", url, request_data, access_token=self.user_tok, + ) + self.render(request) + self.assertEqual(channel.code, expected_code, channel.result) + + def set_alias_via_directory(self, expected_code, alias_length=5): + url = "/_matrix/client/r0/directory/room/%s" % self.random_alias(alias_length) + data = { + "room_id": self.room_id, + } + request_data = json.dumps(data) + + request, channel = self.make_request( + "PUT", url, request_data, access_token=self.user_tok, + ) + self.render(request) + self.assertEqual(channel.code, expected_code, channel.result) + + def random_alias(self, length): + return RoomAlias( + random_string(length), + self.hs.hostname, + ).to_string() + + def ensure_user_left_room(self): + self.ensure_membership("leave") + + def ensure_user_joined_room(self): + self.ensure_membership("join") + + def ensure_membership(self, membership): + try: + if membership == "leave": + self.helper.leave( + room=self.room_id, + user=self.user, + tok=self.user_tok, + ) + if membership == "join": + self.helper.join( + room=self.room_id, + user=self.user, + tok=self.user_tok, + ) + except AssertionError: + # We don't care whether the leave request didn't return a 200 (e.g. + # if the user isn't already in the room), because we only want to + # make sure the user isn't in the room. + pass -- cgit 1.5.1 From c0e0740bef0db661abce352afaf6c958e276f11d Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 8 May 2019 18:26:56 +0100 Subject: add options to require an access_token to GET /profile and /publicRooms on CS API (#5083) This commit adds two config options: * `restrict_public_rooms_to_local_users` Requires auth to fetch the public rooms directory through the CS API and disables fetching it through the federation API. * `require_auth_for_profile_requests` When set to `true`, requires that requests to `/profile` over the CS API are authenticated, and only returns the user's profile if the requester shares a room with the profile's owner, as per MSC1301. MSC1301 also specifies a behaviour for federation (only returning the profile if the server asking for it shares a room with the profile's owner), but that's currently really non-trivial to do in a not too expensive way. Next step is writing down a MSC that allows a HS to specify which user sent the profile query. In this implementation, Synapse won't send a profile query over federation if it doesn't believe it already shares a room with the profile's owner, though. Groups have been intentionally omitted from this commit. --- changelog.d/5083.feature | 1 + docs/sample_config.yaml | 14 ++++++ synapse/config/server.py | 27 ++++++++++ synapse/federation/transport/server.py | 10 ++++ synapse/handlers/profile.py | 43 ++++++++++++++++ synapse/rest/client/v1/profile.py | 40 ++++++++++----- synapse/rest/client/v1/room.py | 6 +++ tests/rest/client/v1/test_profile.py | 92 +++++++++++++++++++++++++++++++++- tests/rest/client/v1/test_rooms.py | 32 ++++++++++++ 9 files changed, 252 insertions(+), 13 deletions(-) create mode 100644 changelog.d/5083.feature diff --git a/changelog.d/5083.feature b/changelog.d/5083.feature new file mode 100644 index 0000000000..55d114b3fe --- /dev/null +++ b/changelog.d/5083.feature @@ -0,0 +1 @@ +Add an configuration option to require authentication on /publicRooms and /profile endpoints. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index b6b9da6e41..bdfc34c6bd 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -69,6 +69,20 @@ pid_file: DATADIR/homeserver.pid # #use_presence: false +# Whether to require authentication to retrieve profile data (avatars, +# display names) of other users through the client API. Defaults to +# 'false'. Note that profile data is also available via the federation +# API, so this setting is of limited value if federation is enabled on +# the server. +# +#require_auth_for_profile_requests: true + +# If set to 'true', requires authentication to access the server's +# public rooms directory through the client API, and forbids any other +# homeserver to fetch it via federation. Defaults to 'false'. +# +#restrict_public_rooms_to_local_users: true + # The GC threshold parameters to pass to `gc.set_threshold`, if defined # #gc_thresholds: [700, 10, 10] diff --git a/synapse/config/server.py b/synapse/config/server.py index 147a976485..8dce75c56a 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -72,6 +72,19 @@ class ServerConfig(Config): # master, potentially causing inconsistency. self.enable_media_repo = config.get("enable_media_repo", True) + # Whether to require authentication to retrieve profile data (avatars, + # display names) of other users through the client API. + self.require_auth_for_profile_requests = config.get( + "require_auth_for_profile_requests", False, + ) + + # If set to 'True', requires authentication to access the server's + # public rooms directory through the client API, and forbids any other + # homeserver to fetch it via federation. + self.restrict_public_rooms_to_local_users = config.get( + "restrict_public_rooms_to_local_users", False, + ) + # whether to enable search. If disabled, new entries will not be inserted # into the search tables and they will not be indexed. Users will receive # errors when attempting to search for messages. @@ -327,6 +340,20 @@ class ServerConfig(Config): # #use_presence: false + # Whether to require authentication to retrieve profile data (avatars, + # display names) of other users through the client API. Defaults to + # 'false'. Note that profile data is also available via the federation + # API, so this setting is of limited value if federation is enabled on + # the server. + # + #require_auth_for_profile_requests: true + + # If set to 'true', requires authentication to access the server's + # public rooms directory through the client API, and forbids any other + # homeserver to fetch it via federation. Defaults to 'false'. + # + #restrict_public_rooms_to_local_users: true + # The GC threshold parameters to pass to `gc.set_threshold`, if defined # #gc_thresholds: [700, 10, 10] diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 452599e1a1..9030eb18c5 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -716,8 +716,17 @@ class PublicRoomList(BaseFederationServlet): PATH = "/publicRooms" + def __init__(self, handler, authenticator, ratelimiter, server_name, deny_access): + super(PublicRoomList, self).__init__( + handler, authenticator, ratelimiter, server_name, + ) + self.deny_access = deny_access + @defer.inlineCallbacks def on_GET(self, origin, content, query): + if self.deny_access: + raise FederationDeniedError(origin) + limit = parse_integer_from_args(query, "limit", 0) since_token = parse_string_from_args(query, "since", None) include_all_networks = parse_boolean_from_args( @@ -1417,6 +1426,7 @@ def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=N authenticator=authenticator, ratelimiter=ratelimiter, server_name=hs.hostname, + deny_access=hs.config.restrict_public_rooms_to_local_users, ).register(resource) if "group_server" in servlet_groups: diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index a65c98ff5c..91fc718ff8 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -53,6 +53,7 @@ class BaseProfileHandler(BaseHandler): @defer.inlineCallbacks def get_profile(self, user_id): target_user = UserID.from_string(user_id) + if self.hs.is_mine(target_user): try: displayname = yield self.store.get_profile_displayname( @@ -283,6 +284,48 @@ class BaseProfileHandler(BaseHandler): room_id, str(e) ) + @defer.inlineCallbacks + def check_profile_query_allowed(self, target_user, requester=None): + """Checks whether a profile query is allowed. If the + 'require_auth_for_profile_requests' config flag is set to True and a + 'requester' is provided, the query is only allowed if the two users + share a room. + + Args: + target_user (UserID): The owner of the queried profile. + requester (None|UserID): The user querying for the profile. + + Raises: + SynapseError(403): The two users share no room, or ne user couldn't + be found to be in any room the server is in, and therefore the query + is denied. + """ + # Implementation of MSC1301: don't allow looking up profiles if the + # requester isn't in the same room as the target. We expect requester to + # be None when this function is called outside of a profile query, e.g. + # when building a membership event. In this case, we must allow the + # lookup. + if not self.hs.config.require_auth_for_profile_requests or not requester: + return + + try: + requester_rooms = yield self.store.get_rooms_for_user( + requester.to_string() + ) + target_user_rooms = yield self.store.get_rooms_for_user( + target_user.to_string(), + ) + + # Check if the room lists have no elements in common. + if requester_rooms.isdisjoint(target_user_rooms): + raise SynapseError(403, "Profile isn't available", Codes.FORBIDDEN) + except StoreError as e: + if e.code == 404: + # This likely means that one of the users doesn't exist, + # so we act as if we couldn't find the profile. + raise SynapseError(403, "Profile isn't available", Codes.FORBIDDEN) + raise + class MasterProfileHandler(BaseProfileHandler): PROFILE_UPDATE_MS = 60 * 1000 diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index a23edd8fe5..eac1966c5e 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -31,11 +31,17 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): + requester_user = None + + if self.hs.config.require_auth_for_profile_requests: + requester = yield self.auth.get_user_by_req(request) + requester_user = requester.user + user = UserID.from_string(user_id) - displayname = yield self.profile_handler.get_displayname( - user, - ) + yield self.profile_handler.check_profile_query_allowed(user, requester_user) + + displayname = yield self.profile_handler.get_displayname(user) ret = {} if displayname is not None: @@ -74,11 +80,17 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): + requester_user = None + + if self.hs.config.require_auth_for_profile_requests: + requester = yield self.auth.get_user_by_req(request) + requester_user = requester.user + user = UserID.from_string(user_id) - avatar_url = yield self.profile_handler.get_avatar_url( - user, - ) + yield self.profile_handler.check_profile_query_allowed(user, requester_user) + + avatar_url = yield self.profile_handler.get_avatar_url(user) ret = {} if avatar_url is not None: @@ -116,14 +128,18 @@ class ProfileRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): + requester_user = None + + if self.hs.config.require_auth_for_profile_requests: + requester = yield self.auth.get_user_by_req(request) + requester_user = requester.user + user = UserID.from_string(user_id) - displayname = yield self.profile_handler.get_displayname( - user, - ) - avatar_url = yield self.profile_handler.get_avatar_url( - user, - ) + yield self.profile_handler.check_profile_query_allowed(user, requester_user) + + displayname = yield self.profile_handler.get_displayname(user) + avatar_url = yield self.profile_handler.get_avatar_url(user) ret = {} if displayname is not None: diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 48da4d557f..fab04965cb 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -301,6 +301,12 @@ class PublicRoomListRestServlet(ClientV1RestServlet): try: yield self.auth.get_user_by_req(request, allow_guest=True) except AuthError as e: + # Option to allow servers to require auth when accessing + # /publicRooms via CS API. This is especially helpful in private + # federations. + if self.hs.config.restrict_public_rooms_to_local_users: + raise + # We allow people to not be authed if they're just looking at our # room list, but require auth when we proxy the request. # In both cases we call the auth function, as that has the side diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 1eab9c3bdb..5d13de11e6 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -20,7 +20,7 @@ from twisted.internet import defer import synapse.types from synapse.api.errors import AuthError, SynapseError -from synapse.rest.client.v1 import profile +from synapse.rest.client.v1 import admin, login, profile, room from tests import unittest @@ -42,6 +42,7 @@ class ProfileTestCase(unittest.TestCase): "set_displayname", "get_avatar_url", "set_avatar_url", + "check_profile_query_allowed", ] ) @@ -155,3 +156,92 @@ class ProfileTestCase(unittest.TestCase): self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD") self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD") self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif") + + +class ProfilesRestrictedTestCase(unittest.HomeserverTestCase): + + servlets = [ + admin.register_servlets, + login.register_servlets, + profile.register_servlets, + room.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + + config = self.default_config() + config.require_auth_for_profile_requests = True + self.hs = self.setup_test_homeserver(config=config) + + return self.hs + + def prepare(self, reactor, clock, hs): + # User owning the requested profile. + self.owner = self.register_user("owner", "pass") + self.owner_tok = self.login("owner", "pass") + self.profile_url = "/profile/%s" % (self.owner) + + # User requesting the profile. + self.requester = self.register_user("requester", "pass") + self.requester_tok = self.login("requester", "pass") + + self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok) + + def test_no_auth(self): + self.try_fetch_profile(401) + + def test_not_in_shared_room(self): + self.ensure_requester_left_room() + + self.try_fetch_profile(403, access_token=self.requester_tok) + + def test_in_shared_room(self): + self.ensure_requester_left_room() + + self.helper.join( + room=self.room_id, + user=self.requester, + tok=self.requester_tok, + ) + + self.try_fetch_profile(200, self.requester_tok) + + def try_fetch_profile(self, expected_code, access_token=None): + self.request_profile( + expected_code, + access_token=access_token + ) + + self.request_profile( + expected_code, + url_suffix="/displayname", + access_token=access_token, + ) + + self.request_profile( + expected_code, + url_suffix="/avatar_url", + access_token=access_token, + ) + + def request_profile(self, expected_code, url_suffix="", access_token=None): + request, channel = self.make_request( + "GET", + self.profile_url + url_suffix, + access_token=access_token, + ) + self.render(request) + self.assertEqual(channel.code, expected_code, channel.result) + + def ensure_requester_left_room(self): + try: + self.helper.leave( + room=self.room_id, + user=self.requester, + tok=self.requester_tok, + ) + except AssertionError: + # We don't care whether the leave request didn't return a 200 (e.g. + # if the user isn't already in the room), because we only want to + # make sure the user isn't in the room. + pass diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 521ac80f9a..28fbf6ae52 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -904,3 +904,35 @@ class RoomSearchTestCase(unittest.HomeserverTestCase): self.assertEqual( context["profile_info"][self.other_user_id]["displayname"], "otheruser" ) + + +class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase): + + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + + self.url = b"/_matrix/client/r0/publicRooms" + + config = self.default_config() + config.restrict_public_rooms_to_local_users = True + self.hs = self.setup_test_homeserver(config=config) + + return self.hs + + def test_restricted_no_auth(self): + request, channel = self.make_request("GET", self.url) + self.render(request) + self.assertEqual(channel.code, 401, channel.result) + + def test_restricted_auth(self): + self.register_user("user", "pass") + tok = self.login("user", "pass") + + request, channel = self.make_request("GET", self.url, access_token=tok) + self.render(request) + self.assertEqual(channel.code, 200, channel.result) -- cgit 1.5.1 From d216a36b3703e42906e2242526784598642c8505 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 8 May 2019 21:57:03 +0100 Subject: Fix bogus imports in tests (#5154) --- changelog.d/5154.bugfix | 1 + tests/rest/client/v1/test_directory.py | 4 ++-- tests/rest/client/v1/test_profile.py | 5 +++-- tests/rest/client/v1/test_rooms.py | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) create mode 100644 changelog.d/5154.bugfix diff --git a/changelog.d/5154.bugfix b/changelog.d/5154.bugfix new file mode 100644 index 0000000000..c7bd5ee6cf --- /dev/null +++ b/changelog.d/5154.bugfix @@ -0,0 +1 @@ +Fix bogus imports in unit tests. diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py index 4804000ec7..f63c68e7ed 100644 --- a/tests/rest/client/v1/test_directory.py +++ b/tests/rest/client/v1/test_directory.py @@ -15,7 +15,7 @@ import json -from synapse.rest.admin import register_servlets +from synapse.rest import admin from synapse.rest.client.v1 import directory, login, room from synapse.types import RoomAlias from synapse.util.stringutils import random_string @@ -26,7 +26,7 @@ from tests import unittest class DirectoryTestCase(unittest.HomeserverTestCase): servlets = [ - register_servlets, + admin.register_servlets_for_client_rest_resource, directory.register_servlets, login.register_servlets, room.register_servlets, diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 5d13de11e6..7306e61b7c 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -20,7 +20,8 @@ from twisted.internet import defer import synapse.types from synapse.api.errors import AuthError, SynapseError -from synapse.rest.client.v1 import admin, login, profile, room +from synapse.rest import admin +from synapse.rest.client.v1 import login, profile, room from tests import unittest @@ -161,7 +162,7 @@ class ProfileTestCase(unittest.TestCase): class ProfilesRestrictedTestCase(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + admin.register_servlets_for_client_rest_resource, login.register_servlets, profile.register_servlets, room.register_servlets, diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 28fbf6ae52..9b191436cc 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -909,7 +909,7 @@ class RoomSearchTestCase(unittest.HomeserverTestCase): class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase): servlets = [ - admin.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, login.register_servlets, ] -- cgit 1.5.1 From 11ea16777f52264f0a1d6f4db5b7db5c6c147523 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 9 May 2019 12:01:41 +0200 Subject: Limit the number of EDUs in transactions to 100 as expected by receiver (#5138) Fixes #3951. --- changelog.d/5138.bugfix | 1 + synapse/federation/sender/per_destination_queue.py | 56 ++++++++++++---------- synapse/storage/deviceinbox.py | 2 +- 3 files changed, 32 insertions(+), 27 deletions(-) create mode 100644 changelog.d/5138.bugfix diff --git a/changelog.d/5138.bugfix b/changelog.d/5138.bugfix new file mode 100644 index 0000000000..c1945a8eb2 --- /dev/null +++ b/changelog.d/5138.bugfix @@ -0,0 +1 @@ +Limit the number of EDUs in transactions to 100 as expected by synapse. Thanks to @superboum for this work! diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index be99211003..4d96f026c6 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -33,6 +33,9 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage import UserPresenceState from synapse.util.retryutils import NotRetryingDestination, get_retry_limiter +# This is defined in the Matrix spec and enforced by the receiver. +MAX_EDUS_PER_TRANSACTION = 100 + logger = logging.getLogger(__name__) @@ -197,7 +200,8 @@ class PerDestinationQueue(object): pending_pdus = [] while True: device_message_edus, device_stream_id, dev_list_id = ( - yield self._get_new_device_messages() + # We have to keep 2 free slots for presence and rr_edus + yield self._get_new_device_messages(MAX_EDUS_PER_TRANSACTION - 2) ) # BEGIN CRITICAL SECTION @@ -216,19 +220,9 @@ class PerDestinationQueue(object): pending_edus = [] - pending_edus.extend(self._get_rr_edus(force_flush=False)) - # We can only include at most 100 EDUs per transactions - pending_edus.extend(self._pop_pending_edus(100 - len(pending_edus))) - - pending_edus.extend( - self._pending_edus_keyed.values() - ) - - self._pending_edus_keyed = {} - - pending_edus.extend(device_message_edus) - + # rr_edus and pending_presence take at most one slot each + pending_edus.extend(self._get_rr_edus(force_flush=False)) pending_presence = self._pending_presence self._pending_presence = {} if pending_presence: @@ -248,6 +242,12 @@ class PerDestinationQueue(object): ) ) + pending_edus.extend(device_message_edus) + pending_edus.extend(self._pop_pending_edus(MAX_EDUS_PER_TRANSACTION - len(pending_edus))) + while len(pending_edus) < MAX_EDUS_PER_TRANSACTION and self._pending_edus_keyed: + _, val = self._pending_edus_keyed.popitem() + pending_edus.append(val) + if pending_pdus: logger.debug("TX [%s] len(pending_pdus_by_dest[dest]) = %d", self._destination, len(pending_pdus)) @@ -259,7 +259,7 @@ class PerDestinationQueue(object): # if we've decided to send a transaction anyway, and we have room, we # may as well send any pending RRs - if len(pending_edus) < 100: + if len(pending_edus) < MAX_EDUS_PER_TRANSACTION: pending_edus.extend(self._get_rr_edus(force_flush=True)) # END CRITICAL SECTION @@ -346,33 +346,37 @@ class PerDestinationQueue(object): return pending_edus @defer.inlineCallbacks - def _get_new_device_messages(self): - last_device_stream_id = self._last_device_stream_id - to_device_stream_id = self._store.get_to_device_stream_token() - contents, stream_id = yield self._store.get_new_device_msgs_for_remote( - self._destination, last_device_stream_id, to_device_stream_id + def _get_new_device_messages(self, limit): + last_device_list = self._last_device_list_stream_id + # Will return at most 20 entries + now_stream_id, results = yield self._store.get_devices_by_remote( + self._destination, last_device_list ) edus = [ Edu( origin=self._server_name, destination=self._destination, - edu_type="m.direct_to_device", + edu_type="m.device_list_update", content=content, ) - for content in contents + for content in results ] - last_device_list = self._last_device_list_stream_id - now_stream_id, results = yield self._store.get_devices_by_remote( - self._destination, last_device_list + assert len(edus) <= limit, "get_devices_by_remote returned too many EDUs" + + last_device_stream_id = self._last_device_stream_id + to_device_stream_id = self._store.get_to_device_stream_token() + contents, stream_id = yield self._store.get_new_device_msgs_for_remote( + self._destination, last_device_stream_id, to_device_stream_id, limit - len(edus) ) edus.extend( Edu( origin=self._server_name, destination=self._destination, - edu_type="m.device_list_update", + edu_type="m.direct_to_device", content=content, ) - for content in results + for content in contents ) + defer.returnValue((edus, stream_id, now_stream_id)) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index fed4ea3610..9b0a99cb49 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -118,7 +118,7 @@ class DeviceInboxWorkerStore(SQLBaseStore): defer.returnValue(count) def get_new_device_msgs_for_remote( - self, destination, last_stream_id, current_stream_id, limit=100 + self, destination, last_stream_id, current_stream_id, limit ): """ Args: -- cgit 1.5.1 From 130f932cbc5ca5477af263c5290e0c9a51b5150c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 May 2019 16:27:02 +0100 Subject: Run `black` on per_destination_queue ... mostly to fix pep8 fails --- synapse/federation/sender/per_destination_queue.py | 74 ++++++++++++---------- 1 file changed, 39 insertions(+), 35 deletions(-) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index 4d96f026c6..fae8bea392 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -40,8 +40,7 @@ logger = logging.getLogger(__name__) sent_edus_counter = Counter( - "synapse_federation_client_sent_edus", - "Total number of EDUs successfully sent", + "synapse_federation_client_sent_edus", "Total number of EDUs successfully sent" ) sent_edus_by_type = Counter( @@ -61,6 +60,7 @@ class PerDestinationQueue(object): destination (str): the server_name of the destination that we are managing transmission for. """ + def __init__(self, hs, transaction_manager, destination): self._server_name = hs.hostname self._clock = hs.get_clock() @@ -71,17 +71,17 @@ class PerDestinationQueue(object): self.transmission_loop_running = False # a list of tuples of (pending pdu, order) - self._pending_pdus = [] # type: list[tuple[EventBase, int]] - self._pending_edus = [] # type: list[Edu] + self._pending_pdus = [] # type: list[tuple[EventBase, int]] + self._pending_edus = [] # type: list[Edu] # Pending EDUs by their "key". Keyed EDUs are EDUs that get clobbered # based on their key (e.g. typing events by room_id) # Map of (edu_type, key) -> Edu - self._pending_edus_keyed = {} # type: dict[tuple[str, str], Edu] + self._pending_edus_keyed = {} # type: dict[tuple[str, str], Edu] # Map of user_id -> UserPresenceState of pending presence to be sent to this # destination - self._pending_presence = {} # type: dict[str, UserPresenceState] + self._pending_presence = {} # type: dict[str, UserPresenceState] # room_id -> receipt_type -> user_id -> receipt_dict self._pending_rrs = {} @@ -123,9 +123,7 @@ class PerDestinationQueue(object): Args: states (iterable[UserPresenceState]): presence to send """ - self._pending_presence.update({ - state.user_id: state for state in states - }) + self._pending_presence.update({state.user_id: state for state in states}) self.attempt_new_transaction() def queue_read_receipt(self, receipt): @@ -135,14 +133,9 @@ class PerDestinationQueue(object): Args: receipt (synapse.api.receipt_info.ReceiptInfo): receipt to be queued """ - self._pending_rrs.setdefault( - receipt.room_id, {}, - ).setdefault( + self._pending_rrs.setdefault(receipt.room_id, {}).setdefault( receipt.receipt_type, {} - )[receipt.user_id] = { - "event_ids": receipt.event_ids, - "data": receipt.data, - } + )[receipt.user_id] = {"event_ids": receipt.event_ids, "data": receipt.data} def flush_read_receipts_for_room(self, room_id): # if we don't have any read-receipts for this room, it may be that we've already @@ -173,10 +166,7 @@ class PerDestinationQueue(object): # request at which point pending_pdus just keeps growing. # we need application-layer timeouts of some flavour of these # requests - logger.debug( - "TX [%s] Transaction already in progress", - self._destination - ) + logger.debug("TX [%s] Transaction already in progress", self._destination) return logger.debug("TX [%s] Starting transaction loop", self._destination) @@ -243,14 +233,22 @@ class PerDestinationQueue(object): ) pending_edus.extend(device_message_edus) - pending_edus.extend(self._pop_pending_edus(MAX_EDUS_PER_TRANSACTION - len(pending_edus))) - while len(pending_edus) < MAX_EDUS_PER_TRANSACTION and self._pending_edus_keyed: + pending_edus.extend( + self._pop_pending_edus(MAX_EDUS_PER_TRANSACTION - len(pending_edus)) + ) + while ( + len(pending_edus) < MAX_EDUS_PER_TRANSACTION + and self._pending_edus_keyed + ): _, val = self._pending_edus_keyed.popitem() pending_edus.append(val) if pending_pdus: - logger.debug("TX [%s] len(pending_pdus_by_dest[dest]) = %d", - self._destination, len(pending_pdus)) + logger.debug( + "TX [%s] len(pending_pdus_by_dest[dest]) = %d", + self._destination, + len(pending_pdus), + ) if not pending_pdus and not pending_edus: logger.debug("TX [%s] Nothing to send", self._destination) @@ -303,22 +301,25 @@ class PerDestinationQueue(object): except HttpResponseException as e: logger.warning( "TX [%s] Received %d response to transaction: %s", - self._destination, e.code, e, + self._destination, + e.code, + e, ) except RequestSendFailed as e: - logger.warning("TX [%s] Failed to send transaction: %s", self._destination, e) + logger.warning( + "TX [%s] Failed to send transaction: %s", self._destination, e + ) for p, _ in pending_pdus: - logger.info("Failed to send event %s to %s", p.event_id, - self._destination) + logger.info( + "Failed to send event %s to %s", p.event_id, self._destination + ) except Exception: - logger.exception( - "TX [%s] Failed to send transaction", - self._destination, - ) + logger.exception("TX [%s] Failed to send transaction", self._destination) for p, _ in pending_pdus: - logger.info("Failed to send event %s to %s", p.event_id, - self._destination) + logger.info( + "Failed to send event %s to %s", p.event_id, self._destination + ) finally: # We want to be *very* sure we clear this after we stop processing self.transmission_loop_running = False @@ -367,7 +368,10 @@ class PerDestinationQueue(object): last_device_stream_id = self._last_device_stream_id to_device_stream_id = self._store.get_to_device_stream_token() contents, stream_id = yield self._store.get_new_device_msgs_for_remote( - self._destination, last_device_stream_id, to_device_stream_id, limit - len(edus) + self._destination, + last_device_stream_id, + to_device_stream_id, + limit - len(edus), ) edus.extend( Edu( -- cgit 1.5.1 From 84cebb89cc90bc5f03ce0f586d457f1a2226b34c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 9 May 2019 22:53:46 +0100 Subject: remove instructions for jessie installation (#5164) We don't ship jessie packages, so these were a bit misleading. --- INSTALL.md | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index 1032a2c68e..b88d826f6c 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -257,9 +257,8 @@ https://github.com/spantaleev/matrix-docker-ansible-deploy #### Matrix.org packages Matrix.org provides Debian/Ubuntu packages of the latest stable version of -Synapse via https://packages.matrix.org/debian/. To use them: - -For Debian 9 (Stretch), Ubuntu 16.04 (Xenial), and later: +Synapse via https://packages.matrix.org/debian/. They are available for Debian +9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them: ``` sudo apt install -y lsb-release wget apt-transport-https @@ -270,19 +269,6 @@ sudo apt update sudo apt install matrix-synapse-py3 ``` -For Debian 8 (Jessie): - -``` -sudo apt install -y lsb-release wget apt-transport-https -sudo wget -O /etc/apt/trusted.gpg.d/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg -echo "deb [signed-by=5586CCC0CBBBEFC7A25811ADF473DD4473365DE1] https://packages.matrix.org/debian/ $(lsb_release -cs) main" | - sudo tee /etc/apt/sources.list.d/matrix-org.list -sudo apt update -sudo apt install matrix-synapse-py3 -``` - -The fingerprint of the repository signing key is AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058. - **Note**: if you followed a previous version of these instructions which recommended using `apt-key add` to add an old key from `https://matrix.org/packages/debian/`, you should note that this key has been @@ -290,6 +276,9 @@ revoked. You should remove the old key with `sudo apt-key remove C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to update your configuration. +The fingerprint of the repository signing key (as shown by `gpg +/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is +`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`. #### Downstream Debian/Ubuntu packages -- cgit 1.5.1 From d9a02d1201d030dc846e194ecb6d4c5e4619ed83 Mon Sep 17 00:00:00 2001 From: colonelkrud Date: Thu, 9 May 2019 18:27:04 -0400 Subject: Add AllowEncodedSlashes to apache (#5068) * Add AllowEncodedSlashes to apache Add `AllowEncodedSlashes On` to apache config to support encoding for v3 rooms. "The AllowEncodedSlashes setting is not inherited by virtual hosts, and virtual hosts are used in many default Apache configurations, such as the one in Ubuntu. The workaround is to add the AllowEncodedSlashes setting inside a container (/etc/apache2/sites-available/default in Ubuntu)." Source: https://stackoverflow.com/questions/4390436/need-to-allow-encoded-slashes-on-apache * change allowencodedslashes to nodecode --- docs/reverse_proxy.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reverse_proxy.rst b/docs/reverse_proxy.rst index cc81ceb84b..7619b1097b 100644 --- a/docs/reverse_proxy.rst +++ b/docs/reverse_proxy.rst @@ -69,6 +69,7 @@ Let's assume that we expect clients to connect to our server at SSLEngine on ServerName matrix.example.com; + AllowEncodedSlashes NoDecode ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix @@ -77,6 +78,7 @@ Let's assume that we expect clients to connect to our server at SSLEngine on ServerName example.com; + AllowEncodedSlashes NoDecode ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix -- cgit 1.5.1 From b36c82576e3bb7ea72600ecf0e80c904ccf47d1d Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 10 May 2019 00:12:11 -0500 Subject: Run Black on the tests again (#5170) --- changelog.d/5170.misc | 1 + tests/api/test_filtering.py | 1 - tests/api/test_ratelimiting.py | 6 +- tests/app/test_openid_listener.py | 46 ++-- tests/config/test_generate.py | 8 +- tests/config/test_room_directory.py | 178 +++++++------ tests/config/test_server.py | 1 - tests/config/test_tls.py | 11 +- tests/crypto/test_keyring.py | 3 +- tests/federation/test_federation_sender.py | 113 +++++---- tests/handlers/test_directory.py | 24 +- tests/handlers/test_e2e_room_keys.py | 277 +++++++++++---------- tests/handlers/test_presence.py | 14 +- tests/handlers/test_typing.py | 152 +++++------ tests/handlers/test_user_directory.py | 8 +- tests/http/__init__.py | 6 +- .../federation/test_matrix_federation_agent.py | 212 ++++++---------- tests/http/federation/test_srv_resolver.py | 36 +-- tests/http/test_fedclient.py | 31 +-- tests/patch_inline_callbacks.py | 16 +- tests/replication/slave/storage/_base.py | 15 +- tests/replication/slave/storage/test_events.py | 19 +- tests/replication/tcp/streams/_base.py | 6 +- tests/rest/admin/test_admin.py | 103 +++----- tests/rest/client/test_identity.py | 8 +- tests/rest/client/v1/test_directory.py | 53 ++-- tests/rest/client/v1/test_login.py | 36 +-- tests/rest/client/v1/test_profile.py | 27 +- tests/rest/client/v2_alpha/test_register.py | 82 +++--- tests/rest/media/v1/test_base.py | 14 +- tests/rest/test_well_known.py | 13 +- tests/server.py | 7 +- .../test_resource_limits_server_notices.py | 1 - tests/state/test_v2.py | 204 +++------------ tests/storage/test_background_update.py | 4 +- tests/storage/test_base.py | 5 +- tests/storage/test_end_to_end_keys.py | 1 - tests/storage/test_monthly_active_users.py | 17 +- tests/storage/test_redaction.py | 6 +- tests/storage/test_registration.py | 2 +- tests/storage/test_roommember.py | 2 +- tests/storage/test_state.py | 83 +++--- tests/storage/test_user_directory.py | 4 +- tests/test_event_auth.py | 8 +- tests/test_federation.py | 1 - tests/test_mau.py | 8 +- tests/test_metrics.py | 56 +++-- tests/test_terms_auth.py | 8 +- tests/test_types.py | 6 +- tests/test_utils/logging_setup.py | 4 +- tests/test_visibility.py | 6 +- tests/unittest.py | 13 +- tests/util/test_async_utils.py | 23 +- tests/utils.py | 9 +- 54 files changed, 829 insertions(+), 1169 deletions(-) create mode 100644 changelog.d/5170.misc diff --git a/changelog.d/5170.misc b/changelog.d/5170.misc new file mode 100644 index 0000000000..7919dac555 --- /dev/null +++ b/changelog.d/5170.misc @@ -0,0 +1 @@ +Run `black` on the tests directory. diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 2a7044801a..6ba623de13 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -109,7 +109,6 @@ class FilteringTestCase(unittest.TestCase): "event_format": "client", "event_fields": ["type", "content", "sender"], }, - # a single backslash should be permitted (though it is debatable whether # it should be permitted before anything other than `.`, and what that # actually means) diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index 30a255d441..dbdd427cac 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -10,19 +10,19 @@ class TestRatelimiter(unittest.TestCase): key="test_id", time_now_s=0, rate_hz=0.1, burst_count=1 ) self.assertTrue(allowed) - self.assertEquals(10., time_allowed) + self.assertEquals(10.0, time_allowed) allowed, time_allowed = limiter.can_do_action( key="test_id", time_now_s=5, rate_hz=0.1, burst_count=1 ) self.assertFalse(allowed) - self.assertEquals(10., time_allowed) + self.assertEquals(10.0, time_allowed) allowed, time_allowed = limiter.can_do_action( key="test_id", time_now_s=10, rate_hz=0.1, burst_count=1 ) self.assertTrue(allowed) - self.assertEquals(20., time_allowed) + self.assertEquals(20.0, time_allowed) def test_pruning(self): limiter = Ratelimiter() diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 590abc1e92..48792d1480 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -25,16 +25,18 @@ from tests.unittest import HomeserverTestCase class FederationReaderOpenIDListenerTests(HomeserverTestCase): def make_homeserver(self, reactor, clock): hs = self.setup_test_homeserver( - http_client=None, homeserverToUse=FederationReaderServer, + http_client=None, homeserverToUse=FederationReaderServer ) return hs - @parameterized.expand([ - (["federation"], "auth_fail"), - ([], "no_resource"), - (["openid", "federation"], "auth_fail"), - (["openid"], "auth_fail"), - ]) + @parameterized.expand( + [ + (["federation"], "auth_fail"), + ([], "no_resource"), + (["openid", "federation"], "auth_fail"), + (["openid"], "auth_fail"), + ] + ) def test_openid_listener(self, names, expectation): """ Test different openid listener configurations. @@ -53,17 +55,14 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase): # Grab the resource from the site that was told to listen site = self.reactor.tcpServers[0][1] try: - self.resource = ( - site.resource.children[b"_matrix"].children[b"federation"] - ) + self.resource = site.resource.children[b"_matrix"].children[b"federation"] except KeyError: if expectation == "no_resource": return raise request, channel = self.make_request( - "GET", - "/_matrix/federation/v1/openid/userinfo", + "GET", "/_matrix/federation/v1/openid/userinfo" ) self.render(request) @@ -74,16 +73,18 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase): class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): def make_homeserver(self, reactor, clock): hs = self.setup_test_homeserver( - http_client=None, homeserverToUse=SynapseHomeServer, + http_client=None, homeserverToUse=SynapseHomeServer ) return hs - @parameterized.expand([ - (["federation"], "auth_fail"), - ([], "no_resource"), - (["openid", "federation"], "auth_fail"), - (["openid"], "auth_fail"), - ]) + @parameterized.expand( + [ + (["federation"], "auth_fail"), + ([], "no_resource"), + (["openid", "federation"], "auth_fail"), + (["openid"], "auth_fail"), + ] + ) def test_openid_listener(self, names, expectation): """ Test different openid listener configurations. @@ -102,17 +103,14 @@ class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): # Grab the resource from the site that was told to listen site = self.reactor.tcpServers[0][1] try: - self.resource = ( - site.resource.children[b"_matrix"].children[b"federation"] - ) + self.resource = site.resource.children[b"_matrix"].children[b"federation"] except KeyError: if expectation == "no_resource": return raise request, channel = self.make_request( - "GET", - "/_matrix/federation/v1/openid/userinfo", + "GET", "/_matrix/federation/v1/openid/userinfo" ) self.render(request) diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index 795b4c298d..5017cbce85 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -45,13 +45,7 @@ class ConfigGenerationTestCase(unittest.TestCase): ) self.assertSetEqual( - set( - [ - "homeserver.yaml", - "lemurs.win.log.config", - "lemurs.win.signing.key", - ] - ), + set(["homeserver.yaml", "lemurs.win.log.config", "lemurs.win.signing.key"]), set(os.listdir(self.dir)), ) diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index 47fffcfeb2..0ec10019b3 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -22,7 +22,8 @@ from tests import unittest class RoomDirectoryConfigTestCase(unittest.TestCase): def test_alias_creation_acl(self): - config = yaml.safe_load(""" + config = yaml.safe_load( + """ alias_creation_rules: - user_id: "*bob*" alias: "*" @@ -38,43 +39,49 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): action: "allow" room_list_publication_rules: [] - """) + """ + ) rd_config = RoomDirectoryConfig() rd_config.read_config(config) - self.assertFalse(rd_config.is_alias_creation_allowed( - user_id="@bob:example.com", - room_id="!test", - alias="#test:example.com", - )) - - self.assertTrue(rd_config.is_alias_creation_allowed( - user_id="@test:example.com", - room_id="!test", - alias="#unofficial_st:example.com", - )) - - self.assertTrue(rd_config.is_alias_creation_allowed( - user_id="@foobar:example.com", - room_id="!test", - alias="#test:example.com", - )) - - self.assertTrue(rd_config.is_alias_creation_allowed( - user_id="@gah:example.com", - room_id="!test", - alias="#goo:example.com", - )) - - self.assertFalse(rd_config.is_alias_creation_allowed( - user_id="@test:example.com", - room_id="!test", - alias="#test:example.com", - )) + self.assertFalse( + rd_config.is_alias_creation_allowed( + user_id="@bob:example.com", room_id="!test", alias="#test:example.com" + ) + ) + + self.assertTrue( + rd_config.is_alias_creation_allowed( + user_id="@test:example.com", + room_id="!test", + alias="#unofficial_st:example.com", + ) + ) + + self.assertTrue( + rd_config.is_alias_creation_allowed( + user_id="@foobar:example.com", + room_id="!test", + alias="#test:example.com", + ) + ) + + self.assertTrue( + rd_config.is_alias_creation_allowed( + user_id="@gah:example.com", room_id="!test", alias="#goo:example.com" + ) + ) + + self.assertFalse( + rd_config.is_alias_creation_allowed( + user_id="@test:example.com", room_id="!test", alias="#test:example.com" + ) + ) def test_room_publish_acl(self): - config = yaml.safe_load(""" + config = yaml.safe_load( + """ alias_creation_rules: [] room_list_publication_rules: @@ -92,55 +99,66 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): action: "allow" - room_id: "!test-deny" action: "deny" - """) + """ + ) rd_config = RoomDirectoryConfig() rd_config.read_config(config) - self.assertFalse(rd_config.is_publishing_room_allowed( - user_id="@bob:example.com", - room_id="!test", - aliases=["#test:example.com"], - )) - - self.assertTrue(rd_config.is_publishing_room_allowed( - user_id="@test:example.com", - room_id="!test", - aliases=["#unofficial_st:example.com"], - )) - - self.assertTrue(rd_config.is_publishing_room_allowed( - user_id="@foobar:example.com", - room_id="!test", - aliases=[], - )) - - self.assertTrue(rd_config.is_publishing_room_allowed( - user_id="@gah:example.com", - room_id="!test", - aliases=["#goo:example.com"], - )) - - self.assertFalse(rd_config.is_publishing_room_allowed( - user_id="@test:example.com", - room_id="!test", - aliases=["#test:example.com"], - )) - - self.assertTrue(rd_config.is_publishing_room_allowed( - user_id="@foobar:example.com", - room_id="!test-deny", - aliases=[], - )) - - self.assertFalse(rd_config.is_publishing_room_allowed( - user_id="@gah:example.com", - room_id="!test-deny", - aliases=[], - )) - - self.assertTrue(rd_config.is_publishing_room_allowed( - user_id="@test:example.com", - room_id="!test", - aliases=["#unofficial_st:example.com", "#blah:example.com"], - )) + self.assertFalse( + rd_config.is_publishing_room_allowed( + user_id="@bob:example.com", + room_id="!test", + aliases=["#test:example.com"], + ) + ) + + self.assertTrue( + rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#unofficial_st:example.com"], + ) + ) + + self.assertTrue( + rd_config.is_publishing_room_allowed( + user_id="@foobar:example.com", room_id="!test", aliases=[] + ) + ) + + self.assertTrue( + rd_config.is_publishing_room_allowed( + user_id="@gah:example.com", + room_id="!test", + aliases=["#goo:example.com"], + ) + ) + + self.assertFalse( + rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#test:example.com"], + ) + ) + + self.assertTrue( + rd_config.is_publishing_room_allowed( + user_id="@foobar:example.com", room_id="!test-deny", aliases=[] + ) + ) + + self.assertFalse( + rd_config.is_publishing_room_allowed( + user_id="@gah:example.com", room_id="!test-deny", aliases=[] + ) + ) + + self.assertTrue( + rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#unofficial_st:example.com", "#blah:example.com"], + ) + ) diff --git a/tests/config/test_server.py b/tests/config/test_server.py index f5836d73ac..de64965a60 100644 --- a/tests/config/test_server.py +++ b/tests/config/test_server.py @@ -19,7 +19,6 @@ from tests import unittest class ServerConfigTestCase(unittest.TestCase): - def test_is_threepid_reserved(self): user1 = {'medium': 'email', 'address': 'user1@example.com'} user2 = {'medium': 'email', 'address': 'user2@example.com'} diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index c260d3359f..40ca428778 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -26,7 +26,6 @@ class TestConfig(TlsConfig): class TLSConfigTests(TestCase): - def test_warn_self_signed(self): """ Synapse will give a warning when it loads a self-signed certificate. @@ -34,7 +33,8 @@ class TLSConfigTests(TestCase): config_dir = self.mktemp() os.mkdir(config_dir) with open(os.path.join(config_dir, "cert.pem"), 'w') as f: - f.write("""-----BEGIN CERTIFICATE----- + f.write( + """-----BEGIN CERTIFICATE----- MIID6DCCAtACAws9CjANBgkqhkiG9w0BAQUFADCBtzELMAkGA1UEBhMCVFIxDzAN BgNVBAgMBsOHb3J1bTEUMBIGA1UEBwwLQmHFn21ha8OnxLExEjAQBgNVBAMMCWxv Y2FsaG9zdDEcMBoGA1UECgwTVHdpc3RlZCBNYXRyaXggTGFiczEkMCIGA1UECwwb @@ -56,11 +56,12 @@ I8OtG1xGwcok53lyDuuUUDexnK4O5BkjKiVlNPg4HPim5Kuj2hRNFfNt/F2BVIlj iZupikC5MT1LQaRwidkSNxCku1TfAyueiBwhLnFwTmIGNnhuDCutEVAD9kFmcJN2 SznugAcPk4doX2+rL+ila+ThqgPzIkwTUHtnmjI0TI6xsDUlXz5S3UyudrE2Qsfz s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg= ------END CERTIFICATE-----""") +-----END CERTIFICATE-----""" + ) config = { "tls_certificate_path": os.path.join(config_dir, "cert.pem"), - "tls_fingerprints": [] + "tls_fingerprints": [], } t = TestConfig() @@ -75,5 +76,5 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg= "Self-signed TLS certificates will not be accepted by " "Synapse 1.0. Please either provide a valid certificate, " "or use Synapse's ACME support to provision one." - ) + ), ) diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index f5bd7a1aa1..3c79d4afe7 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -169,7 +169,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.http_client.post_json.return_value = defer.Deferred() res_deferreds_2 = kr.verify_json_objects_for_server( - [("server10", json1, )] + [("server10", json1)] ) res_deferreds_2[0].addBoth(self.check_context, None) yield logcontext.make_deferred_yieldable(res_deferreds_2[0]) @@ -345,6 +345,7 @@ def _verify_json_for_server(keyring, server_name, json_object): """thin wrapper around verify_json_for_server which makes sure it is wrapped with the patched defer.inlineCallbacks. """ + @defer.inlineCallbacks def v(): rv1 = yield keyring.verify_json_for_server(server_name, json_object) diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py index 28e7e27416..7bb106b5f7 100644 --- a/tests/federation/test_federation_sender.py +++ b/tests/federation/test_federation_sender.py @@ -33,11 +33,15 @@ class FederationSenderTestCases(HomeserverTestCase): mock_state_handler = self.hs.get_state_handler() mock_state_handler.get_current_hosts_in_room.return_value = ["test", "host2"] - mock_send_transaction = self.hs.get_federation_transport_client().send_transaction + mock_send_transaction = ( + self.hs.get_federation_transport_client().send_transaction + ) mock_send_transaction.return_value = defer.succeed({}) sender = self.hs.get_federation_sender() - receipt = ReadReceipt("room_id", "m.read", "user_id", ["event_id"], {"ts": 1234}) + receipt = ReadReceipt( + "room_id", "m.read", "user_id", ["event_id"], {"ts": 1234} + ) self.successResultOf(sender.send_read_receipt(receipt)) self.pump() @@ -46,21 +50,24 @@ class FederationSenderTestCases(HomeserverTestCase): mock_send_transaction.assert_called_once() json_cb = mock_send_transaction.call_args[0][1] data = json_cb() - self.assertEqual(data['edus'], [ - { - 'edu_type': 'm.receipt', - 'content': { - 'room_id': { - 'm.read': { - 'user_id': { - 'event_ids': ['event_id'], - 'data': {'ts': 1234}, - }, - }, + self.assertEqual( + data['edus'], + [ + { + 'edu_type': 'm.receipt', + 'content': { + 'room_id': { + 'm.read': { + 'user_id': { + 'event_ids': ['event_id'], + 'data': {'ts': 1234}, + } + } + } }, - }, - }, - ]) + } + ], + ) def test_send_receipts_with_backoff(self): """Send two receipts in quick succession; the second should be flushed, but @@ -68,11 +75,15 @@ class FederationSenderTestCases(HomeserverTestCase): mock_state_handler = self.hs.get_state_handler() mock_state_handler.get_current_hosts_in_room.return_value = ["test", "host2"] - mock_send_transaction = self.hs.get_federation_transport_client().send_transaction + mock_send_transaction = ( + self.hs.get_federation_transport_client().send_transaction + ) mock_send_transaction.return_value = defer.succeed({}) sender = self.hs.get_federation_sender() - receipt = ReadReceipt("room_id", "m.read", "user_id", ["event_id"], {"ts": 1234}) + receipt = ReadReceipt( + "room_id", "m.read", "user_id", ["event_id"], {"ts": 1234} + ) self.successResultOf(sender.send_read_receipt(receipt)) self.pump() @@ -81,25 +92,30 @@ class FederationSenderTestCases(HomeserverTestCase): mock_send_transaction.assert_called_once() json_cb = mock_send_transaction.call_args[0][1] data = json_cb() - self.assertEqual(data['edus'], [ - { - 'edu_type': 'm.receipt', - 'content': { - 'room_id': { - 'm.read': { - 'user_id': { - 'event_ids': ['event_id'], - 'data': {'ts': 1234}, - }, - }, + self.assertEqual( + data['edus'], + [ + { + 'edu_type': 'm.receipt', + 'content': { + 'room_id': { + 'm.read': { + 'user_id': { + 'event_ids': ['event_id'], + 'data': {'ts': 1234}, + } + } + } }, - }, - }, - ]) + } + ], + ) mock_send_transaction.reset_mock() # send the second RR - receipt = ReadReceipt("room_id", "m.read", "user_id", ["other_id"], {"ts": 1234}) + receipt = ReadReceipt( + "room_id", "m.read", "user_id", ["other_id"], {"ts": 1234} + ) self.successResultOf(sender.send_read_receipt(receipt)) self.pump() mock_send_transaction.assert_not_called() @@ -111,18 +127,21 @@ class FederationSenderTestCases(HomeserverTestCase): mock_send_transaction.assert_called_once() json_cb = mock_send_transaction.call_args[0][1] data = json_cb() - self.assertEqual(data['edus'], [ - { - 'edu_type': 'm.receipt', - 'content': { - 'room_id': { - 'm.read': { - 'user_id': { - 'event_ids': ['other_id'], - 'data': {'ts': 1234}, - }, - }, + self.assertEqual( + data['edus'], + [ + { + 'edu_type': 'm.receipt', + 'content': { + 'room_id': { + 'm.read': { + 'user_id': { + 'event_ids': ['other_id'], + 'data': {'ts': 1234}, + } + } + } }, - }, - }, - ]) + } + ], + ) diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 5b2105bc76..917548bb31 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -115,11 +115,7 @@ class TestCreateAliasACL(unittest.HomeserverTestCase): # We cheekily override the config to add custom alias creation rules config = {} config["alias_creation_rules"] = [ - { - "user_id": "*", - "alias": "#unofficial_*", - "action": "allow", - } + {"user_id": "*", "alias": "#unofficial_*", "action": "allow"} ] config["room_list_publication_rules"] = [] @@ -162,9 +158,7 @@ class TestRoomListSearchDisabled(unittest.HomeserverTestCase): room_id = self.helper.create_room_as(self.user_id) request, channel = self.make_request( - "PUT", - b"directory/list/room/%s" % (room_id.encode('ascii'),), - b'{}', + "PUT", b"directory/list/room/%s" % (room_id.encode('ascii'),), b'{}' ) self.render(request) self.assertEquals(200, channel.code, channel.result) @@ -179,10 +173,7 @@ class TestRoomListSearchDisabled(unittest.HomeserverTestCase): self.directory_handler.enable_room_list_search = True # Room list is enabled so we should get some results - request, channel = self.make_request( - "GET", - b"publicRooms", - ) + request, channel = self.make_request("GET", b"publicRooms") self.render(request) self.assertEquals(200, channel.code, channel.result) self.assertTrue(len(channel.json_body["chunk"]) > 0) @@ -191,10 +182,7 @@ class TestRoomListSearchDisabled(unittest.HomeserverTestCase): self.directory_handler.enable_room_list_search = False # Room list disabled so we should get no results - request, channel = self.make_request( - "GET", - b"publicRooms", - ) + request, channel = self.make_request("GET", b"publicRooms") self.render(request) self.assertEquals(200, channel.code, channel.result) self.assertTrue(len(channel.json_body["chunk"]) == 0) @@ -202,9 +190,7 @@ class TestRoomListSearchDisabled(unittest.HomeserverTestCase): # Room list disabled so we shouldn't be allowed to publish rooms room_id = self.helper.create_room_as(self.user_id) request, channel = self.make_request( - "PUT", - b"directory/list/room/%s" % (room_id.encode('ascii'),), - b'{}', + "PUT", b"directory/list/room/%s" % (room_id.encode('ascii'),), b'{}' ) self.render(request) self.assertEquals(403, channel.code, channel.result) diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index 1c49bbbc3c..2e72a1dd23 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -36,7 +36,7 @@ room_keys = { "first_message_index": 1, "forwarded_count": 1, "is_verified": False, - "session_data": "SSBBTSBBIEZJU0gK" + "session_data": "SSBBTSBBIEZJU0gK", } } } @@ -47,15 +47,13 @@ room_keys = { class E2eRoomKeysHandlerTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(E2eRoomKeysHandlerTestCase, self).__init__(*args, **kwargs) - self.hs = None # type: synapse.server.HomeServer + self.hs = None # type: synapse.server.HomeServer self.handler = None # type: synapse.handlers.e2e_keys.E2eRoomKeysHandler @defer.inlineCallbacks def setUp(self): self.hs = yield utils.setup_test_homeserver( - self.addCleanup, - handlers=None, - replication_layer=mock.Mock(), + self.addCleanup, handlers=None, replication_layer=mock.Mock() ) self.handler = synapse.handlers.e2e_room_keys.E2eRoomKeysHandler(self.hs) self.local_user = "@boris:" + self.hs.hostname @@ -88,67 +86,86 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_create_version(self): """Check that we can create and then retrieve versions. """ - res = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + res = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(res, "1") # check we can retrieve it as the current version res = yield self.handler.get_version_info(self.local_user) - self.assertDictEqual(res, { - "version": "1", - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + self.assertDictEqual( + res, + { + "version": "1", + "algorithm": "m.megolm_backup.v1", + "auth_data": "first_version_auth_data", + }, + ) # check we can retrieve it as a specific version res = yield self.handler.get_version_info(self.local_user, "1") - self.assertDictEqual(res, { - "version": "1", - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + self.assertDictEqual( + res, + { + "version": "1", + "algorithm": "m.megolm_backup.v1", + "auth_data": "first_version_auth_data", + }, + ) # upload a new one... - res = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "second_version_auth_data", - }) + res = yield self.handler.create_version( + self.local_user, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "second_version_auth_data", + }, + ) self.assertEqual(res, "2") # check we can retrieve it as the current version res = yield self.handler.get_version_info(self.local_user) - self.assertDictEqual(res, { - "version": "2", - "algorithm": "m.megolm_backup.v1", - "auth_data": "second_version_auth_data", - }) + self.assertDictEqual( + res, + { + "version": "2", + "algorithm": "m.megolm_backup.v1", + "auth_data": "second_version_auth_data", + }, + ) @defer.inlineCallbacks def test_update_version(self): """Check that we can update versions. """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") - res = yield self.handler.update_version(self.local_user, version, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "revised_first_version_auth_data", - "version": version - }) + res = yield self.handler.update_version( + self.local_user, + version, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": version, + }, + ) self.assertDictEqual(res, {}) # check we can retrieve it as the current version res = yield self.handler.get_version_info(self.local_user) - self.assertDictEqual(res, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "revised_first_version_auth_data", - "version": version - }) + self.assertDictEqual( + res, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": version, + }, + ) @defer.inlineCallbacks def test_update_missing_version(self): @@ -156,11 +173,15 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): """ res = None try: - yield self.handler.update_version(self.local_user, "1", { - "algorithm": "m.megolm_backup.v1", - "auth_data": "revised_first_version_auth_data", - "version": "1" - }) + yield self.handler.update_version( + self.local_user, + "1", + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": "1", + }, + ) except errors.SynapseError as e: res = e.code self.assertEqual(res, 404) @@ -170,29 +191,37 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): """Check that we get a 400 if the version in the body is missing or doesn't match """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") res = None try: - yield self.handler.update_version(self.local_user, version, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "revised_first_version_auth_data" - }) + yield self.handler.update_version( + self.local_user, + version, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + }, + ) except errors.SynapseError as e: res = e.code self.assertEqual(res, 400) res = None try: - yield self.handler.update_version(self.local_user, version, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "revised_first_version_auth_data", - "version": "incorrect" - }) + yield self.handler.update_version( + self.local_user, + version, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": "incorrect", + }, + ) except errors.SynapseError as e: res = e.code self.assertEqual(res, 400) @@ -223,10 +252,10 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_delete_version(self): """Check that we can create and then delete versions. """ - res = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + res = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(res, "1") # check we can delete it @@ -255,16 +284,14 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_get_missing_room_keys(self): """Check we get an empty response from an empty backup """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") res = yield self.handler.get_room_keys(self.local_user, version) - self.assertDictEqual(res, { - "rooms": {} - }) + self.assertDictEqual(res, {"rooms": {}}) # TODO: test the locking semantics when uploading room_keys, # although this is probably best done in sytest @@ -275,7 +302,9 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): """ res = None try: - yield self.handler.upload_room_keys(self.local_user, "no_version", room_keys) + yield self.handler.upload_room_keys( + self.local_user, "no_version", room_keys + ) except errors.SynapseError as e: res = e.code self.assertEqual(res, 404) @@ -285,10 +314,10 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): """Check that we get a 404 on uploading keys when an nonexistent version is specified """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") res = None @@ -304,16 +333,19 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_upload_room_keys_wrong_version(self): """Check that we get a 403 on uploading keys for an old version """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "second_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "second_version_auth_data", + }, + ) self.assertEqual(version, "2") res = None @@ -327,10 +359,10 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_upload_room_keys_insert(self): """Check that we can insert and retrieve keys for a session """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") yield self.handler.upload_room_keys(self.local_user, version, room_keys) @@ -340,18 +372,13 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): # check getting room_keys for a given room res = yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org" + self.local_user, version, room_id="!abc:matrix.org" ) self.assertDictEqual(res, room_keys) # check getting room_keys for a given session_id res = yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", + self.local_user, version, room_id="!abc:matrix.org", session_id="c0ff33" ) self.assertDictEqual(res, room_keys) @@ -359,10 +386,10 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_upload_room_keys_merge(self): """Check that we can upload a new room_key for an existing session and have it correctly merged""" - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") yield self.handler.upload_room_keys(self.local_user, version, room_keys) @@ -378,7 +405,7 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): res = yield self.handler.get_room_keys(self.local_user, version) self.assertEqual( res['rooms']['!abc:matrix.org']['sessions']['c0ff33']['session_data'], - "SSBBTSBBIEZJU0gK" + "SSBBTSBBIEZJU0gK", ) # test that marking the session as verified however /does/ replace it @@ -387,8 +414,7 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): res = yield self.handler.get_room_keys(self.local_user, version) self.assertEqual( - res['rooms']['!abc:matrix.org']['sessions']['c0ff33']['session_data'], - "new" + res['rooms']['!abc:matrix.org']['sessions']['c0ff33']['session_data'], "new" ) # test that a session with a higher forwarded_count doesn't replace one @@ -399,8 +425,7 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): res = yield self.handler.get_room_keys(self.local_user, version) self.assertEqual( - res['rooms']['!abc:matrix.org']['sessions']['c0ff33']['session_data'], - "new" + res['rooms']['!abc:matrix.org']['sessions']['c0ff33']['session_data'], "new" ) # TODO: check edge cases as well as the common variations here @@ -409,56 +434,36 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): def test_delete_room_keys(self): """Check that we can insert and delete keys for a session """ - version = yield self.handler.create_version(self.local_user, { - "algorithm": "m.megolm_backup.v1", - "auth_data": "first_version_auth_data", - }) + version = yield self.handler.create_version( + self.local_user, + {"algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data"}, + ) self.assertEqual(version, "1") # check for bulk-delete yield self.handler.upload_room_keys(self.local_user, version, room_keys) yield self.handler.delete_room_keys(self.local_user, version) res = yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", + self.local_user, version, room_id="!abc:matrix.org", session_id="c0ff33" ) - self.assertDictEqual(res, { - "rooms": {} - }) + self.assertDictEqual(res, {"rooms": {}}) # check for bulk-delete per room yield self.handler.upload_room_keys(self.local_user, version, room_keys) yield self.handler.delete_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", + self.local_user, version, room_id="!abc:matrix.org" ) res = yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", + self.local_user, version, room_id="!abc:matrix.org", session_id="c0ff33" ) - self.assertDictEqual(res, { - "rooms": {} - }) + self.assertDictEqual(res, {"rooms": {}}) # check for bulk-delete per session yield self.handler.upload_room_keys(self.local_user, version, room_keys) yield self.handler.delete_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", + self.local_user, version, room_id="!abc:matrix.org", session_id="c0ff33" ) res = yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", + self.local_user, version, room_id="!abc:matrix.org", session_id="c0ff33" ) - self.assertDictEqual(res, { - "rooms": {} - }) + self.assertDictEqual(res, {"rooms": {}}) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 94c6080e34..f70c6e7d65 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -424,8 +424,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): hs = self.setup_test_homeserver( - "server", http_client=None, - federation_sender=Mock(), + "server", http_client=None, federation_sender=Mock() ) return hs @@ -457,7 +456,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): # Mark test2 as online, test will be offline with a last_active of 0 self.presence_handler.set_state( - UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE}, + UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE} ) self.reactor.pump([0]) # Wait for presence updates to be handled @@ -506,13 +505,13 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): # Mark test as online self.presence_handler.set_state( - UserID.from_string("@test:server"), {"presence": PresenceState.ONLINE}, + UserID.from_string("@test:server"), {"presence": PresenceState.ONLINE} ) # Mark test2 as online, test will be offline with a last_active of 0. # Note we don't join them to the room yet self.presence_handler.set_state( - UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE}, + UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE} ) # Add servers to the room @@ -541,8 +540,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): ) self.assertEqual(expected_state.state, PresenceState.ONLINE) self.federation_sender.send_presence_to_destinations.assert_called_once_with( - destinations=set(("server2", "server3")), - states=[expected_state] + destinations=set(("server2", "server3")), states=[expected_state] ) def _add_new_user(self, room_id, user_id): @@ -565,7 +563,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): type=EventTypes.Member, sender=user_id, state_key=user_id, - content={"membership": Membership.JOIN} + content={"membership": Membership.JOIN}, ) prev_event_ids = self.get_success( diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 5a0b6c201c..cb8b4d2913 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -64,20 +64,22 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): mock_federation_client.put_json.return_value = defer.succeed((200, "OK")) hs = self.setup_test_homeserver( - datastore=(Mock( - spec=[ - # Bits that Federation needs - "prep_send_transaction", - "delivered_txn", - "get_received_txn_response", - "set_received_txn_response", - "get_destination_retry_timings", - "get_devices_by_remote", - # Bits that user_directory needs - "get_user_directory_stream_pos", - "get_current_state_deltas", - ] - )), + datastore=( + Mock( + spec=[ + # Bits that Federation needs + "prep_send_transaction", + "delivered_txn", + "get_received_txn_response", + "set_received_txn_response", + "get_destination_retry_timings", + "get_devices_by_remote", + # Bits that user_directory needs + "get_user_directory_stream_pos", + "get_current_state_deltas", + ] + ) + ), notifier=Mock(), http_client=mock_federation_client, keyring=mock_keyring, @@ -87,7 +89,7 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): def prepare(self, reactor, clock, hs): # the tests assume that we are starting at unix time 1000 - reactor.pump((1000, )) + reactor.pump((1000,)) mock_notifier = hs.get_notifier() self.on_new_event = mock_notifier.on_new_event @@ -114,6 +116,7 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): def check_joined_room(room_id, user_id): if user_id not in [u.to_string() for u in self.room_members]: raise AuthError(401, "User is not in the room") + hs.get_auth().check_joined_room = check_joined_room def get_joined_hosts_for_room(room_id): @@ -123,6 +126,7 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): def get_current_users_in_room(room_id): return set(str(u) for u in self.room_members) + hs.get_state_handler().get_current_users_in_room = get_current_users_in_room self.datastore.get_user_directory_stream_pos.return_value = ( @@ -141,21 +145,16 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): self.assertEquals(self.event_source.get_current_key(), 0) - self.successResultOf(self.handler.started_typing( - target_user=U_APPLE, - auth_user=U_APPLE, - room_id=ROOM_ID, - timeout=20000, - )) - - self.on_new_event.assert_has_calls( - [call('typing_key', 1, rooms=[ROOM_ID])] + self.successResultOf( + self.handler.started_typing( + target_user=U_APPLE, auth_user=U_APPLE, room_id=ROOM_ID, timeout=20000 + ) ) + self.on_new_event.assert_has_calls([call('typing_key', 1, rooms=[ROOM_ID])]) + self.assertEquals(self.event_source.get_current_key(), 1) - events = self.event_source.get_new_events( - room_ids=[ROOM_ID], from_key=0 - ) + events = self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0) self.assertEquals( events[0], [ @@ -170,12 +169,11 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): def test_started_typing_remote_send(self): self.room_members = [U_APPLE, U_ONION] - self.successResultOf(self.handler.started_typing( - target_user=U_APPLE, - auth_user=U_APPLE, - room_id=ROOM_ID, - timeout=20000, - )) + self.successResultOf( + self.handler.started_typing( + target_user=U_APPLE, auth_user=U_APPLE, room_id=ROOM_ID, timeout=20000 + ) + ) put_json = self.hs.get_http_client().put_json put_json.assert_called_once_with( @@ -216,14 +214,10 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): self.render(request) self.assertEqual(channel.code, 200) - self.on_new_event.assert_has_calls( - [call('typing_key', 1, rooms=[ROOM_ID])] - ) + self.on_new_event.assert_has_calls([call('typing_key', 1, rooms=[ROOM_ID])]) self.assertEquals(self.event_source.get_current_key(), 1) - events = self.event_source.get_new_events( - room_ids=[ROOM_ID], from_key=0 - ) + events = self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0) self.assertEquals( events[0], [ @@ -247,14 +241,14 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): self.assertEquals(self.event_source.get_current_key(), 0) - self.successResultOf(self.handler.stopped_typing( - target_user=U_APPLE, auth_user=U_APPLE, room_id=ROOM_ID - )) - - self.on_new_event.assert_has_calls( - [call('typing_key', 1, rooms=[ROOM_ID])] + self.successResultOf( + self.handler.stopped_typing( + target_user=U_APPLE, auth_user=U_APPLE, room_id=ROOM_ID + ) ) + self.on_new_event.assert_has_calls([call('typing_key', 1, rooms=[ROOM_ID])]) + put_json = self.hs.get_http_client().put_json put_json.assert_called_once_with( "farm", @@ -274,18 +268,10 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): ) self.assertEquals(self.event_source.get_current_key(), 1) - events = self.event_source.get_new_events( - room_ids=[ROOM_ID], from_key=0 - ) + events = self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0) self.assertEquals( events[0], - [ - { - "type": "m.typing", - "room_id": ROOM_ID, - "content": {"user_ids": []}, - } - ], + [{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}], ) def test_typing_timeout(self): @@ -293,22 +279,17 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): self.assertEquals(self.event_source.get_current_key(), 0) - self.successResultOf(self.handler.started_typing( - target_user=U_APPLE, - auth_user=U_APPLE, - room_id=ROOM_ID, - timeout=10000, - )) - - self.on_new_event.assert_has_calls( - [call('typing_key', 1, rooms=[ROOM_ID])] + self.successResultOf( + self.handler.started_typing( + target_user=U_APPLE, auth_user=U_APPLE, room_id=ROOM_ID, timeout=10000 + ) ) + + self.on_new_event.assert_has_calls([call('typing_key', 1, rooms=[ROOM_ID])]) self.on_new_event.reset_mock() self.assertEquals(self.event_source.get_current_key(), 1) - events = self.event_source.get_new_events( - room_ids=[ROOM_ID], from_key=0 - ) + events = self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0) self.assertEquals( events[0], [ @@ -320,45 +301,30 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase): ], ) - self.reactor.pump([16, ]) + self.reactor.pump([16]) - self.on_new_event.assert_has_calls( - [call('typing_key', 2, rooms=[ROOM_ID])] - ) + self.on_new_event.assert_has_calls([call('typing_key', 2, rooms=[ROOM_ID])]) self.assertEquals(self.event_source.get_current_key(), 2) - events = self.event_source.get_new_events( - room_ids=[ROOM_ID], from_key=1 - ) + events = self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=1) self.assertEquals( events[0], - [ - { - "type": "m.typing", - "room_id": ROOM_ID, - "content": {"user_ids": []}, - } - ], + [{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}], ) # SYN-230 - see if we can still set after timeout - self.successResultOf(self.handler.started_typing( - target_user=U_APPLE, - auth_user=U_APPLE, - room_id=ROOM_ID, - timeout=10000, - )) - - self.on_new_event.assert_has_calls( - [call('typing_key', 3, rooms=[ROOM_ID])] + self.successResultOf( + self.handler.started_typing( + target_user=U_APPLE, auth_user=U_APPLE, room_id=ROOM_ID, timeout=10000 + ) ) + + self.on_new_event.assert_has_calls([call('typing_key', 3, rooms=[ROOM_ID])]) self.on_new_event.reset_mock() self.assertEquals(self.event_source.get_current_key(), 3) - events = self.event_source.get_new_events( - room_ids=[ROOM_ID], from_key=0 - ) + events = self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0) self.assertEquals( events[0], [ diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 7dd1a1daf8..44468f5382 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -352,9 +352,7 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase): # Assert user directory is not empty request, channel = self.make_request( - "POST", - b"user_directory/search", - b'{"search_term":"user2"}', + "POST", b"user_directory/search", b'{"search_term":"user2"}' ) self.render(request) self.assertEquals(200, channel.code, channel.result) @@ -363,9 +361,7 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase): # Disable user directory and check search returns nothing self.config.user_directory_search_enabled = False request, channel = self.make_request( - "POST", - b"user_directory/search", - b'{"search_term":"user2"}', + "POST", b"user_directory/search", b'{"search_term":"user2"}' ) self.render(request) self.assertEquals(200, channel.code, channel.result) diff --git a/tests/http/__init__.py b/tests/http/__init__.py index ee8010f598..851fc0eb33 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -24,14 +24,12 @@ def get_test_cert_file(): # # openssl req -x509 -newkey rsa:4096 -keyout server.pem -out server.pem -days 36500 \ # -nodes -subj '/CN=testserv' - return os.path.join( - os.path.dirname(__file__), - 'server.pem', - ) + return os.path.join(os.path.dirname(__file__), 'server.pem') class ServerTLSContext(object): """A TLS Context which presents our test cert.""" + def __init__(self): self.filename = get_test_cert_file() diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index e9eb662c4c..7036615041 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -79,12 +79,12 @@ class MatrixFederationAgentTests(TestCase): # stubbing that out here. client_protocol = client_factory.buildProtocol(None) client_protocol.makeConnection( - FakeTransport(server_tls_protocol, self.reactor, client_protocol), + FakeTransport(server_tls_protocol, self.reactor, client_protocol) ) # tell the server tls protocol to send its stuff back to the client, too server_tls_protocol.makeConnection( - FakeTransport(client_protocol, self.reactor, server_tls_protocol), + FakeTransport(client_protocol, self.reactor, server_tls_protocol) ) # give the reactor a pump to get the TLS juices flowing. @@ -125,7 +125,7 @@ class MatrixFederationAgentTests(TestCase): _check_logcontext(context) def _handle_well_known_connection( - self, client_factory, expected_sni, content, response_headers={}, + self, client_factory, expected_sni, content, response_headers={} ): """Handle an outgoing HTTPs connection: wire it up to a server, check that the request is for a .well-known, and send the response. @@ -139,8 +139,7 @@ class MatrixFederationAgentTests(TestCase): """ # make the connection for .well-known well_known_server = self._make_connection( - client_factory, - expected_sni=expected_sni, + client_factory, expected_sni=expected_sni ) # check the .well-known request and send a response self.assertEqual(len(well_known_server.requests), 1) @@ -154,17 +153,14 @@ class MatrixFederationAgentTests(TestCase): """ self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/.well-known/matrix/server') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'testserv'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'testserv']) # send back a response for k, v in headers.items(): request.setHeader(k, v) request.write(content) request.finish() - self.reactor.pump((0.1, )) + self.reactor.pump((0.1,)) def test_get(self): """ @@ -184,18 +180,14 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 8448) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=b"testserv", - ) + http_server = self._make_connection(client_factory, expected_sni=b"testserv") self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'testserv:8448'] + request.requestHeaders.getRawHeaders(b'host'), [b'testserv:8448'] ) content = request.content.read() self.assertEqual(content, b'') @@ -244,19 +236,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 8448) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=None, - ) + http_server = self._make_connection(client_factory, expected_sni=None) self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'1.2.3.4'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'1.2.3.4']) # finish the request request.finish() @@ -285,19 +271,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 8448) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=None, - ) + http_server = self._make_connection(client_factory, expected_sni=None) self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'[::1]'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'[::1]']) # finish the request request.finish() @@ -326,19 +306,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 80) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=None, - ) + http_server = self._make_connection(client_factory, expected_sni=None) self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'[::1]:80'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'[::1]:80']) # finish the request request.finish() @@ -377,7 +351,7 @@ class MatrixFederationAgentTests(TestCase): # now there should be a SRV lookup self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.testserv", + b"_matrix._tcp.testserv" ) # we should fall back to a direct connection @@ -387,19 +361,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 8448) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=b'testserv', - ) + http_server = self._make_connection(client_factory, expected_sni=b'testserv') self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'testserv'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'testserv']) # finish the request request.finish() @@ -427,13 +395,14 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 443) self._handle_well_known_connection( - client_factory, expected_sni=b"testserv", + client_factory, + expected_sni=b"testserv", content=b'{ "m.server": "target-server" }', ) # there should be a SRV lookup self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.target-server", + b"_matrix._tcp.target-server" ) # now we should get a connection to the target server @@ -444,8 +413,7 @@ class MatrixFederationAgentTests(TestCase): # make a test server, and wire up the client http_server = self._make_connection( - client_factory, - expected_sni=b'target-server', + client_factory, expected_sni=b'target-server' ) self.assertEqual(len(http_server.requests), 1) @@ -453,8 +421,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'target-server'], + request.requestHeaders.getRawHeaders(b'host'), [b'target-server'] ) # finish the request @@ -490,8 +457,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 443) redirect_server = self._make_connection( - client_factory, - expected_sni=b"testserv", + client_factory, expected_sni=b"testserv" ) # send a 302 redirect @@ -500,7 +466,7 @@ class MatrixFederationAgentTests(TestCase): request.redirect(b'https://testserv/even_better_known') request.finish() - self.reactor.pump((0.1, )) + self.reactor.pump((0.1,)) # now there should be another connection clients = self.reactor.tcpClients @@ -510,8 +476,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 443) well_known_server = self._make_connection( - client_factory, - expected_sni=b"testserv", + client_factory, expected_sni=b"testserv" ) self.assertEqual(len(well_known_server.requests), 1, "No request after 302") @@ -521,11 +486,11 @@ class MatrixFederationAgentTests(TestCase): request.write(b'{ "m.server": "target-server" }') request.finish() - self.reactor.pump((0.1, )) + self.reactor.pump((0.1,)) # there should be a SRV lookup self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.target-server", + b"_matrix._tcp.target-server" ) # now we should get a connection to the target server @@ -536,8 +501,7 @@ class MatrixFederationAgentTests(TestCase): # make a test server, and wire up the client http_server = self._make_connection( - client_factory, - expected_sni=b'target-server', + client_factory, expected_sni=b'target-server' ) self.assertEqual(len(http_server.requests), 1) @@ -545,8 +509,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'target-server'], + request.requestHeaders.getRawHeaders(b'host'), [b'target-server'] ) # finish the request @@ -585,12 +548,12 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 443) self._handle_well_known_connection( - client_factory, expected_sni=b"testserv", content=b'NOT JSON', + client_factory, expected_sni=b"testserv", content=b'NOT JSON' ) # now there should be a SRV lookup self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.testserv", + b"_matrix._tcp.testserv" ) # we should fall back to a direct connection @@ -600,19 +563,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 8448) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=b'testserv', - ) + http_server = self._make_connection(client_factory, expected_sni=b'testserv') self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'testserv'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'testserv']) # finish the request request.finish() @@ -635,7 +592,7 @@ class MatrixFederationAgentTests(TestCase): # the request for a .well-known will have failed with a DNS lookup error. self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.testserv", + b"_matrix._tcp.testserv" ) # Make sure treq is trying to connect @@ -646,19 +603,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 8443) # make a test server, and wire up the client - http_server = self._make_connection( - client_factory, - expected_sni=b'testserv', - ) + http_server = self._make_connection(client_factory, expected_sni=b'testserv') self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') - self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'testserv'], - ) + self.assertEqual(request.requestHeaders.getRawHeaders(b'host'), [b'testserv']) # finish the request request.finish() @@ -685,17 +636,18 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(port, 443) self.mock_resolver.resolve_service.side_effect = lambda _: [ - Server(host=b"srvtarget", port=8443), + Server(host=b"srvtarget", port=8443) ] self._handle_well_known_connection( - client_factory, expected_sni=b"testserv", + client_factory, + expected_sni=b"testserv", content=b'{ "m.server": "target-server" }', ) # there should be a SRV lookup self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.target-server", + b"_matrix._tcp.target-server" ) # now we should get a connection to the target of the SRV record @@ -706,8 +658,7 @@ class MatrixFederationAgentTests(TestCase): # make a test server, and wire up the client http_server = self._make_connection( - client_factory, - expected_sni=b'target-server', + client_factory, expected_sni=b'target-server' ) self.assertEqual(len(http_server.requests), 1) @@ -715,8 +666,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'target-server'], + request.requestHeaders.getRawHeaders(b'host'), [b'target-server'] ) # finish the request @@ -757,7 +707,7 @@ class MatrixFederationAgentTests(TestCase): # now there should have been a SRV lookup self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.xn--bcher-kva.com", + b"_matrix._tcp.xn--bcher-kva.com" ) # We should fall back to port 8448 @@ -769,8 +719,7 @@ class MatrixFederationAgentTests(TestCase): # make a test server, and wire up the client http_server = self._make_connection( - client_factory, - expected_sni=b'xn--bcher-kva.com', + client_factory, expected_sni=b'xn--bcher-kva.com' ) self.assertEqual(len(http_server.requests), 1) @@ -778,8 +727,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'xn--bcher-kva.com'], + request.requestHeaders.getRawHeaders(b'host'), [b'xn--bcher-kva.com'] ) # finish the request @@ -801,7 +749,7 @@ class MatrixFederationAgentTests(TestCase): self.assertNoResult(test_d) self.mock_resolver.resolve_service.assert_called_once_with( - b"_matrix._tcp.xn--bcher-kva.com", + b"_matrix._tcp.xn--bcher-kva.com" ) # Make sure treq is trying to connect @@ -813,8 +761,7 @@ class MatrixFederationAgentTests(TestCase): # make a test server, and wire up the client http_server = self._make_connection( - client_factory, - expected_sni=b'xn--bcher-kva.com', + client_factory, expected_sni=b'xn--bcher-kva.com' ) self.assertEqual(len(http_server.requests), 1) @@ -822,8 +769,7 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b'GET') self.assertEqual(request.path, b'/foo/bar') self.assertEqual( - request.requestHeaders.getRawHeaders(b'host'), - [b'xn--bcher-kva.com'], + request.requestHeaders.getRawHeaders(b'host'), [b'xn--bcher-kva.com'] ) # finish the request @@ -897,67 +843,70 @@ class TestCachePeriodFromHeaders(TestCase): # uppercase self.assertEqual( _cache_period_from_headers( - Headers({b'Cache-Control': [b'foo, Max-Age = 100, bar']}), - ), 100, + Headers({b'Cache-Control': [b'foo, Max-Age = 100, bar']}) + ), + 100, ) # missing value - self.assertIsNone(_cache_period_from_headers( - Headers({b'Cache-Control': [b'max-age=, bar']}), - )) + self.assertIsNone( + _cache_period_from_headers(Headers({b'Cache-Control': [b'max-age=, bar']})) + ) # hackernews: bogus due to semicolon - self.assertIsNone(_cache_period_from_headers( - Headers({b'Cache-Control': [b'private; max-age=0']}), - )) + self.assertIsNone( + _cache_period_from_headers( + Headers({b'Cache-Control': [b'private; max-age=0']}) + ) + ) # github self.assertEqual( _cache_period_from_headers( - Headers({b'Cache-Control': [b'max-age=0, private, must-revalidate']}), - ), 0, + Headers({b'Cache-Control': [b'max-age=0, private, must-revalidate']}) + ), + 0, ) # google self.assertEqual( _cache_period_from_headers( - Headers({b'cache-control': [b'private, max-age=0']}), - ), 0, + Headers({b'cache-control': [b'private, max-age=0']}) + ), + 0, ) def test_expires(self): self.assertEqual( _cache_period_from_headers( Headers({b'Expires': [b'Wed, 30 Jan 2019 07:35:33 GMT']}), - time_now=lambda: 1548833700 - ), 33, + time_now=lambda: 1548833700, + ), + 33, ) # cache-control overrides expires self.assertEqual( _cache_period_from_headers( - Headers({ - b'cache-control': [b'max-age=10'], - b'Expires': [b'Wed, 30 Jan 2019 07:35:33 GMT'] - }), - time_now=lambda: 1548833700 - ), 10, + Headers( + { + b'cache-control': [b'max-age=10'], + b'Expires': [b'Wed, 30 Jan 2019 07:35:33 GMT'], + } + ), + time_now=lambda: 1548833700, + ), + 10, ) # invalid expires means immediate expiry - self.assertEqual( - _cache_period_from_headers( - Headers({b'Expires': [b'0']}), - ), 0, - ) + self.assertEqual(_cache_period_from_headers(Headers({b'Expires': [b'0']})), 0) def _check_logcontext(context): current = LoggingContext.current_context() if current is not context: - raise AssertionError( - "Expected logcontext %s but was %s" % (context, current), - ) + raise AssertionError("Expected logcontext %s but was %s" % (context, current)) def _build_test_server(): @@ -973,7 +922,7 @@ def _build_test_server(): server_factory.log = _log_request server_tls_factory = TLSMemoryBIOFactory( - ServerTLSContext(), isClient=False, wrappedFactory=server_factory, + ServerTLSContext(), isClient=False, wrappedFactory=server_factory ) return server_tls_factory.buildProtocol(None) @@ -987,6 +936,7 @@ def _log_request(request): @implementer(IPolicyForHTTPS) class TrustingTLSPolicyForHTTPS(object): """An IPolicyForHTTPS which doesn't do any certificate verification""" + def creatorForNetloc(self, hostname, port): certificateOptions = OpenSSLCertificateOptions() return ClientTLSOptions(hostname, certificateOptions.getContext()) diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index a872e2441e..034c0db8d2 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -68,9 +68,7 @@ class SrvResolverTestCase(unittest.TestCase): dns_client_mock.lookupService.assert_called_once_with(service_name) - result_deferred.callback( - ([answer_srv], None, None) - ) + result_deferred.callback(([answer_srv], None, None)) servers = self.successResultOf(test_d) @@ -112,7 +110,7 @@ class SrvResolverTestCase(unittest.TestCase): cache = {service_name: [entry]} resolver = SrvResolver( - dns_client=dns_client_mock, cache=cache, get_time=clock.time, + dns_client=dns_client_mock, cache=cache, get_time=clock.time ) servers = yield resolver.resolve_service(service_name) @@ -168,11 +166,13 @@ class SrvResolverTestCase(unittest.TestCase): self.assertNoResult(resolve_d) # returning a single "." should make the lookup fail with a ConenctError - lookup_deferred.callback(( - [dns.RRHeader(type=dns.SRV, payload=dns.Record_SRV(target=b"."))], - None, - None, - )) + lookup_deferred.callback( + ( + [dns.RRHeader(type=dns.SRV, payload=dns.Record_SRV(target=b"."))], + None, + None, + ) + ) self.failureResultOf(resolve_d, ConnectError) @@ -191,14 +191,16 @@ class SrvResolverTestCase(unittest.TestCase): resolve_d = resolver.resolve_service(service_name) self.assertNoResult(resolve_d) - lookup_deferred.callback(( - [ - dns.RRHeader(type=dns.A, payload=dns.Record_A()), - dns.RRHeader(type=dns.SRV, payload=dns.Record_SRV(target=b"host")), - ], - None, - None, - )) + lookup_deferred.callback( + ( + [ + dns.RRHeader(type=dns.A, payload=dns.Record_A()), + dns.RRHeader(type=dns.SRV, payload=dns.Record_SRV(target=b"host")), + ], + None, + None, + ) + ) servers = self.successResultOf(resolve_d) diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index cd8e086f86..279e456614 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -36,9 +36,7 @@ from tests.unittest import HomeserverTestCase def check_logcontext(context): current = LoggingContext.current_context() if current is not context: - raise AssertionError( - "Expected logcontext %s but was %s" % (context, current), - ) + raise AssertionError("Expected logcontext %s but was %s" % (context, current)) class FederationClientTests(HomeserverTestCase): @@ -54,6 +52,7 @@ class FederationClientTests(HomeserverTestCase): """ happy-path test of a GET request """ + @defer.inlineCallbacks def do_request(): with LoggingContext("one") as context: @@ -175,8 +174,7 @@ class FederationClientTests(HomeserverTestCase): self.assertIsInstance(f.value, RequestSendFailed) self.assertIsInstance( - f.value.inner_exception, - (ConnectingCancelledError, TimeoutError), + f.value.inner_exception, (ConnectingCancelledError, TimeoutError) ) def test_client_connect_no_response(self): @@ -216,9 +214,7 @@ class FederationClientTests(HomeserverTestCase): Once the client gets the headers, _request returns successfully. """ request = MatrixFederationRequest( - method="GET", - destination="testserv:8008", - path="foo/bar", + method="GET", destination="testserv:8008", path="foo/bar" ) d = self.cl._send_request(request, timeout=10000) @@ -258,8 +254,10 @@ class FederationClientTests(HomeserverTestCase): # Send it the HTTP response client.dataReceived( - (b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n" - b"Server: Fake\r\n\r\n") + ( + b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n" + b"Server: Fake\r\n\r\n" + ) ) # Push by enough to time it out @@ -274,9 +272,7 @@ class FederationClientTests(HomeserverTestCase): requiring a trailing slash. We need to retry the request with a trailing slash. Workaround for Synapse <= v0.99.3, explained in #3622. """ - d = self.cl.get_json( - "testserv:8008", "foo/bar", try_trailing_slash_on_400=True, - ) + d = self.cl.get_json("testserv:8008", "foo/bar", try_trailing_slash_on_400=True) # Send the request self.pump() @@ -329,9 +325,7 @@ class FederationClientTests(HomeserverTestCase): See test_client_requires_trailing_slashes() for context. """ - d = self.cl.get_json( - "testserv:8008", "foo/bar", try_trailing_slash_on_400=True, - ) + d = self.cl.get_json("testserv:8008", "foo/bar", try_trailing_slash_on_400=True) # Send the request self.pump() @@ -368,10 +362,7 @@ class FederationClientTests(HomeserverTestCase): self.failureResultOf(d) def test_client_sends_body(self): - self.cl.post_json( - "testserv:8008", "foo/bar", timeout=10000, - data={"a": "b"} - ) + self.cl.post_json("testserv:8008", "foo/bar", timeout=10000, data={"a": "b"}) self.pump() diff --git a/tests/patch_inline_callbacks.py b/tests/patch_inline_callbacks.py index 0f613945c8..ee0add3455 100644 --- a/tests/patch_inline_callbacks.py +++ b/tests/patch_inline_callbacks.py @@ -45,7 +45,9 @@ def do_patch(): except Exception: if LoggingContext.current_context() != start_context: err = "%s changed context from %s to %s on exception" % ( - f, start_context, LoggingContext.current_context() + f, + start_context, + LoggingContext.current_context(), ) print(err, file=sys.stderr) raise Exception(err) @@ -54,7 +56,9 @@ def do_patch(): if not isinstance(res, Deferred) or res.called: if LoggingContext.current_context() != start_context: err = "%s changed context from %s to %s" % ( - f, start_context, LoggingContext.current_context() + f, + start_context, + LoggingContext.current_context(), ) # print the error to stderr because otherwise all we # see in travis-ci is the 500 error @@ -66,9 +70,7 @@ def do_patch(): err = ( "%s returned incomplete deferred in non-sentinel context " "%s (start was %s)" - ) % ( - f, LoggingContext.current_context(), start_context, - ) + ) % (f, LoggingContext.current_context(), start_context) print(err, file=sys.stderr) raise Exception(err) @@ -76,7 +78,9 @@ def do_patch(): if LoggingContext.current_context() != start_context: err = "%s completion of %s changed context from %s to %s" % ( "Failure" if isinstance(r, Failure) else "Success", - f, start_context, LoggingContext.current_context(), + f, + start_context, + LoggingContext.current_context(), ) print(err, file=sys.stderr) raise Exception(err) diff --git a/tests/replication/slave/storage/_base.py b/tests/replication/slave/storage/_base.py index 1f72a2a04f..104349cdbd 100644 --- a/tests/replication/slave/storage/_base.py +++ b/tests/replication/slave/storage/_base.py @@ -74,21 +74,18 @@ class BaseSlavedStoreTestCase(unittest.HomeserverTestCase): self.assertEqual( master_result, expected_result, - "Expected master result to be %r but was %r" % ( - expected_result, master_result - ), + "Expected master result to be %r but was %r" + % (expected_result, master_result), ) self.assertEqual( slaved_result, expected_result, - "Expected slave result to be %r but was %r" % ( - expected_result, slaved_result - ), + "Expected slave result to be %r but was %r" + % (expected_result, slaved_result), ) self.assertEqual( master_result, slaved_result, - "Slave result %r does not match master result %r" % ( - slaved_result, master_result - ), + "Slave result %r does not match master result %r" + % (slaved_result, master_result), ) diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py index 65ecff3bd6..a368117b43 100644 --- a/tests/replication/slave/storage/test_events.py +++ b/tests/replication/slave/storage/test_events.py @@ -234,10 +234,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase): type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join" ) msg, msgctx = self.build_event() - self.get_success(self.master_store.persist_events([ - (j2, j2ctx), - (msg, msgctx), - ])) + self.get_success(self.master_store.persist_events([(j2, j2ctx), (msg, msgctx)])) self.replicate() event_source = RoomEventSource(self.hs) @@ -257,15 +254,13 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase): # # First, we get a list of the rooms we are joined to joined_rooms = self.get_success( - self.slaved_store.get_rooms_for_user_with_stream_ordering( - USER_ID_2, - ), + self.slaved_store.get_rooms_for_user_with_stream_ordering(USER_ID_2) ) # Then, we get a list of the events since the last sync membership_changes = self.get_success( self.slaved_store.get_membership_changes_for_user( - USER_ID_2, prev_token, current_token, + USER_ID_2, prev_token, current_token ) ) @@ -298,9 +293,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase): self.master_store.persist_events([(event, context)], backfilled=True) ) else: - self.get_success( - self.master_store.persist_event(event, context) - ) + self.get_success(self.master_store.persist_event(event, context)) return event @@ -359,9 +352,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase): ) else: state_handler = self.hs.get_state_handler() - context = self.get_success(state_handler.compute_event_context( - event - )) + context = self.get_success(state_handler.compute_event_context(event)) self.master_store.add_push_actions_to_staging( event.event_id, {user_id: actions for user_id, actions in push_actions} diff --git a/tests/replication/tcp/streams/_base.py b/tests/replication/tcp/streams/_base.py index 38b368a972..ce3835ae6a 100644 --- a/tests/replication/tcp/streams/_base.py +++ b/tests/replication/tcp/streams/_base.py @@ -22,6 +22,7 @@ from tests.server import FakeTransport class BaseStreamTestCase(unittest.HomeserverTestCase): """Base class for tests of the replication streams""" + def prepare(self, reactor, clock, hs): # build a replication server server_factory = ReplicationStreamProtocolFactory(self.hs) @@ -52,6 +53,7 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): class TestReplicationClientHandler(object): """Drop-in for ReplicationClientHandler which just collects RDATA rows""" + def __init__(self): self.received_rdata_rows = [] @@ -69,6 +71,4 @@ class TestReplicationClientHandler(object): def on_rdata(self, stream_name, token, rows): for r in rows: - self.received_rdata_rows.append( - (stream_name, token, r) - ) + self.received_rdata_rows.append((stream_name, token, r)) diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index da19a83918..ee5f09041f 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -41,10 +41,10 @@ class VersionTestCase(unittest.HomeserverTestCase): request, channel = self.make_request("GET", self.url, shorthand=False) self.render(request) - self.assertEqual(200, int(channel.result["code"]), - msg=channel.result["body"]) - self.assertEqual({'server_version', 'python_version'}, - set(channel.json_body.keys())) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual( + {'server_version', 'python_version'}, set(channel.json_body.keys()) + ) class UserRegisterTestCase(unittest.HomeserverTestCase): @@ -200,9 +200,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): nonce = channel.json_body["nonce"] want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update( - nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin" - ) + want_mac.update(nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin") want_mac = want_mac.hexdigest() body = json.dumps( @@ -330,11 +328,13 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): # # Invalid user_type - body = json.dumps({ - "nonce": nonce(), - "username": "a", - "password": "1234", - "user_type": "invalid"} + body = json.dumps( + { + "nonce": nonce(), + "username": "a", + "password": "1234", + "user_type": "invalid", + } ) request, channel = self.make_request("POST", self.url, body.encode('utf8')) self.render(request) @@ -357,9 +357,7 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): hs.config.user_consent_version = "1" consent_uri_builder = Mock() - consent_uri_builder.build_user_consent_uri.return_value = ( - "http://example.com" - ) + consent_uri_builder.build_user_consent_uri.return_value = "http://example.com" self.event_creation_handler._consent_uri_builder = consent_uri_builder self.store = hs.get_datastore() @@ -371,9 +369,7 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): self.other_user_token = self.login("user", "pass") # Mark the admin user as having consented - self.get_success( - self.store.user_set_consent_version(self.admin_user, "1"), - ) + self.get_success(self.store.user_set_consent_version(self.admin_user, "1")) def test_shutdown_room_consent(self): """Test that we can shutdown rooms with local users who have not @@ -385,9 +381,7 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_token) # Assert one user in room - users_in_room = self.get_success( - self.store.get_users_in_room(room_id), - ) + users_in_room = self.get_success(self.store.get_users_in_room(room_id)) self.assertEqual([self.other_user], users_in_room) # Enable require consent to send events @@ -395,8 +389,7 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): # Assert that the user is getting consent error self.helper.send( - room_id, - body="foo", tok=self.other_user_token, expect_code=403, + room_id, body="foo", tok=self.other_user_token, expect_code=403 ) # Test that the admin can still send shutdown @@ -412,9 +405,7 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) # Assert there is now no longer anyone in the room - users_in_room = self.get_success( - self.store.get_users_in_room(room_id), - ) + users_in_room = self.get_success(self.store.get_users_in_room(room_id)) self.assertEqual([], users_in_room) @unittest.DEBUG @@ -459,24 +450,20 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): url = "rooms/%s/initialSync" % (room_id,) request, channel = self.make_request( - "GET", - url.encode('ascii'), - access_token=self.admin_user_tok, + "GET", url.encode('ascii'), access_token=self.admin_user_tok ) self.render(request) self.assertEqual( - expect_code, int(channel.result["code"]), msg=channel.result["body"], + expect_code, int(channel.result["code"]), msg=channel.result["body"] ) url = "events?timeout=0&room_id=" + room_id request, channel = self.make_request( - "GET", - url.encode('ascii'), - access_token=self.admin_user_tok, + "GET", url.encode('ascii'), access_token=self.admin_user_tok ) self.render(request) self.assertEqual( - expect_code, int(channel.result["code"]), msg=channel.result["body"], + expect_code, int(channel.result["code"]), msg=channel.result["body"] ) @@ -502,15 +489,11 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase): "POST", "/create_group".encode('ascii'), access_token=self.admin_user_tok, - content={ - "localpart": "test", - } + content={"localpart": "test"}, ) self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) group_id = channel.json_body["group_id"] @@ -520,27 +503,17 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase): url = "/groups/%s/admin/users/invite/%s" % (group_id, self.other_user) request, channel = self.make_request( - "PUT", - url.encode('ascii'), - access_token=self.admin_user_tok, - content={} + "PUT", url.encode('ascii'), access_token=self.admin_user_tok, content={} ) self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) url = "/groups/%s/self/accept_invite" % (group_id,) request, channel = self.make_request( - "PUT", - url.encode('ascii'), - access_token=self.other_user_token, - content={} + "PUT", url.encode('ascii'), access_token=self.other_user_token, content={} ) self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) # Check other user knows they're in the group self.assertIn(group_id, self._get_groups_user_is_in(self.admin_user_tok)) @@ -552,15 +525,11 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase): "POST", url.encode('ascii'), access_token=self.admin_user_tok, - content={ - "localpart": "test", - } + content={"localpart": "test"}, ) self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) # Check group returns 404 self._check_group(group_id, expect_code=404) @@ -576,28 +545,22 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase): url = "/groups/%s/profile" % (group_id,) request, channel = self.make_request( - "GET", - url.encode('ascii'), - access_token=self.admin_user_tok, + "GET", url.encode('ascii'), access_token=self.admin_user_tok ) self.render(request) self.assertEqual( - expect_code, int(channel.result["code"]), msg=channel.result["body"], + expect_code, int(channel.result["code"]), msg=channel.result["body"] ) def _get_groups_user_is_in(self, access_token): """Returns the list of groups the user is in (given their access token) """ request, channel = self.make_request( - "GET", - "/joined_groups".encode('ascii'), - access_token=access_token, + "GET", "/joined_groups".encode('ascii'), access_token=access_token ) self.render(request) - self.assertEqual( - 200, int(channel.result["code"]), msg=channel.result["body"], - ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) return channel.json_body["groups"] diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index 2e51ffa418..1a714ff58a 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -44,7 +44,7 @@ class IdentityTestCase(unittest.HomeserverTestCase): tok = self.login("kermit", "monkey") request, channel = self.make_request( - b"POST", "/createRoom", b"{}", access_token=tok, + b"POST", "/createRoom", b"{}", access_token=tok ) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) @@ -56,11 +56,9 @@ class IdentityTestCase(unittest.HomeserverTestCase): "address": "test@example.com", } request_data = json.dumps(params) - request_url = ( - "/rooms/%s/invite" % (room_id) - ).encode('ascii') + request_url = ("/rooms/%s/invite" % (room_id)).encode('ascii') request, channel = self.make_request( - b"POST", request_url, request_data, access_token=tok, + b"POST", request_url, request_data, access_token=tok ) self.render(request) self.assertEquals(channel.result["code"], b"403", channel.result) diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py index f63c68e7ed..73c5b44b46 100644 --- a/tests/rest/client/v1/test_directory.py +++ b/tests/rest/client/v1/test_directory.py @@ -45,7 +45,7 @@ class DirectoryTestCase(unittest.HomeserverTestCase): self.room_owner_tok = self.login("room_owner", "test") self.room_id = self.helper.create_room_as( - self.room_owner, tok=self.room_owner_tok, + self.room_owner, tok=self.room_owner_tok ) self.user = self.register_user("user", "test") @@ -80,12 +80,10 @@ class DirectoryTestCase(unittest.HomeserverTestCase): # We use deliberately a localpart under the length threshold so # that we can make sure that the check is done on the whole alias. - data = { - "room_alias_name": random_string(256 - len(self.hs.hostname)), - } + data = {"room_alias_name": random_string(256 - len(self.hs.hostname))} request_data = json.dumps(data) request, channel = self.make_request( - "POST", url, request_data, access_token=self.user_tok, + "POST", url, request_data, access_token=self.user_tok ) self.render(request) self.assertEqual(channel.code, 400, channel.result) @@ -96,51 +94,42 @@ class DirectoryTestCase(unittest.HomeserverTestCase): # Check with an alias of allowed length. There should already be # a test that ensures it works in test_register.py, but let's be # as cautious as possible here. - data = { - "room_alias_name": random_string(5), - } + data = {"room_alias_name": random_string(5)} request_data = json.dumps(data) request, channel = self.make_request( - "POST", url, request_data, access_token=self.user_tok, + "POST", url, request_data, access_token=self.user_tok ) self.render(request) self.assertEqual(channel.code, 200, channel.result) def set_alias_via_state_event(self, expected_code, alias_length=5): - url = ("/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" - % (self.room_id, self.hs.hostname)) - - data = { - "aliases": [ - self.random_alias(alias_length), - ], - } + url = "/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" % ( + self.room_id, + self.hs.hostname, + ) + + data = {"aliases": [self.random_alias(alias_length)]} request_data = json.dumps(data) request, channel = self.make_request( - "PUT", url, request_data, access_token=self.user_tok, + "PUT", url, request_data, access_token=self.user_tok ) self.render(request) self.assertEqual(channel.code, expected_code, channel.result) def set_alias_via_directory(self, expected_code, alias_length=5): url = "/_matrix/client/r0/directory/room/%s" % self.random_alias(alias_length) - data = { - "room_id": self.room_id, - } + data = {"room_id": self.room_id} request_data = json.dumps(data) request, channel = self.make_request( - "PUT", url, request_data, access_token=self.user_tok, + "PUT", url, request_data, access_token=self.user_tok ) self.render(request) self.assertEqual(channel.code, expected_code, channel.result) def random_alias(self, length): - return RoomAlias( - random_string(length), - self.hs.hostname, - ).to_string() + return RoomAlias(random_string(length), self.hs.hostname).to_string() def ensure_user_left_room(self): self.ensure_membership("leave") @@ -151,17 +140,9 @@ class DirectoryTestCase(unittest.HomeserverTestCase): def ensure_membership(self, membership): try: if membership == "leave": - self.helper.leave( - room=self.room_id, - user=self.user, - tok=self.user_tok, - ) + self.helper.leave(room=self.room_id, user=self.user, tok=self.user_tok) if membership == "join": - self.helper.join( - room=self.room_id, - user=self.user, - tok=self.user_tok, - ) + self.helper.join(room=self.room_id, user=self.user, tok=self.user_tok) except AssertionError: # We don't care whether the leave request didn't return a 200 (e.g. # if the user isn't already in the room), because we only want to diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py index 9ebd91f678..0397f91a9e 100644 --- a/tests/rest/client/v1/test_login.py +++ b/tests/rest/client/v1/test_login.py @@ -37,10 +37,7 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase): for i in range(0, 6): params = { "type": "m.login.password", - "identifier": { - "type": "m.id.user", - "user": "kermit" + str(i), - }, + "identifier": {"type": "m.id.user", "user": "kermit" + str(i)}, "password": "monkey", } request_data = json.dumps(params) @@ -57,14 +54,11 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase): # than 1min. self.assertTrue(retry_after_ms < 6000) - self.reactor.advance(retry_after_ms / 1000.) + self.reactor.advance(retry_after_ms / 1000.0) params = { "type": "m.login.password", - "identifier": { - "type": "m.id.user", - "user": "kermit" + str(i), - }, + "identifier": {"type": "m.id.user", "user": "kermit" + str(i)}, "password": "monkey", } request_data = json.dumps(params) @@ -82,10 +76,7 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase): for i in range(0, 6): params = { "type": "m.login.password", - "identifier": { - "type": "m.id.user", - "user": "kermit", - }, + "identifier": {"type": "m.id.user", "user": "kermit"}, "password": "monkey", } request_data = json.dumps(params) @@ -102,14 +93,11 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase): # than 1min. self.assertTrue(retry_after_ms < 6000) - self.reactor.advance(retry_after_ms / 1000.) + self.reactor.advance(retry_after_ms / 1000.0) params = { "type": "m.login.password", - "identifier": { - "type": "m.id.user", - "user": "kermit", - }, + "identifier": {"type": "m.id.user", "user": "kermit"}, "password": "monkey", } request_data = json.dumps(params) @@ -127,10 +115,7 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase): for i in range(0, 6): params = { "type": "m.login.password", - "identifier": { - "type": "m.id.user", - "user": "kermit", - }, + "identifier": {"type": "m.id.user", "user": "kermit"}, "password": "notamonkey", } request_data = json.dumps(params) @@ -147,14 +132,11 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase): # than 1min. self.assertTrue(retry_after_ms < 6000) - self.reactor.advance(retry_after_ms / 1000.) + self.reactor.advance(retry_after_ms / 1000.0) params = { "type": "m.login.password", - "identifier": { - "type": "m.id.user", - "user": "kermit", - }, + "identifier": {"type": "m.id.user", "user": "kermit"}, "password": "notamonkey", } request_data = json.dumps(params) diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 7306e61b7c..ed034879cf 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -199,37 +199,24 @@ class ProfilesRestrictedTestCase(unittest.HomeserverTestCase): def test_in_shared_room(self): self.ensure_requester_left_room() - self.helper.join( - room=self.room_id, - user=self.requester, - tok=self.requester_tok, - ) + self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok) self.try_fetch_profile(200, self.requester_tok) def try_fetch_profile(self, expected_code, access_token=None): - self.request_profile( - expected_code, - access_token=access_token - ) + self.request_profile(expected_code, access_token=access_token) self.request_profile( - expected_code, - url_suffix="/displayname", - access_token=access_token, + expected_code, url_suffix="/displayname", access_token=access_token ) self.request_profile( - expected_code, - url_suffix="/avatar_url", - access_token=access_token, + expected_code, url_suffix="/avatar_url", access_token=access_token ) def request_profile(self, expected_code, url_suffix="", access_token=None): request, channel = self.make_request( - "GET", - self.profile_url + url_suffix, - access_token=access_token, + "GET", self.profile_url + url_suffix, access_token=access_token ) self.render(request) self.assertEqual(channel.code, expected_code, channel.result) @@ -237,9 +224,7 @@ class ProfilesRestrictedTestCase(unittest.HomeserverTestCase): def ensure_requester_left_room(self): try: self.helper.leave( - room=self.room_id, - user=self.requester, - tok=self.requester_tok, + room=self.room_id, user=self.requester, tok=self.requester_tok ) except AssertionError: # We don't care whether the leave request didn't return a 200 (e.g. diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 1c3a621d26..be95dc592d 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -41,11 +41,10 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): as_token = "i_am_an_app_service" appservice = ApplicationService( - as_token, self.hs.config.server_name, + as_token, + self.hs.config.server_name, id="1234", - namespaces={ - "users": [{"regex": r"@as_user.*", "exclusive": True}], - }, + namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, ) self.hs.get_datastore().services_cache.append(appservice) @@ -57,10 +56,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) - det_data = { - "user_id": user_id, - "home_server": self.hs.hostname, - } + det_data = {"user_id": user_id, "home_server": self.hs.hostname} self.assertDictContainsSubset(det_data, channel.json_body) def test_POST_appservice_registration_invalid(self): @@ -128,10 +124,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): request, channel = self.make_request(b"POST", self.url + b"?kind=guest", b"{}") self.render(request) - det_data = { - "home_server": self.hs.hostname, - "device_id": "guest_device", - } + det_data = {"home_server": self.hs.hostname, "device_id": "guest_device"} self.assertEquals(channel.result["code"], b"200", channel.result) self.assertDictContainsSubset(det_data, channel.json_body) @@ -159,7 +152,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): else: self.assertEquals(channel.result["code"], b"200", channel.result) - self.reactor.advance(retry_after_ms / 1000.) + self.reactor.advance(retry_after_ms / 1000.0) request, channel = self.make_request(b"POST", self.url + b"?kind=guest", b"{}") self.render(request) @@ -187,7 +180,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): else: self.assertEquals(channel.result["code"], b"200", channel.result) - self.reactor.advance(retry_after_ms / 1000.) + self.reactor.advance(retry_after_ms / 1000.0) request, channel = self.make_request(b"POST", self.url + b"?kind=guest", b"{}") self.render(request) @@ -221,23 +214,19 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): # The specific endpoint doesn't matter, all we need is an authenticated # endpoint. - request, channel = self.make_request( - b"GET", "/sync", access_token=tok, - ) + request, channel = self.make_request(b"GET", "/sync", access_token=tok) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) self.reactor.advance(datetime.timedelta(weeks=1).total_seconds()) - request, channel = self.make_request( - b"GET", "/sync", access_token=tok, - ) + request, channel = self.make_request(b"GET", "/sync", access_token=tok) self.render(request) self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals( - channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result, + channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result ) def test_manual_renewal(self): @@ -253,21 +242,17 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): admin_tok = self.login("admin", "adminpassword") url = "/_matrix/client/unstable/admin/account_validity/validity" - params = { - "user_id": user_id, - } + params = {"user_id": user_id} request_data = json.dumps(params) request, channel = self.make_request( - b"POST", url, request_data, access_token=admin_tok, + b"POST", url, request_data, access_token=admin_tok ) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) # The specific endpoint doesn't matter, all we need is an authenticated # endpoint. - request, channel = self.make_request( - b"GET", "/sync", access_token=tok, - ) + request, channel = self.make_request(b"GET", "/sync", access_token=tok) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) @@ -286,20 +271,18 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): } request_data = json.dumps(params) request, channel = self.make_request( - b"POST", url, request_data, access_token=admin_tok, + b"POST", url, request_data, access_token=admin_tok ) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) # The specific endpoint doesn't matter, all we need is an authenticated # endpoint. - request, channel = self.make_request( - b"GET", "/sync", access_token=tok, - ) + request, channel = self.make_request(b"GET", "/sync", access_token=tok) self.render(request) self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals( - channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result, + channel.json_body["errcode"], Codes.EXPIRED_ACCOUNT, channel.result ) @@ -358,10 +341,15 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # We need to manually add an email address otherwise the handler will do # nothing. now = self.hs.clock.time_msec() - self.get_success(self.store.user_add_threepid( - user_id=user_id, medium="email", address="kermit@example.com", - validated_at=now, added_at=now, - )) + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address="kermit@example.com", + validated_at=now, + added_at=now, + ) + ) # Move 6 days forward. This should trigger a renewal email to be sent. self.reactor.advance(datetime.timedelta(days=6).total_seconds()) @@ -379,9 +367,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # our access token should be denied from now, otherwise they should # succeed. self.reactor.advance(datetime.timedelta(days=3).total_seconds()) - request, channel = self.make_request( - b"GET", "/sync", access_token=tok, - ) + request, channel = self.make_request(b"GET", "/sync", access_token=tok) self.render(request) self.assertEquals(channel.result["code"], b"200", channel.result) @@ -393,13 +379,19 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # We need to manually add an email address otherwise the handler will do # nothing. now = self.hs.clock.time_msec() - self.get_success(self.store.user_add_threepid( - user_id=user_id, medium="email", address="kermit@example.com", - validated_at=now, added_at=now, - )) + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address="kermit@example.com", + validated_at=now, + added_at=now, + ) + ) request, channel = self.make_request( - b"POST", "/_matrix/client/unstable/account_validity/send_mail", + b"POST", + "/_matrix/client/unstable/account_validity/send_mail", access_token=tok, ) self.render(request) diff --git a/tests/rest/media/v1/test_base.py b/tests/rest/media/v1/test_base.py index af8f74eb42..00688a7325 100644 --- a/tests/rest/media/v1/test_base.py +++ b/tests/rest/media/v1/test_base.py @@ -26,20 +26,14 @@ class GetFileNameFromHeadersTests(unittest.TestCase): b'inline; filename="aze%20rty"': u"aze%20rty", b'inline; filename="aze\"rty"': u'aze"rty', b'inline; filename="azer;ty"': u"azer;ty", - b"inline; filename*=utf-8''foo%C2%A3bar": u"foo£bar", } def tests(self): for hdr, expected in self.TEST_CASES.items(): - res = get_filename_from_headers( - { - b'Content-Disposition': [hdr], - }, - ) + res = get_filename_from_headers({b'Content-Disposition': [hdr]}) self.assertEqual( - res, expected, - "expected output for %s to be %s but was %s" % ( - hdr, expected, res, - ) + res, + expected, + "expected output for %s to be %s but was %s" % (hdr, expected, res), ) diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 8d8f03e005..b090bb974c 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -31,27 +31,24 @@ class WellKnownTests(unittest.HomeserverTestCase): self.hs.config.default_identity_server = "https://testis" request, channel = self.make_request( - "GET", - "/.well-known/matrix/client", - shorthand=False, + "GET", "/.well-known/matrix/client", shorthand=False ) self.render(request) self.assertEqual(request.code, 200) self.assertEqual( - channel.json_body, { + channel.json_body, + { "m.homeserver": {"base_url": "https://tesths"}, "m.identity_server": {"base_url": "https://testis"}, - } + }, ) def test_well_known_no_public_baseurl(self): self.hs.config.public_baseurl = None request, channel = self.make_request( - "GET", - "/.well-known/matrix/client", - shorthand=False, + "GET", "/.well-known/matrix/client", shorthand=False ) self.render(request) diff --git a/tests/server.py b/tests/server.py index 8f89f4a83d..fc41345488 100644 --- a/tests/server.py +++ b/tests/server.py @@ -182,7 +182,8 @@ def make_request( if federation_auth_origin is not None: req.requestHeaders.addRawHeader( - b"Authorization", b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,) + b"Authorization", + b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,), ) if content: @@ -233,7 +234,7 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): class FakeResolver(object): def getHostByName(self, name, timeout=None): if name not in lookups: - return fail(DNSLookupError("OH NO: unknown %s" % (name, ))) + return fail(DNSLookupError("OH NO: unknown %s" % (name,))) return succeed(lookups[name]) self.nameResolver = SimpleResolverComplexifier(FakeResolver()) @@ -454,6 +455,6 @@ class FakeTransport(object): logger.warning("Exception writing to protocol: %s", e) return - self.buffer = self.buffer[len(to_write):] + self.buffer = self.buffer[len(to_write) :] if self.buffer and self.autoflush: self._reactor.callLater(0.0, self.flush) diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index be73e718c2..a490b81ed4 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -27,7 +27,6 @@ from tests import unittest class TestResourceLimitsServerNotices(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): hs_config = self.default_config("test") hs_config.server_notices_mxid = "@server:test" diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index f448b01326..9c5311d916 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -50,6 +50,7 @@ class FakeEvent(object): refer to events. The event_id has node_id as localpart and example.com as domain. """ + def __init__(self, id, sender, type, state_key, content): self.node_id = id self.event_id = EventID(id, "example.com").to_string() @@ -142,24 +143,14 @@ INITIAL_EVENTS = [ content=MEMBERSHIP_CONTENT_JOIN, ), FakeEvent( - id="START", - sender=ZARA, - type=EventTypes.Message, - state_key=None, - content={}, + id="START", sender=ZARA, type=EventTypes.Message, state_key=None, content={} ), FakeEvent( - id="END", - sender=ZARA, - type=EventTypes.Message, - state_key=None, - content={}, + id="END", sender=ZARA, type=EventTypes.Message, state_key=None, content={} ), ] -INITIAL_EDGES = [ - "START", "IMZ", "IMC", "IMB", "IJR", "IPOWER", "IMA", "CREATE", -] +INITIAL_EDGES = ["START", "IMZ", "IMC", "IMB", "IJR", "IPOWER", "IMA", "CREATE"] class StateTestCase(unittest.TestCase): @@ -170,12 +161,7 @@ class StateTestCase(unittest.TestCase): sender=ALICE, type=EventTypes.PowerLevels, state_key="", - content={ - "users": { - ALICE: 100, - BOB: 50, - } - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( id="MA", @@ -196,19 +182,11 @@ class StateTestCase(unittest.TestCase): sender=BOB, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50}}, ), ] - edges = [ - ["END", "MB", "MA", "PA", "START"], - ["END", "PB", "PA"], - ] + edges = [["END", "MB", "MA", "PA", "START"], ["END", "PB", "PA"]] expected_state_ids = ["PA", "MA", "MB"] @@ -232,10 +210,7 @@ class StateTestCase(unittest.TestCase): ), ] - edges = [ - ["END", "JR", "START"], - ["END", "ME", "START"], - ] + edges = [["END", "JR", "START"], ["END", "ME", "START"]] expected_state_ids = ["JR"] @@ -248,45 +223,25 @@ class StateTestCase(unittest.TestCase): sender=ALICE, type=EventTypes.PowerLevels, state_key="", - content={ - "users": { - ALICE: 100, - BOB: 50, - } - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( id="PB", sender=BOB, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - CHARLIE: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50, CHARLIE: 50}}, ), FakeEvent( id="PC", sender=CHARLIE, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - CHARLIE: 0, - }, - }, + content={"users": {ALICE: 100, BOB: 50, CHARLIE: 0}}, ), ] - edges = [ - ["END", "PC", "PB", "PA", "START"], - ["END", "PA"], - ] + edges = [["END", "PC", "PB", "PA", "START"], ["END", "PA"]] expected_state_ids = ["PC"] @@ -295,68 +250,38 @@ class StateTestCase(unittest.TestCase): def test_topic_basic(self): events = [ FakeEvent( - id="T1", - sender=ALICE, - type=EventTypes.Topic, - state_key="", - content={}, + id="T1", sender=ALICE, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="PA1", sender=ALICE, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( - id="T2", - sender=ALICE, - type=EventTypes.Topic, - state_key="", - content={}, + id="T2", sender=ALICE, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="PA2", sender=ALICE, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 0, - }, - }, + content={"users": {ALICE: 100, BOB: 0}}, ), FakeEvent( id="PB", sender=BOB, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( - id="T3", - sender=BOB, - type=EventTypes.Topic, - state_key="", - content={}, + id="T3", sender=BOB, type=EventTypes.Topic, state_key="", content={} ), ] - edges = [ - ["END", "PA2", "T2", "PA1", "T1", "START"], - ["END", "T3", "PB", "PA1"], - ] + edges = [["END", "PA2", "T2", "PA1", "T1", "START"], ["END", "T3", "PB", "PA1"]] expected_state_ids = ["PA2", "T2"] @@ -365,30 +290,17 @@ class StateTestCase(unittest.TestCase): def test_topic_reset(self): events = [ FakeEvent( - id="T1", - sender=ALICE, - type=EventTypes.Topic, - state_key="", - content={}, + id="T1", sender=ALICE, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="PA", sender=ALICE, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( - id="T2", - sender=BOB, - type=EventTypes.Topic, - state_key="", - content={}, + id="T2", sender=BOB, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="MB", @@ -399,10 +311,7 @@ class StateTestCase(unittest.TestCase): ), ] - edges = [ - ["END", "MB", "T2", "PA", "T1", "START"], - ["END", "T1"], - ] + edges = [["END", "MB", "T2", "PA", "T1", "START"], ["END", "T1"]] expected_state_ids = ["T1", "MB", "PA"] @@ -411,61 +320,34 @@ class StateTestCase(unittest.TestCase): def test_topic(self): events = [ FakeEvent( - id="T1", - sender=ALICE, - type=EventTypes.Topic, - state_key="", - content={}, + id="T1", sender=ALICE, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="PA1", sender=ALICE, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( - id="T2", - sender=ALICE, - type=EventTypes.Topic, - state_key="", - content={}, + id="T2", sender=ALICE, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="PA2", sender=ALICE, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 0, - }, - }, + content={"users": {ALICE: 100, BOB: 0}}, ), FakeEvent( id="PB", sender=BOB, type=EventTypes.PowerLevels, state_key='', - content={ - "users": { - ALICE: 100, - BOB: 50, - }, - }, + content={"users": {ALICE: 100, BOB: 50}}, ), FakeEvent( - id="T3", - sender=BOB, - type=EventTypes.Topic, - state_key="", - content={}, + id="T3", sender=BOB, type=EventTypes.Topic, state_key="", content={} ), FakeEvent( id="MZ1", @@ -475,11 +357,7 @@ class StateTestCase(unittest.TestCase): content={}, ), FakeEvent( - id="T4", - sender=ALICE, - type=EventTypes.Topic, - state_key="", - content={}, + id="T4", sender=ALICE, type=EventTypes.Topic, state_key="", content={} ), ] @@ -587,13 +465,7 @@ class StateTestCase(unittest.TestCase): class LexicographicalTestCase(unittest.TestCase): def test_simple(self): - graph = { - "l": {"o"}, - "m": {"n", "o"}, - "n": {"o"}, - "o": set(), - "p": {"o"}, - } + graph = {"l": {"o"}, "m": {"n", "o"}, "n": {"o"}, "o": set(), "p": {"o"}} res = list(lexicographical_topological_sort(graph, key=lambda x: x)) @@ -680,7 +552,13 @@ class SimpleParamStateTestCase(unittest.TestCase): self.expected_combined_state = { (e.type, e.state_key): e.event_id - for e in [create_event, alice_member, join_rules, bob_member, charlie_member] + for e in [ + create_event, + alice_member, + join_rules, + bob_member, + charlie_member, + ] } def test_event_map_none(self): @@ -720,11 +598,7 @@ class TestStateResolutionStore(object): Deferred[dict[str, FrozenEvent]]: Dict from event_id to event. """ - return { - eid: self.event_map[eid] - for eid in event_ids - if eid in self.event_map - } + return {eid: self.event_map[eid] for eid in event_ids if eid in self.event_map} def get_auth_chain(self, event_ids): """Gets the full auth chain for a set of events (including rejected diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index 5568a607c7..fbb9302694 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -9,9 +9,7 @@ from tests.utils import setup_test_homeserver class BackgroundUpdateTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver( - self.addCleanup - ) + hs = yield setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() self.clock = hs.get_clock() diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index f18db8c384..c778de1f0c 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -56,10 +56,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): fake_engine = Mock(wraps=engine) fake_engine.can_native_upsert = False hs = TestHomeServer( - "test", - db_pool=self.db_pool, - config=config, - database_engine=fake_engine, + "test", db_pool=self.db_pool, config=config, database_engine=fake_engine ) self.datastore = SQLBaseStore(None, hs) diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py index 11fb8c0c19..cd2bcd4ca3 100644 --- a/tests/storage/test_end_to_end_keys.py +++ b/tests/storage/test_end_to_end_keys.py @@ -20,7 +20,6 @@ import tests.utils class EndToEndKeyStoreTestCase(tests.unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield tests.utils.setup_test_homeserver(self.addCleanup) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index d6569a82bb..f458c03054 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -56,8 +56,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): self.store.register(user_id=user1, token="123", password_hash=None) self.store.register(user_id=user2, token="456", password_hash=None) self.store.register( - user_id=user3, token="789", - password_hash=None, user_type=UserTypes.SUPPORT + user_id=user3, token="789", password_hash=None, user_type=UserTypes.SUPPORT ) self.pump() @@ -173,9 +172,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): def test_populate_monthly_users_should_update(self): self.store.upsert_monthly_active_user = Mock() - self.store.is_trial_user = Mock( - return_value=defer.succeed(False) - ) + self.store.is_trial_user = Mock(return_value=defer.succeed(False)) self.store.user_last_seen_monthly_active = Mock( return_value=defer.succeed(None) @@ -187,13 +184,9 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): def test_populate_monthly_users_should_not_update(self): self.store.upsert_monthly_active_user = Mock() - self.store.is_trial_user = Mock( - return_value=defer.succeed(False) - ) + self.store.is_trial_user = Mock(return_value=defer.succeed(False)) self.store.user_last_seen_monthly_active = Mock( - return_value=defer.succeed( - self.hs.get_clock().time_msec() - ) + return_value=defer.succeed(self.hs.get_clock().time_msec()) ) self.store.populate_monthly_active_users('user_id') self.pump() @@ -243,7 +236,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): user_id=support_user_id, token="123", password_hash=None, - user_type=UserTypes.SUPPORT + user_type=UserTypes.SUPPORT, ) self.store.upsert_monthly_active_user(support_user_id) diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 0fc5019e9f..4823d44dec 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -60,7 +60,7 @@ class RedactionTestCase(unittest.TestCase): "state_key": user.to_string(), "room_id": room.to_string(), "content": content, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( @@ -83,7 +83,7 @@ class RedactionTestCase(unittest.TestCase): "state_key": user.to_string(), "room_id": room.to_string(), "content": {"body": body, "msgtype": u"message"}, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( @@ -105,7 +105,7 @@ class RedactionTestCase(unittest.TestCase): "room_id": room.to_string(), "content": {"reason": reason}, "redacts": event_id, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index cb3cc4d2e5..c0e0155bb4 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -116,7 +116,7 @@ class RegistrationStoreTestCase(unittest.TestCase): user_id=SUPPORT_USER, token="456", password_hash=None, - user_type=UserTypes.SUPPORT + user_type=UserTypes.SUPPORT, ) res = yield self.store.is_support_user(SUPPORT_USER) self.assertTrue(res) diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index 063387863e..73ed943f5a 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -58,7 +58,7 @@ class RoomMemberStoreTestCase(unittest.TestCase): "state_key": user.to_string(), "room_id": room.to_string(), "content": {"membership": membership}, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 78e260a7fa..b6169436de 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -29,7 +29,6 @@ logger = logging.getLogger(__name__) class StateStoreTestCase(tests.unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield tests.utils.setup_test_homeserver(self.addCleanup) @@ -57,7 +56,7 @@ class StateStoreTestCase(tests.unittest.TestCase): "state_key": state_key, "room_id": room.to_string(), "content": content, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( @@ -83,15 +82,14 @@ class StateStoreTestCase(tests.unittest.TestCase): self.room, self.u_alice, EventTypes.Name, '', {"name": "test room"} ) - state_group_map = yield self.store.get_state_groups_ids(self.room, [e2.event_id]) + state_group_map = yield self.store.get_state_groups_ids( + self.room, [e2.event_id] + ) self.assertEqual(len(state_group_map), 1) state_map = list(state_group_map.values())[0] self.assertDictEqual( state_map, - { - (EventTypes.Create, ''): e1.event_id, - (EventTypes.Name, ''): e2.event_id, - }, + {(EventTypes.Create, ''): e1.event_id, (EventTypes.Name, ''): e2.event_id}, ) @defer.inlineCallbacks @@ -103,15 +101,11 @@ class StateStoreTestCase(tests.unittest.TestCase): self.room, self.u_alice, EventTypes.Name, '', {"name": "test room"} ) - state_group_map = yield self.store.get_state_groups( - self.room, [e2.event_id]) + state_group_map = yield self.store.get_state_groups(self.room, [e2.event_id]) self.assertEqual(len(state_group_map), 1) state_list = list(state_group_map.values())[0] - self.assertEqual( - {ev.event_id for ev in state_list}, - {e1.event_id, e2.event_id}, - ) + self.assertEqual({ev.event_id for ev in state_list}, {e1.event_id, e2.event_id}) @defer.inlineCallbacks def test_get_state_for_event(self): @@ -147,9 +141,7 @@ class StateStoreTestCase(tests.unittest.TestCase): ) # check we get the full state as of the final event - state = yield self.store.get_state_for_event( - e5.event_id, - ) + state = yield self.store.get_state_for_event(e5.event_id) self.assertIsNotNone(e4) @@ -194,7 +186,7 @@ class StateStoreTestCase(tests.unittest.TestCase): state_filter=StateFilter( types={EventTypes.Member: {self.u_alice.to_string()}}, include_others=True, - ) + ), ) self.assertStateMapEqual( @@ -208,9 +200,9 @@ class StateStoreTestCase(tests.unittest.TestCase): # check that we can grab everything except members state = yield self.store.get_state_for_event( - e5.event_id, state_filter=StateFilter( - types={EventTypes.Member: set()}, - include_others=True, + e5.event_id, + state_filter=StateFilter( + types={EventTypes.Member: set()}, include_others=True ), ) @@ -229,10 +221,10 @@ class StateStoreTestCase(tests.unittest.TestCase): # test _get_state_for_group_using_cache correctly filters out members # with types=[] (state_dict, is_all) = yield self.store._get_state_for_group_using_cache( - self.store._state_group_cache, group, + self.store._state_group_cache, + group, state_filter=StateFilter( - types={EventTypes.Member: set()}, - include_others=True, + types={EventTypes.Member: set()}, include_others=True ), ) @@ -249,8 +241,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: set()}, - include_others=True, + types={EventTypes.Member: set()}, include_others=True ), ) @@ -263,8 +254,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_cache, group, state_filter=StateFilter( - types={EventTypes.Member: None}, - include_others=True, + types={EventTypes.Member: None}, include_others=True ), ) @@ -281,8 +271,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: None}, - include_others=True, + types={EventTypes.Member: None}, include_others=True ), ) @@ -302,8 +291,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=True, + types={EventTypes.Member: {e5.state_key}}, include_others=True ), ) @@ -320,8 +308,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=True, + types={EventTypes.Member: {e5.state_key}}, include_others=True ), ) @@ -334,8 +321,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=False, + types={EventTypes.Member: {e5.state_key}}, include_others=False ), ) @@ -384,10 +370,10 @@ class StateStoreTestCase(tests.unittest.TestCase): # with types=[] room_id = self.room.to_string() (state_dict, is_all) = yield self.store._get_state_for_group_using_cache( - self.store._state_group_cache, group, + self.store._state_group_cache, + group, state_filter=StateFilter( - types={EventTypes.Member: set()}, - include_others=True, + types={EventTypes.Member: set()}, include_others=True ), ) @@ -399,8 +385,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: set()}, - include_others=True, + types={EventTypes.Member: set()}, include_others=True ), ) @@ -413,8 +398,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_cache, group, state_filter=StateFilter( - types={EventTypes.Member: None}, - include_others=True, + types={EventTypes.Member: None}, include_others=True ), ) @@ -425,8 +409,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: None}, - include_others=True, + types={EventTypes.Member: None}, include_others=True ), ) @@ -445,8 +428,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=True, + types={EventTypes.Member: {e5.state_key}}, include_others=True ), ) @@ -457,8 +439,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=True, + types={EventTypes.Member: {e5.state_key}}, include_others=True ), ) @@ -471,8 +452,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=False, + types={EventTypes.Member: {e5.state_key}}, include_others=False ), ) @@ -483,8 +463,7 @@ class StateStoreTestCase(tests.unittest.TestCase): self.store._state_group_members_cache, group, state_filter=StateFilter( - types={EventTypes.Member: {e5.state_key}}, - include_others=False, + types={EventTypes.Member: {e5.state_key}}, include_others=False ), ) diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index fd3361404f..d7d244ce97 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -36,9 +36,7 @@ class UserDirectoryStoreTestCase(unittest.TestCase): yield self.store.update_profile_in_user_dir(ALICE, "alice", None) yield self.store.update_profile_in_user_dir(BOB, "bob", None) yield self.store.update_profile_in_user_dir(BOBBY, "bobby", None) - yield self.store.add_users_in_public_rooms( - "!room:id", (ALICE, BOB) - ) + yield self.store.add_users_in_public_rooms("!room:id", (ALICE, BOB)) @defer.inlineCallbacks def test_search_user_dir(self): diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 4c8f87e958..8b2741d277 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -37,7 +37,9 @@ class EventAuthTestCase(unittest.TestCase): # creator should be able to send state event_auth.check( - RoomVersions.V1.identifier, _random_state_event(creator), auth_events, + RoomVersions.V1.identifier, + _random_state_event(creator), + auth_events, do_sig_check=False, ) @@ -82,7 +84,9 @@ class EventAuthTestCase(unittest.TestCase): # king should be able to send state event_auth.check( - RoomVersions.V1.identifier, _random_state_event(king), auth_events, + RoomVersions.V1.identifier, + _random_state_event(king), + auth_events, do_sig_check=False, ) diff --git a/tests/test_federation.py b/tests/test_federation.py index 1a5dc32c88..6a8339b561 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -1,4 +1,3 @@ - from mock import Mock from twisted.internet.defer import maybeDeferred, succeed diff --git a/tests/test_mau.py b/tests/test_mau.py index 00be1a8c21..1fbe0d51ff 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -33,9 +33,7 @@ class TestMauLimit(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): self.hs = self.setup_test_homeserver( - "red", - http_client=None, - federation_client=Mock(), + "red", http_client=None, federation_client=Mock() ) self.store = self.hs.get_datastore() @@ -210,9 +208,7 @@ class TestMauLimit(unittest.HomeserverTestCase): return access_token def do_sync_for_user(self, token): - request, channel = self.make_request( - "GET", "/sync", access_token=token - ) + request, channel = self.make_request("GET", "/sync", access_token=token) self.render(request) if channel.code != 200: diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 0ff6d0e283..2edbae5c6d 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -44,9 +44,7 @@ def get_sample_labels_value(sample): class TestMauLimit(unittest.TestCase): def test_basic(self): gauge = InFlightGauge( - "test1", "", - labels=["test_label"], - sub_metrics=["foo", "bar"], + "test1", "", labels=["test_label"], sub_metrics=["foo", "bar"] ) def handle1(metrics): @@ -59,37 +57,49 @@ class TestMauLimit(unittest.TestCase): gauge.register(("key1",), handle1) - self.assert_dict({ - "test1_total": {("key1",): 1}, - "test1_foo": {("key1",): 2}, - "test1_bar": {("key1",): 5}, - }, self.get_metrics_from_gauge(gauge)) + self.assert_dict( + { + "test1_total": {("key1",): 1}, + "test1_foo": {("key1",): 2}, + "test1_bar": {("key1",): 5}, + }, + self.get_metrics_from_gauge(gauge), + ) gauge.unregister(("key1",), handle1) - self.assert_dict({ - "test1_total": {("key1",): 0}, - "test1_foo": {("key1",): 0}, - "test1_bar": {("key1",): 0}, - }, self.get_metrics_from_gauge(gauge)) + self.assert_dict( + { + "test1_total": {("key1",): 0}, + "test1_foo": {("key1",): 0}, + "test1_bar": {("key1",): 0}, + }, + self.get_metrics_from_gauge(gauge), + ) gauge.register(("key1",), handle1) gauge.register(("key2",), handle2) - self.assert_dict({ - "test1_total": {("key1",): 1, ("key2",): 1}, - "test1_foo": {("key1",): 2, ("key2",): 3}, - "test1_bar": {("key1",): 5, ("key2",): 7}, - }, self.get_metrics_from_gauge(gauge)) + self.assert_dict( + { + "test1_total": {("key1",): 1, ("key2",): 1}, + "test1_foo": {("key1",): 2, ("key2",): 3}, + "test1_bar": {("key1",): 5, ("key2",): 7}, + }, + self.get_metrics_from_gauge(gauge), + ) gauge.unregister(("key2",), handle2) gauge.register(("key1",), handle2) - self.assert_dict({ - "test1_total": {("key1",): 2, ("key2",): 0}, - "test1_foo": {("key1",): 5, ("key2",): 0}, - "test1_bar": {("key1",): 7, ("key2",): 0}, - }, self.get_metrics_from_gauge(gauge)) + self.assert_dict( + { + "test1_total": {("key1",): 2, ("key2",): 0}, + "test1_foo": {("key1",): 5, ("key2",): 0}, + "test1_bar": {("key1",): 7, ("key2",): 0}, + }, + self.get_metrics_from_gauge(gauge), + ) def get_metrics_from_gauge(self, gauge): results = {} diff --git a/tests/test_terms_auth.py b/tests/test_terms_auth.py index 0968e86a7b..f412985d2c 100644 --- a/tests/test_terms_auth.py +++ b/tests/test_terms_auth.py @@ -69,10 +69,10 @@ class TermsTestCase(unittest.HomeserverTestCase): "name": "My Cool Privacy Policy", "url": "https://example.org/_matrix/consent?v=1.0", }, - "version": "1.0" - }, - }, - }, + "version": "1.0", + } + } + } } self.assertIsInstance(channel.json_body["params"], dict) self.assertDictContainsSubset(channel.json_body["params"], expected_params) diff --git a/tests/test_types.py b/tests/test_types.py index d314a7ff58..d83c36559f 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -94,8 +94,7 @@ class MapUsernameTestCase(unittest.TestCase): def testSymbols(self): self.assertEqual( - map_username_to_mxid_localpart("test=$?_1234"), - "test=3d=24=3f_1234", + map_username_to_mxid_localpart("test=$?_1234"), "test=3d=24=3f_1234" ) def testLeadingUnderscore(self): @@ -105,6 +104,5 @@ class MapUsernameTestCase(unittest.TestCase): # this should work with either a unicode or a bytes self.assertEqual(map_username_to_mxid_localpart(u'têst'), "t=c3=aast") self.assertEqual( - map_username_to_mxid_localpart(u'têst'.encode('utf-8')), - "t=c3=aast", + map_username_to_mxid_localpart(u'têst'.encode('utf-8')), "t=c3=aast" ) diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index d0bc8e2112..fde0baee8e 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -22,6 +22,7 @@ from synapse.util.logcontext import LoggingContextFilter class ToTwistedHandler(logging.Handler): """logging handler which sends the logs to the twisted log""" + tx_log = twisted.logger.Logger() def emit(self, record): @@ -41,7 +42,8 @@ def setup_logging(): root_logger = logging.getLogger() log_format = ( - "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s" + "%(asctime)s - %(name)s - %(lineno)d - " + "%(levelname)s - %(request)s - %(message)s" ) handler = ToTwistedHandler() diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 3bdb500514..6a180ddc32 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -132,7 +132,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): "state_key": "", "room_id": TEST_ROOM_ID, "content": content, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( @@ -153,7 +153,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): "state_key": user_id, "room_id": TEST_ROOM_ID, "content": content, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( @@ -174,7 +174,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): "sender": user_id, "room_id": TEST_ROOM_ID, "content": content, - } + }, ) event, context = yield self.event_creation_handler.create_new_client_event( diff --git a/tests/unittest.py b/tests/unittest.py index 029a88d770..94df8cf47e 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -84,9 +84,8 @@ class TestCase(unittest.TestCase): # all future bets are off. if LoggingContext.current_context() is not LoggingContext.sentinel: self.fail( - "Test starting with non-sentinel logging context %s" % ( - LoggingContext.current_context(), - ) + "Test starting with non-sentinel logging context %s" + % (LoggingContext.current_context(),) ) old_level = logging.getLogger().level @@ -300,7 +299,13 @@ class HomeserverTestCase(TestCase): content = json.dumps(content).encode('utf8') return make_request( - self.reactor, method, path, content, access_token, request, shorthand, + self.reactor, + method, + path, + content, + access_token, + request, + shorthand, federation_auth_origin, ) diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py index 84dd71e47a..bf85d3b8ec 100644 --- a/tests/util/test_async_utils.py +++ b/tests/util/test_async_utils.py @@ -42,10 +42,10 @@ class TimeoutDeferredTest(TestCase): self.assertNoResult(timing_out_d) self.assertFalse(cancelled[0], "deferred was cancelled prematurely") - self.clock.pump((1.0, )) + self.clock.pump((1.0,)) self.assertTrue(cancelled[0], "deferred was not cancelled by timeout") - self.failureResultOf(timing_out_d, defer.TimeoutError, ) + self.failureResultOf(timing_out_d, defer.TimeoutError) def test_times_out_when_canceller_throws(self): """Test that we have successfully worked around @@ -59,9 +59,9 @@ class TimeoutDeferredTest(TestCase): self.assertNoResult(timing_out_d) - self.clock.pump((1.0, )) + self.clock.pump((1.0,)) - self.failureResultOf(timing_out_d, defer.TimeoutError, ) + self.failureResultOf(timing_out_d, defer.TimeoutError) def test_logcontext_is_preserved_on_cancellation(self): blocking_was_cancelled = [False] @@ -80,10 +80,10 @@ class TimeoutDeferredTest(TestCase): # the errbacks should be run in the test logcontext def errback(res, deferred_name): self.assertIs( - LoggingContext.current_context(), context_one, - "errback %s run in unexpected logcontext %s" % ( - deferred_name, LoggingContext.current_context(), - ) + LoggingContext.current_context(), + context_one, + "errback %s run in unexpected logcontext %s" + % (deferred_name, LoggingContext.current_context()), ) return res @@ -94,11 +94,10 @@ class TimeoutDeferredTest(TestCase): self.assertIs(LoggingContext.current_context(), LoggingContext.sentinel) timing_out_d.addErrback(errback, "timingout") - self.clock.pump((1.0, )) + self.clock.pump((1.0,)) self.assertTrue( - blocking_was_cancelled[0], - "non-completing deferred was not cancelled", + blocking_was_cancelled[0], "non-completing deferred was not cancelled" ) - self.failureResultOf(timing_out_d, defer.TimeoutError, ) + self.failureResultOf(timing_out_d, defer.TimeoutError) self.assertIs(LoggingContext.current_context(), context_one) diff --git a/tests/utils.py b/tests/utils.py index cb75514851..c2ef4b0bb5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -68,7 +68,9 @@ def setupdb(): # connect to postgres to create the base database. db_conn = db_engine.module.connect( - user=POSTGRES_USER, host=POSTGRES_HOST, password=POSTGRES_PASSWORD, + user=POSTGRES_USER, + host=POSTGRES_HOST, + password=POSTGRES_PASSWORD, dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE, ) db_conn.autocommit = True @@ -94,7 +96,9 @@ def setupdb(): def _cleanup(): db_conn = db_engine.module.connect( - user=POSTGRES_USER, host=POSTGRES_HOST, password=POSTGRES_PASSWORD, + user=POSTGRES_USER, + host=POSTGRES_HOST, + password=POSTGRES_PASSWORD, dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE, ) db_conn.autocommit = True @@ -114,7 +118,6 @@ def default_config(name): "server_name": name, "media_store_path": "media", "uploads_path": "uploads", - # the test signing key is just an arbitrary ed25519 key to keep the config # parser happy "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg", -- cgit 1.5.1 From ee90c06e38c16f9ca6d5e01c081ee99720fafafb Mon Sep 17 00:00:00 2001 From: Christoph Müller Date: Fri, 10 May 2019 10:09:25 +0200 Subject: Set syslog identifiers in systemd units (#5023) --- changelog.d/5023.feature | 3 +++ contrib/systemd-with-workers/system/matrix-synapse-worker@.service | 1 + contrib/systemd-with-workers/system/matrix-synapse.service | 1 + contrib/systemd/matrix-synapse.service | 2 +- debian/changelog | 7 +++++++ debian/matrix-synapse.service | 1 + 6 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5023.feature diff --git a/changelog.d/5023.feature b/changelog.d/5023.feature new file mode 100644 index 0000000000..98551f79a3 --- /dev/null +++ b/changelog.d/5023.feature @@ -0,0 +1,3 @@ +Configure the example systemd units to have a log identifier of `matrix-synapse` +instead of the executable name, `python`. +Contributed by Christoph Müller. diff --git a/contrib/systemd-with-workers/system/matrix-synapse-worker@.service b/contrib/systemd-with-workers/system/matrix-synapse-worker@.service index 912984b9d2..9d980d5168 100644 --- a/contrib/systemd-with-workers/system/matrix-synapse-worker@.service +++ b/contrib/systemd-with-workers/system/matrix-synapse-worker@.service @@ -12,6 +12,7 @@ ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.%i --config-path=/ ExecReload=/bin/kill -HUP $MAINPID Restart=always RestartSec=3 +SyslogIdentifier=matrix-synapse-%i [Install] WantedBy=matrix-synapse.service diff --git a/contrib/systemd-with-workers/system/matrix-synapse.service b/contrib/systemd-with-workers/system/matrix-synapse.service index 8bb4e400dc..3aae19034c 100644 --- a/contrib/systemd-with-workers/system/matrix-synapse.service +++ b/contrib/systemd-with-workers/system/matrix-synapse.service @@ -11,6 +11,7 @@ ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --confi ExecReload=/bin/kill -HUP $MAINPID Restart=always RestartSec=3 +SyslogIdentifier=matrix-synapse [Install] WantedBy=matrix.target diff --git a/contrib/systemd/matrix-synapse.service b/contrib/systemd/matrix-synapse.service index efb157e941..595b69916c 100644 --- a/contrib/systemd/matrix-synapse.service +++ b/contrib/systemd/matrix-synapse.service @@ -22,10 +22,10 @@ Group=nogroup WorkingDirectory=/opt/synapse ExecStart=/opt/synapse/env/bin/python -m synapse.app.homeserver --config-path=/opt/synapse/homeserver.yaml +SyslogIdentifier=matrix-synapse # adjust the cache factor if necessary # Environment=SYNAPSE_CACHE_FACTOR=2.0 [Install] WantedBy=multi-user.target - diff --git a/debian/changelog b/debian/changelog index c25425bf26..454fa8eb16 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +matrix-synapse-py3 (0.99.3.2+nmu1) UNRELEASED; urgency=medium + + [ Christoph Müller ] + * Configure the systemd units to have a log identifier of `matrix-synapse` + + -- Christoph Müller Wed, 17 Apr 2019 16:17:32 +0200 + matrix-synapse-py3 (0.99.3.2) stable; urgency=medium * New synapse release 0.99.3.2. diff --git a/debian/matrix-synapse.service b/debian/matrix-synapse.service index 942e4b83fe..b0a8d72e6d 100644 --- a/debian/matrix-synapse.service +++ b/debian/matrix-synapse.service @@ -11,6 +11,7 @@ ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --confi ExecReload=/bin/kill -HUP $MAINPID Restart=always RestartSec=3 +SyslogIdentifier=matrix-synapse [Install] WantedBy=multi-user.target -- cgit 1.5.1 From cd3f30014ad7bd71b4c86a34487a7719d3ac8caf Mon Sep 17 00:00:00 2001 From: Gergely Polonkai Date: Fri, 10 May 2019 10:15:08 +0200 Subject: Make Prometheus snippet less confusing on the metrics collection doc (#4288) Signed-off-by: Gergely Polonkai --- docs/metrics-howto.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst index 5bbb5a4f3a..32b064e2da 100644 --- a/docs/metrics-howto.rst +++ b/docs/metrics-howto.rst @@ -48,7 +48,10 @@ How to monitor Synapse metrics using Prometheus - job_name: "synapse" metrics_path: "/_synapse/metrics" static_configs: - - targets: ["my.server.here:9092"] + - targets: ["my.server.here:port"] + + where ``my.server.here`` is the IP address of Synapse, and ``port`` is the listener port + configured with the ``metrics`` resource. If your prometheus is older than 1.5.2, you will need to replace ``static_configs`` in the above with ``target_groups``. -- cgit 1.5.1 From a78996cc4a48660d1b11abacf764b6928a8ffee8 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 May 2019 09:46:28 +0100 Subject: fix sample config --- docs/sample_config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 6ed75ff764..45585f09e7 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -524,7 +524,7 @@ uploads_path: "DATADIR/uploads" # (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly # listed here, since they correspond to unroutable addresses.) # -# This must be specified if url_preview_enabled is set. It is recommended that +# This must be specified if url_preview_enabled is set. It is recommended that # you uncomment the following list as a starting point. # #url_preview_ip_range_blacklist: -- cgit 1.5.1 From 085ae346ace418e0fc043ac5f568f85ebf80038e Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 10 May 2019 10:52:24 +0100 Subject: Add a DUMMY stage to captcha-only registration flow This allows the client to complete the email last which is more natual for the user. Without this stage, if the client would complete the recaptcha (and terms, if enabled) stages and then the registration request would complete because you've now completed a flow, even if you were intending to complete the flow that's the same except has email auth at the end. Adding a dummy auth stage to the recaptcha-only flow means it's always unambiguous which flow the client was trying to complete. Longer term we should think about changing the protocol so the client explicitly says which flow it's trying to complete. https://github.com/vector-im/riot-web/issues/9586 --- synapse/rest/client/v2_alpha/register.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index dc3e265bcd..95578b76ae 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -345,7 +345,7 @@ class RegisterRestServlet(RestServlet): if self.hs.config.enable_registration_captcha: # only support 3PIDless registration if no 3PIDs are required if not require_email and not require_msisdn: - flows.extend([[LoginType.RECAPTCHA]]) + flows.extend([[LoginType.RECAPTCHA, LoginType.DUMMY]]) # only support the email-only flow if we don't require MSISDN 3PIDs if not require_msisdn: flows.extend([[LoginType.EMAIL_IDENTITY, LoginType.RECAPTCHA]]) -- cgit 1.5.1 From c2bb7476c93d1f5026a4c8b715b2f70316d3a506 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 10 May 2019 11:08:01 +0100 Subject: Revert 085ae346ace418e0fc043ac5f568f85ebf80038e Accidentally went straight to develop --- synapse/rest/client/v2_alpha/register.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 95578b76ae..dc3e265bcd 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -345,7 +345,7 @@ class RegisterRestServlet(RestServlet): if self.hs.config.enable_registration_captcha: # only support 3PIDless registration if no 3PIDs are required if not require_email and not require_msisdn: - flows.extend([[LoginType.RECAPTCHA, LoginType.DUMMY]]) + flows.extend([[LoginType.RECAPTCHA]]) # only support the email-only flow if we don't require MSISDN 3PIDs if not require_msisdn: flows.extend([[LoginType.EMAIL_IDENTITY, LoginType.RECAPTCHA]]) -- cgit 1.5.1 From 2f48c4e1ae40869a1bc5dcb36557ad0a0d8a5728 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 10 May 2019 10:32:44 -0700 Subject: URL preview blacklisting fixes (#5155) Prevents a SynapseError being raised inside of a IResolutionReceiver and instead opts to just return 0 results. This thus means that we have to lump a failed lookup and a blacklisted lookup together with the same error message, but the substitute should be generic enough to cover both cases. --- changelog.d/5155.misc | 1 + synapse/http/client.py | 45 +++++++++++++++------------ synapse/rest/media/v1/preview_url_resource.py | 10 ++++++ tests/rest/media/v1/test_url_preview.py | 22 ++++++------- 4 files changed, 47 insertions(+), 31 deletions(-) create mode 100644 changelog.d/5155.misc diff --git a/changelog.d/5155.misc b/changelog.d/5155.misc new file mode 100644 index 0000000000..a81dbae67a --- /dev/null +++ b/changelog.d/5155.misc @@ -0,0 +1 @@ +Prevent an exception from being raised in a IResolutionReceiver and use a more generic error message for blacklisted URL previews. \ No newline at end of file diff --git a/synapse/http/client.py b/synapse/http/client.py index ad454f4964..ddbfb72228 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -90,45 +90,50 @@ class IPBlacklistingResolver(object): def resolveHostName(self, recv, hostname, portNumber=0): r = recv() - d = defer.Deferred() addresses = [] - @provider(IResolutionReceiver) - class EndpointReceiver(object): - @staticmethod - def resolutionBegan(resolutionInProgress): - pass + def _callback(): + r.resolutionBegan(None) - @staticmethod - def addressResolved(address): - ip_address = IPAddress(address.host) + has_bad_ip = False + for i in addresses: + ip_address = IPAddress(i.host) if check_against_blacklist( ip_address, self._ip_whitelist, self._ip_blacklist ): logger.info( - "Dropped %s from DNS resolution to %s" % (ip_address, hostname) + "Dropped %s from DNS resolution to %s due to blacklist" % + (ip_address, hostname) ) - raise SynapseError(403, "IP address blocked by IP blacklist entry") + has_bad_ip = True + + # if we have a blacklisted IP, we'd like to raise an error to block the + # request, but all we can really do from here is claim that there were no + # valid results. + if not has_bad_ip: + for i in addresses: + r.addressResolved(i) + r.resolutionComplete() + @provider(IResolutionReceiver) + class EndpointReceiver(object): + @staticmethod + def resolutionBegan(resolutionInProgress): + pass + + @staticmethod + def addressResolved(address): addresses.append(address) @staticmethod def resolutionComplete(): - d.callback(addresses) + _callback() self._reactor.nameResolver.resolveHostName( EndpointReceiver, hostname, portNumber=portNumber ) - def _callback(addrs): - r.resolutionBegan(None) - for i in addrs: - r.addressResolved(i) - r.resolutionComplete() - - d.addCallback(_callback) - return r diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index ba3ab1d37d..acf87709f2 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -31,6 +31,7 @@ from six.moves import urllib_parse as urlparse from canonicaljson import json from twisted.internet import defer +from twisted.internet.error import DNSLookupError from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET @@ -328,9 +329,18 @@ class PreviewUrlResource(Resource): # handler will return a SynapseError to the client instead of # blank data or a 500. raise + except DNSLookupError: + # DNS lookup returned no results + # Note: This will also be the case if one of the resolved IP + # addresses is blacklisted + raise SynapseError( + 502, "DNS resolution failure during URL preview generation", + Codes.UNKNOWN + ) except Exception as e: # FIXME: pass through 404s and other error messages nicely logger.warn("Error downloading %s: %r", url, e) + raise SynapseError( 500, "Failed to download content: %s" % ( traceback.format_exception_only(sys.exc_info()[0], e), diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py index 650ce95a6f..f696395f3c 100644 --- a/tests/rest/media/v1/test_url_preview.py +++ b/tests/rest/media/v1/test_url_preview.py @@ -297,12 +297,12 @@ class URLPreviewTests(unittest.HomeserverTestCase): # No requests made. self.assertEqual(len(self.reactor.tcpClients), 0) - self.assertEqual(channel.code, 403) + self.assertEqual(channel.code, 502) self.assertEqual( channel.json_body, { 'errcode': 'M_UNKNOWN', - 'error': 'IP address blocked by IP blacklist entry', + 'error': 'DNS resolution failure during URL preview generation', }, ) @@ -318,12 +318,12 @@ class URLPreviewTests(unittest.HomeserverTestCase): request.render(self.preview_url) self.pump() - self.assertEqual(channel.code, 403) + self.assertEqual(channel.code, 502) self.assertEqual( channel.json_body, { 'errcode': 'M_UNKNOWN', - 'error': 'IP address blocked by IP blacklist entry', + 'error': 'DNS resolution failure during URL preview generation', }, ) @@ -339,7 +339,6 @@ class URLPreviewTests(unittest.HomeserverTestCase): # No requests made. self.assertEqual(len(self.reactor.tcpClients), 0) - self.assertEqual(channel.code, 403) self.assertEqual( channel.json_body, { @@ -347,6 +346,7 @@ class URLPreviewTests(unittest.HomeserverTestCase): 'error': 'IP address blocked by IP blacklist entry', }, ) + self.assertEqual(channel.code, 403) def test_blacklisted_ip_range_direct(self): """ @@ -414,12 +414,12 @@ class URLPreviewTests(unittest.HomeserverTestCase): ) request.render(self.preview_url) self.pump() - self.assertEqual(channel.code, 403) + self.assertEqual(channel.code, 502) self.assertEqual( channel.json_body, { 'errcode': 'M_UNKNOWN', - 'error': 'IP address blocked by IP blacklist entry', + 'error': 'DNS resolution failure during URL preview generation', }, ) @@ -439,12 +439,12 @@ class URLPreviewTests(unittest.HomeserverTestCase): # No requests made. self.assertEqual(len(self.reactor.tcpClients), 0) - self.assertEqual(channel.code, 403) + self.assertEqual(channel.code, 502) self.assertEqual( channel.json_body, { 'errcode': 'M_UNKNOWN', - 'error': 'IP address blocked by IP blacklist entry', + 'error': 'DNS resolution failure during URL preview generation', }, ) @@ -460,11 +460,11 @@ class URLPreviewTests(unittest.HomeserverTestCase): request.render(self.preview_url) self.pump() - self.assertEqual(channel.code, 403) + self.assertEqual(channel.code, 502) self.assertEqual( channel.json_body, { 'errcode': 'M_UNKNOWN', - 'error': 'IP address blocked by IP blacklist entry', + 'error': 'DNS resolution failure during URL preview generation', }, ) -- cgit 1.5.1 From bb93757b32e1f13cc6e091dfdd485b1b2ae47373 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 13 May 2019 15:19:44 +0100 Subject: Fix CI after new release of isort --- synapse/server.pyi | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/server.pyi b/synapse/server.pyi index 3ba3a967c2..9583e82d52 100644 --- a/synapse/server.pyi +++ b/synapse/server.pyi @@ -18,7 +18,6 @@ import synapse.server_notices.server_notices_sender import synapse.state import synapse.storage - class HomeServer(object): @property def config(self) -> synapse.config.homeserver.HomeServerConfig: -- cgit 1.5.1 From 1a536699fde3c4ae80a5da9fab725eb90b0ed148 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 13 May 2019 15:21:23 +0100 Subject: Changelog --- changelog.d/5179.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5179.bugfix diff --git a/changelog.d/5179.bugfix b/changelog.d/5179.bugfix new file mode 100644 index 0000000000..92369cb2fc --- /dev/null +++ b/changelog.d/5179.bugfix @@ -0,0 +1 @@ +Fix CI after new release of isort. -- cgit 1.5.1 From 2725cd2290445db0931f45dc10e508f20bfc2d05 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 13 May 2019 15:32:07 +0100 Subject: Fix changelog --- changelog.d/5179.bugfix | 1 - changelog.d/5179.misc | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 changelog.d/5179.bugfix create mode 100644 changelog.d/5179.misc diff --git a/changelog.d/5179.bugfix b/changelog.d/5179.bugfix deleted file mode 100644 index 92369cb2fc..0000000000 --- a/changelog.d/5179.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix CI after new release of isort. diff --git a/changelog.d/5179.misc b/changelog.d/5179.misc new file mode 100644 index 0000000000..92369cb2fc --- /dev/null +++ b/changelog.d/5179.misc @@ -0,0 +1 @@ +Fix CI after new release of isort. -- cgit 1.5.1 From 2e1129b5f7aae4f74b23ad063b12ab34a5c82eb3 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 13 May 2019 16:11:21 +0100 Subject: 0.99.4rc1 --- CHANGES.md | 82 ++++++++++++++++++++++++++++++++++++++++++++++++ changelog.d/4339.feature | 1 - changelog.d/4474.misc | 1 - changelog.d/4555.bugfix | 1 - changelog.d/4867.feature | 1 - changelog.d/4942.bugfix | 1 - changelog.d/4947.feature | 1 - changelog.d/4949.misc | 1 - changelog.d/4953.misc | 2 -- changelog.d/4954.misc | 1 - changelog.d/4955.bugfix | 1 - changelog.d/4956.bugfix | 1 - changelog.d/4959.misc | 1 - changelog.d/4965.misc | 1 - changelog.d/4967.feature | 1 - changelog.d/4968.misc | 1 - changelog.d/4969.misc | 2 -- changelog.d/4972.misc | 1 - changelog.d/4974.misc | 1 - changelog.d/4981.bugfix | 1 - changelog.d/4982.misc | 1 - changelog.d/4985.misc | 1 - changelog.d/4987.misc | 1 - changelog.d/4989.feature | 1 - changelog.d/4990.bugfix | 1 - changelog.d/4991.feature | 1 - changelog.d/4992.misc | 1 - changelog.d/4996.misc | 1 - changelog.d/4998.misc | 1 - changelog.d/4999.bugfix | 1 - changelog.d/5001.misc | 1 - changelog.d/5002.feature | 1 - changelog.d/5003.bugfix | 1 - changelog.d/5005.misc | 1 - changelog.d/5007.misc | 1 - changelog.d/5009.bugfix | 1 - changelog.d/5010.feature | 1 - changelog.d/5020.feature | 1 - changelog.d/5023.feature | 3 -- changelog.d/5024.misc | 1 - changelog.d/5027.feature | 1 - changelog.d/5028.misc | 1 - changelog.d/5030.misc | 1 - changelog.d/5032.bugfix | 1 - changelog.d/5033.misc | 1 - changelog.d/5035.bugfix | 1 - changelog.d/5037.bugfix | 1 - changelog.d/5046.misc | 1 - changelog.d/5047.feature | 1 - changelog.d/5063.feature | 1 - changelog.d/5065.feature | 1 - changelog.d/5067.misc | 1 - changelog.d/5070.feature | 1 - changelog.d/5071.bugfix | 1 - changelog.d/5073.feature | 1 - changelog.d/5077.bugfix | 1 - changelog.d/5083.feature | 1 - changelog.d/5098.misc | 1 - changelog.d/5100.misc | 1 - changelog.d/5103.bugfix | 1 - changelog.d/5104.bugfix | 1 - changelog.d/5116.feature | 1 - changelog.d/5119.feature | 1 - changelog.d/5120.misc | 1 - changelog.d/5121.feature | 1 - changelog.d/5122.misc | 1 - changelog.d/5124.bugfix | 1 - changelog.d/5128.bugfix | 1 - changelog.d/5138.bugfix | 1 - changelog.d/5142.feature | 1 - changelog.d/5154.bugfix | 1 - changelog.d/5155.misc | 1 - changelog.d/5170.misc | 1 - changelog.d/5179.misc | 1 - synapse/__init__.py | 2 +- 75 files changed, 83 insertions(+), 78 deletions(-) delete mode 100644 changelog.d/4339.feature delete mode 100644 changelog.d/4474.misc delete mode 100644 changelog.d/4555.bugfix delete mode 100644 changelog.d/4867.feature delete mode 100644 changelog.d/4942.bugfix delete mode 100644 changelog.d/4947.feature delete mode 100644 changelog.d/4949.misc delete mode 100644 changelog.d/4953.misc delete mode 100644 changelog.d/4954.misc delete mode 100644 changelog.d/4955.bugfix delete mode 100644 changelog.d/4956.bugfix delete mode 100644 changelog.d/4959.misc delete mode 100644 changelog.d/4965.misc delete mode 100644 changelog.d/4967.feature delete mode 100644 changelog.d/4968.misc delete mode 100644 changelog.d/4969.misc delete mode 100644 changelog.d/4972.misc delete mode 100644 changelog.d/4974.misc delete mode 100644 changelog.d/4981.bugfix delete mode 100644 changelog.d/4982.misc delete mode 100644 changelog.d/4985.misc delete mode 100644 changelog.d/4987.misc delete mode 100644 changelog.d/4989.feature delete mode 100644 changelog.d/4990.bugfix delete mode 100644 changelog.d/4991.feature delete mode 100644 changelog.d/4992.misc delete mode 100644 changelog.d/4996.misc delete mode 100644 changelog.d/4998.misc delete mode 100644 changelog.d/4999.bugfix delete mode 100644 changelog.d/5001.misc delete mode 100644 changelog.d/5002.feature delete mode 100644 changelog.d/5003.bugfix delete mode 100644 changelog.d/5005.misc delete mode 100644 changelog.d/5007.misc delete mode 100644 changelog.d/5009.bugfix delete mode 100644 changelog.d/5010.feature delete mode 100644 changelog.d/5020.feature delete mode 100644 changelog.d/5023.feature delete mode 100644 changelog.d/5024.misc delete mode 100644 changelog.d/5027.feature delete mode 100644 changelog.d/5028.misc delete mode 100644 changelog.d/5030.misc delete mode 100644 changelog.d/5032.bugfix delete mode 100644 changelog.d/5033.misc delete mode 100644 changelog.d/5035.bugfix delete mode 100644 changelog.d/5037.bugfix delete mode 100644 changelog.d/5046.misc delete mode 100644 changelog.d/5047.feature delete mode 100644 changelog.d/5063.feature delete mode 100644 changelog.d/5065.feature delete mode 100644 changelog.d/5067.misc delete mode 100644 changelog.d/5070.feature delete mode 100644 changelog.d/5071.bugfix delete mode 100644 changelog.d/5073.feature delete mode 100644 changelog.d/5077.bugfix delete mode 100644 changelog.d/5083.feature delete mode 100644 changelog.d/5098.misc delete mode 100644 changelog.d/5100.misc delete mode 100644 changelog.d/5103.bugfix delete mode 100644 changelog.d/5104.bugfix delete mode 100644 changelog.d/5116.feature delete mode 100644 changelog.d/5119.feature delete mode 100644 changelog.d/5120.misc delete mode 100644 changelog.d/5121.feature delete mode 100644 changelog.d/5122.misc delete mode 100644 changelog.d/5124.bugfix delete mode 100644 changelog.d/5128.bugfix delete mode 100644 changelog.d/5138.bugfix delete mode 100644 changelog.d/5142.feature delete mode 100644 changelog.d/5154.bugfix delete mode 100644 changelog.d/5155.misc delete mode 100644 changelog.d/5170.misc delete mode 100644 changelog.d/5179.misc diff --git a/CHANGES.md b/CHANGES.md index d8cfbbebef..3cacca5a6b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,85 @@ +Synapse 0.99.4rc1 (2019-05-13) +============================== + +Features +-------- + +- Add systemd-python to the optional dependencies to enable logging to the systemd journal. Install with `pip install matrix-synapse[systemd]`. ([\#4339](https://github.com/matrix-org/synapse/issues/4339)) +- Add a default .m.rule.tombstone push rule. ([\#4867](https://github.com/matrix-org/synapse/issues/4867)) +- Add ability for password provider modules to bind email addresses to users upon registration. ([\#4947](https://github.com/matrix-org/synapse/issues/4947)) +- Implementation of [MSC1711](https://github.com/matrix-org/matrix-doc/pull/1711) including config options for requiring valid TLS certificates for federation traffic, the ability to disable TLS validation for specific domains, and the ability to specify your own list of CA certificates. ([\#4967](https://github.com/matrix-org/synapse/issues/4967)) +- Remove presence list support as per MSC 1819. ([\#4989](https://github.com/matrix-org/synapse/issues/4989)) +- Reduce CPU usage starting pushers during start up. ([\#4991](https://github.com/matrix-org/synapse/issues/4991)) +- Add a delete group admin API. ([\#5002](https://github.com/matrix-org/synapse/issues/5002)) +- Add config option to block users from looking up 3PIDs. ([\#5010](https://github.com/matrix-org/synapse/issues/5010)) +- Add context to phonehome stats. ([\#5020](https://github.com/matrix-org/synapse/issues/5020)) +- Configure the example systemd units to have a log identifier of `matrix-synapse` + instead of the executable name, `python`. + Contributed by Christoph Müller. ([\#5023](https://github.com/matrix-org/synapse/issues/5023)) +- Add time-based account expiration. ([\#5027](https://github.com/matrix-org/synapse/issues/5027), [\#5047](https://github.com/matrix-org/synapse/issues/5047), [\#5073](https://github.com/matrix-org/synapse/issues/5073), [\#5116](https://github.com/matrix-org/synapse/issues/5116)) +- Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. ([\#5063](https://github.com/matrix-org/synapse/issues/5063), [\#5065](https://github.com/matrix-org/synapse/issues/5065), [\#5070](https://github.com/matrix-org/synapse/issues/5070)) +- Add an configuration option to require authentication on /publicRooms and /profile endpoints. ([\#5083](https://github.com/matrix-org/synapse/issues/5083)) +- Move admin APIs to `/_synapse/admin/v1`. (The old paths are retained for backwards-compatibility, for now). ([\#5119](https://github.com/matrix-org/synapse/issues/5119)) +- Implement an admin API for sending server notices. Many thanks to @krombel who provided a foundation for this work. ([\#5121](https://github.com/matrix-org/synapse/issues/5121), [\#5142](https://github.com/matrix-org/synapse/issues/5142)) + + +Bugfixes +-------- + +- Avoid redundant URL encoding of redirect URL for SSO login in the fallback login page. Fixes a regression introduced in [#4220](https://github.com/matrix-org/synapse/pull/4220). Contributed by Marcel Fabian Krüger ("[zaugin](https://github.com/zauguin)"). ([\#4555](https://github.com/matrix-org/synapse/issues/4555)) +- Fix bug where presence updates were sent to all servers in a room when a new server joined, rather than to just the new server. ([\#4942](https://github.com/matrix-org/synapse/issues/4942), [\#5103](https://github.com/matrix-org/synapse/issues/5103)) +- Fix sync bug which made accepting invites unreliable in worker-mode synapses. ([\#4955](https://github.com/matrix-org/synapse/issues/4955), [\#4956](https://github.com/matrix-org/synapse/issues/4956)) +- start.sh: Fix the --no-rate-limit option for messages and make it bypass rate limit on registration and login too. ([\#4981](https://github.com/matrix-org/synapse/issues/4981)) +- Transfer related groups on room upgrade. ([\#4990](https://github.com/matrix-org/synapse/issues/4990)) +- Prevent the ability to kick users from a room they aren't in. ([\#4999](https://github.com/matrix-org/synapse/issues/4999)) +- Fix issue #4596 so synapse_port_db script works with --curses option on Python 3. Contributed by Anders Jensen-Waud . ([\#5003](https://github.com/matrix-org/synapse/issues/5003)) +- Clients timing out/disappearing while downloading from the media repository will now no longer log a spurious "Producer was not unregistered" message. ([\#5009](https://github.com/matrix-org/synapse/issues/5009)) +- Fix "cannot import name execute_batch" error with postgres. ([\#5032](https://github.com/matrix-org/synapse/issues/5032)) +- Fix disappearing exceptions in manhole. ([\#5035](https://github.com/matrix-org/synapse/issues/5035)) +- Workaround bug in twisted where attempting too many concurrent DNS requests could cause it to hang due to running out of file descriptors. ([\#5037](https://github.com/matrix-org/synapse/issues/5037)) +- Make sure we're not registering the same 3pid twice on registration. ([\#5071](https://github.com/matrix-org/synapse/issues/5071)) +- Don't crash on lack of expiry templates. ([\#5077](https://github.com/matrix-org/synapse/issues/5077)) +- Fix the ratelimting on third party invites. ([\#5104](https://github.com/matrix-org/synapse/issues/5104)) +- Add some missing limitations to room alias creation. ([\#5124](https://github.com/matrix-org/synapse/issues/5124), [\#5128](https://github.com/matrix-org/synapse/issues/5128)) +- Limit the number of EDUs in transactions to 100 as expected by synapse. Thanks to @superboum for this work! ([\#5138](https://github.com/matrix-org/synapse/issues/5138)) +- Fix bogus imports in unit tests. ([\#5154](https://github.com/matrix-org/synapse/issues/5154)) + + +Internal Changes +---------------- + +- Add test to verify threepid auth check added in #4435. ([\#4474](https://github.com/matrix-org/synapse/issues/4474)) +- Fix/improve some docstrings in the replication code. ([\#4949](https://github.com/matrix-org/synapse/issues/4949)) +- Split synapse.replication.tcp.streams into smaller files. ([\#4953](https://github.com/matrix-org/synapse/issues/4953)) +- Refactor replication row generation/parsing. ([\#4954](https://github.com/matrix-org/synapse/issues/4954)) +- Run `black` to clean up formatting on `synapse/storage/roommember.py` and `synapse/storage/events.py`. ([\#4959](https://github.com/matrix-org/synapse/issues/4959)) +- Remove log line for password via the admin API. ([\#4965](https://github.com/matrix-org/synapse/issues/4965)) +- Fix typo in TLS filenames in docker/README.md. Also add the '-p' commandline option to the 'docker run' example. Contributed by Jurrie Overgoor. ([\#4968](https://github.com/matrix-org/synapse/issues/4968)) +- Refactor room version definitions. ([\#4969](https://github.com/matrix-org/synapse/issues/4969)) +- Reduce log level of .well-known/matrix/client responses. ([\#4972](https://github.com/matrix-org/synapse/issues/4972)) +- Add `config.signing_key_path` that can be read by `synapse.config` utility. ([\#4974](https://github.com/matrix-org/synapse/issues/4974)) +- Track which identity server is used when binding a threepid and use that for unbinding, as per MSC1915. ([\#4982](https://github.com/matrix-org/synapse/issues/4982)) +- Rewrite KeyringTestCase as a HomeserverTestCase. ([\#4985](https://github.com/matrix-org/synapse/issues/4985)) +- README updates: Corrected the default POSTGRES_USER. Added port forwarding hint in TLS section. ([\#4987](https://github.com/matrix-org/synapse/issues/4987)) +- Remove a number of unused tables from the database schema. ([\#4992](https://github.com/matrix-org/synapse/issues/4992), [\#5028](https://github.com/matrix-org/synapse/issues/5028), [\#5033](https://github.com/matrix-org/synapse/issues/5033)) +- Run `black` on the remainder of `synapse/storage/`. ([\#4996](https://github.com/matrix-org/synapse/issues/4996)) +- Fix grammar in get_current_users_in_room and give it a docstring. ([\#4998](https://github.com/matrix-org/synapse/issues/4998)) +- Clean up some code in the server-key Keyring. ([\#5001](https://github.com/matrix-org/synapse/issues/5001)) +- Convert SYNAPSE_NO_TLS Docker variable to boolean for user friendliness. Contributed by Gabriel Eckerson. ([\#5005](https://github.com/matrix-org/synapse/issues/5005)) +- Refactor synapse.storage._base._simple_select_list_paginate. ([\#5007](https://github.com/matrix-org/synapse/issues/5007)) +- Store the notary server name correctly in server_keys_json. ([\#5024](https://github.com/matrix-org/synapse/issues/5024)) +- Rewrite Datastore.get_server_verify_keys to reduce the number of database transactions. ([\#5030](https://github.com/matrix-org/synapse/issues/5030)) +- Remove extraneous period from copyright headers. ([\#5046](https://github.com/matrix-org/synapse/issues/5046)) +- Update documentation for where to get Synapse packages. ([\#5067](https://github.com/matrix-org/synapse/issues/5067)) +- Add workarounds for pep-517 install errors. ([\#5098](https://github.com/matrix-org/synapse/issues/5098)) +- Improve logging when event-signature checks fail. ([\#5100](https://github.com/matrix-org/synapse/issues/5100)) +- Factor out an "assert_requester_is_admin" function. ([\#5120](https://github.com/matrix-org/synapse/issues/5120)) +- Remove the requirement to authenticate for /admin/server_version. ([\#5122](https://github.com/matrix-org/synapse/issues/5122)) +- Prevent an exception from being raised in a IResolutionReceiver and use a more generic error message for blacklisted URL previews. ([\#5155](https://github.com/matrix-org/synapse/issues/5155)) +- Run `black` on the tests directory. ([\#5170](https://github.com/matrix-org/synapse/issues/5170)) +- Fix CI after new release of isort. ([\#5179](https://github.com/matrix-org/synapse/issues/5179)) + + Synapse 0.99.3.2 (2019-05-03) ============================= diff --git a/changelog.d/4339.feature b/changelog.d/4339.feature deleted file mode 100644 index cecff97b80..0000000000 --- a/changelog.d/4339.feature +++ /dev/null @@ -1 +0,0 @@ -Add systemd-python to the optional dependencies to enable logging to the systemd journal. Install with `pip install matrix-synapse[systemd]`. diff --git a/changelog.d/4474.misc b/changelog.d/4474.misc deleted file mode 100644 index 4b882d60be..0000000000 --- a/changelog.d/4474.misc +++ /dev/null @@ -1 +0,0 @@ -Add test to verify threepid auth check added in #4435. diff --git a/changelog.d/4555.bugfix b/changelog.d/4555.bugfix deleted file mode 100644 index d596022c3f..0000000000 --- a/changelog.d/4555.bugfix +++ /dev/null @@ -1 +0,0 @@ -Avoid redundant URL encoding of redirect URL for SSO login in the fallback login page. Fixes a regression introduced in [#4220](https://github.com/matrix-org/synapse/pull/4220). Contributed by Marcel Fabian Krüger ("[zaugin](https://github.com/zauguin)"). diff --git a/changelog.d/4867.feature b/changelog.d/4867.feature deleted file mode 100644 index f5f9030e22..0000000000 --- a/changelog.d/4867.feature +++ /dev/null @@ -1 +0,0 @@ -Add a default .m.rule.tombstone push rule. diff --git a/changelog.d/4942.bugfix b/changelog.d/4942.bugfix deleted file mode 100644 index 590d80d58f..0000000000 --- a/changelog.d/4942.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where presence updates were sent to all servers in a room when a new server joined, rather than to just the new server. diff --git a/changelog.d/4947.feature b/changelog.d/4947.feature deleted file mode 100644 index b9d27b90f1..0000000000 --- a/changelog.d/4947.feature +++ /dev/null @@ -1 +0,0 @@ -Add ability for password provider modules to bind email addresses to users upon registration. \ No newline at end of file diff --git a/changelog.d/4949.misc b/changelog.d/4949.misc deleted file mode 100644 index 25c4e05a64..0000000000 --- a/changelog.d/4949.misc +++ /dev/null @@ -1 +0,0 @@ -Fix/improve some docstrings in the replication code. diff --git a/changelog.d/4953.misc b/changelog.d/4953.misc deleted file mode 100644 index 06a084e6ef..0000000000 --- a/changelog.d/4953.misc +++ /dev/null @@ -1,2 +0,0 @@ -Split synapse.replication.tcp.streams into smaller files. - diff --git a/changelog.d/4954.misc b/changelog.d/4954.misc deleted file mode 100644 index 91f145950d..0000000000 --- a/changelog.d/4954.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor replication row generation/parsing. diff --git a/changelog.d/4955.bugfix b/changelog.d/4955.bugfix deleted file mode 100644 index e50e67383d..0000000000 --- a/changelog.d/4955.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix sync bug which made accepting invites unreliable in worker-mode synapses. diff --git a/changelog.d/4956.bugfix b/changelog.d/4956.bugfix deleted file mode 100644 index e50e67383d..0000000000 --- a/changelog.d/4956.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix sync bug which made accepting invites unreliable in worker-mode synapses. diff --git a/changelog.d/4959.misc b/changelog.d/4959.misc deleted file mode 100644 index dd4275501f..0000000000 --- a/changelog.d/4959.misc +++ /dev/null @@ -1 +0,0 @@ -Run `black` to clean up formatting on `synapse/storage/roommember.py` and `synapse/storage/events.py`. \ No newline at end of file diff --git a/changelog.d/4965.misc b/changelog.d/4965.misc deleted file mode 100644 index 284c58b75e..0000000000 --- a/changelog.d/4965.misc +++ /dev/null @@ -1 +0,0 @@ -Remove log line for password via the admin API. diff --git a/changelog.d/4967.feature b/changelog.d/4967.feature deleted file mode 100644 index 7f9f81f849..0000000000 --- a/changelog.d/4967.feature +++ /dev/null @@ -1 +0,0 @@ -Implementation of [MSC1711](https://github.com/matrix-org/matrix-doc/pull/1711) including config options for requiring valid TLS certificates for federation traffic, the ability to disable TLS validation for specific domains, and the ability to specify your own list of CA certificates. diff --git a/changelog.d/4968.misc b/changelog.d/4968.misc deleted file mode 100644 index 7a7b69771c..0000000000 --- a/changelog.d/4968.misc +++ /dev/null @@ -1 +0,0 @@ -Fix typo in TLS filenames in docker/README.md. Also add the '-p' commandline option to the 'docker run' example. Contributed by Jurrie Overgoor. diff --git a/changelog.d/4969.misc b/changelog.d/4969.misc deleted file mode 100644 index e3a3214e6b..0000000000 --- a/changelog.d/4969.misc +++ /dev/null @@ -1,2 +0,0 @@ -Refactor room version definitions. - diff --git a/changelog.d/4972.misc b/changelog.d/4972.misc deleted file mode 100644 index e104a9bb34..0000000000 --- a/changelog.d/4972.misc +++ /dev/null @@ -1 +0,0 @@ -Reduce log level of .well-known/matrix/client responses. diff --git a/changelog.d/4974.misc b/changelog.d/4974.misc deleted file mode 100644 index 672a18923a..0000000000 --- a/changelog.d/4974.misc +++ /dev/null @@ -1 +0,0 @@ -Add `config.signing_key_path` that can be read by `synapse.config` utility. diff --git a/changelog.d/4981.bugfix b/changelog.d/4981.bugfix deleted file mode 100644 index e51b45eec0..0000000000 --- a/changelog.d/4981.bugfix +++ /dev/null @@ -1 +0,0 @@ -start.sh: Fix the --no-rate-limit option for messages and make it bypass rate limit on registration and login too. \ No newline at end of file diff --git a/changelog.d/4982.misc b/changelog.d/4982.misc deleted file mode 100644 index 067c177d39..0000000000 --- a/changelog.d/4982.misc +++ /dev/null @@ -1 +0,0 @@ -Track which identity server is used when binding a threepid and use that for unbinding, as per MSC1915. diff --git a/changelog.d/4985.misc b/changelog.d/4985.misc deleted file mode 100644 index 50c9ff9e0f..0000000000 --- a/changelog.d/4985.misc +++ /dev/null @@ -1 +0,0 @@ -Rewrite KeyringTestCase as a HomeserverTestCase. diff --git a/changelog.d/4987.misc b/changelog.d/4987.misc deleted file mode 100644 index 33490e146f..0000000000 --- a/changelog.d/4987.misc +++ /dev/null @@ -1 +0,0 @@ -README updates: Corrected the default POSTGRES_USER. Added port forwarding hint in TLS section. diff --git a/changelog.d/4989.feature b/changelog.d/4989.feature deleted file mode 100644 index a5138f5612..0000000000 --- a/changelog.d/4989.feature +++ /dev/null @@ -1 +0,0 @@ -Remove presence list support as per MSC 1819. diff --git a/changelog.d/4990.bugfix b/changelog.d/4990.bugfix deleted file mode 100644 index 1b69d058f6..0000000000 --- a/changelog.d/4990.bugfix +++ /dev/null @@ -1 +0,0 @@ -Transfer related groups on room upgrade. \ No newline at end of file diff --git a/changelog.d/4991.feature b/changelog.d/4991.feature deleted file mode 100644 index 034bf3239c..0000000000 --- a/changelog.d/4991.feature +++ /dev/null @@ -1 +0,0 @@ -Reduce CPU usage starting pushers during start up. diff --git a/changelog.d/4992.misc b/changelog.d/4992.misc deleted file mode 100644 index 3ee4228c09..0000000000 --- a/changelog.d/4992.misc +++ /dev/null @@ -1 +0,0 @@ -Remove a number of unused tables from the database schema. diff --git a/changelog.d/4996.misc b/changelog.d/4996.misc deleted file mode 100644 index ecac24e2be..0000000000 --- a/changelog.d/4996.misc +++ /dev/null @@ -1 +0,0 @@ -Run `black` on the remainder of `synapse/storage/`. \ No newline at end of file diff --git a/changelog.d/4998.misc b/changelog.d/4998.misc deleted file mode 100644 index 7caf959139..0000000000 --- a/changelog.d/4998.misc +++ /dev/null @@ -1 +0,0 @@ -Fix grammar in get_current_users_in_room and give it a docstring. diff --git a/changelog.d/4999.bugfix b/changelog.d/4999.bugfix deleted file mode 100644 index acbc191960..0000000000 --- a/changelog.d/4999.bugfix +++ /dev/null @@ -1 +0,0 @@ -Prevent the ability to kick users from a room they aren't in. diff --git a/changelog.d/5001.misc b/changelog.d/5001.misc deleted file mode 100644 index bf590a016d..0000000000 --- a/changelog.d/5001.misc +++ /dev/null @@ -1 +0,0 @@ -Clean up some code in the server-key Keyring. \ No newline at end of file diff --git a/changelog.d/5002.feature b/changelog.d/5002.feature deleted file mode 100644 index d8f50e963f..0000000000 --- a/changelog.d/5002.feature +++ /dev/null @@ -1 +0,0 @@ -Add a delete group admin API. diff --git a/changelog.d/5003.bugfix b/changelog.d/5003.bugfix deleted file mode 100644 index 9955dc871f..0000000000 --- a/changelog.d/5003.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix issue #4596 so synapse_port_db script works with --curses option on Python 3. Contributed by Anders Jensen-Waud . diff --git a/changelog.d/5005.misc b/changelog.d/5005.misc deleted file mode 100644 index f74147b352..0000000000 --- a/changelog.d/5005.misc +++ /dev/null @@ -1 +0,0 @@ -Convert SYNAPSE_NO_TLS Docker variable to boolean for user friendliness. Contributed by Gabriel Eckerson. diff --git a/changelog.d/5007.misc b/changelog.d/5007.misc deleted file mode 100644 index 05b6ce2c26..0000000000 --- a/changelog.d/5007.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor synapse.storage._base._simple_select_list_paginate. \ No newline at end of file diff --git a/changelog.d/5009.bugfix b/changelog.d/5009.bugfix deleted file mode 100644 index e66d3dd1aa..0000000000 --- a/changelog.d/5009.bugfix +++ /dev/null @@ -1 +0,0 @@ -Clients timing out/disappearing while downloading from the media repository will now no longer log a spurious "Producer was not unregistered" message. \ No newline at end of file diff --git a/changelog.d/5010.feature b/changelog.d/5010.feature deleted file mode 100644 index 65ab198b71..0000000000 --- a/changelog.d/5010.feature +++ /dev/null @@ -1 +0,0 @@ -Add config option to block users from looking up 3PIDs. diff --git a/changelog.d/5020.feature b/changelog.d/5020.feature deleted file mode 100644 index 71f7a8db2e..0000000000 --- a/changelog.d/5020.feature +++ /dev/null @@ -1 +0,0 @@ -Add context to phonehome stats. diff --git a/changelog.d/5023.feature b/changelog.d/5023.feature deleted file mode 100644 index 98551f79a3..0000000000 --- a/changelog.d/5023.feature +++ /dev/null @@ -1,3 +0,0 @@ -Configure the example systemd units to have a log identifier of `matrix-synapse` -instead of the executable name, `python`. -Contributed by Christoph Müller. diff --git a/changelog.d/5024.misc b/changelog.d/5024.misc deleted file mode 100644 index 07c13f28d0..0000000000 --- a/changelog.d/5024.misc +++ /dev/null @@ -1 +0,0 @@ -Store the notary server name correctly in server_keys_json. diff --git a/changelog.d/5027.feature b/changelog.d/5027.feature deleted file mode 100644 index 12766a82a7..0000000000 --- a/changelog.d/5027.feature +++ /dev/null @@ -1 +0,0 @@ -Add time-based account expiration. diff --git a/changelog.d/5028.misc b/changelog.d/5028.misc deleted file mode 100644 index 3ee4228c09..0000000000 --- a/changelog.d/5028.misc +++ /dev/null @@ -1 +0,0 @@ -Remove a number of unused tables from the database schema. diff --git a/changelog.d/5030.misc b/changelog.d/5030.misc deleted file mode 100644 index 3456eb5381..0000000000 --- a/changelog.d/5030.misc +++ /dev/null @@ -1 +0,0 @@ -Rewrite Datastore.get_server_verify_keys to reduce the number of database transactions. diff --git a/changelog.d/5032.bugfix b/changelog.d/5032.bugfix deleted file mode 100644 index cd71180ce9..0000000000 --- a/changelog.d/5032.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix "cannot import name execute_batch" error with postgres. diff --git a/changelog.d/5033.misc b/changelog.d/5033.misc deleted file mode 100644 index 3ee4228c09..0000000000 --- a/changelog.d/5033.misc +++ /dev/null @@ -1 +0,0 @@ -Remove a number of unused tables from the database schema. diff --git a/changelog.d/5035.bugfix b/changelog.d/5035.bugfix deleted file mode 100644 index 85e154027e..0000000000 --- a/changelog.d/5035.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix disappearing exceptions in manhole. diff --git a/changelog.d/5037.bugfix b/changelog.d/5037.bugfix deleted file mode 100644 index c3af418ddf..0000000000 --- a/changelog.d/5037.bugfix +++ /dev/null @@ -1 +0,0 @@ -Workaround bug in twisted where attempting too many concurrent DNS requests could cause it to hang due to running out of file descriptors. diff --git a/changelog.d/5046.misc b/changelog.d/5046.misc deleted file mode 100644 index eb966a5ae6..0000000000 --- a/changelog.d/5046.misc +++ /dev/null @@ -1 +0,0 @@ -Remove extraneous period from copyright headers. diff --git a/changelog.d/5047.feature b/changelog.d/5047.feature deleted file mode 100644 index 12766a82a7..0000000000 --- a/changelog.d/5047.feature +++ /dev/null @@ -1 +0,0 @@ -Add time-based account expiration. diff --git a/changelog.d/5063.feature b/changelog.d/5063.feature deleted file mode 100644 index fd7b80018e..0000000000 --- a/changelog.d/5063.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. diff --git a/changelog.d/5065.feature b/changelog.d/5065.feature deleted file mode 100644 index fd7b80018e..0000000000 --- a/changelog.d/5065.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. diff --git a/changelog.d/5067.misc b/changelog.d/5067.misc deleted file mode 100644 index bbb4337dbf..0000000000 --- a/changelog.d/5067.misc +++ /dev/null @@ -1 +0,0 @@ -Update documentation for where to get Synapse packages. diff --git a/changelog.d/5070.feature b/changelog.d/5070.feature deleted file mode 100644 index fd7b80018e..0000000000 --- a/changelog.d/5070.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for handling /verions, /voip and /push_rules client endpoints to client_reader worker. diff --git a/changelog.d/5071.bugfix b/changelog.d/5071.bugfix deleted file mode 100644 index ddf7ab5fa8..0000000000 --- a/changelog.d/5071.bugfix +++ /dev/null @@ -1 +0,0 @@ -Make sure we're not registering the same 3pid twice on registration. diff --git a/changelog.d/5073.feature b/changelog.d/5073.feature deleted file mode 100644 index 12766a82a7..0000000000 --- a/changelog.d/5073.feature +++ /dev/null @@ -1 +0,0 @@ -Add time-based account expiration. diff --git a/changelog.d/5077.bugfix b/changelog.d/5077.bugfix deleted file mode 100644 index f3345a6353..0000000000 --- a/changelog.d/5077.bugfix +++ /dev/null @@ -1 +0,0 @@ -Don't crash on lack of expiry templates. diff --git a/changelog.d/5083.feature b/changelog.d/5083.feature deleted file mode 100644 index 55d114b3fe..0000000000 --- a/changelog.d/5083.feature +++ /dev/null @@ -1 +0,0 @@ -Add an configuration option to require authentication on /publicRooms and /profile endpoints. diff --git a/changelog.d/5098.misc b/changelog.d/5098.misc deleted file mode 100644 index 9cd83bf226..0000000000 --- a/changelog.d/5098.misc +++ /dev/null @@ -1 +0,0 @@ -Add workarounds for pep-517 install errors. diff --git a/changelog.d/5100.misc b/changelog.d/5100.misc deleted file mode 100644 index db5eb1b156..0000000000 --- a/changelog.d/5100.misc +++ /dev/null @@ -1 +0,0 @@ -Improve logging when event-signature checks fail. diff --git a/changelog.d/5103.bugfix b/changelog.d/5103.bugfix deleted file mode 100644 index 590d80d58f..0000000000 --- a/changelog.d/5103.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where presence updates were sent to all servers in a room when a new server joined, rather than to just the new server. diff --git a/changelog.d/5104.bugfix b/changelog.d/5104.bugfix deleted file mode 100644 index f88aca8a40..0000000000 --- a/changelog.d/5104.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix the ratelimting on third party invites. diff --git a/changelog.d/5116.feature b/changelog.d/5116.feature deleted file mode 100644 index dcbf7c1fb8..0000000000 --- a/changelog.d/5116.feature +++ /dev/null @@ -1 +0,0 @@ - Add time-based account expiration. diff --git a/changelog.d/5119.feature b/changelog.d/5119.feature deleted file mode 100644 index a3a73f09d3..0000000000 --- a/changelog.d/5119.feature +++ /dev/null @@ -1 +0,0 @@ -Move admin APIs to `/_synapse/admin/v1`. (The old paths are retained for backwards-compatibility, for now). \ No newline at end of file diff --git a/changelog.d/5120.misc b/changelog.d/5120.misc deleted file mode 100644 index 6575f29322..0000000000 --- a/changelog.d/5120.misc +++ /dev/null @@ -1 +0,0 @@ -Factor out an "assert_requester_is_admin" function. diff --git a/changelog.d/5121.feature b/changelog.d/5121.feature deleted file mode 100644 index 54b228680d..0000000000 --- a/changelog.d/5121.feature +++ /dev/null @@ -1 +0,0 @@ -Implement an admin API for sending server notices. Many thanks to @krombel who provided a foundation for this work. diff --git a/changelog.d/5122.misc b/changelog.d/5122.misc deleted file mode 100644 index e1be8a6210..0000000000 --- a/changelog.d/5122.misc +++ /dev/null @@ -1 +0,0 @@ -Remove the requirement to authenticate for /admin/server_version. diff --git a/changelog.d/5124.bugfix b/changelog.d/5124.bugfix deleted file mode 100644 index 46df1e9fd5..0000000000 --- a/changelog.d/5124.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add some missing limitations to room alias creation. diff --git a/changelog.d/5128.bugfix b/changelog.d/5128.bugfix deleted file mode 100644 index 46df1e9fd5..0000000000 --- a/changelog.d/5128.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add some missing limitations to room alias creation. diff --git a/changelog.d/5138.bugfix b/changelog.d/5138.bugfix deleted file mode 100644 index c1945a8eb2..0000000000 --- a/changelog.d/5138.bugfix +++ /dev/null @@ -1 +0,0 @@ -Limit the number of EDUs in transactions to 100 as expected by synapse. Thanks to @superboum for this work! diff --git a/changelog.d/5142.feature b/changelog.d/5142.feature deleted file mode 100644 index 54b228680d..0000000000 --- a/changelog.d/5142.feature +++ /dev/null @@ -1 +0,0 @@ -Implement an admin API for sending server notices. Many thanks to @krombel who provided a foundation for this work. diff --git a/changelog.d/5154.bugfix b/changelog.d/5154.bugfix deleted file mode 100644 index c7bd5ee6cf..0000000000 --- a/changelog.d/5154.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bogus imports in unit tests. diff --git a/changelog.d/5155.misc b/changelog.d/5155.misc deleted file mode 100644 index a81dbae67a..0000000000 --- a/changelog.d/5155.misc +++ /dev/null @@ -1 +0,0 @@ -Prevent an exception from being raised in a IResolutionReceiver and use a more generic error message for blacklisted URL previews. \ No newline at end of file diff --git a/changelog.d/5170.misc b/changelog.d/5170.misc deleted file mode 100644 index 7919dac555..0000000000 --- a/changelog.d/5170.misc +++ /dev/null @@ -1 +0,0 @@ -Run `black` on the tests directory. diff --git a/changelog.d/5179.misc b/changelog.d/5179.misc deleted file mode 100644 index 92369cb2fc..0000000000 --- a/changelog.d/5179.misc +++ /dev/null @@ -1 +0,0 @@ -Fix CI after new release of isort. diff --git a/synapse/__init__.py b/synapse/__init__.py index 315fa96551..cd9cfb2409 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -27,4 +27,4 @@ try: except ImportError: pass -__version__ = "0.99.3.2" +__version__ = "0.99.4rc1" -- cgit 1.5.1