From 145f57897db3df082a06a791e547332278d32bfe Mon Sep 17 00:00:00 2001 From: Ike Johnson Date: Sun, 2 Jun 2019 23:10:27 +0800 Subject: Update HAProxy example rules These new rules allow a user to instead route only matrix traffic, allowing them to run matrix on the domain without affecting their existing websites --- docs/reverse_proxy.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/reverse_proxy.rst b/docs/reverse_proxy.rst index 7619b1097b..e4b870411c 100644 --- a/docs/reverse_proxy.rst +++ b/docs/reverse_proxy.rst @@ -89,8 +89,10 @@ Let's assume that we expect clients to connect to our server at bind :::443 v4v6 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1 # Matrix client traffic - acl matrix hdr(host) -i matrix.example.com - use_backend matrix if matrix + acl matrix-host hdr(host) -i matrix.example.com + acl matrix-path path_beg /_matrix + + use_backend matrix if matrix-host matrix-path frontend matrix-federation bind :::8448 v4v6 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1 -- cgit 1.4.1 From 0df5b41759772d8c29f7622680deab7f594c1a4c Mon Sep 17 00:00:00 2001 From: Ike Johnson Date: Sun, 2 Jun 2019 23:23:58 +0800 Subject: Create 5313.misc --- changelog.d/5313.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5313.misc diff --git a/changelog.d/5313.misc b/changelog.d/5313.misc new file mode 100644 index 0000000000..2ea01cb9d3 --- /dev/null +++ b/changelog.d/5313.misc @@ -0,0 +1 @@ +Update example haproxy config to a more compatible setup. -- cgit 1.4.1 From a52e1a3b6c5a255f4a51d48dec739751238fa1f9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 24 Jun 2019 18:37:20 +0100 Subject: Factor out "generate_config_from_template" ... and inline generate_secrets --- docker/start.py | 122 +++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 81 insertions(+), 41 deletions(-) diff --git a/docker/start.py b/docker/start.py index a7a54dacf7..eab39f3c93 100755 --- a/docker/start.py +++ b/docker/start.py @@ -7,37 +7,109 @@ import subprocess import glob import codecs + # Utility functions -convert = lambda src, dst, environ: open(dst, "w").write( - jinja2.Template(open(src).read()).render(**environ) -) +def log(txt): + print(txt, file=sys.stderr) + + +def error(txt): + log(txt) + sys.exit(2) + + +def convert(src, dst, environ): + """Generate a file from a template + + Args: + src (str): path to input file + dst (str): path to file to write + environ (dict): environment dictionary, for replacement mappings. + """ + with open(src) as infile: + template = infile.read() + rendered = jinja2.Template(template).render(**environ) + with open(dst, "w") as outfile: + outfile.write(rendered) def check_arguments(environ, args): for argument in args: if argument not in environ: - print("Environment variable %s is mandatory, exiting." % argument) - sys.exit(2) + error("Environment variable %s is mandatory, exiting." % argument) + + +def generate_config_from_template(environ, ownership): + """Generate a homeserver.yaml from environment variables + + Args: + environ (dict): environment dictionary + ownership (str): ":" string which will be used to set + ownership of the generated configs + Returns: + path to generated config file + """ + for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"): + if v not in environ: + error( + "Environment variable '%s' is mandatory when generating a config " + "file on-the-fly." % (v,) + ) + + # populate some params from data files (if they exist, else create new ones) + environ = environ.copy() + secrets = { + "registration": "SYNAPSE_REGISTRATION_SHARED_SECRET", + "macaroon": "SYNAPSE_MACAROON_SECRET_KEY", + } -def generate_secrets(environ, secrets): for name, secret in secrets.items(): if secret not in environ: filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name) + + # if the file already exists, load in the existing value; otherwise, + # generate a new secret and write it to a file + if os.path.exists(filename): with open(filename) as handle: value = handle.read() else: - print("Generating a random secret for {}".format(name)) + log("Generating a random secret for {}".format(name)) value = codecs.encode(os.urandom(32), "hex").decode() with open(filename, "w") as handle: handle.write(value) environ[secret] = value + environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml") + if not os.path.exists("/compiled"): + os.mkdir("/compiled") + + config_path = "/compiled/homeserver.yaml" + + # Convert SYNAPSE_NO_TLS to boolean if exists + if "SYNAPSE_NO_TLS" in environ: + tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"]) + if tlsanswerstring in ("true", "on", "1", "yes"): + environ["SYNAPSE_NO_TLS"] = True + else: + if tlsanswerstring in ("false", "off", "0", "no"): + environ["SYNAPSE_NO_TLS"] = False + else: + error( + 'Environment variable "SYNAPSE_NO_TLS" found but value "' + + tlsanswerstring + + '" unrecognized; exiting.' + ) + + convert("/conf/homeserver.yaml", config_path, environ) + convert("/conf/log.config", "/compiled/log.config", environ) + subprocess.check_output(["chown", "-R", ownership, "/data"]) + return config_path + # Prepare the configuration mode = sys.argv[1] if len(sys.argv) > 1 else None -environ = os.environ.copy() ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) args = ["python", "-m", "synapse.app.homeserver"] @@ -62,39 +134,7 @@ else: if "SYNAPSE_CONFIG_PATH" in environ: config_path = environ["SYNAPSE_CONFIG_PATH"] else: - check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS")) - generate_secrets( - environ, - { - "registration": "SYNAPSE_REGISTRATION_SHARED_SECRET", - "macaroon": "SYNAPSE_MACAROON_SECRET_KEY", - }, - ) - environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml") - if not os.path.exists("/compiled"): - os.mkdir("/compiled") - - config_path = "/compiled/homeserver.yaml" - - # Convert SYNAPSE_NO_TLS to boolean if exists - if "SYNAPSE_NO_TLS" in environ: - tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"]) - if tlsanswerstring in ("true", "on", "1", "yes"): - environ["SYNAPSE_NO_TLS"] = True - else: - if tlsanswerstring in ("false", "off", "0", "no"): - environ["SYNAPSE_NO_TLS"] = False - else: - print( - 'Environment variable "SYNAPSE_NO_TLS" found but value "' - + tlsanswerstring - + '" unrecognized; exiting.' - ) - sys.exit(2) - - convert("/conf/homeserver.yaml", config_path, environ) - convert("/conf/log.config", "/compiled/log.config", environ) - subprocess.check_output(["chown", "-R", ownership, "/data"]) + config_path = generate_config_from_template(environ, ownership) args += [ "--config-path", -- cgit 1.4.1 From b1fddb7f699a595d39507e876d950f256ab0f8aa Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 24 Jun 2019 18:55:37 +0100 Subject: Factor out a run_generate_config function --- docker/start.py | 45 ++++++++++++++++++++++++++++----------------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/docker/start.py b/docker/start.py index eab39f3c93..70942bcbee 100755 --- a/docker/start.py +++ b/docker/start.py @@ -33,12 +33,6 @@ def convert(src, dst, environ): outfile.write(rendered) -def check_arguments(environ, args): - for argument in args: - if argument not in environ: - error("Environment variable %s is mandatory, exiting." % argument) - - def generate_config_from_template(environ, ownership): """Generate a homeserver.yaml from environment variables @@ -108,17 +102,22 @@ def generate_config_from_template(environ, ownership): return config_path -# Prepare the configuration -mode = sys.argv[1] if len(sys.argv) > 1 else None -ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) -args = ["python", "-m", "synapse.app.homeserver"] +def run_generate_config(environ): + """Run synapse with a --generate-config param to generate a template config file -# In generate mode, generate a configuration, missing keys, then exit -if mode == "generate": - check_arguments( - environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH") - ) - args += [ + Args: + environ (dict): environment dictionary + + Never returns. + """ + for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"): + if v not in environ: + error("Environment variable '%s' is mandatory in `generate` mode." % (v,)) + + args = [ + "python", + "-m", + "synapse.app.homeserver", "--server-name", environ["SYNAPSE_SERVER_NAME"], "--report-stats", @@ -129,6 +128,15 @@ if mode == "generate": ] os.execv("/usr/local/bin/python", args) + +# Prepare the configuration +mode = sys.argv[1] if len(sys.argv) > 1 else None +ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) + +# In generate mode, generate a configuration, missing keys, then exit +if mode == "generate": + run_generate_config(environ) + # In normal mode, generate missing keys if any, then run synapse else: if "SYNAPSE_CONFIG_PATH" in environ: @@ -136,7 +144,10 @@ else: else: config_path = generate_config_from_template(environ, ownership) - args += [ + args = [ + "python", + "-m", + "synapse.app.homeserver", "--config-path", config_path, # tell synapse to put any generated keys in /data rather than /compiled -- cgit 1.4.1 From 3f24e4dce7d4dfb50f0f8d958f6f4daf2dbdc70c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 14:23:24 +0100 Subject: Add a main() function --- docker/start.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/docker/start.py b/docker/start.py index 70942bcbee..df907fe15c 100755 --- a/docker/start.py +++ b/docker/start.py @@ -129,16 +129,15 @@ def run_generate_config(environ): os.execv("/usr/local/bin/python", args) -# Prepare the configuration -mode = sys.argv[1] if len(sys.argv) > 1 else None -ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) +def main(args, environ): + mode = args[1] if len(args) > 1 else None + ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) -# In generate mode, generate a configuration, missing keys, then exit -if mode == "generate": - run_generate_config(environ) + # In generate mode, generate a configuration, missing keys, then exit + if mode == "generate": + return run_generate_config(environ) -# In normal mode, generate missing keys if any, then run synapse -else: + # In normal mode, generate missing keys if any, then run synapse if "SYNAPSE_CONFIG_PATH" in environ: config_path = environ["SYNAPSE_CONFIG_PATH"] else: @@ -158,3 +157,7 @@ else: # Generate missing keys and start synapse subprocess.check_output(args + ["--generate-keys"]) os.execv("/sbin/su-exec", ["su-exec", ownership] + args) + + +if __name__ == "__main__": + main(sys.argv, os.environ) -- cgit 1.4.1 From 5375c3a9b8121d121cc68756654f4fc20c481995 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 15:10:36 +0100 Subject: isort --- docker/start.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docker/start.py b/docker/start.py index df907fe15c..bdb703aebd 100755 --- a/docker/start.py +++ b/docker/start.py @@ -1,11 +1,12 @@ #!/usr/local/bin/python -import jinja2 +import codecs +import glob import os -import sys import subprocess -import glob -import codecs +import sys + +import jinja2 # Utility functions -- cgit 1.4.1 From a2f6d31a63012531935566e380dfb5edd81dcbd0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 11:56:52 +0100 Subject: Refactor get_user_ids_changed to pull less from DB When a client asks for users whose devices have changed since a token we used to pull *all* users from the database since the token, which could easily be thousands of rows for old tokens. This PR changes this to only check for changes for users the client is actually interested in. Fixes #5553 --- synapse/handlers/device.py | 12 +++++------ synapse/handlers/sync.py | 22 +++++++++----------- synapse/storage/devices.py | 51 +++++++++++++++++++++++++++++++++++++--------- 3 files changed, 57 insertions(+), 28 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index f59d0479b5..2b6c2117f9 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -101,9 +101,13 @@ class DeviceWorkerHandler(BaseHandler): room_ids = yield self.store.get_rooms_for_user(user_id) - # First we check if any devices have changed + # First we check if any devices have changed for users that we share + # rooms with. + users_who_share_room = yield self.store.get_users_who_share_room_with_user( + user_id + ) changed = yield self.store.get_user_whose_devices_changed( - from_token.device_list_key + from_token.device_list_key, users_who_share_room ) # Then work out if any users have since joined @@ -188,10 +192,6 @@ class DeviceWorkerHandler(BaseHandler): break if possibly_changed or possibly_left: - users_who_share_room = yield self.store.get_users_who_share_room_with_user( - user_id - ) - # Take the intersection of the users whose devices may have changed # and those that actually still share a room with the user possibly_joined = possibly_changed & users_who_share_room diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index c5188a1f8e..8249e75ecd 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1062,10 +1062,6 @@ class SyncHandler(object): since_token = sync_result_builder.since_token if since_token and since_token.device_list_key: - changed = yield self.store.get_user_whose_devices_changed( - since_token.device_list_key - ) - # TODO: Be more clever than this, i.e. remove users who we already # share a room with? for room_id in newly_joined_rooms: @@ -1076,21 +1072,23 @@ class SyncHandler(object): left_users = yield self.state.get_current_users_in_room(room_id) newly_left_users.update(left_users) + users_who_share_room = yield self.store.get_users_who_share_room_with_user( + user_id + ) + # TODO: Check that these users are actually new, i.e. either they # weren't in the previous sync *or* they left and rejoined. - changed.update(newly_joined_or_invited_users) - - if not changed and not newly_left_users: - defer.returnValue(DeviceLists(changed=[], left=newly_left_users)) + changed = users_who_share_room & set(newly_joined_or_invited_users) - users_who_share_room = yield self.store.get_users_who_share_room_with_user( - user_id + changed_users = yield self.store.get_user_whose_devices_changed( + since_token.device_list_key, users_who_share_room ) + changed.update(changed_users) + defer.returnValue( DeviceLists( - changed=users_who_share_room & changed, - left=set(newly_left_users) - users_who_share_room, + changed=changed, left=set(newly_left_users) - users_who_share_room ) ) else: diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 3413a46675..3af0171f75 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -391,22 +391,53 @@ class DeviceWorkerStore(SQLBaseStore): return now_stream_id, [] - @defer.inlineCallbacks - def get_user_whose_devices_changed(self, from_key): - """Get set of users whose devices have changed since `from_key`. + def get_user_whose_devices_changed(self, from_key, user_ids): + """Get set of users whose devices have changed since `from_key` that + are in the given list of user_ids. + + Args: + user_ids (Iterable[str]) + from_key: The device lists stream token + + Returns: + Deferred[set[str]]: The set of user_ids whose devices have changed + since `from_key` """ from_key = int(from_key) - changed = self._device_list_stream_cache.get_all_entities_changed(from_key) - if changed is not None: - defer.returnValue(set(changed)) + + # Get set of users who *may* have changed. Users not in the returned + # list have definitely not changed. + to_check = list( + self._device_list_stream_cache.get_entities_changed(user_ids, from_key) + ) + + if not to_check: + return defer.succeed(set()) + + # We now check the database for all users in `to_check`, in batches. + batch_size = 100 + chunks = [ + to_check[i : i + batch_size] for i in range(0, len(to_check), batch_size) + ] sql = """ - SELECT DISTINCT user_id FROM device_lists_stream WHERE stream_id > ? + SELECT DISTINCT user_id FROM device_lists_stream + WHERE stream_id > ? + AND user_id IN (%s) """ - rows = yield self._execute( - "get_user_whose_devices_changed", None, sql, from_key + + def _get_user_whose_devices_changed_txn(txn): + changes = set() + + for chunk in chunks: + txn.execute(sql % (",".join("?" for _ in chunk),), [from_key] + chunk) + changes.update(user_id for user_id, in txn) + + return changes + + return self.runInteraction( + "get_user_whose_devices_changed", _get_user_whose_devices_changed_txn ) - defer.returnValue(set(row[0] for row in rows)) def get_all_device_list_changes_for_remotes(self, from_key, to_key): """Return a list of `(stream_id, user_id, destination)` which is the -- cgit 1.4.1 From 508c3ce3d71b4711f6e50e7f1e74d71cb46f61d9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 12:03:49 +0100 Subject: Newsfile --- changelog.d/5559.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5559.misc diff --git a/changelog.d/5559.misc b/changelog.d/5559.misc new file mode 100644 index 0000000000..b77b383459 --- /dev/null +++ b/changelog.d/5559.misc @@ -0,0 +1 @@ +Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. -- cgit 1.4.1 From 043ab6da13cbc49d221b6c220ad0b360b2095643 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 15:28:28 +0100 Subject: changelog --- changelog.d/5561.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5561.feature diff --git a/changelog.d/5561.feature b/changelog.d/5561.feature new file mode 100644 index 0000000000..85380bc517 --- /dev/null +++ b/changelog.d/5561.feature @@ -0,0 +1 @@ +Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. -- cgit 1.4.1 From a1732bbff995345e6b6bd79823bb5309038bbe56 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 14:59:02 +0100 Subject: improve logging for generate_config_from_template --- docker/start.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/start.py b/docker/start.py index bdb703aebd..9ca139c3ea 100755 --- a/docker/start.py +++ b/docker/start.py @@ -67,10 +67,11 @@ def generate_config_from_template(environ, ownership): # generate a new secret and write it to a file if os.path.exists(filename): + log("Reading %s from %s" % (secret, filename)) with open(filename) as handle: value = handle.read() else: - log("Generating a random secret for {}".format(name)) + log("Generating a random secret for {}".format(secret)) value = codecs.encode(os.urandom(32), "hex").decode() with open(filename, "w") as handle: handle.write(value) -- cgit 1.4.1 From a5fba9c27c5b27fc3c0b1035e087e46fc11e4d79 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 18:09:09 +0100 Subject: Docker: only run --generate-keys when generating config on-the-fly. We don't want to generate any missing configs when running from a precanned config. (There's a strong argument that we don't want to do this at all, since generating a new signing key on each invocation sounds disasterous, but I don't fancy unpicking that for now.) --- docker/start.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/docker/start.py b/docker/start.py index 9ca139c3ea..59527a5883 100755 --- a/docker/start.py +++ b/docker/start.py @@ -101,6 +101,24 @@ def generate_config_from_template(environ, ownership): convert("/conf/homeserver.yaml", config_path, environ) convert("/conf/log.config", "/compiled/log.config", environ) subprocess.check_output(["chown", "-R", ownership, "/data"]) + + # Hopefully we already have a signing key, but generate one if not. + subprocess.check_output( + [ + "su-exec", + ownership, + "python", + "-m", + "synapse.app.homeserver", + "--config-path", + config_path, + # tell synapse to put generated keys in /data rather than /compiled + "--keys-directory", + "/data", + "--generate-keys", + ] + ) + return config_path @@ -146,19 +164,15 @@ def main(args, environ): config_path = generate_config_from_template(environ, ownership) args = [ + "su-exec", + ownership, "python", "-m", "synapse.app.homeserver", "--config-path", config_path, - # tell synapse to put any generated keys in /data rather than /compiled - "--keys-directory", - "/data", ] - - # Generate missing keys and start synapse - subprocess.check_output(args + ["--generate-keys"]) - os.execv("/sbin/su-exec", ["su-exec", ownership] + args) + os.execv("/sbin/su-exec", args) if __name__ == "__main__": -- cgit 1.4.1 From 7c453472e48e1ca4dd6fcec587d7fdbf20b514f0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 15:32:55 +0100 Subject: changelog --- changelog.d/5562.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5562.feature diff --git a/changelog.d/5562.feature b/changelog.d/5562.feature new file mode 100644 index 0000000000..85380bc517 --- /dev/null +++ b/changelog.d/5562.feature @@ -0,0 +1 @@ +Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. -- cgit 1.4.1 From c58a6e6108d2891d10ce7f29e4b0169a3e026787 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 15:09:22 +0100 Subject: document supported env vars for docker 'generate' option --- docker/README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docker/README.md b/docker/README.md index 5a596eecb9..10af88844b 100644 --- a/docker/README.md +++ b/docker/README.md @@ -179,3 +179,10 @@ docker run -d --name synapse \ -e SYNAPSE_CONFIG_PATH=/data/homeserver.yaml \ matrixdotorg/synapse:latest ``` + +The following environment variables are supported in this mode: + +* `SYNAPSE_SERVER_NAME` (mandatory): the server public hostname. +* `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable + anonymous statistics reporting. +* `SYNAPSE_CONFIG_PATH` (mandatory): path to the file to be generated. -- cgit 1.4.1 From 7e433beb65946a39442f025705429227bfa3e429 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 15:18:30 +0100 Subject: Docker image: add support for SYNAPSE_DATA_DIR parameter Fixes #4830. --- docker/README.md | 4 ++++ docker/start.py | 17 +++++++++++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/docker/README.md b/docker/README.md index 10af88844b..5c85c538a8 100644 --- a/docker/README.md +++ b/docker/README.md @@ -186,3 +186,7 @@ The following environment variables are supported in this mode: * `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable anonymous statistics reporting. * `SYNAPSE_CONFIG_PATH` (mandatory): path to the file to be generated. +* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data + such as the datatase and media store. Defaults to `/data`. +* `UID`, `GID`: the user id and group id to use for creating the data + directories. Defaults to `991`, `991`. diff --git a/docker/start.py b/docker/start.py index bdb703aebd..ad968b2a67 100755 --- a/docker/start.py +++ b/docker/start.py @@ -103,11 +103,12 @@ def generate_config_from_template(environ, ownership): return config_path -def run_generate_config(environ): +def run_generate_config(environ, ownership): """Run synapse with a --generate-config param to generate a template config file Args: - environ (dict): environment dictionary + environ (dict): env var dict + ownership (str): "userid:groupid" arg for chmod Never returns. """ @@ -115,6 +116,11 @@ def run_generate_config(environ): if v not in environ: error("Environment variable '%s' is mandatory in `generate` mode." % (v,)) + data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") + + # make sure that synapse has perms to write to the data dir. + subprocess.check_output(["chown", ownership, data_dir]) + args = [ "python", "-m", @@ -125,8 +131,11 @@ def run_generate_config(environ): environ["SYNAPSE_REPORT_STATS"], "--config-path", environ["SYNAPSE_CONFIG_PATH"], + "--data-directory", + data_dir, "--generate-config", ] + # log("running %s" % (args, )) os.execv("/usr/local/bin/python", args) @@ -134,9 +143,9 @@ def main(args, environ): mode = args[1] if len(args) > 1 else None ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) - # In generate mode, generate a configuration, missing keys, then exit + # In generate mode, generate a configuration and missing keys, then exit if mode == "generate": - return run_generate_config(environ) + return run_generate_config(environ, ownership) # In normal mode, generate missing keys if any, then run synapse if "SYNAPSE_CONFIG_PATH" in environ: -- cgit 1.4.1 From a0f2921ccfd5eac2b660274d4de3d05fdc0f390d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 15:41:10 +0100 Subject: changelog --- changelog.d/5563.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5563.bugfix diff --git a/changelog.d/5563.bugfix b/changelog.d/5563.bugfix new file mode 100644 index 0000000000..09c4381a23 --- /dev/null +++ b/changelog.d/5563.bugfix @@ -0,0 +1 @@ +Docker: Use a sensible location for data files when generating a config file. \ No newline at end of file -- cgit 1.4.1 From 6347dc1beddf71411d0615f4e4101b814457a836 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 18:21:32 +0100 Subject: Add support for SYNAPSE_CONFIG_DIR --- docker/README.md | 2 ++ docker/start.py | 3 +++ 2 files changed, 5 insertions(+) diff --git a/docker/README.md b/docker/README.md index 5c85c538a8..a42d1d24b6 100644 --- a/docker/README.md +++ b/docker/README.md @@ -186,6 +186,8 @@ The following environment variables are supported in this mode: * `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable anonymous statistics reporting. * `SYNAPSE_CONFIG_PATH` (mandatory): path to the file to be generated. +* `SYNAPSE_CONFIG_DIR`: where additional config files (such as the log config + and event signing key) will be stored. Defaults to `/data`. * `SYNAPSE_DATA_DIR`: where the generated config will put persistent data such as the datatase and media store. Defaults to `/data`. * `UID`, `GID`: the user id and group id to use for creating the data diff --git a/docker/start.py b/docker/start.py index ad968b2a67..99107a9d54 100755 --- a/docker/start.py +++ b/docker/start.py @@ -116,6 +116,7 @@ def run_generate_config(environ, ownership): if v not in environ: error("Environment variable '%s' is mandatory in `generate` mode." % (v,)) + config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") # make sure that synapse has perms to write to the data dir. @@ -131,6 +132,8 @@ def run_generate_config(environ, ownership): environ["SYNAPSE_REPORT_STATS"], "--config-path", environ["SYNAPSE_CONFIG_PATH"], + "--config-directory", + config_dir, "--data-directory", data_dir, "--generate-config", -- cgit 1.4.1 From 28e30c6581b051c13d53d5abc6eab8b6c7d0f96f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 18:23:17 +0100 Subject: Docker: generate our own log config When running under docker, we want to use docker's own logging stuff rather than losing the logs somewhere on the container's filesystem, so let's use log configs that spit logs out to stdout instead. --- docker/start.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docker/start.py b/docker/start.py index 99107a9d54..06b38ec9db 100755 --- a/docker/start.py +++ b/docker/start.py @@ -116,9 +116,16 @@ def run_generate_config(environ, ownership): if v not in environ: error("Environment variable '%s' is mandatory in `generate` mode." % (v,)) + server_name = environ["SYNAPSE_SERVER_NAME"] config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") + # create a suitable log config from our template + log_config_file = "%s/%s.log.config" % (config_dir, server_name) + if not os.path.exists(log_config_file): + log("Creating log config %s" % (log_config_file,)) + convert("/conf/log.config", log_config_file, environ) + # make sure that synapse has perms to write to the data dir. subprocess.check_output(["chown", ownership, data_dir]) @@ -127,7 +134,7 @@ def run_generate_config(environ, ownership): "-m", "synapse.app.homeserver", "--server-name", - environ["SYNAPSE_SERVER_NAME"], + server_name, "--report-stats", environ["SYNAPSE_REPORT_STATS"], "--config-path", -- cgit 1.4.1 From befa116b3104b3259254f10b1cccad1b18042a72 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 15:52:00 +0100 Subject: changelog --- changelog.d/5565.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5565.feature diff --git a/changelog.d/5565.feature b/changelog.d/5565.feature new file mode 100644 index 0000000000..4b0665af03 --- /dev/null +++ b/changelog.d/5565.feature @@ -0,0 +1 @@ +Docker: Send synapse logs to the docker logging system, by default. -- cgit 1.4.1 From 806a06daf2b30691c2c69e32d1ff2e104436bbc4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:09:10 +0100 Subject: Rename get_users_whose_devices_changed --- synapse/handlers/device.py | 2 +- synapse/handlers/sync.py | 2 +- synapse/storage/devices.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 2b6c2117f9..99e8413092 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -106,7 +106,7 @@ class DeviceWorkerHandler(BaseHandler): users_who_share_room = yield self.store.get_users_who_share_room_with_user( user_id ) - changed = yield self.store.get_user_whose_devices_changed( + changed = yield self.store.get_users_whose_devices_changed( from_token.device_list_key, users_who_share_room ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 8249e75ecd..f70ebfdee7 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1080,7 +1080,7 @@ class SyncHandler(object): # weren't in the previous sync *or* they left and rejoined. changed = users_who_share_room & set(newly_joined_or_invited_users) - changed_users = yield self.store.get_user_whose_devices_changed( + changed_users = yield self.store.get_users_whose_devices_changed( since_token.device_list_key, users_who_share_room ) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 3af0171f75..97f6cd2754 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -391,7 +391,7 @@ class DeviceWorkerStore(SQLBaseStore): return now_stream_id, [] - def get_user_whose_devices_changed(self, from_key, user_ids): + def get_users_whose_devices_changed(self, from_key, user_ids): """Get set of users whose devices have changed since `from_key` that are in the given list of user_ids. @@ -426,7 +426,7 @@ class DeviceWorkerStore(SQLBaseStore): AND user_id IN (%s) """ - def _get_user_whose_devices_changed_txn(txn): + def _get_users_whose_devices_changed_txn(txn): changes = set() for chunk in chunks: @@ -436,7 +436,7 @@ class DeviceWorkerStore(SQLBaseStore): return changes return self.runInteraction( - "get_user_whose_devices_changed", _get_user_whose_devices_changed_txn + "get_users_whose_devices_changed", _get_users_whose_devices_changed_txn ) def get_all_device_list_changes_for_remotes(self, from_key, to_key): -- cgit 1.4.1 From f335e77d5330d13cbaf61b7b903980bae60761d7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:10:38 +0100 Subject: Use batch_iter and correct docstring --- synapse/storage/devices.py | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 97f6cd2754..44324bf400 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -24,6 +24,7 @@ from synapse.api.errors import StoreError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage._base import Cache, SQLBaseStore, db_to_json from synapse.storage.background_updates import BackgroundUpdateStore +from synapse.util import batch_iter from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList logger = logging.getLogger(__name__) @@ -396,8 +397,8 @@ class DeviceWorkerStore(SQLBaseStore): are in the given list of user_ids. Args: + from_key (str): The device lists stream token user_ids (Iterable[str]) - from_key: The device lists stream token Returns: Deferred[set[str]]: The set of user_ids whose devices have changed @@ -414,23 +415,19 @@ class DeviceWorkerStore(SQLBaseStore): if not to_check: return defer.succeed(set()) - # We now check the database for all users in `to_check`, in batches. - batch_size = 100 - chunks = [ - to_check[i : i + batch_size] for i in range(0, len(to_check), batch_size) - ] - - sql = """ - SELECT DISTINCT user_id FROM device_lists_stream - WHERE stream_id > ? - AND user_id IN (%s) - """ - def _get_users_whose_devices_changed_txn(txn): changes = set() - for chunk in chunks: - txn.execute(sql % (",".join("?" for _ in chunk),), [from_key] + chunk) + sql = """ + SELECT DISTINCT user_id FROM device_lists_stream + WHERE stream_id > ? + AND user_id IN (%s) + """ + + for chunk in batch_iter(to_check, 100): + txn.execute( + sql % (",".join("?" for _ in chunk),), [from_key] + list(chunk) + ) changes.update(user_id for user_id, in txn) return changes -- cgit 1.4.1 From 8624db3194789cdc98e4d2a8e0da324609497fb6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:30:35 +0100 Subject: Refactor and comment sync device list code --- synapse/handlers/sync.py | 70 ++++++++++++++++++++++++++++++++++++------------ 1 file changed, 53 insertions(+), 17 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f70ebfdee7..4f737d0a12 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1058,38 +1058,74 @@ class SyncHandler(object): newly_left_rooms, newly_left_users, ): + """Generate the DeviceLists section of sync + + Args: + sync_result_builder (SyncResultBuilder) + newly_joined_rooms (set[str]): Set of rooms user has joined since + previous sync + newly_joined_or_invited_users (set[str]): Set of users that have + joined or been invited to a room since previous sync. + newly_left_rooms (set[str]): Set of rooms user has left since + previous sync + newly_left_users (set[str]): Set of users that have left a room + we're in since previous sync + + Returns: + Deferred[DeviceLists] + """ + user_id = sync_result_builder.sync_config.user.to_string() since_token = sync_result_builder.since_token - if since_token and since_token.device_list_key: - # TODO: Be more clever than this, i.e. remove users who we already - # share a room with? - for room_id in newly_joined_rooms: - joined_users = yield self.state.get_current_users_in_room(room_id) - newly_joined_or_invited_users.update(joined_users) + # We're going to mutate these fields, so lets copy them rather than + # assume they won't get used later. + newly_joined_or_invited_users = set(newly_joined_or_invited_users) + newly_left_users = set(newly_left_users) - for room_id in newly_left_rooms: - left_users = yield self.state.get_current_users_in_room(room_id) - newly_left_users.update(left_users) + if since_token and since_token.device_list_key: + # We want to figure out what user IDs the client should refetch + # device keys for, and which users we aren't going to track changes + # for anymore. + # + # For the first step we check: + # 1. if any users we share a room with have updated their devices, + # and + # 2. we also check if we've joined any new rooms, or if a user has + # joined a room we're in. + # + # For the second step we just find any users we no longer share a + # room with by looking at all users that have left a room plus users + # that were in a room we've left. users_who_share_room = yield self.store.get_users_who_share_room_with_user( user_id ) + # Step 1, check for changes in devices of users we share a room with + users_that_have_changed = yield self.store.get_users_whose_devices_changed( + since_token.device_list_key, users_who_share_room + ) + + # Step 2, check for newly joined rooms + for room_id in newly_joined_rooms: + joined_users = yield self.state.get_current_users_in_room(room_id) + newly_joined_or_invited_users.update(joined_users) + # TODO: Check that these users are actually new, i.e. either they # weren't in the previous sync *or* they left and rejoined. - changed = users_who_share_room & set(newly_joined_or_invited_users) + users_that_have_changed.update(newly_joined_or_invited_users) - changed_users = yield self.store.get_users_whose_devices_changed( - since_token.device_list_key, users_who_share_room - ) + # Now find users that we no longer track + for room_id in newly_left_rooms: + left_users = yield self.state.get_current_users_in_room(room_id) + newly_left_users.update(left_users) - changed.update(changed_users) + # Remove any users that we still share a room with. + newly_left_users -= users_who_share_room defer.returnValue( - DeviceLists( - changed=changed, left=set(newly_left_users) - users_who_share_room - ) + DeviceLists(changed=users_that_have_changed, left=newly_left_users) ) else: defer.returnValue(DeviceLists(changed=[], left=[])) -- cgit 1.4.1 From 82028d723b1533832c22b2acced1ff5d1a0fb51a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:33:11 +0100 Subject: Move changelog --- changelog.d/5559.feature | 1 + changelog.d/5559.misc | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 changelog.d/5559.feature delete mode 100644 changelog.d/5559.misc diff --git a/changelog.d/5559.feature b/changelog.d/5559.feature new file mode 100644 index 0000000000..b77b383459 --- /dev/null +++ b/changelog.d/5559.feature @@ -0,0 +1 @@ +Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. diff --git a/changelog.d/5559.misc b/changelog.d/5559.misc deleted file mode 100644 index b77b383459..0000000000 --- a/changelog.d/5559.misc +++ /dev/null @@ -1 +0,0 @@ -Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. -- cgit 1.4.1 From db56383b24e20617653b5735972507dc730536c5 Mon Sep 17 00:00:00 2001 From: jon r Date: Thu, 27 Jun 2019 00:08:18 +0200 Subject: Update purge_api README This points the reverse links at the intended location. --- contrib/purge_api/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/purge_api/README.md b/contrib/purge_api/README.md index 000bf35ca7..06b4cdb9f7 100644 --- a/contrib/purge_api/README.md +++ b/contrib/purge_api/README.md @@ -3,7 +3,7 @@ Purge history API examples # `purge_history.sh` -A bash file, that uses the [purge history API](/docs/admin_api/README.rst) to +A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to purge all messages in a list of rooms up to a certain event. You can select a timeframe or a number of messages that you want to keep in the room. @@ -12,5 +12,5 @@ the script. # `purge_remote_media.sh` -A bash file, that uses the [purge history API](/docs/admin_api/README.rst) to +A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to purge all old cached remote media. -- cgit 1.4.1 From 536820e572c53d9e3d860fb071c5bb5895844983 Mon Sep 17 00:00:00 2001 From: jon r Date: Thu, 27 Jun 2019 00:13:34 +0200 Subject: Create 5570.misc Signed-off-by: Jon Richter --- changelog.d/5570.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5570.misc diff --git a/changelog.d/5570.misc b/changelog.d/5570.misc new file mode 100644 index 0000000000..f329fd9d67 --- /dev/null +++ b/changelog.d/5570.misc @@ -0,0 +1 @@ +Point the reverse links in the Purge History API README at the intended location. -- cgit 1.4.1 From a11475f396e8526ce93d1754a6c3fea24e64b9cb Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Jun 2019 00:57:59 +0100 Subject: fix changelog --- changelog.d/5570.misc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/5570.misc b/changelog.d/5570.misc index f329fd9d67..dfb1d7e58b 100644 --- a/changelog.d/5570.misc +++ b/changelog.d/5570.misc @@ -1 +1 @@ -Point the reverse links in the Purge History API README at the intended location. +Point the reverse links in the Purge History contrib scripts at the intended location. -- cgit 1.4.1 From b2d2617c0de61db4cf81336d21e17bccc832eb70 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Jun 2019 11:18:51 +0100 Subject: Reduce the amount of stuff we send in the docker context (#5564) this makes docker builds a bit faster. --- .dockerignore | 22 +++++++++++++--------- changelog.d/5564.misc | 1 + 2 files changed, 14 insertions(+), 9 deletions(-) create mode 100644 changelog.d/5564.misc diff --git a/.dockerignore b/.dockerignore index 3c3996eb4c..f6c638b0a2 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,9 +1,13 @@ -Dockerfile -.travis.yml -.gitignore -demo/etc -tox.ini -.git/* -.tox/* -debian/matrix-synapse/ -debian/matrix-synapse-*/ +# ignore everything by default +* + +# things to include +!docker +!scripts +!synapse +!MANIFEST.in +!README.rst +!setup.py +!synctl + +**/__pycache__ diff --git a/changelog.d/5564.misc b/changelog.d/5564.misc new file mode 100644 index 0000000000..e209cdcc29 --- /dev/null +++ b/changelog.d/5564.misc @@ -0,0 +1 @@ +Reduce the amount of stuff we send in the docker context. -- cgit 1.4.1 From 856ea04eb32f1f40c37d0534955d900fea2c7069 Mon Sep 17 00:00:00 2001 From: PauRE Date: Thu, 27 Jun 2019 13:02:41 +0200 Subject: Fix JWT login (#5555) * Fix JWT login with register Signed-off-by: Pau Rodriguez-Estivill * Add pyjwt conditional dependency Signed-off-by: Pau Rodriguez-Estivill * Added changelog file Signed-off-by: Pau Rodriguez-Estivill * Improved changelog description Signed-off-by: Pau Rodriguez-Estivill --- changelog.d/5555.bugfix | 1 + synapse/python_dependencies.py | 1 + synapse/rest/client/v1/login.py | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5555.bugfix diff --git a/changelog.d/5555.bugfix b/changelog.d/5555.bugfix new file mode 100644 index 0000000000..c0b1ecf81a --- /dev/null +++ b/changelog.d/5555.bugfix @@ -0,0 +1 @@ +Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 13698d9638..6324c00ef1 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -95,6 +95,7 @@ CONDITIONAL_REQUIREMENTS = { "url_preview": ["lxml>=3.5.0"], "test": ["mock>=2.0", "parameterized"], "sentry": ["sentry-sdk>=0.7.2"], + "jwt": ["pyjwt>=1.6.4"], } ALL_OPTIONAL_REQUIREMENTS = set() diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 4efb679a04..ede6bc8b1e 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -336,7 +336,7 @@ class LoginRestServlet(RestServlet): } else: user_id, access_token = ( - yield self.handlers.registration_handler.register(localpart=user) + yield self.registration_handler.register(localpart=user) ) device_id = login_submission.get("device_id") -- cgit 1.4.1 From 2f7ebc2a559409abca9424bfb02cb287c4a21dc6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Jun 2019 13:49:48 +0100 Subject: Deprecate the env var way of running the docker image (#5566) This is mostly a documentation change, but also adds a default value for SYNAPSE_CONFIG_PATH, so that running from the generated config is the default, and will Just Work provided your config is in the right place. --- changelog.d/5566.feature | 1 + contrib/docker/README.md | 4 + docker/README.md | 216 +++++++++++++++-------------------------------- docker/start.py | 32 +++++-- 4 files changed, 99 insertions(+), 154 deletions(-) create mode 100644 changelog.d/5566.feature diff --git a/changelog.d/5566.feature b/changelog.d/5566.feature new file mode 100644 index 0000000000..85380bc517 --- /dev/null +++ b/changelog.d/5566.feature @@ -0,0 +1 @@ +Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. diff --git a/contrib/docker/README.md b/contrib/docker/README.md index 05254e5192..af102f7594 100644 --- a/contrib/docker/README.md +++ b/contrib/docker/README.md @@ -1,5 +1,9 @@ # Synapse Docker +FIXME: this is out-of-date as of +https://github.com/matrix-org/synapse/issues/5518. Contributions to bring it up +to date would be welcome. + ### Automated configuration It is recommended that you use Docker Compose to run your containers, including diff --git a/docker/README.md b/docker/README.md index a42d1d24b6..7f7d27ed33 100644 --- a/docker/README.md +++ b/docker/README.md @@ -6,39 +6,11 @@ postgres database. The image also does *not* provide a TURN server. -## Run - -### Using docker-compose (easier) - -This image is designed to run either with an automatically generated -configuration file or with a custom configuration that requires manual editing. - -An easy way to make use of this image is via docker-compose. See the -[contrib/docker](https://github.com/matrix-org/synapse/tree/master/contrib/docker) section of the synapse project for -examples. - -### Without Compose (harder) - -If you do not wish to use Compose, you may still run this image using plain -Docker commands. Note that the following is just a guideline and you may need -to add parameters to the docker run command to account for the network situation -with your postgres database. - -``` -docker run \ - -d \ - --name synapse \ - --mount type=volume,src=synapse-data,dst=/data \ - -e SYNAPSE_SERVER_NAME=my.matrix.host \ - -e SYNAPSE_REPORT_STATS=yes \ - -p 8448:8448 \ - matrixdotorg/synapse:latest -``` - ## Volumes -The image expects a single volume, located at ``/data``, that will hold: +By default, the image expects a single volume, located at ``/data``, that will hold: +* configuration files; * temporary files during uploads; * uploaded media and thumbnails; * the SQLite database if you do not configure postgres; @@ -53,142 +25,90 @@ In order to setup an application service, simply create an ``appservices`` directory in the data volume and write the application service Yaml configuration file there. Multiple application services are supported. -## TLS certificates - -Synapse requires a valid TLS certificate. You can do one of the following: - - * Provide your own certificate and key (as - `${DATA_PATH}/${SYNAPSE_SERVER_NAME}.tls.crt` and - `${DATA_PATH}/${SYNAPSE_SERVER_NAME}.tls.key`, or elsewhere by providing an - entire config as `${SYNAPSE_CONFIG_PATH}`). In this case, you should forward - traffic to port 8448 in the container, for example with `-p 443:8448`. - - * Use a reverse proxy to terminate incoming TLS, and forward the plain http - traffic to port 8008 in the container. In this case you should set `-e - SYNAPSE_NO_TLS=1`. - - * Use the ACME (Let's Encrypt) support built into Synapse. This requires - `${SYNAPSE_SERVER_NAME}` port 80 to be forwarded to port 8009 in the - container, for example with `-p 80:8009`. To enable it in the docker - container, set `-e SYNAPSE_ACME=1`. - -If you don't do any of these, Synapse will fail to start with an error similar to: - - synapse.config._base.ConfigError: Error accessing file '/data/.tls.crt' (config for tls_certificate): No such file or directory - -## Environment - -Unless you specify a custom path for the configuration file, a very generic -file will be generated, based on the following environment settings. -These are a good starting point for setting up your own deployment. - -Global settings: - -* ``UID``, the user id Synapse will run as [default 991] -* ``GID``, the group id Synapse will run as [default 991] -* ``SYNAPSE_CONFIG_PATH``, path to a custom config file - -If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file -then customize it manually: see [Generating a config -file](#generating-a-config-file). - -Otherwise, a dynamic configuration file will be used. - -### Environment variables used to build a dynamic configuration file - -The following environment variables are used to build the configuration file -when ``SYNAPSE_CONFIG_PATH`` is not set. - -* ``SYNAPSE_SERVER_NAME`` (mandatory), the server public hostname. -* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous - statistics reporting back to the Matrix project which helps us to get funding. -* `SYNAPSE_NO_TLS`, (accepts `true`, `false`, `on`, `off`, `1`, `0`, `yes`, `no`]): disable - TLS in Synapse (use this if you run your own TLS-capable reverse proxy). Defaults - to `false` (ie, TLS is enabled by default). -* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on - the Synapse instance. -* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server. -* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`]. -* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public - key in order to enable recaptcha upon registration. -* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private - key in order to enable recaptcha upon registration. -* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN - uris to enable TURN for this homeserver. -* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required. -* ``SYNAPSE_MAX_UPLOAD_SIZE``, set this variable to change the max upload size - [default `10M`]. -* ``SYNAPSE_ACME``: set this to enable the ACME certificate renewal support. - -Shared secrets, that will be initialized to random values if not set: - -* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if - registration is disable. -* ``SYNAPSE_MACAROON_SECRET_KEY`` secret for signing access tokens - to the server. - -Database specific values (will use SQLite if not set): - -* `POSTGRES_DB` - The database name for the synapse postgres - database. [default: `synapse`] -* `POSTGRES_HOST` - The host of the postgres database if you wish to use - postgresql instead of sqlite3. [default: `db` which is useful when using a - container on the same docker network in a compose file where the postgres - service is called `db`] -* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If - this is set then postgres will be used instead of sqlite3.** [default: none] - **NOTE**: You are highly encouraged to use postgresql! Please use the compose - file to make it easier to deploy. -* `POSTGRES_USER` - The user for the synapse postgres database. [default: - `synapse`] - -Mail server specific values (will not send emails if not set): - -* ``SYNAPSE_SMTP_HOST``, hostname to the mail server. -* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default - ``25``]. -* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if - any. -* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail - server if any. - -### Generating a config file - -It is possible to generate a basic configuration file for use with -`SYNAPSE_CONFIG_PATH` using the `generate` commandline option. You will need to -specify values for `SYNAPSE_CONFIG_PATH`, `SYNAPSE_SERVER_NAME` and -`SYNAPSE_REPORT_STATS`, and mount a docker volume to store the data on. For -example: +## Generating a configuration file + +The first step is to genearte a valid config file. To do this, you can run the +image with the `generate` commandline option. + +You will need to specify values for the `SYNAPSE_SERVER_NAME` and +`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store +the configuration on. For example: ``` docker run -it --rm \ --mount type=volume,src=synapse-data,dst=/data \ - -e SYNAPSE_CONFIG_PATH=/data/homeserver.yaml \ -e SYNAPSE_SERVER_NAME=my.matrix.host \ -e SYNAPSE_REPORT_STATS=yes \ matrixdotorg/synapse:latest generate ``` -This will generate a `homeserver.yaml` in (typically) -`/var/lib/docker/volumes/synapse-data/_data`, which you can then customise and -use with: +For information on picking a suitable server name, see +https://github.com/matrix-org/synapse/blob/master/INSTALL.md. -``` -docker run -d --name synapse \ - --mount type=volume,src=synapse-data,dst=/data \ - -e SYNAPSE_CONFIG_PATH=/data/homeserver.yaml \ - matrixdotorg/synapse:latest -``` +The above command will generate a `homeserver.yaml` in (typically) +`/var/lib/docker/volumes/synapse-data/_data`. You should check this file, and +customise it to your needs. -The following environment variables are supported in this mode: +The following environment variables are supported in `generate` mode: * `SYNAPSE_SERVER_NAME` (mandatory): the server public hostname. * `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable anonymous statistics reporting. -* `SYNAPSE_CONFIG_PATH` (mandatory): path to the file to be generated. * `SYNAPSE_CONFIG_DIR`: where additional config files (such as the log config and event signing key) will be stored. Defaults to `/data`. +* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to + `/homeserver.yaml`. * `SYNAPSE_DATA_DIR`: where the generated config will put persistent data such as the datatase and media store. Defaults to `/data`. * `UID`, `GID`: the user id and group id to use for creating the data directories. Defaults to `991`, `991`. + + +## Running synapse + +Once you have a valid configuration file, you can start synapse as follows: + +``` +docker run -d --name synapse \ + --mount type=volume,src=synapse-data,dst=/data \ + -p 8008:8008 \ + matrixdotorg/synapse:latest +``` + +You can then check that it has started correctly with: + +``` +docker logs synapse +``` + +If all is well, you should now be able to connect to http://localhost:8008 and +see a confirmation message. + +The following environment variables are supported in run mode: + +* `SYNAPSE_CONFIG_DIR`: where additional config files are stored. Defaults to + `/data`. +* `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to + `/homeserver.yaml`. +* `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`. + +## TLS support + +The default configuration exposes a single HTTP port: http://localhost:8008. It +is suitable for local testing, but for any practical use, you will either need +to use a reverse proxy, or configure Synapse to expose an HTTPS port. + +For documentation on using a reverse proxy, see +https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst. + +For more information on enabling TLS support in synapse itself, see +https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of +course, you will need to expose the TLS port from the container with a `-p` +argument to `docker run`. + +## Legacy dynamic configuration file support + +For backwards-compatibility only, the docker image supports creating a dynamic +configuration file based on environment variables. This is now deprecated, but +is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is +not given). diff --git a/docker/start.py b/docker/start.py index fdc3d59d86..89ec9ebaf8 100755 --- a/docker/start.py +++ b/docker/start.py @@ -131,12 +131,13 @@ def run_generate_config(environ, ownership): Never returns. """ - for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"): + for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"): if v not in environ: error("Environment variable '%s' is mandatory in `generate` mode." % (v,)) server_name = environ["SYNAPSE_SERVER_NAME"] config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") + config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml") data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") # create a suitable log config from our template @@ -157,7 +158,7 @@ def run_generate_config(environ, ownership): "--report-stats", environ["SYNAPSE_REPORT_STATS"], "--config-path", - environ["SYNAPSE_CONFIG_PATH"], + config_path, "--config-directory", config_dir, "--data-directory", @@ -176,11 +177,30 @@ def main(args, environ): if mode == "generate": return run_generate_config(environ, ownership) - # In normal mode, generate missing keys if any, then run synapse - if "SYNAPSE_CONFIG_PATH" in environ: - config_path = environ["SYNAPSE_CONFIG_PATH"] - else: + if "SYNAPSE_SERVER_NAME" in environ: + # backwards-compatibility generate-a-config-on-the-fly mode + if "SYNAPSE_CONFIG_PATH" in environ: + error( + "SYNAPSE_SERVER_NAME and SYNAPSE_CONFIG_PATH are mutually exclusive " + "except in `generate` mode." + ) + config_path = generate_config_from_template(environ, ownership) + else: + config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") + config_path = environ.get( + "SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml" + ) + if not os.path.exists(config_path): + error( + "Config file '%s' does not exist. You should either create a new " + "config file by running with the `generate` argument (and then edit " + "the resulting file before restarting) or specify the path to an " + "existing config file with the SYNAPSE_CONFIG_PATH variable." + % (config_path,) + ) + + log("Starting synapse with config file " + config_path) args = [ "su-exec", -- cgit 1.4.1 From 1ddc7b39c90d0d823fc6935857eda5153b542d83 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Jun 2019 13:50:10 +0100 Subject: Docker image: open the non-TLS port by default. (#5568) There's not much point in binding to localhost when it's in a docker container. --- changelog.d/5568.feature | 1 + docker/start.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/5568.feature diff --git a/changelog.d/5568.feature b/changelog.d/5568.feature new file mode 100644 index 0000000000..59b9e5f96d --- /dev/null +++ b/changelog.d/5568.feature @@ -0,0 +1 @@ +Docker image: open the non-TLS port by default. diff --git a/docker/start.py b/docker/start.py index 89ec9ebaf8..2a13308dae 100755 --- a/docker/start.py +++ b/docker/start.py @@ -164,6 +164,7 @@ def run_generate_config(environ, ownership): "--data-directory", data_dir, "--generate-config", + "--open-private-ports", ] # log("running %s" % (args, )) os.execv("/usr/local/bin/python", args) -- cgit 1.4.1 From 555b6fa0d534ac897b139ee1e57a651e50e1aec5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Jun 2019 13:52:40 +0100 Subject: Docker image: Add a migrate_config mode (#5567) ... to help people escape env var hell --- changelog.d/5567.feature | 1 + docker/README.md | 16 +++++++++++++ docker/conf/homeserver.yaml | 2 +- docker/start.py | 56 ++++++++++++++++++++++++++++++++------------- 4 files changed, 58 insertions(+), 17 deletions(-) create mode 100644 changelog.d/5567.feature diff --git a/changelog.d/5567.feature b/changelog.d/5567.feature new file mode 100644 index 0000000000..85380bc517 --- /dev/null +++ b/changelog.d/5567.feature @@ -0,0 +1 @@ +Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. diff --git a/docker/README.md b/docker/README.md index 7f7d27ed33..b62417c281 100644 --- a/docker/README.md +++ b/docker/README.md @@ -112,3 +112,19 @@ For backwards-compatibility only, the docker image supports creating a dynamic configuration file based on environment variables. This is now deprecated, but is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is not given). + +To migrate from a dynamic configuration file to a static one, run the docker +container once with the environment variables set, and `migrate_config` +commandline option. For example: + +``` +docker run -it --rm \ + --mount type=volume,src=synapse-data,dst=/data \ + -e SYNAPSE_SERVER_NAME=my.matrix.host \ + -e SYNAPSE_REPORT_STATS=yes \ + matrixdotorg/synapse:latest migrate_config +``` + +This will generate the same configuration file as the legacy mode used, but +will store it in `/data/homeserver.yaml` instead of a temporary location. You +can then use it as shown above at [Running synapse](#running-synapse). diff --git a/docker/conf/homeserver.yaml b/docker/conf/homeserver.yaml index babd5bef9e..b0267b1c60 100644 --- a/docker/conf/homeserver.yaml +++ b/docker/conf/homeserver.yaml @@ -21,7 +21,7 @@ server_name: "{{ SYNAPSE_SERVER_NAME }}" pid_file: /homeserver.pid web_client: False soft_file_limit: 0 -log_config: "/compiled/log.config" +log_config: "{{ SYNAPSE_LOG_CONFIG }}" ## Ports ## diff --git a/docker/start.py b/docker/start.py index 2a13308dae..40a861f200 100755 --- a/docker/start.py +++ b/docker/start.py @@ -34,22 +34,21 @@ def convert(src, dst, environ): outfile.write(rendered) -def generate_config_from_template(environ, ownership): +def generate_config_from_template(config_dir, config_path, environ, ownership): """Generate a homeserver.yaml from environment variables Args: + config_dir (str): where to put generated config files + config_path (str): where to put the main config file environ (dict): environment dictionary ownership (str): ":" string which will be used to set ownership of the generated configs - - Returns: - path to generated config file """ for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"): if v not in environ: error( - "Environment variable '%s' is mandatory when generating a config " - "file on-the-fly." % (v,) + "Environment variable '%s' is mandatory when generating a config file." + % (v,) ) # populate some params from data files (if they exist, else create new ones) @@ -78,10 +77,8 @@ def generate_config_from_template(environ, ownership): environ[secret] = value environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml") - if not os.path.exists("/compiled"): - os.mkdir("/compiled") - - config_path = "/compiled/homeserver.yaml" + if not os.path.exists(config_dir): + os.mkdir(config_dir) # Convert SYNAPSE_NO_TLS to boolean if exists if "SYNAPSE_NO_TLS" in environ: @@ -98,8 +95,16 @@ def generate_config_from_template(environ, ownership): + '" unrecognized; exiting.' ) + if "SYNAPSE_LOG_CONFIG" not in environ: + environ["SYNAPSE_LOG_CONFIG"] = config_dir + "/log.config" + + log("Generating synapse config file " + config_path) convert("/conf/homeserver.yaml", config_path, environ) - convert("/conf/log.config", "/compiled/log.config", environ) + + log_config_file = environ["SYNAPSE_LOG_CONFIG"] + log("Generating log config file " + log_config_file) + convert("/conf/log.config", log_config_file, environ) + subprocess.check_output(["chown", "-R", ownership, "/data"]) # Hopefully we already have a signing key, but generate one if not. @@ -114,13 +119,11 @@ def generate_config_from_template(environ, ownership): config_path, # tell synapse to put generated keys in /data rather than /compiled "--keys-directory", - "/data", + config_dir, "--generate-keys", ] ) - return config_path - def run_generate_config(environ, ownership): """Run synapse with a --generate-config param to generate a template config file @@ -178,15 +181,36 @@ def main(args, environ): if mode == "generate": return run_generate_config(environ, ownership) + if mode == "migrate_config": + # generate a config based on environment vars. + config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") + config_path = environ.get( + "SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml" + ) + return generate_config_from_template( + config_dir, config_path, environ, ownership + ) + + if mode is not None: + error("Unknown execution mode '%s'" % (mode,)) + if "SYNAPSE_SERVER_NAME" in environ: # backwards-compatibility generate-a-config-on-the-fly mode if "SYNAPSE_CONFIG_PATH" in environ: error( "SYNAPSE_SERVER_NAME and SYNAPSE_CONFIG_PATH are mutually exclusive " - "except in `generate` mode." + "except in `generate` or `migrate_config` mode." ) - config_path = generate_config_from_template(environ, ownership) + config_path = "/compiled/homeserver.yaml" + log( + "Generating config file '%s' on-the-fly from environment variables.\n" + "Note that this mode is deprecated. You can migrate to a static config\n" + "file by running with 'migrate_config'. See the README for more details." + % (config_path,) + ) + + generate_config_from_template("/compiled", config_path, environ, ownership) else: config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data") config_path = environ.get( -- cgit 1.4.1 From 729f5a4fb6654e6c9beb68a3edbb8dbbae076e3f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 27 Jun 2019 16:06:23 +0100 Subject: Review comments --- synapse/handlers/sync.py | 8 ++++---- synapse/storage/devices.py | 4 +--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 4f737d0a12..a3f550554f 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1089,9 +1089,9 @@ class SyncHandler(object): # for anymore. # # For the first step we check: - # 1. if any users we share a room with have updated their devices, + # a. if any users we share a room with have updated their devices, # and - # 2. we also check if we've joined any new rooms, or if a user has + # b. we also check if we've joined any new rooms, or if a user has # joined a room we're in. # # For the second step we just find any users we no longer share a @@ -1102,12 +1102,12 @@ class SyncHandler(object): user_id ) - # Step 1, check for changes in devices of users we share a room with + # Step 1a, check for changes in devices of users we share a room with users_that_have_changed = yield self.store.get_users_whose_devices_changed( since_token.device_list_key, users_who_share_room ) - # Step 2, check for newly joined rooms + # Step 1b, check for newly joined rooms for room_id in newly_joined_rooms: joined_users = yield self.state.get_current_users_in_room(room_id) newly_joined_or_invited_users.update(joined_users) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 44324bf400..d2b113a4e7 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -425,9 +425,7 @@ class DeviceWorkerStore(SQLBaseStore): """ for chunk in batch_iter(to_check, 100): - txn.execute( - sql % (",".join("?" for _ in chunk),), [from_key] + list(chunk) - ) + txn.execute(sql % (",".join("?" for _ in chunk),), (from_key,) + chunk) changes.update(user_id for user_id, in txn) return changes -- cgit 1.4.1 From c548dbc4b16a8bff6226fa4a6d7c86180c4f5704 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 27 Jun 2019 18:20:17 +0100 Subject: Make it clearer that the template dir is relative to synapse's root dir (#5543) Helps address #5444 --- changelog.d/5543.misc | 1 + docs/sample_config.yaml | 10 +++++++++- synapse/config/emailconfig.py | 10 +++++++++- 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5543.misc diff --git a/changelog.d/5543.misc b/changelog.d/5543.misc new file mode 100644 index 0000000000..793620a731 --- /dev/null +++ b/changelog.d/5543.misc @@ -0,0 +1 @@ +Make the config clearer in that email.template_dir is relative to the Synapse's root directory, not the `synapse/` folder within it. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index da10788e96..18be376e1e 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1070,11 +1070,13 @@ password_config: # app_name: Matrix # # # Enable email notifications by default +# # # notif_for_new_users: True # # # Defining a custom URL for Riot is only needed if email notifications # # should contain links to a self-hosted installation of Riot; when set # # the "app_name" setting is ignored +# # # riot_base_url: "http://localhost/riot" # # # Enable sending password reset emails via the configured, trusted @@ -1087,16 +1089,22 @@ password_config: # # # # If this option is set to false and SMTP options have not been # # configured, resetting user passwords via email will be disabled +# # # #trust_identity_server_for_password_resets: false # # # Configure the time that a validation email or text message code # # will expire after sending # # # # This is currently used for password resets +# # # #validation_token_lifetime: 1h # # # Template directory. All template files should be stored within this -# # directory +# # directory. If not set, default templates from within the Synapse +# # package will be used +# # +# # For the list of default templates, please see +# # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates # # # #template_dir: res/templates # diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index cf39936da7..fcd55d3e3d 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -233,11 +233,13 @@ class EmailConfig(Config): # app_name: Matrix # # # Enable email notifications by default + # # # notif_for_new_users: True # # # Defining a custom URL for Riot is only needed if email notifications # # should contain links to a self-hosted installation of Riot; when set # # the "app_name" setting is ignored + # # # riot_base_url: "http://localhost/riot" # # # Enable sending password reset emails via the configured, trusted @@ -250,16 +252,22 @@ class EmailConfig(Config): # # # # If this option is set to false and SMTP options have not been # # configured, resetting user passwords via email will be disabled + # # # #trust_identity_server_for_password_resets: false # # # Configure the time that a validation email or text message code # # will expire after sending # # # # This is currently used for password resets + # # # #validation_token_lifetime: 1h # # # Template directory. All template files should be stored within this - # # directory + # # directory. If not set, default templates from within the Synapse + # # package will be used + # # + # # For the list of default templates, please see + # # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates # # # #template_dir: res/templates # -- cgit 1.4.1 From 457b8e4c4d15f43f9f16b22fdfa79d5b2017e5cd Mon Sep 17 00:00:00 2001 From: Silke Hofstra Date: Thu, 27 Jun 2019 19:26:41 +0200 Subject: Include systemd-python in Debian package to allow logging to journal (#5261) Signed-off-by: Silke Hofstra --- debian/build_virtualenv | 2 +- debian/changelog | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/debian/build_virtualenv b/debian/build_virtualenv index bead3ebc6e..2791896052 100755 --- a/debian/build_virtualenv +++ b/debian/build_virtualenv @@ -43,7 +43,7 @@ dh_virtualenv \ --preinstall="mock" \ --extra-pip-arg="--no-cache-dir" \ --extra-pip-arg="--compile" \ - --extras="all" + --extras="all,systemd" PACKAGE_BUILD_DIR="debian/matrix-synapse-py3" VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse" diff --git a/debian/changelog b/debian/changelog index ef4edd7ac0..91653e7243 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium + + [ Silke Hofstra ] + * Include systemd-python to allow logging to the systemd journal. + + -- Silke Hofstra Wed, 29 May 2019 09:45:29 +0200 + matrix-synapse-py3 (1.0.0) stable; urgency=medium * New synapse release 1.0.0. -- cgit 1.4.1 From 9646a593ac555e7b68c6133c29a9f5bac83d1c2f Mon Sep 17 00:00:00 2001 From: Daniel Hoffend Date: Thu, 27 Jun 2019 19:37:29 +0200 Subject: Added possibilty to disable local password authentication (#5092) Signed-off-by: Daniel Hoffend --- changelog.d/5092.feature | 1 + docs/sample_config.yaml | 6 ++++++ synapse/config/password.py | 7 +++++++ synapse/handlers/auth.py | 2 +- synapse/handlers/set_password.py | 3 +++ 5 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5092.feature diff --git a/changelog.d/5092.feature b/changelog.d/5092.feature new file mode 100644 index 0000000000..c22f586d08 --- /dev/null +++ b/changelog.d/5092.feature @@ -0,0 +1 @@ +Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 18be376e1e..a01e1152f7 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1046,6 +1046,12 @@ password_config: # #enabled: false + # Uncomment to disable authentication against the local password + # database. This is ignored if `enabled` is false, and is only useful + # if you have other password_providers. + # + #localdb_enabled: false + # Uncomment and change to a secret random string for extra security. # DO NOT CHANGE THIS AFTER INITIAL SETUP! # diff --git a/synapse/config/password.py b/synapse/config/password.py index 598f84fc0c..d5b5953f2f 100644 --- a/synapse/config/password.py +++ b/synapse/config/password.py @@ -26,6 +26,7 @@ class PasswordConfig(Config): password_config = {} self.password_enabled = password_config.get("enabled", True) + self.password_localdb_enabled = password_config.get("localdb_enabled", True) self.password_pepper = password_config.get("pepper", "") def generate_config_section(self, config_dir_path, server_name, **kwargs): @@ -35,6 +36,12 @@ class PasswordConfig(Config): # #enabled: false + # Uncomment to disable authentication against the local password + # database. This is ignored if `enabled` is false, and is only useful + # if you have other password_providers. + # + #localdb_enabled: false + # Uncomment and change to a secret random string for extra security. # DO NOT CHANGE THIS AFTER INITIAL SETUP! # diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 97b21c4093..c8c1ed3246 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -743,7 +743,7 @@ class AuthHandler(BaseHandler): result = (result, None) defer.returnValue(result) - if login_type == LoginType.PASSWORD: + if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled: known_login_type = True canonical_user_id = yield self._check_local_password( diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py index 5a0995d4fe..d90c9e0108 100644 --- a/synapse/handlers/set_password.py +++ b/synapse/handlers/set_password.py @@ -33,6 +33,9 @@ class SetPasswordHandler(BaseHandler): @defer.inlineCallbacks def set_password(self, user_id, newpassword, requester=None): + if not self.hs.config.password_localdb_enabled: + raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN) + password_hash = yield self._auth_handler.hash(newpassword) except_device_id = requester.device_id if requester else None -- cgit 1.4.1 From be3b901ccdf28d0f81d312d7cd8b7bedb22b4049 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 28 Jun 2019 18:19:09 +1000 Subject: Update the TLS cipher string and provide configurability for TLS on outgoing federation (#5550) --- changelog.d/5550.feature | 1 + changelog.d/5550.misc | 1 + docs/sample_config.yaml | 9 +++ scripts/generate_config | 2 +- synapse/config/tls.py | 32 ++++++++++- synapse/crypto/context_factory.py | 39 +++++++++++-- tests/config/test_tls.py | 115 +++++++++++++++++++++++++++++++++++++- 7 files changed, 190 insertions(+), 9 deletions(-) create mode 100644 changelog.d/5550.feature create mode 100644 changelog.d/5550.misc diff --git a/changelog.d/5550.feature b/changelog.d/5550.feature new file mode 100644 index 0000000000..79ecedf3b8 --- /dev/null +++ b/changelog.d/5550.feature @@ -0,0 +1 @@ +The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. diff --git a/changelog.d/5550.misc b/changelog.d/5550.misc new file mode 100644 index 0000000000..ad5693338e --- /dev/null +++ b/changelog.d/5550.misc @@ -0,0 +1 @@ +Synapse will now only allow TLS v1.2 connections when serving federation, if it terminates TLS. As Synapse's allowed ciphers were only able to be used in TLSv1.2 before, this does not change behaviour. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index a01e1152f7..bf9cd88b15 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -317,6 +317,15 @@ listeners: # #federation_verify_certificates: false +# The minimum TLS version that will be used for outbound federation requests. +# +# Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note +# that setting this value higher than `1.2` will prevent federation to most +# of the public Matrix network: only configure it to `1.3` if you have an +# entirely private federation setup and you can ensure TLS 1.3 support. +# +#federation_client_minimum_tls_version: 1.2 + # Skip federation certificate verification on the following whitelist # of domains. # diff --git a/scripts/generate_config b/scripts/generate_config index 93b6406992..771cbf8d95 100755 --- a/scripts/generate_config +++ b/scripts/generate_config @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import argparse import shutil diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 8fcf801418..ca508a224f 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -23,7 +23,7 @@ import six from unpaddedbase64 import encode_base64 -from OpenSSL import crypto +from OpenSSL import SSL, crypto from twisted.internet._sslverify import Certificate, trustRootFromCertificates from synapse.config._base import Config, ConfigError @@ -81,6 +81,27 @@ class TlsConfig(Config): "federation_verify_certificates", True ) + # Minimum TLS version to use for outbound federation traffic + self.federation_client_minimum_tls_version = str( + config.get("federation_client_minimum_tls_version", 1) + ) + + if self.federation_client_minimum_tls_version not in ["1", "1.1", "1.2", "1.3"]: + raise ConfigError( + "federation_client_minimum_tls_version must be one of: 1, 1.1, 1.2, 1.3" + ) + + # Prevent people shooting themselves in the foot here by setting it to + # the biggest number blindly + if self.federation_client_minimum_tls_version == "1.3": + if getattr(SSL, "OP_NO_TLSv1_3", None) is None: + raise ConfigError( + ( + "federation_client_minimum_tls_version cannot be 1.3, " + "your OpenSSL does not support it" + ) + ) + # Whitelist of domains to not verify certificates for fed_whitelist_entries = config.get( "federation_certificate_verification_whitelist", [] @@ -261,6 +282,15 @@ class TlsConfig(Config): # #federation_verify_certificates: false + # The minimum TLS version that will be used for outbound federation requests. + # + # Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note + # that setting this value higher than `1.2` will prevent federation to most + # of the public Matrix network: only configure it to `1.3` if you have an + # entirely private federation setup and you can ensure TLS 1.3 support. + # + #federation_client_minimum_tls_version: 1.2 + # Skip federation certificate verification on the following whitelist # of domains. # diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 2bc5cc3807..4f48e8e88d 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -24,12 +24,25 @@ from OpenSSL import SSL, crypto from twisted.internet._sslverify import _defaultCurveName from twisted.internet.abstract import isIPAddress, isIPv6Address from twisted.internet.interfaces import IOpenSSLClientConnectionCreator -from twisted.internet.ssl import CertificateOptions, ContextFactory, platformTrust +from twisted.internet.ssl import ( + CertificateOptions, + ContextFactory, + TLSVersion, + platformTrust, +) from twisted.python.failure import Failure logger = logging.getLogger(__name__) +_TLS_VERSION_MAP = { + "1": TLSVersion.TLSv1_0, + "1.1": TLSVersion.TLSv1_1, + "1.2": TLSVersion.TLSv1_2, + "1.3": TLSVersion.TLSv1_3, +} + + class ServerContextFactory(ContextFactory): """Factory for PyOpenSSL SSL contexts that are used to handle incoming connections.""" @@ -43,16 +56,18 @@ class ServerContextFactory(ContextFactory): try: _ecCurve = crypto.get_elliptic_curve(_defaultCurveName) context.set_tmp_ecdh(_ecCurve) - except Exception: logger.exception("Failed to enable elliptic curve for TLS") - context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) + + context.set_options( + SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_NO_TLSv1 | SSL.OP_NO_TLSv1_1 + ) context.use_certificate_chain_file(config.tls_certificate_file) context.use_privatekey(config.tls_private_key) # https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ context.set_cipher_list( - "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1" + "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM" ) def getContext(self): @@ -79,10 +94,22 @@ class ClientTLSOptionsFactory(object): # Use CA root certs provided by OpenSSL trust_root = platformTrust() - self._verify_ssl_context = CertificateOptions(trustRoot=trust_root).getContext() + # "insecurelyLowerMinimumTo" is the argument that will go lower than + # Twisted's default, which is why it is marked as "insecure" (since + # Twisted's defaults are reasonably secure). But, since Twisted is + # moving to TLS 1.2 by default, we want to respect the config option if + # it is set to 1.0 (which the alternate option, raiseMinimumTo, will not + # let us do). + minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version] + + self._verify_ssl = CertificateOptions( + trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS + ) + self._verify_ssl_context = self._verify_ssl.getContext() self._verify_ssl_context.set_info_callback(self._context_info_cb) - self._no_verify_ssl_context = CertificateOptions().getContext() + self._no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS) + self._no_verify_ssl_context = self._no_verify_ssl.getContext() self._no_verify_ssl_context.set_info_callback(self._context_info_cb) def get_options(self, host): diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index a5d88d644a..4f8a87a3df 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd +# Copyright 2019 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +16,10 @@ import os -from synapse.config.tls import TlsConfig +from OpenSSL import SSL + +from synapse.config.tls import ConfigError, TlsConfig +from synapse.crypto.context_factory import ClientTLSOptionsFactory from tests.unittest import TestCase @@ -78,3 +82,112 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg= "or use Synapse's ACME support to provision one." ), ) + + def test_tls_client_minimum_default(self): + """ + The default client TLS version is 1.0. + """ + config = {} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + + self.assertEqual(t.federation_client_minimum_tls_version, "1") + + def test_tls_client_minimum_set(self): + """ + The default client TLS version can be set to 1.0, 1.1, and 1.2. + """ + config = {"federation_client_minimum_tls_version": 1} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1") + + config = {"federation_client_minimum_tls_version": 1.1} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.1") + + config = {"federation_client_minimum_tls_version": 1.2} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.2") + + # Also test a string version + config = {"federation_client_minimum_tls_version": "1"} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1") + + config = {"federation_client_minimum_tls_version": "1.2"} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.2") + + def test_tls_client_minimum_1_point_3_missing(self): + """ + If TLS 1.3 support is missing and it's configured, it will raise a + ConfigError. + """ + # thanks i hate it + if hasattr(SSL, "OP_NO_TLSv1_3"): + OP_NO_TLSv1_3 = SSL.OP_NO_TLSv1_3 + delattr(SSL, "OP_NO_TLSv1_3") + self.addCleanup(setattr, SSL, "SSL.OP_NO_TLSv1_3", OP_NO_TLSv1_3) + assert not hasattr(SSL, "OP_NO_TLSv1_3") + + config = {"federation_client_minimum_tls_version": 1.3} + t = TestConfig() + with self.assertRaises(ConfigError) as e: + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual( + e.exception.args[0], + ( + "federation_client_minimum_tls_version cannot be 1.3, " + "your OpenSSL does not support it" + ), + ) + + def test_tls_client_minimum_1_point_3_exists(self): + """ + If TLS 1.3 support exists and it's configured, it will be settable. + """ + # thanks i hate it, still + if not hasattr(SSL, "OP_NO_TLSv1_3"): + SSL.OP_NO_TLSv1_3 = 0x00 + self.addCleanup(lambda: delattr(SSL, "OP_NO_TLSv1_3")) + assert hasattr(SSL, "OP_NO_TLSv1_3") + + config = {"federation_client_minimum_tls_version": 1.3} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.3") + + def test_tls_client_minimum_set_passed_through_1_2(self): + """ + The configured TLS version is correctly configured by the ContextFactory. + """ + config = {"federation_client_minimum_tls_version": 1.2} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + + cf = ClientTLSOptionsFactory(t) + + # The context has had NO_TLSv1_1 and NO_TLSv1_0 set, but not NO_TLSv1_2 + self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0) + self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0) + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0) + + def test_tls_client_minimum_set_passed_through_1_0(self): + """ + The configured TLS version is correctly configured by the ContextFactory. + """ + config = {"federation_client_minimum_tls_version": 1} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + + cf = ClientTLSOptionsFactory(t) + + # The context has not had any of the NO_TLS set. + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0) + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0) + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0) -- cgit 1.4.1 From 071150ce19305e7f7a54f9794a1c7f2a05bc0610 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 28 Jun 2019 21:45:33 +1000 Subject: Don't log GC 0s at INFO (#5557) --- changelog.d/5557.misc | 1 + synapse/metrics/__init__.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5557.misc diff --git a/changelog.d/5557.misc b/changelog.d/5557.misc new file mode 100644 index 0000000000..0c90f49871 --- /dev/null +++ b/changelog.d/5557.misc @@ -0,0 +1 @@ +Logging when running GC collection on generation 0 is now at the DEBUG level, not INFO. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 1f30179b51..eaf0aaa86e 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -437,7 +437,10 @@ def runUntilCurrentTimer(func): counts = gc.get_count() for i in (2, 1, 0): if threshold[i] < counts[i]: - logger.info("Collecting gc %d", i) + if i == 0: + logger.debug("Collecting gc %d", i) + else: + logger.info("Collecting gc %d", i) start = time.time() unreachable = gc.collect(i) -- cgit 1.4.1 From 01d0f8e701b4c2ddd04eee1a26edef952c0ac558 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jun 2019 15:17:15 +0100 Subject: Don't update the ratelimiter before sending a 3PID invite This would cause emails being sent, but Synapse responding with a 429 when creating the event. The client would then retry, and with bad timing the same scenario would happen again. Some testing I did ended up sending me 10 emails for one single invite because of this. --- synapse/handlers/room_member.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 4d6e883802..c860acf970 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -676,7 +676,7 @@ class RoomMemberHandler(object): # We need to rate limit *before* we send out any 3PID invites, so we # can't just rely on the standard ratelimiting of events. - yield self.base_handler.ratelimit(requester) + yield self.base_handler.ratelimit(requester, update=False) can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited( medium, address, room_id -- cgit 1.4.1 From b339f6489f64b2e970b120245b9f7b87f1c17aa9 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jun 2019 15:24:59 +0100 Subject: Changelog --- changelog.d/5576.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5576.bugfix diff --git a/changelog.d/5576.bugfix b/changelog.d/5576.bugfix new file mode 100644 index 0000000000..c1ba5581f2 --- /dev/null +++ b/changelog.d/5576.bugfix @@ -0,0 +1 @@ +Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. -- cgit 1.4.1 From 15d9fc31bd549e2b9c04f96a0d3e8938c1bdc6a5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jun 2019 16:04:05 +0100 Subject: Only ratelimit when sending the email If we do the opposite, an event can arrive after or while sending the email and the 3PID invite event will get ratelimited. --- synapse/handlers/room_member.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index c860acf970..66b05b4732 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -676,7 +676,7 @@ class RoomMemberHandler(object): # We need to rate limit *before* we send out any 3PID invites, so we # can't just rely on the standard ratelimiting of events. - yield self.base_handler.ratelimit(requester, update=False) + yield self.base_handler.ratelimit(requester) can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited( medium, address, room_id @@ -823,6 +823,7 @@ class RoomMemberHandler(object): "sender": user.to_string(), "state_key": token, }, + ratelimit=False, txn_id=txn_id, ) -- cgit 1.4.1 From f40a7dc41fdd738a74546ff22b110a4c8ab850fe Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 29 Jun 2019 17:06:55 +1000 Subject: Make the http server handle coroutine-making REST servlets (#5475) --- changelog.d/5475.misc | 1 + synapse/http/server.py | 77 ++++++++++++++------------- synapse/rest/consent/consent_resource.py | 35 +++++------- synapse/rest/key/v2/remote_key_resource.py | 28 ++++------ synapse/rest/media/v1/config_resource.py | 21 ++++---- synapse/rest/media/v1/download_resource.py | 26 ++++----- synapse/rest/media/v1/preview_url_resource.py | 18 +++---- synapse/rest/media/v1/thumbnail_resource.py | 27 +++++----- synapse/rest/media/v1/upload_resource.py | 23 ++++---- synapse/rest/saml2/response_resource.py | 15 ++---- tests/rest/media/v1/test_media_storage.py | 25 +++++---- tests/unittest.py | 40 +++++++++++--- 12 files changed, 162 insertions(+), 174 deletions(-) create mode 100644 changelog.d/5475.misc diff --git a/changelog.d/5475.misc b/changelog.d/5475.misc new file mode 100644 index 0000000000..6be06d4d0b --- /dev/null +++ b/changelog.d/5475.misc @@ -0,0 +1 @@ +Synapse can now handle RestServlets that return coroutines. diff --git a/synapse/http/server.py b/synapse/http/server.py index 6fd13e87d1..f067c163c1 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -16,10 +16,11 @@ import cgi import collections +import http.client import logging - -from six import PY3 -from six.moves import http_client, urllib +import types +import urllib +from io import BytesIO from canonicaljson import encode_canonical_json, encode_pretty_printed_json, json @@ -41,11 +42,6 @@ from synapse.api.errors import ( from synapse.util.caches import intern_dict from synapse.util.logcontext import preserve_fn -if PY3: - from io import BytesIO -else: - from cStringIO import StringIO as BytesIO - logger = logging.getLogger(__name__) HTML_ERROR_TEMPLATE = """ @@ -75,10 +71,9 @@ def wrap_json_request_handler(h): deferred fails with any other type of error we send a 500 reponse. """ - @defer.inlineCallbacks - def wrapped_request_handler(self, request): + async def wrapped_request_handler(self, request): try: - yield h(self, request) + await h(self, request) except SynapseError as e: code = e.code logger.info("%s SynapseError: %s - %s", request, code, e.msg) @@ -142,10 +137,12 @@ def wrap_html_request_handler(h): where "request" must be a SynapseRequest. """ - def wrapped_request_handler(self, request): - d = defer.maybeDeferred(h, self, request) - d.addErrback(_return_html_error, request) - return d + async def wrapped_request_handler(self, request): + try: + return await h(self, request) + except Exception: + f = failure.Failure() + return _return_html_error(f, request) return wrap_async_request_handler(wrapped_request_handler) @@ -171,7 +168,7 @@ def _return_html_error(f, request): exc_info=(f.type, f.value, f.getTracebackObject()), ) else: - code = http_client.INTERNAL_SERVER_ERROR + code = http.client.INTERNAL_SERVER_ERROR msg = "Internal server error" logger.error( @@ -201,10 +198,9 @@ def wrap_async_request_handler(h): logged until the deferred completes. """ - @defer.inlineCallbacks - def wrapped_async_request_handler(self, request): + async def wrapped_async_request_handler(self, request): with request.processing(): - yield h(self, request) + await h(self, request) # we need to preserve_fn here, because the synchronous render method won't yield for # us (obviously) @@ -270,12 +266,11 @@ class JsonResource(HttpServer, resource.Resource): def render(self, request): """ This gets called by twisted every time someone sends us a request. """ - self._async_render(request) + defer.ensureDeferred(self._async_render(request)) return NOT_DONE_YET @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render(self, request): + async def _async_render(self, request): """ This gets called from render() every time someone sends us a request. This checks if anyone has registered a callback for that method and path. @@ -292,26 +287,19 @@ class JsonResource(HttpServer, resource.Resource): # Now trigger the callback. If it returns a response, we send it # here. If it throws an exception, that is handled by the wrapper # installed by @request_handler. - - def _unquote(s): - if PY3: - # On Python 3, unquote is unicode -> unicode - return urllib.parse.unquote(s) - else: - # On Python 2, unquote is bytes -> bytes We need to encode the - # URL again (as it was decoded by _get_handler_for request), as - # ASCII because it's a URL, and then decode it to get the UTF-8 - # characters that were quoted. - return urllib.parse.unquote(s.encode("ascii")).decode("utf8") - kwargs = intern_dict( { - name: _unquote(value) if value else value + name: urllib.parse.unquote(value) if value else value for name, value in group_dict.items() } ) - callback_return = yield callback(request, **kwargs) + callback_return = callback(request, **kwargs) + + # Is it synchronous? We'll allow this for now. + if isinstance(callback_return, (defer.Deferred, types.CoroutineType)): + callback_return = await callback_return + if callback_return is not None: code, response = callback_return self._send_response(request, code, response) @@ -360,6 +348,23 @@ class JsonResource(HttpServer, resource.Resource): ) +class DirectServeResource(resource.Resource): + def render(self, request): + """ + Render the request, using an asynchronous render handler if it exists. + """ + render_callback_name = "_async_render_" + request.method.decode("ascii") + + if hasattr(self, render_callback_name): + # Call the handler + callback = getattr(self, render_callback_name) + defer.ensureDeferred(callback(request)) + + return NOT_DONE_YET + else: + super().render(request) + + def _options_handler(request): """Request handler for OPTIONS requests diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 9a32892d8b..624c42441e 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -23,13 +23,13 @@ from six.moves import http_client import jinja2 from jinja2 import TemplateNotFound -from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET - from synapse.api.errors import NotFoundError, StoreError, SynapseError from synapse.config import ConfigError -from synapse.http.server import finish_request, wrap_html_request_handler +from synapse.http.server import ( + DirectServeResource, + finish_request, + wrap_html_request_handler, +) from synapse.http.servlet import parse_string from synapse.types import UserID @@ -47,7 +47,7 @@ else: return a == b -class ConsentResource(Resource): +class ConsentResource(DirectServeResource): """A twisted Resource to display a privacy policy and gather consent to it When accessed via GET, returns the privacy policy via a template. @@ -87,7 +87,7 @@ class ConsentResource(Resource): Args: hs (synapse.server.HomeServer): homeserver """ - Resource.__init__(self) + super().__init__() self.hs = hs self.store = hs.get_datastore() @@ -118,18 +118,12 @@ class ConsentResource(Resource): self._hmac_secret = hs.config.form_secret.encode("utf-8") - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_html_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): """ Args: request (twisted.web.http.Request): """ - version = parse_string(request, "v", default=self._default_consent_version) username = parse_string(request, "u", required=False, default="") userhmac = None @@ -145,7 +139,7 @@ class ConsentResource(Resource): else: qualified_user_id = UserID(username, self.hs.hostname).to_string() - u = yield self.store.get_user_by_id(qualified_user_id) + u = await self.store.get_user_by_id(qualified_user_id) if u is None: raise NotFoundError("Unknown user") @@ -165,13 +159,8 @@ class ConsentResource(Resource): except TemplateNotFound: raise NotFoundError("Unknown policy version") - def render_POST(self, request): - self._async_render_POST(request) - return NOT_DONE_YET - @wrap_html_request_handler - @defer.inlineCallbacks - def _async_render_POST(self, request): + async def _async_render_POST(self, request): """ Args: request (twisted.web.http.Request): @@ -188,12 +177,12 @@ class ConsentResource(Resource): qualified_user_id = UserID(username, self.hs.hostname).to_string() try: - yield self.store.user_set_consent_version(qualified_user_id, version) + await self.store.user_set_consent_version(qualified_user_id, version) except StoreError as e: if e.code != 404: raise raise NotFoundError("Unknown user") - yield self.registration_handler.post_consent_actions(qualified_user_id) + await self.registration_handler.post_consent_actions(qualified_user_id) try: self._render_template(request, "success.html") diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index ec8b9d7269..031a316693 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -16,18 +16,20 @@ import logging from io import BytesIO from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET from synapse.api.errors import Codes, SynapseError from synapse.crypto.keyring import ServerKeyFetcher -from synapse.http.server import respond_with_json_bytes, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + respond_with_json_bytes, + wrap_json_request_handler, +) from synapse.http.servlet import parse_integer, parse_json_object_from_request logger = logging.getLogger(__name__) -class RemoteKey(Resource): +class RemoteKey(DirectServeResource): """HTTP resource for retreiving the TLS certificate and NACL signature verification keys for a collection of servers. Checks that the reported X.509 TLS certificate matches the one used in the HTTPS connection. Checks @@ -94,13 +96,8 @@ class RemoteKey(Resource): self.clock = hs.get_clock() self.federation_domain_whitelist = hs.config.federation_domain_whitelist - def render_GET(self, request): - self.async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def async_render_GET(self, request): + async def _async_render_GET(self, request): if len(request.postpath) == 1: server, = request.postpath query = {server.decode("ascii"): {}} @@ -114,20 +111,15 @@ class RemoteKey(Resource): else: raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND) - yield self.query_keys(request, query, query_remote_on_cache_miss=True) - - def render_POST(self, request): - self.async_render_POST(request) - return NOT_DONE_YET + await self.query_keys(request, query, query_remote_on_cache_miss=True) @wrap_json_request_handler - @defer.inlineCallbacks - def async_render_POST(self, request): + async def _async_render_POST(self, request): content = parse_json_object_from_request(request) query = content["server_keys"] - yield self.query_keys(request, query, query_remote_on_cache_miss=True) + await self.query_keys(request, query, query_remote_on_cache_miss=True) @defer.inlineCallbacks def query_keys(self, request, query, query_remote_on_cache_miss=False): diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py index fa3d6680fc..9f747de263 100644 --- a/synapse/rest/media/v1/config_resource.py +++ b/synapse/rest/media/v1/config_resource.py @@ -14,31 +14,28 @@ # limitations under the License. # -from twisted.internet import defer -from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from synapse.http.server import respond_with_json, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + respond_with_json, + wrap_json_request_handler, +) -class MediaConfigResource(Resource): +class MediaConfigResource(DirectServeResource): isLeaf = True def __init__(self, hs): - Resource.__init__(self) + super().__init__() config = hs.get_config() self.clock = hs.get_clock() self.auth = hs.get_auth() self.limits_dict = {"m.upload.size": config.max_upload_size} - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): - yield self.auth.get_user_by_req(request) + async def _async_render_GET(self, request): + await self.auth.get_user_by_req(request) respond_with_json(request, 200, self.limits_dict, send_cors=True) def render_OPTIONS(self, request): diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py index a21a35f843..66a01559e1 100644 --- a/synapse/rest/media/v1/download_resource.py +++ b/synapse/rest/media/v1/download_resource.py @@ -14,37 +14,31 @@ # limitations under the License. import logging -from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET - import synapse.http.servlet -from synapse.http.server import set_cors_headers, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + set_cors_headers, + wrap_json_request_handler, +) from ._base import parse_media_id, respond_404 logger = logging.getLogger(__name__) -class DownloadResource(Resource): +class DownloadResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo): - Resource.__init__(self) - + super().__init__() self.media_repo = media_repo self.server_name = hs.hostname # this is expected by @wrap_json_request_handler self.clock = hs.get_clock() - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): set_cors_headers(request) request.setHeader( b"Content-Security-Policy", @@ -58,7 +52,7 @@ class DownloadResource(Resource): ) server_name, media_id, name = parse_media_id(request) if server_name == self.server_name: - yield self.media_repo.get_local_media(request, media_id, name) + await self.media_repo.get_local_media(request, media_id, name) else: allow_remote = synapse.http.servlet.parse_boolean( request, "allow_remote", default=True @@ -72,4 +66,4 @@ class DownloadResource(Resource): respond_404(request) return - yield self.media_repo.get_remote_media(request, server_name, media_id, name) + await self.media_repo.get_remote_media(request, server_name, media_id, name) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index de6f292ffb..0337b64dc2 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -32,12 +32,11 @@ from canonicaljson import json from twisted.internet import defer from twisted.internet.error import DNSLookupError -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET from synapse.api.errors import Codes, SynapseError from synapse.http.client import SimpleHttpClient from synapse.http.server import ( + DirectServeResource, respond_with_json, respond_with_json_bytes, wrap_json_request_handler, @@ -58,11 +57,11 @@ _charset_match = re.compile(br"<\s*meta[^>]*charset\s*=\s*([a-z0-9-]+)", flags=r _content_type_match = re.compile(r'.*; *charset="?(.*?)"?(;|$)', flags=re.I) -class PreviewUrlResource(Resource): +class PreviewUrlResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo, media_storage): - Resource.__init__(self) + super().__init__() self.auth = hs.get_auth() self.clock = hs.get_clock() @@ -98,16 +97,11 @@ class PreviewUrlResource(Resource): def render_OPTIONS(self, request): return respond_with_json(request, 200, {}, send_cors=True) - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): # XXX: if get_user_by_req fails, what should we do in an async render? - requester = yield self.auth.get_user_by_req(request) + requester = await self.auth.get_user_by_req(request) url = parse_string(request, "url") if b"ts" in request.args: ts = parse_integer(request, "ts") @@ -159,7 +153,7 @@ class PreviewUrlResource(Resource): else: logger.info("Returning cached response") - og = yield make_deferred_yieldable(observable.observe()) + og = await make_deferred_yieldable(defer.maybeDeferred(observable.observe)) respond_with_json_bytes(request, 200, og, send_cors=True) @defer.inlineCallbacks diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index ca84c9f139..08329884ac 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -17,10 +17,12 @@ import logging from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET -from synapse.http.server import set_cors_headers, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + set_cors_headers, + wrap_json_request_handler, +) from synapse.http.servlet import parse_integer, parse_string from ._base import ( @@ -34,11 +36,11 @@ from ._base import ( logger = logging.getLogger(__name__) -class ThumbnailResource(Resource): +class ThumbnailResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo, media_storage): - Resource.__init__(self) + super().__init__() self.store = hs.get_datastore() self.media_repo = media_repo @@ -47,13 +49,8 @@ class ThumbnailResource(Resource): self.server_name = hs.hostname self.clock = hs.get_clock() - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): set_cors_headers(request) server_name, media_id, _ = parse_media_id(request) width = parse_integer(request, "width", required=True) @@ -63,21 +60,21 @@ class ThumbnailResource(Resource): if server_name == self.server_name: if self.dynamic_thumbnails: - yield self._select_or_generate_local_thumbnail( + await self._select_or_generate_local_thumbnail( request, media_id, width, height, method, m_type ) else: - yield self._respond_local_thumbnail( + await self._respond_local_thumbnail( request, media_id, width, height, method, m_type ) self.media_repo.mark_recently_accessed(None, media_id) else: if self.dynamic_thumbnails: - yield self._select_or_generate_remote_thumbnail( + await self._select_or_generate_remote_thumbnail( request, server_name, media_id, width, height, method, m_type ) else: - yield self._respond_remote_thumbnail( + await self._respond_remote_thumbnail( request, server_name, media_id, width, height, method, m_type ) self.media_repo.mark_recently_accessed(server_name, media_id) diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index d1d7e959f0..5d76bbdf68 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -15,22 +15,24 @@ import logging -from twisted.internet import defer -from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET from synapse.api.errors import SynapseError -from synapse.http.server import respond_with_json, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + respond_with_json, + wrap_json_request_handler, +) from synapse.http.servlet import parse_string logger = logging.getLogger(__name__) -class UploadResource(Resource): +class UploadResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo): - Resource.__init__(self) + super().__init__() self.media_repo = media_repo self.filepaths = media_repo.filepaths @@ -41,18 +43,13 @@ class UploadResource(Resource): self.max_upload_size = hs.config.max_upload_size self.clock = hs.get_clock() - def render_POST(self, request): - self._async_render_POST(request) - return NOT_DONE_YET - def render_OPTIONS(self, request): respond_with_json(request, 200, {}, send_cors=True) return NOT_DONE_YET @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_POST(self, request): - requester = yield self.auth.get_user_by_req(request) + async def _async_render_POST(self, request): + requester = await self.auth.get_user_by_req(request) # TODO: The checks here are a bit late. The content will have # already been uploaded to a tmp file at this point content_length = request.getHeader(b"Content-Length").decode("ascii") @@ -81,7 +78,7 @@ class UploadResource(Resource): # disposition = headers.getRawHeaders(b"Content-Disposition")[0] # TODO(markjh): parse content-dispostion - content_uri = yield self.media_repo.create_content( + content_uri = await self.media_repo.create_content( media_type, upload_name, request.content, content_length, requester.user ) diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py index ab14b70675..939c87306c 100644 --- a/synapse/rest/saml2/response_resource.py +++ b/synapse/rest/saml2/response_resource.py @@ -18,34 +18,27 @@ import logging import saml2 from saml2.client import Saml2Client -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET - from synapse.api.errors import CodeMessageException -from synapse.http.server import wrap_html_request_handler +from synapse.http.server import DirectServeResource, wrap_html_request_handler from synapse.http.servlet import parse_string from synapse.rest.client.v1.login import SSOAuthHandler logger = logging.getLogger(__name__) -class SAML2ResponseResource(Resource): +class SAML2ResponseResource(DirectServeResource): """A Twisted web resource which handles the SAML response""" isLeaf = 1 def __init__(self, hs): - Resource.__init__(self) + super().__init__() self._saml_client = Saml2Client(hs.config.saml2_sp_config) self._sso_auth_handler = SSOAuthHandler(hs) - def render_POST(self, request): - self._async_render_POST(request) - return NOT_DONE_YET - @wrap_html_request_handler - def _async_render_POST(self, request): + async def _async_render_POST(self, request): resp_bytes = parse_string(request, "SAMLResponse", required=True) relay_state = parse_string(request, "RelayState", required=True) diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index e2d418b1df..39c9342423 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -22,7 +22,6 @@ from binascii import unhexlify from mock import Mock from six.moves.urllib import parse -from twisted.internet import defer, reactor from twisted.internet.defer import Deferred from synapse.rest.media.v1._base import FileInfo @@ -34,15 +33,17 @@ from synapse.util.logcontext import make_deferred_yieldable from tests import unittest -class MediaStorageTests(unittest.TestCase): - def setUp(self): +class MediaStorageTests(unittest.HomeserverTestCase): + + needs_threadpool = True + + def prepare(self, reactor, clock, hs): self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-") + self.addCleanup(shutil.rmtree, self.test_dir) self.primary_base_path = os.path.join(self.test_dir, "primary") self.secondary_base_path = os.path.join(self.test_dir, "secondary") - hs = Mock() - hs.get_reactor = Mock(return_value=reactor) hs.config.media_store_path = self.primary_base_path storage_providers = [FileStorageProviderBackend(hs, self.secondary_base_path)] @@ -52,10 +53,6 @@ class MediaStorageTests(unittest.TestCase): hs, self.primary_base_path, self.filepaths, storage_providers ) - def tearDown(self): - shutil.rmtree(self.test_dir) - - @defer.inlineCallbacks def test_ensure_media_is_in_local_cache(self): media_id = "some_media_id" test_body = "Test\n" @@ -73,7 +70,15 @@ class MediaStorageTests(unittest.TestCase): # Now we run ensure_media_is_in_local_cache, which should copy the file # to the local cache. file_info = FileInfo(None, media_id) - local_path = yield self.media_storage.ensure_media_is_in_local_cache(file_info) + + # This uses a real blocking threadpool so we have to wait for it to be + # actually done :/ + x = self.media_storage.ensure_media_is_in_local_cache(file_info) + + # Hotloop until the threadpool does its job... + self.wait_on_thread(x) + + local_path = self.get_success(x) self.assertTrue(os.path.exists(local_path)) diff --git a/tests/unittest.py b/tests/unittest.py index 36df43c137..d26804b5b5 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -17,6 +17,7 @@ import gc import hashlib import hmac import logging +import time from mock import Mock @@ -24,7 +25,8 @@ from canonicaljson import json import twisted import twisted.logger -from twisted.internet.defer import Deferred +from twisted.internet.defer import Deferred, succeed +from twisted.python.threadpool import ThreadPool from twisted.trial import unittest from synapse.api.constants import EventTypes @@ -164,6 +166,7 @@ class HomeserverTestCase(TestCase): servlets = [] hijack_auth = True + needs_threadpool = False def setUp(self): """ @@ -192,15 +195,19 @@ class HomeserverTestCase(TestCase): if self.hijack_auth: def get_user_by_access_token(token=None, allow_guest=False): - return { - "user": UserID.from_string(self.helper.auth_user_id), - "token_id": 1, - "is_guest": False, - } + return succeed( + { + "user": UserID.from_string(self.helper.auth_user_id), + "token_id": 1, + "is_guest": False, + } + ) def get_user_by_req(request, allow_guest=False, rights="access"): - return create_requester( - UserID.from_string(self.helper.auth_user_id), 1, False, None + return succeed( + create_requester( + UserID.from_string(self.helper.auth_user_id), 1, False, None + ) ) self.hs.get_auth().get_user_by_req = get_user_by_req @@ -209,9 +216,26 @@ class HomeserverTestCase(TestCase): return_value="1234" ) + if self.needs_threadpool: + self.reactor.threadpool = ThreadPool() + self.addCleanup(self.reactor.threadpool.stop) + self.reactor.threadpool.start() + if hasattr(self, "prepare"): self.prepare(self.reactor, self.clock, self.hs) + def wait_on_thread(self, deferred, timeout=10): + """ + Wait until a Deferred is done, where it's waiting on a real thread. + """ + start_time = time.time() + + while not deferred.called: + if start_time + timeout < time.time(): + raise ValueError("Timed out waiting for threadpool") + self.reactor.advance(0.01) + time.sleep(0.01) + def make_homeserver(self, reactor, clock): """ Make and return a homeserver. -- cgit 1.4.1