diff --git a/changelog.d/5498.bugfix b/changelog.d/5498.bugfix
new file mode 100644
index 0000000000..6ef298d20c
--- /dev/null
+++ b/changelog.d/5498.bugfix
@@ -0,0 +1 @@
+Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines.
diff --git a/changelog.d/5500.bugfix b/changelog.d/5500.bugfix
new file mode 100644
index 0000000000..624c678435
--- /dev/null
+++ b/changelog.d/5500.bugfix
@@ -0,0 +1 @@
+Fix logging error when a tampered event is detected.
diff --git a/changelog.d/5509.misc b/changelog.d/5509.misc
new file mode 100644
index 0000000000..cc27cf2940
--- /dev/null
+++ b/changelog.d/5509.misc
@@ -0,0 +1 @@
+Fix "Unexpected entry in 'full_schemas'" log warning.
diff --git a/changelog.d/5510.misc b/changelog.d/5510.misc
new file mode 100644
index 0000000000..4591a63d9d
--- /dev/null
+++ b/changelog.d/5510.misc
@@ -0,0 +1 @@
+Improve logging when generating config files.
diff --git a/changelog.d/5511.misc b/changelog.d/5511.misc
new file mode 100644
index 0000000000..c1f679b287
--- /dev/null
+++ b/changelog.d/5511.misc
@@ -0,0 +1 @@
+Refactor and clean up Config parser for maintainability.
diff --git a/changelog.d/5512.feature b/changelog.d/5512.feature
new file mode 100644
index 0000000000..712878901b
--- /dev/null
+++ b/changelog.d/5512.feature
@@ -0,0 +1 @@
+Improve help and cmdline option names for --generate-config options.
diff --git a/changelog.d/5514.bugfix b/changelog.d/5514.bugfix
new file mode 100644
index 0000000000..c3a76a854a
--- /dev/null
+++ b/changelog.d/5514.bugfix
@@ -0,0 +1 @@
+Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`.
diff --git a/changelog.d/5516.feature b/changelog.d/5516.feature
new file mode 100644
index 0000000000..fdf91c35e4
--- /dev/null
+++ b/changelog.d/5516.feature
@@ -0,0 +1 @@
+Allow configuration of the path used for ACME account keys.
diff --git a/changelog.d/5521.feature b/changelog.d/5521.feature
new file mode 100644
index 0000000000..fdf91c35e4
--- /dev/null
+++ b/changelog.d/5521.feature
@@ -0,0 +1 @@
+Allow configuration of the path used for ACME account keys.
diff --git a/changelog.d/5522.feature b/changelog.d/5522.feature
new file mode 100644
index 0000000000..fdf91c35e4
--- /dev/null
+++ b/changelog.d/5522.feature
@@ -0,0 +1 @@
+Allow configuration of the path used for ACME account keys.
diff --git a/changelog.d/5525.removal b/changelog.d/5525.removal
new file mode 100644
index 0000000000..af71560f36
--- /dev/null
+++ b/changelog.d/5525.removal
@@ -0,0 +1 @@
+Remove support for cpu_affinity setting.
diff --git a/changelog.d/5531.feature b/changelog.d/5531.feature
new file mode 100644
index 0000000000..5c6bab2c31
--- /dev/null
+++ b/changelog.d/5531.feature
@@ -0,0 +1 @@
+Add support for handling pagination APIs on client reader worker.
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index bd80d97a93..bb07b02f4e 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -23,29 +23,6 @@ server_name: "SERVERNAME"
#
pid_file: DATADIR/homeserver.pid
-# CPU affinity mask. Setting this restricts the CPUs on which the
-# process will be scheduled. It is represented as a bitmask, with the
-# lowest order bit corresponding to the first logical CPU and the
-# highest order bit corresponding to the last logical CPU. Not all CPUs
-# may exist on a given system but a mask may specify more CPUs than are
-# present.
-#
-# For example:
-# 0x00000001 is processor #0,
-# 0x00000003 is processors #0 and #1,
-# 0xFFFFFFFF is all processors (#0 through #31).
-#
-# Pinning a Python process to a single CPU is desirable, because Python
-# is inherently single-threaded due to the GIL, and can suffer a
-# 30-40% slowdown due to cache blow-out and thread context switching
-# if the scheduler happens to schedule the underlying threads across
-# different cores. See
-# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
-#
-# This setting requires the affinity package to be installed!
-#
-#cpu_affinity: 0xFFFFFFFF
-
# The path to the web client which will be served at /_matrix/client/
# if 'webclient' is configured under the 'listeners' configuration.
#
@@ -425,6 +402,13 @@ acme:
#
#domain: matrix.example.com
+ # file to use for the account key. This will be generated if it doesn't
+ # exist.
+ #
+ # If unspecified, we will use CONFDIR/client.key.
+ #
+ account_key_file: DATADIR/acme_account.key
+
# List of allowed TLS fingerprints for this server to publish along
# with the signing keys for this server. Other matrix servers that
# make HTTPS requests to this server will check that the TLS
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index df4c2d4c97..d50a9840d4 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -19,7 +19,6 @@ import signal
import sys
import traceback
-import psutil
from daemonize import Daemonize
from twisted.internet import defer, error, reactor
@@ -68,21 +67,13 @@ def start_worker_reactor(appname, config):
gc_thresholds=config.gc_thresholds,
pid_file=config.worker_pid_file,
daemonize=config.worker_daemonize,
- cpu_affinity=config.worker_cpu_affinity,
print_pidfile=config.print_pidfile,
logger=logger,
)
def start_reactor(
- appname,
- soft_file_limit,
- gc_thresholds,
- pid_file,
- daemonize,
- cpu_affinity,
- print_pidfile,
- logger,
+ appname, soft_file_limit, gc_thresholds, pid_file, daemonize, print_pidfile, logger
):
""" Run the reactor in the main process
@@ -95,7 +86,6 @@ def start_reactor(
gc_thresholds:
pid_file (str): name of pid file to write to if daemonize is True
daemonize (bool): true to run the reactor in a background process
- cpu_affinity (int|None): cpu affinity mask
print_pidfile (bool): whether to print the pid file, if daemonize is True
logger (logging.Logger): logger instance to pass to Daemonize
"""
@@ -109,20 +99,6 @@ def start_reactor(
# between the sentinel and `run` logcontexts.
with PreserveLoggingContext():
logger.info("Running")
- if cpu_affinity is not None:
- # Turn the bitmask into bits, reverse it so we go from 0 up
- mask_to_bits = bin(cpu_affinity)[2:][::-1]
-
- cpus = []
- cpu_num = 0
-
- for i in mask_to_bits:
- if i == "1":
- cpus.append(cpu_num)
- cpu_num += 1
-
- p = psutil.Process()
- p.cpu_affinity(cpus)
change_resource_limit(soft_file_limit)
if gc_thresholds:
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index d19c7c7d71..49da105cf6 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -641,7 +641,6 @@ def run(hs):
gc_thresholds=hs.config.gc_thresholds,
pid_file=hs.config.pid_file,
daemonize=hs.config.daemonize,
- cpu_affinity=hs.config.cpu_affinity,
print_pidfile=hs.config.print_pidfile,
logger=logger,
)
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 8284aa4c6d..21d110c82d 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2017-2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -196,6 +198,12 @@ class Config(object):
@classmethod
def load_config(cls, description, argv):
+ """Parse the commandline and config files
+
+ Doesn't support config-file-generation: used by the worker apps.
+
+ Returns: Config object.
+ """
config_parser = argparse.ArgumentParser(description=description)
config_parser.add_argument(
"-c",
@@ -210,7 +218,7 @@ class Config(object):
"--keys-directory",
metavar="DIRECTORY",
help="Where files such as certs and signing keys are stored when"
- " their location is given explicitly in the config."
+ " their location is not given explicitly in the config."
" Defaults to the directory containing the last config file",
)
@@ -222,8 +230,21 @@ class Config(object):
config_files = find_config_files(search_paths=config_args.config_path)
- obj.read_config_files(
- config_files, keys_directory=config_args.keys_directory, generate_keys=False
+ if not config_files:
+ config_parser.error("Must supply a config file.")
+
+ if config_args.keys_directory:
+ config_dir_path = config_args.keys_directory
+ else:
+ config_dir_path = os.path.dirname(config_files[-1])
+ config_dir_path = os.path.abspath(config_dir_path)
+ data_dir_path = os.getcwd()
+
+ config_dict = obj.read_config_files(
+ config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path
+ )
+ obj.parse_config_dict(
+ config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
@@ -232,6 +253,12 @@ class Config(object):
@classmethod
def load_or_generate_config(cls, description, argv):
+ """Parse the commandline and config files
+
+ Supports generation of config files, so is used for the main homeserver app.
+
+ Returns: Config object, or None if --generate-config or --generate-keys was set
+ """
config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument(
"-c",
@@ -241,37 +268,57 @@ class Config(object):
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
- config_parser.add_argument(
+
+ generate_group = config_parser.add_argument_group("Config generation")
+ generate_group.add_argument(
"--generate-config",
action="store_true",
- help="Generate a config file for the server name",
+ help="Generate a config file, then exit.",
)
- config_parser.add_argument(
+ generate_group.add_argument(
+ "--generate-missing-configs",
+ "--generate-keys",
+ action="store_true",
+ help="Generate any missing additional config files, then exit.",
+ )
+ generate_group.add_argument(
+ "-H", "--server-name", help="The server name to generate a config file for."
+ )
+ generate_group.add_argument(
"--report-stats",
action="store",
- help="Whether the generated config reports anonymized usage statistics",
+ help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
- config_parser.add_argument(
- "--generate-keys",
- action="store_true",
- help="Generate any missing key files then exit",
- )
- config_parser.add_argument(
+ generate_group.add_argument(
+ "--config-directory",
"--keys-directory",
metavar="DIRECTORY",
- help="Used with 'generate-*' options to specify where files such as"
- " signing keys should be stored, unless explicitly"
- " specified in the config.",
- )
- config_parser.add_argument(
- "-H", "--server-name", help="The server name to generate a config file for"
+ help=(
+ "Specify where additional config files such as signing keys and log"
+ " config should be stored. Defaults to the same directory as the last"
+ " config file."
+ ),
)
config_args, remaining_args = config_parser.parse_known_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
- generate_keys = config_args.generate_keys
+ if not config_files:
+ config_parser.error(
+ "Must supply a config file.\nA config file can be automatically"
+ ' generated using "--generate-config -H SERVER_NAME'
+ ' -c CONFIG-FILE"'
+ )
+
+ if config_args.config_directory:
+ config_dir_path = config_args.config_directory
+ else:
+ config_dir_path = os.path.dirname(config_files[-1])
+ config_dir_path = os.path.abspath(config_dir_path)
+ data_dir_path = os.getcwd()
+
+ generate_missing_configs = config_args.generate_missing_configs
obj = cls()
@@ -281,19 +328,10 @@ class Config(object):
"Please specify either --report-stats=yes or --report-stats=no\n\n"
+ MISSING_REPORT_STATS_SPIEL
)
- if not config_files:
- config_parser.error(
- "Must supply a config file.\nA config file can be automatically"
- ' generated using "--generate-config -H SERVER_NAME'
- ' -c CONFIG-FILE"'
- )
+
(config_path,) = config_files
if not cls.path_exists(config_path):
- if config_args.keys_directory:
- config_dir_path = config_args.keys_directory
- else:
- config_dir_path = os.path.dirname(config_path)
- config_dir_path = os.path.abspath(config_dir_path)
+ print("Generating config file %s" % (config_path,))
server_name = config_args.server_name
if not server_name:
@@ -304,7 +342,7 @@ class Config(object):
config_str = obj.generate_config(
config_dir_path=config_dir_path,
- data_dir_path=os.getcwd(),
+ data_dir_path=data_dir_path,
server_name=server_name,
report_stats=(config_args.report_stats == "yes"),
generate_secrets=True,
@@ -331,12 +369,12 @@ class Config(object):
else:
print(
(
- "Config file %r already exists. Generating any missing key"
+ "Config file %r already exists. Generating any missing config"
" files."
)
% (config_path,)
)
- generate_keys = True
+ generate_missing_configs = True
parser = argparse.ArgumentParser(
parents=[config_parser],
@@ -347,48 +385,57 @@ class Config(object):
obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
- if not config_files:
- config_parser.error(
- "Must supply a config file.\nA config file can be automatically"
- ' generated using "--generate-config -H SERVER_NAME'
- ' -c CONFIG-FILE"'
- )
-
- obj.read_config_files(
- config_files,
- keys_directory=config_args.keys_directory,
- generate_keys=generate_keys,
+ config_dict = obj.read_config_files(
+ config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
- if generate_keys:
+ if generate_missing_configs:
+ obj.generate_missing_files(config_dict)
return None
+ obj.parse_config_dict(
+ config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
+ )
obj.invoke_all("read_arguments", args)
return obj
- def read_config_files(self, config_files, keys_directory=None, generate_keys=False):
- if not keys_directory:
- keys_directory = os.path.dirname(config_files[-1])
+ def read_config_files(self, config_files, config_dir_path, data_dir_path):
+ """Read the config files into a dict
+
+ Args:
+ config_files (iterable[str]): A list of the config files to read
- self.config_dir_path = os.path.abspath(keys_directory)
+ config_dir_path (str): The path where the config files are kept. Used to
+ create filenames for things like the log config and the signing key.
+ data_dir_path (str): The path where the data files are kept. Used to create
+ filenames for things like the database and media store.
+
+ Returns: dict
+ """
+ # first we read the config files into a dict
specified_config = {}
for config_file in config_files:
yaml_config = self.read_config_file(config_file)
specified_config.update(yaml_config)
+ # not all of the options have sensible defaults in code, so we now need to
+ # generate a default config file suitable for the specified server name...
if "server_name" not in specified_config:
raise ConfigError(MISSING_SERVER_NAME)
-
server_name = specified_config["server_name"]
config_string = self.generate_config(
- config_dir_path=self.config_dir_path,
- data_dir_path=os.getcwd(),
+ config_dir_path=config_dir_path,
+ data_dir_path=data_dir_path,
server_name=server_name,
generate_secrets=False,
)
+
+ # ... and read it into a base config dict ...
config = yaml.safe_load(config_string)
+
+ # ... and finally, overlay it with the actual configuration.
config.pop("log_config")
config.update(specified_config)
@@ -398,15 +445,29 @@ class Config(object):
+ "\n"
+ MISSING_REPORT_STATS_SPIEL
)
+ return config
- if generate_keys:
- self.invoke_all("generate_files", config)
- return
+ def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
+ """Read the information from the config dict into this Config object.
+
+ Args:
+ config_dict (dict): Configuration data, as read from the yaml
- self.parse_config_dict(config)
+ config_dir_path (str): The path where the config files are kept. Used to
+ create filenames for things like the log config and the signing key.
+
+ data_dir_path (str): The path where the data files are kept. Used to create
+ filenames for things like the database and media store.
+ """
+ self.invoke_all(
+ "read_config",
+ config_dict,
+ config_dir_path=config_dir_path,
+ data_dir_path=data_dir_path,
+ )
- def parse_config_dict(self, config_dict):
- self.invoke_all("read_config", config_dict)
+ def generate_missing_files(self, config_dict):
+ self.invoke_all("generate_files", config_dict)
def find_config_files(search_paths):
diff --git a/synapse/config/api.py b/synapse/config/api.py
index 23b0ea6962..d9eff9ae1f 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -18,7 +18,7 @@ from ._base import Config
class ApiConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.room_invite_state_types = config.get(
"room_invite_state_types",
[
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index 679ee62480..b74cebfca9 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
class AppServiceConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.app_service_config_files = config.get("app_service_config_files", [])
self.notify_appservices = config.get("notify_appservices", True)
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index e2eb473a92..a08b08570b 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -16,7 +16,7 @@ from ._base import Config
class CaptchaConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.recaptcha_private_key = config.get("recaptcha_private_key")
self.recaptcha_public_key = config.get("recaptcha_public_key")
self.enable_registration_captcha = config.get(
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index 609c0815c8..a5f0449955 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -22,7 +22,7 @@ class CasConfig(Config):
cas_server_url: URL of CAS server
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
cas_config = config.get("cas_config", None)
if cas_config:
self.cas_enabled = cas_config.get("enabled", True)
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index 5b0bf919c7..6fd4931681 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -84,7 +84,7 @@ class ConsentConfig(Config):
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
consent_config = config.get("user_consent")
if consent_config is None:
return
diff --git a/synapse/config/database.py b/synapse/config/database.py
index adc0a47ddf..c8963e276a 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -18,7 +18,7 @@ from ._base import Config
class DatabaseConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
self.database_config = config.get("database")
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 3a6cb07206..07df7b7173 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -27,7 +27,7 @@ from ._base import Config, ConfigError
class EmailConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
# TODO: We should separate better the email configuration from the notification
# and account validity config.
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index e4be172a79..d11f4d3b96 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -17,7 +17,7 @@ from ._base import Config
class GroupsConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_group_creation = config.get("enable_group_creation", False)
self.group_creation_prefix = config.get("group_creation_prefix", "")
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py
index b190dcbe38..a2c97dea95 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt_config.py
@@ -23,7 +23,7 @@ MISSING_JWT = """Missing jwt library. This is required for jwt login.
class JWTConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
jwt_config = config.get("jwt_config", None)
if jwt_config:
self.jwt_enabled = jwt_config.get("enabled", False)
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 94a0f47ea4..e58638f708 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -65,7 +65,7 @@ class TrustedKeyServer(object):
class KeyConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
# the signing key can be specified inline or in a separate file
if "signing_key" in config:
self.signing_key = read_signing_keys([config["signing_key"]])
@@ -241,6 +241,7 @@ class KeyConfig(Config):
signing_key_path = config["signing_key_path"]
if not self.path_exists(signing_key_path):
+ print("Generating signing key file %s" % (signing_key_path,))
with open(signing_key_path, "w") as signing_key_file:
key_id = "a_" + random_string(4)
write_signing_keys(signing_key_file, (generate_signing_key(key_id),))
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index a22655b125..153a137517 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -74,7 +74,7 @@ root:
class LoggingConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.verbosity = config.get("verbose", 0)
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
self.log_config = self.abspath(config.get("log_config"))
@@ -137,6 +137,10 @@ class LoggingConfig(Config):
log_config = config.get("log_config")
if log_config and not os.path.exists(log_config):
log_file = self.abspath("homeserver.log")
+ print(
+ "Generating log config file %s which will log to %s"
+ % (log_config, log_file)
+ )
with open(log_config, "w") as log_config_file:
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index c85e234d22..6af82e1329 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -21,7 +21,7 @@ MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentr
class MetricsConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_metrics = config.get("enable_metrics", False)
self.report_stats = config.get("report_stats", None)
self.metrics_port = config.get("metrics_port")
diff --git a/synapse/config/password.py b/synapse/config/password.py
index eea59e772b..300b67f236 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -20,7 +20,7 @@ class PasswordConfig(Config):
"""Password login configuration
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
password_config = config.get("password_config", {})
if password_config is None:
password_config = {}
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index fcf279e8e1..8ffefd2639 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -21,7 +21,7 @@ LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
class PasswordAuthProviderConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.password_providers = []
providers = []
diff --git a/synapse/config/push.py b/synapse/config/push.py
index 62c0060c9c..99d15e4461 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -18,7 +18,7 @@ from ._base import Config
class PushConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
push_config = config.get("push", {})
self.push_include_content = push_config.get("include_content", True)
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 5a9adac480..b03047f2b5 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -36,7 +36,7 @@ class FederationRateLimitConfig(object):
class RatelimitConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
# Load the new-style messages config if it exists. Otherwise fall back
# to the old method.
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index a1e27ba66c..6d8a2df29b 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -46,7 +46,7 @@ class AccountValidityConfig(Config):
class RegistrationConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_registration = bool(
strtobool(str(config.get("enable_registration", False)))
)
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 9f9669ebb1..15a19e0911 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -86,7 +86,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
class ContentRepositoryConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M"))
self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index c1da0e20e0..24223db7a1 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -19,7 +19,7 @@ from ._base import Config, ConfigError
class RoomDirectoryConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_room_list_search = config.get("enable_room_list_search", True)
alias_creation_rules = config.get("alias_creation_rules")
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 2ec38e48e9..d86cf0e6ee 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -17,7 +17,7 @@ from ._base import Config, ConfigError
class SAML2Config(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.saml2_enabled = False
saml2_config = config.get("saml2_config")
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 6d3f1da96c..1e58b2e91b 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -40,7 +40,7 @@ DEFAULT_ROOM_VERSION = "4"
class ServerConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.server_name = config["server_name"]
self.server_context = config.get("server_context", None)
@@ -57,7 +57,6 @@ class ServerConfig(Config):
self.user_agent_suffix = config.get("user_agent_suffix")
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
self.public_baseurl = config.get("public_baseurl")
- self.cpu_affinity = config.get("cpu_affinity")
# Whether to send federation traffic out in this process. This only
# applies to some federation traffic, and so shouldn't be used to
@@ -336,29 +335,6 @@ class ServerConfig(Config):
#
pid_file: %(pid_file)s
- # CPU affinity mask. Setting this restricts the CPUs on which the
- # process will be scheduled. It is represented as a bitmask, with the
- # lowest order bit corresponding to the first logical CPU and the
- # highest order bit corresponding to the last logical CPU. Not all CPUs
- # may exist on a given system but a mask may specify more CPUs than are
- # present.
- #
- # For example:
- # 0x00000001 is processor #0,
- # 0x00000003 is processors #0 and #1,
- # 0xFFFFFFFF is all processors (#0 through #31).
- #
- # Pinning a Python process to a single CPU is desirable, because Python
- # is inherently single-threaded due to the GIL, and can suffer a
- # 30-40%% slowdown due to cache blow-out and thread context switching
- # if the scheduler happens to schedule the underlying threads across
- # different cores. See
- # https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
- #
- # This setting requires the affinity package to be installed!
- #
- #cpu_affinity: 0xFFFFFFFF
-
# The path to the web client which will be served at /_matrix/client/
# if 'webclient' is configured under the 'listeners' configuration.
#
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py
index d930eb33b5..05110c17a6 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices_config.py
@@ -66,7 +66,7 @@ class ServerNoticesConfig(Config):
self.server_notices_mxid_avatar_url = None
self.server_notices_room_name = None
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
c = config.get("server_notices")
if c is None:
return
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index 1502e9faba..1968003cb3 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -19,7 +19,7 @@ from ._base import Config
class SpamCheckerConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.spam_checker = None
provider = config.get("spam_checker", None)
diff --git a/synapse/config/stats.py b/synapse/config/stats.py
index 80fc1b9dd0..73a87c73f2 100644
--- a/synapse/config/stats.py
+++ b/synapse/config/stats.py
@@ -25,7 +25,7 @@ class StatsConfig(Config):
Configuration for the behaviour of synapse's stats engine
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.stats_enabled = True
self.stats_bucket_size = 86400
self.stats_retention = sys.maxsize
diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py
index a89dd5f98a..1bedd607b6 100644
--- a/synapse/config/third_party_event_rules.py
+++ b/synapse/config/third_party_event_rules.py
@@ -19,7 +19,7 @@ from ._base import Config
class ThirdPartyRulesConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.third_party_event_rules = None
provider = config.get("third_party_event_rules", None)
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 7951bf21fa..9a66e8cc4b 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
class TlsConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, config_dir_path, **kwargs):
acme_config = config.get("acme", None)
if acme_config is None:
@@ -50,6 +50,10 @@ class TlsConfig(Config):
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
self.acme_domain = acme_config.get("domain", config.get("server_name"))
+ self.acme_account_key_file = self.abspath(
+ acme_config.get("account_key_file", config_dir_path + "/client.key")
+ )
+
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
@@ -213,11 +217,12 @@ class TlsConfig(Config):
if sha256_fingerprint not in sha256_fingerprints:
self.tls_fingerprints.append({"sha256": sha256_fingerprint})
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def default_config(self, config_dir_path, server_name, data_dir_path, **kwargs):
base_key_name = os.path.join(config_dir_path, server_name)
tls_certificate_path = base_key_name + ".tls.crt"
tls_private_key_path = base_key_name + ".tls.key"
+ default_acme_account_file = os.path.join(data_dir_path, "acme_account.key")
# this is to avoid the max line length. Sorrynotsorry
proxypassline = (
@@ -343,6 +348,13 @@ class TlsConfig(Config):
#
#domain: matrix.example.com
+ # file to use for the account key. This will be generated if it doesn't
+ # exist.
+ #
+ # If unspecified, we will use CONFDIR/client.key.
+ #
+ account_key_file: %(default_acme_account_file)s
+
# List of allowed TLS fingerprints for this server to publish along
# with the signing keys for this server. Other matrix servers that
# make HTTPS requests to this server will check that the TLS
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index e031b11599..0665dc3fcf 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -21,7 +21,7 @@ class UserDirectoryConfig(Config):
Configuration for the behaviour of the /user_directory API
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.user_directory_search_enabled = True
self.user_directory_search_all_users = False
user_directory_config = config.get("user_directory", None)
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index 82cf8c53a8..01e0cb2e28 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -16,7 +16,7 @@ from ._base import Config
class VoipConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.turn_uris = config.get("turn_uris", [])
self.turn_shared_secret = config.get("turn_shared_secret")
self.turn_username = config.get("turn_username")
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index 75993abf35..3b75471d85 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -21,7 +21,7 @@ class WorkerConfig(Config):
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.worker_app = config.get("worker_app")
# Canonicalise worker_app so that master always has None
@@ -46,7 +46,6 @@ class WorkerConfig(Config):
self.worker_name = config.get("worker_name", self.worker_app)
self.worker_main_http_uri = config.get("worker_main_http_uri", None)
- self.worker_cpu_affinity = config.get("worker_cpu_affinity")
# This option is really only here to support `--manhole` command line
# argument.
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index 58b929363f..1e925b19e7 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -163,7 +163,6 @@ class FederationBase(object):
logger.warning(
"Event %s content has been tampered, redacting",
pdu.event_id,
- pdu.get_pdu_json(),
)
return redacted_event
diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py
index 01e0ef408d..fbef2f3d38 100644
--- a/synapse/handlers/acme.py
+++ b/synapse/handlers/acme.py
@@ -15,14 +15,9 @@
import logging
-import attr
-from zope.interface import implementer
-
import twisted
import twisted.internet.error
from twisted.internet import defer
-from twisted.python.filepath import FilePath
-from twisted.python.url import URL
from twisted.web import server, static
from twisted.web.resource import Resource
@@ -30,27 +25,6 @@ from synapse.app import check_bind_error
logger = logging.getLogger(__name__)
-try:
- from txacme.interfaces import ICertificateStore
-
- @attr.s
- @implementer(ICertificateStore)
- class ErsatzStore(object):
- """
- A store that only stores in memory.
- """
-
- certs = attr.ib(default=attr.Factory(dict))
-
- def store(self, server_name, pem_objects):
- self.certs[server_name] = [o.as_bytes() for o in pem_objects]
- return defer.succeed(None)
-
-
-except ImportError:
- # txacme is missing
- pass
-
class AcmeHandler(object):
def __init__(self, hs):
@@ -60,6 +34,7 @@ class AcmeHandler(object):
@defer.inlineCallbacks
def start_listening(self):
+ from synapse.handlers import acme_issuing_service
# Configure logging for txacme, if you need to debug
# from eliot import add_destinations
@@ -67,37 +42,18 @@ class AcmeHandler(object):
#
# add_destinations(TwistedDestination())
- from txacme.challenges import HTTP01Responder
- from txacme.service import AcmeIssuingService
- from txacme.endpoint import load_or_create_client_key
- from txacme.client import Client
- from josepy.jwa import RS256
-
- self._store = ErsatzStore()
- responder = HTTP01Responder()
-
- self._issuer = AcmeIssuingService(
- cert_store=self._store,
- client_creator=(
- lambda: Client.from_url(
- reactor=self.reactor,
- url=URL.from_text(self.hs.config.acme_url),
- key=load_or_create_client_key(
- FilePath(self.hs.config.config_dir_path)
- ),
- alg=RS256,
- )
- ),
- clock=self.reactor,
- responders=[responder],
+ well_known = Resource()
+
+ self._issuer = acme_issuing_service.create_issuing_service(
+ self.reactor,
+ acme_url=self.hs.config.acme_url,
+ account_key_file=self.hs.config.acme_account_key_file,
+ well_known_resource=well_known,
)
- well_known = Resource()
- well_known.putChild(b"acme-challenge", responder.resource)
responder_resource = Resource()
responder_resource.putChild(b".well-known", well_known)
responder_resource.putChild(b"check", static.Data(b"OK", b"text/plain"))
-
srv = server.Site(responder_resource)
bind_addresses = self.hs.config.acme_bind_addresses
@@ -128,7 +84,7 @@ class AcmeHandler(object):
logger.exception("Fail!")
raise
logger.warning("Reprovisioned %s, saving.", self._acme_domain)
- cert_chain = self._store.certs[self._acme_domain]
+ cert_chain = self._issuer.cert_store.certs[self._acme_domain]
try:
with open(self.hs.config.tls_private_key_file, "wb") as private_key_file:
diff --git a/synapse/handlers/acme_issuing_service.py b/synapse/handlers/acme_issuing_service.py
new file mode 100644
index 0000000000..e1d4224e74
--- /dev/null
+++ b/synapse/handlers/acme_issuing_service.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Utility function to create an ACME issuing service.
+
+This file contains the unconditional imports on the acme and cryptography bits that we
+only need (and may only have available) if we are doing ACME, so is designed to be
+imported conditionally.
+"""
+import logging
+
+import attr
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import serialization
+from josepy import JWKRSA
+from josepy.jwa import RS256
+from txacme.challenges import HTTP01Responder
+from txacme.client import Client
+from txacme.interfaces import ICertificateStore
+from txacme.service import AcmeIssuingService
+from txacme.util import generate_private_key
+from zope.interface import implementer
+
+from twisted.internet import defer
+from twisted.python.filepath import FilePath
+from twisted.python.url import URL
+
+logger = logging.getLogger(__name__)
+
+
+def create_issuing_service(reactor, acme_url, account_key_file, well_known_resource):
+ """Create an ACME issuing service, and attach it to a web Resource
+
+ Args:
+ reactor: twisted reactor
+ acme_url (str): URL to use to request certificates
+ account_key_file (str): where to store the account key
+ well_known_resource (twisted.web.IResource): web resource for .well-known.
+ we will attach a child resource for "acme-challenge".
+
+ Returns:
+ AcmeIssuingService
+ """
+ responder = HTTP01Responder()
+
+ well_known_resource.putChild(b"acme-challenge", responder.resource)
+
+ store = ErsatzStore()
+
+ return AcmeIssuingService(
+ cert_store=store,
+ client_creator=(
+ lambda: Client.from_url(
+ reactor=reactor,
+ url=URL.from_text(acme_url),
+ key=load_or_create_client_key(account_key_file),
+ alg=RS256,
+ )
+ ),
+ clock=reactor,
+ responders=[responder],
+ )
+
+
+@attr.s
+@implementer(ICertificateStore)
+class ErsatzStore(object):
+ """
+ A store that only stores in memory.
+ """
+
+ certs = attr.ib(default=attr.Factory(dict))
+
+ def store(self, server_name, pem_objects):
+ self.certs[server_name] = [o.as_bytes() for o in pem_objects]
+ return defer.succeed(None)
+
+
+def load_or_create_client_key(key_file):
+ """Load the ACME account key from a file, creating it if it does not exist.
+
+ Args:
+ key_file (str): name of the file to use as the account key
+ """
+ # this is based on txacme.endpoint.load_or_create_client_key, but doesn't
+ # hardcode the 'client.key' filename
+ acme_key_file = FilePath(key_file)
+ if acme_key_file.exists():
+ logger.info("Loading ACME account key from '%s'", acme_key_file)
+ key = serialization.load_pem_private_key(
+ acme_key_file.getContent(), password=None, backend=default_backend()
+ )
+ else:
+ logger.info("Saving new ACME account key to '%s'", acme_key_file)
+ key = generate_private_key("rsa")
+ acme_key_file.setContent(
+ key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+ )
+ return JWKRSA(key=key)
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 062e026e5f..76ee97ddd3 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -180,9 +180,7 @@ class PaginationHandler(object):
room_token = pagin_config.from_token.room_key
else:
pagin_config.from_token = (
- yield self.hs.get_event_sources().get_current_token_for_room(
- room_id=room_id
- )
+ yield self.hs.get_event_sources().get_current_token_for_pagination()
)
room_token = pagin_config.from_token.room_key
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 77e2d1a0e1..13698d9638 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -72,10 +72,11 @@ REQUIREMENTS = [
# Twisted 18.7.0 requires attrs>=17.4.0
"attrs>=17.4.0",
"netaddr>=0.7.18",
+ "Jinja2>=2.9",
+ "bleach>=1.4.3",
]
CONDITIONAL_REQUIREMENTS = {
- "email": ["Jinja2>=2.9", "bleach>=1.4.3"],
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
# we use execute_batch, which arrived in psycopg 2.7.
"postgres": ["psycopg2>=2.7"],
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index c74bcc8f0b..aae43d0f99 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -38,6 +38,14 @@ from synapse.util.caches.descriptors import Cache
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.stringutils import exception_to_unicode
+# import a function which will return a monotonic time, in seconds
+try:
+ # on python 3, use time.monotonic, since time.clock can go backwards
+ from time import monotonic as monotonic_time
+except ImportError:
+ # ... but python 2 doesn't have it
+ from time import clock as monotonic_time
+
logger = logging.getLogger(__name__)
try:
@@ -350,14 +358,14 @@ class SQLBaseStore(object):
)
def start_profiling(self):
- self._previous_loop_ts = self._clock.time_msec()
+ self._previous_loop_ts = monotonic_time()
def loop():
curr = self._current_txn_total_time
prev = self._previous_txn_total_time
self._previous_txn_total_time = curr
- time_now = self._clock.time_msec()
+ time_now = monotonic_time()
time_then = self._previous_loop_ts
self._previous_loop_ts = time_now
@@ -383,7 +391,7 @@ class SQLBaseStore(object):
def _new_transaction(
self, conn, desc, after_callbacks, exception_callbacks, func, *args, **kwargs
):
- start = time.time()
+ start = monotonic_time()
txn_id = self._TXN_ID
# We don't really need these to be unique, so lets stop it from
@@ -449,7 +457,7 @@ class SQLBaseStore(object):
logger.debug("[TXN FAIL] {%s} %s", name, e)
raise
finally:
- end = time.time()
+ end = monotonic_time()
duration = end - start
LoggingContext.current_context().add_database_transaction(duration)
@@ -523,11 +531,11 @@ class SQLBaseStore(object):
)
parent_context = None
- start_time = time.time()
+ start_time = monotonic_time()
def inner_func(conn, *args, **kwargs):
with LoggingContext("runWithConnection", parent_context) as context:
- sched_duration_sec = time.time() - start_time
+ sched_duration_sec = monotonic_time() - start_time
sql_scheduling_timer.observe(sched_duration_sec)
context.add_database_scheduled(sched_duration_sec)
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index fc10b9534e..7c4e1dc7ec 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -133,7 +133,7 @@ def _setup_new_database(cur, database_engine):
if ver <= SCHEMA_VERSION:
valid_dirs.append((ver, abs_path))
else:
- logger.warn("Unexpected entry in 'full_schemas': %s", filename)
+ logger.debug("Ignoring entry '%s' in 'full_schemas'", filename)
if not valid_dirs:
raise PrepareDatabaseException(
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
index 9b416f2f40..488c49747a 100644
--- a/synapse/streams/events.py
+++ b/synapse/streams/events.py
@@ -59,21 +59,25 @@ class EventSources(object):
defer.returnValue(token)
@defer.inlineCallbacks
- def get_current_token_for_room(self, room_id):
- push_rules_key, _ = self.store.get_push_rules_stream_token()
- to_device_key = self.store.get_to_device_stream_token()
- device_list_key = self.store.get_device_stream_token()
- groups_key = self.store.get_group_stream_token()
+ def get_current_token_for_pagination(self):
+ """Get the current token for a given room to be used to paginate
+ events.
+ The returned token does not have the current values for fields other
+ than `room`, since they are not used during pagination.
+
+ Retuns:
+ Deferred[StreamToken]
+ """
token = StreamToken(
- room_key=(yield self.sources["room"].get_current_key_for_room(room_id)),
- presence_key=(yield self.sources["presence"].get_current_key()),
- typing_key=(yield self.sources["typing"].get_current_key()),
- receipt_key=(yield self.sources["receipt"].get_current_key()),
- account_data_key=(yield self.sources["account_data"].get_current_key()),
- push_rules_key=push_rules_key,
- to_device_key=to_device_key,
- device_list_key=device_list_key,
- groups_key=groups_key,
+ room_key=(yield self.sources["room"].get_current_key()),
+ presence_key=0,
+ typing_key=0,
+ receipt_key=0,
+ account_data_key=0,
+ push_rules_key=0,
+ to_device_key=0,
+ device_list_key=0,
+ groups_key=0,
)
defer.returnValue(token)
diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py
index 0cbbf4e885..a5d88d644a 100644
--- a/tests/config/test_tls.py
+++ b/tests/config/test_tls.py
@@ -65,7 +65,7 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
}
t = TestConfig()
- t.read_config(config)
+ t.read_config(config, config_dir_path="", data_dir_path="")
t.read_certificate_from_disk(require_cert_and_key=False)
warnings = self.flushWarnings()
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index b1094c1448..417fda3ab2 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -78,7 +78,7 @@ class MatrixFederationAgentTests(TestCase):
# config_dict["trusted_key_servers"] = []
self._config = config = HomeServerConfig()
- config.parse_config_dict(config_dict)
+ config.parse_config_dict(config_dict, "", "")
self.agent = MatrixFederationAgent(
reactor=self.reactor,
diff --git a/tests/unittest.py b/tests/unittest.py
index d64702b0c2..36df43c137 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -342,7 +342,7 @@ class HomeserverTestCase(TestCase):
# Parse the config from a config dict into a HomeServerConfig
config_obj = HomeServerConfig()
- config_obj.parse_config_dict(config)
+ config_obj.parse_config_dict(config, "", "")
kwargs["config"] = config_obj
hs = setup_test_homeserver(self.addCleanup, *args, **kwargs)
diff --git a/tests/utils.py b/tests/utils.py
index bd2c7c954c..da43166f3a 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -182,7 +182,7 @@ def default_config(name, parse=False):
if parse:
config = HomeServerConfig()
- config.parse_config_dict(config_dict)
+ config.parse_config_dict(config_dict, "", "")
return config
return config_dict
|