diff options
Diffstat (limited to 'synapse')
39 files changed, 309 insertions, 207 deletions
diff --git a/synapse/app/_base.py b/synapse/app/_base.py index df4c2d4c97..d50a9840d4 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -19,7 +19,6 @@ import signal import sys import traceback -import psutil from daemonize import Daemonize from twisted.internet import defer, error, reactor @@ -68,21 +67,13 @@ def start_worker_reactor(appname, config): gc_thresholds=config.gc_thresholds, pid_file=config.worker_pid_file, daemonize=config.worker_daemonize, - cpu_affinity=config.worker_cpu_affinity, print_pidfile=config.print_pidfile, logger=logger, ) def start_reactor( - appname, - soft_file_limit, - gc_thresholds, - pid_file, - daemonize, - cpu_affinity, - print_pidfile, - logger, + appname, soft_file_limit, gc_thresholds, pid_file, daemonize, print_pidfile, logger ): """ Run the reactor in the main process @@ -95,7 +86,6 @@ def start_reactor( gc_thresholds: pid_file (str): name of pid file to write to if daemonize is True daemonize (bool): true to run the reactor in a background process - cpu_affinity (int|None): cpu affinity mask print_pidfile (bool): whether to print the pid file, if daemonize is True logger (logging.Logger): logger instance to pass to Daemonize """ @@ -109,20 +99,6 @@ def start_reactor( # between the sentinel and `run` logcontexts. with PreserveLoggingContext(): logger.info("Running") - if cpu_affinity is not None: - # Turn the bitmask into bits, reverse it so we go from 0 up - mask_to_bits = bin(cpu_affinity)[2:][::-1] - - cpus = [] - cpu_num = 0 - - for i in mask_to_bits: - if i == "1": - cpus.append(cpu_num) - cpu_num += 1 - - p = psutil.Process() - p.cpu_affinity(cpus) change_resource_limit(soft_file_limit) if gc_thresholds: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index d19c7c7d71..49da105cf6 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -641,7 +641,6 @@ def run(hs): gc_thresholds=hs.config.gc_thresholds, pid_file=hs.config.pid_file, daemonize=hs.config.daemonize, - cpu_affinity=hs.config.cpu_affinity, print_pidfile=hs.config.print_pidfile, logger=logger, ) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 36e9c04cee..21d110c82d 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -216,7 +218,7 @@ class Config(object): "--keys-directory", metavar="DIRECTORY", help="Where files such as certs and signing keys are stored when" - " their location is given explicitly in the config." + " their location is not given explicitly in the config." " Defaults to the directory containing the last config file", ) @@ -228,10 +230,22 @@ class Config(object): config_files = find_config_files(search_paths=config_args.config_path) + if not config_files: + config_parser.error("Must supply a config file.") + + if config_args.keys_directory: + config_dir_path = config_args.keys_directory + else: + config_dir_path = os.path.dirname(config_files[-1]) + config_dir_path = os.path.abspath(config_dir_path) + data_dir_path = os.getcwd() + config_dict = obj.read_config_files( - config_files, keys_directory=config_args.keys_directory + config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path + ) + obj.parse_config_dict( + config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path ) - obj.parse_config_dict(config_dict) obj.invoke_all("read_arguments", config_args) @@ -282,7 +296,7 @@ class Config(object): metavar="DIRECTORY", help=( "Specify where additional config files such as signing keys and log" - " config should be stored. Defaults to the same directory as the main" + " config should be stored. Defaults to the same directory as the last" " config file." ), ) @@ -290,6 +304,20 @@ class Config(object): config_files = find_config_files(search_paths=config_args.config_path) + if not config_files: + config_parser.error( + "Must supply a config file.\nA config file can be automatically" + ' generated using "--generate-config -H SERVER_NAME' + ' -c CONFIG-FILE"' + ) + + if config_args.config_directory: + config_dir_path = config_args.config_directory + else: + config_dir_path = os.path.dirname(config_files[-1]) + config_dir_path = os.path.abspath(config_dir_path) + data_dir_path = os.getcwd() + generate_missing_configs = config_args.generate_missing_configs obj = cls() @@ -300,20 +328,10 @@ class Config(object): "Please specify either --report-stats=yes or --report-stats=no\n\n" + MISSING_REPORT_STATS_SPIEL ) - if not config_files: - config_parser.error( - "Must supply a config file.\nA config file can be automatically" - ' generated using "--generate-config -H SERVER_NAME' - ' -c CONFIG-FILE"' - ) + (config_path,) = config_files if not cls.path_exists(config_path): print("Generating config file %s" % (config_path,)) - if config_args.config_directory: - config_dir_path = config_args.config_directory - else: - config_dir_path = os.path.dirname(config_path) - config_dir_path = os.path.abspath(config_dir_path) server_name = config_args.server_name if not server_name: @@ -324,7 +342,7 @@ class Config(object): config_str = obj.generate_config( config_dir_path=config_dir_path, - data_dir_path=os.getcwd(), + data_dir_path=data_dir_path, server_name=server_name, report_stats=(config_args.report_stats == "yes"), generate_secrets=True, @@ -367,36 +385,35 @@ class Config(object): obj.invoke_all("add_arguments", parser) args = parser.parse_args(remaining_args) - if not config_files: - config_parser.error( - "Must supply a config file.\nA config file can be automatically" - ' generated using "--generate-config -H SERVER_NAME' - ' -c CONFIG-FILE"' - ) - config_dict = obj.read_config_files( - config_files, keys_directory=config_args.config_directory + config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path ) if generate_missing_configs: obj.generate_missing_files(config_dict) return None - obj.parse_config_dict(config_dict) + obj.parse_config_dict( + config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path + ) obj.invoke_all("read_arguments", args) return obj - def read_config_files(self, config_files, keys_directory=None): + def read_config_files(self, config_files, config_dir_path, data_dir_path): """Read the config files into a dict - Returns: dict - """ - if not keys_directory: - keys_directory = os.path.dirname(config_files[-1]) + Args: + config_files (iterable[str]): A list of the config files to read - self.config_dir_path = os.path.abspath(keys_directory) + config_dir_path (str): The path where the config files are kept. Used to + create filenames for things like the log config and the signing key. + data_dir_path (str): The path where the data files are kept. Used to create + filenames for things like the database and media store. + + Returns: dict + """ # first we read the config files into a dict specified_config = {} for config_file in config_files: @@ -409,8 +426,8 @@ class Config(object): raise ConfigError(MISSING_SERVER_NAME) server_name = specified_config["server_name"] config_string = self.generate_config( - config_dir_path=self.config_dir_path, - data_dir_path=os.getcwd(), + config_dir_path=config_dir_path, + data_dir_path=data_dir_path, server_name=server_name, generate_secrets=False, ) @@ -430,8 +447,24 @@ class Config(object): ) return config - def parse_config_dict(self, config_dict): - self.invoke_all("read_config", config_dict) + def parse_config_dict(self, config_dict, config_dir_path, data_dir_path): + """Read the information from the config dict into this Config object. + + Args: + config_dict (dict): Configuration data, as read from the yaml + + config_dir_path (str): The path where the config files are kept. Used to + create filenames for things like the log config and the signing key. + + data_dir_path (str): The path where the data files are kept. Used to create + filenames for things like the database and media store. + """ + self.invoke_all( + "read_config", + config_dict, + config_dir_path=config_dir_path, + data_dir_path=data_dir_path, + ) def generate_missing_files(self, config_dict): self.invoke_all("generate_files", config_dict) diff --git a/synapse/config/api.py b/synapse/config/api.py index 23b0ea6962..d9eff9ae1f 100644 --- a/synapse/config/api.py +++ b/synapse/config/api.py @@ -18,7 +18,7 @@ from ._base import Config class ApiConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.room_invite_state_types = config.get( "room_invite_state_types", [ diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 679ee62480..b74cebfca9 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -29,7 +29,7 @@ logger = logging.getLogger(__name__) class AppServiceConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.app_service_config_files = config.get("app_service_config_files", []) self.notify_appservices = config.get("notify_appservices", True) self.track_appservice_user_ips = config.get("track_appservice_user_ips", False) diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py index e2eb473a92..a08b08570b 100644 --- a/synapse/config/captcha.py +++ b/synapse/config/captcha.py @@ -16,7 +16,7 @@ from ._base import Config class CaptchaConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.recaptcha_private_key = config.get("recaptcha_private_key") self.recaptcha_public_key = config.get("recaptcha_public_key") self.enable_registration_captcha = config.get( diff --git a/synapse/config/cas.py b/synapse/config/cas.py index 609c0815c8..a5f0449955 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -22,7 +22,7 @@ class CasConfig(Config): cas_server_url: URL of CAS server """ - def read_config(self, config): + def read_config(self, config, **kwargs): cas_config = config.get("cas_config", None) if cas_config: self.cas_enabled = cas_config.get("enabled", True) diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py index 5b0bf919c7..6fd4931681 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent_config.py @@ -84,7 +84,7 @@ class ConsentConfig(Config): self.user_consent_at_registration = False self.user_consent_policy_name = "Privacy Policy" - def read_config(self, config): + def read_config(self, config, **kwargs): consent_config = config.get("user_consent") if consent_config is None: return diff --git a/synapse/config/database.py b/synapse/config/database.py index adc0a47ddf..c8963e276a 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -18,7 +18,7 @@ from ._base import Config class DatabaseConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K")) self.database_config = config.get("database") diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 3a6cb07206..07df7b7173 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -27,7 +27,7 @@ from ._base import Config, ConfigError class EmailConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): # TODO: We should separate better the email configuration from the notification # and account validity config. diff --git a/synapse/config/groups.py b/synapse/config/groups.py index e4be172a79..d11f4d3b96 100644 --- a/synapse/config/groups.py +++ b/synapse/config/groups.py @@ -17,7 +17,7 @@ from ._base import Config class GroupsConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.enable_group_creation = config.get("enable_group_creation", False) self.group_creation_prefix = config.get("group_creation_prefix", "") diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py index b190dcbe38..a2c97dea95 100644 --- a/synapse/config/jwt_config.py +++ b/synapse/config/jwt_config.py @@ -23,7 +23,7 @@ MISSING_JWT = """Missing jwt library. This is required for jwt login. class JWTConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): jwt_config = config.get("jwt_config", None) if jwt_config: self.jwt_enabled = jwt_config.get("enabled", False) diff --git a/synapse/config/key.py b/synapse/config/key.py index 21c4f5c51c..e58638f708 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -65,7 +65,7 @@ class TrustedKeyServer(object): class KeyConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): # the signing key can be specified inline or in a separate file if "signing_key" in config: self.signing_key = read_signing_keys([config["signing_key"]]) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 9db2e087e4..153a137517 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -74,7 +74,7 @@ root: class LoggingConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.verbosity = config.get("verbose", 0) self.no_redirect_stdio = config.get("no_redirect_stdio", False) self.log_config = self.abspath(config.get("log_config")) diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index c85e234d22..6af82e1329 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -21,7 +21,7 @@ MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentr class MetricsConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.enable_metrics = config.get("enable_metrics", False) self.report_stats = config.get("report_stats", None) self.metrics_port = config.get("metrics_port") diff --git a/synapse/config/password.py b/synapse/config/password.py index eea59e772b..300b67f236 100644 --- a/synapse/config/password.py +++ b/synapse/config/password.py @@ -20,7 +20,7 @@ class PasswordConfig(Config): """Password login configuration """ - def read_config(self, config): + def read_config(self, config, **kwargs): password_config = config.get("password_config", {}) if password_config is None: password_config = {} diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index fcf279e8e1..8ffefd2639 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -21,7 +21,7 @@ LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider" class PasswordAuthProviderConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.password_providers = [] providers = [] diff --git a/synapse/config/push.py b/synapse/config/push.py index 62c0060c9c..99d15e4461 100644 --- a/synapse/config/push.py +++ b/synapse/config/push.py @@ -18,7 +18,7 @@ from ._base import Config class PushConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): push_config = config.get("push", {}) self.push_include_content = push_config.get("include_content", True) diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index 5a9adac480..b03047f2b5 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -36,7 +36,7 @@ class FederationRateLimitConfig(object): class RatelimitConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): # Load the new-style messages config if it exists. Otherwise fall back # to the old method. diff --git a/synapse/config/registration.py b/synapse/config/registration.py index a1e27ba66c..6d8a2df29b 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -46,7 +46,7 @@ class AccountValidityConfig(Config): class RegistrationConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.enable_registration = bool( strtobool(str(config.get("enable_registration", False))) ) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 9f9669ebb1..15a19e0911 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -86,7 +86,7 @@ def parse_thumbnail_requirements(thumbnail_sizes): class ContentRepositoryConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M")) self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M")) self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M")) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index c1da0e20e0..24223db7a1 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -19,7 +19,7 @@ from ._base import Config, ConfigError class RoomDirectoryConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.enable_room_list_search = config.get("enable_room_list_search", True) alias_creation_rules = config.get("alias_creation_rules") diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 2ec38e48e9..d86cf0e6ee 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -17,7 +17,7 @@ from ._base import Config, ConfigError class SAML2Config(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.saml2_enabled = False saml2_config = config.get("saml2_config") diff --git a/synapse/config/server.py b/synapse/config/server.py index 6d3f1da96c..7cbb699a66 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -40,7 +40,7 @@ DEFAULT_ROOM_VERSION = "4" class ServerConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.server_name = config["server_name"] self.server_context = config.get("server_context", None) @@ -57,7 +57,6 @@ class ServerConfig(Config): self.user_agent_suffix = config.get("user_agent_suffix") self.use_frozen_dicts = config.get("use_frozen_dicts", False) self.public_baseurl = config.get("public_baseurl") - self.cpu_affinity = config.get("cpu_affinity") # Whether to send federation traffic out in this process. This only # applies to some federation traffic, and so shouldn't be used to @@ -83,12 +82,32 @@ class ServerConfig(Config): "require_auth_for_profile_requests", False ) - # If set to 'True', requires authentication to access the server's - # public rooms directory through the client API, and forbids any other - # homeserver to fetch it via federation. - self.restrict_public_rooms_to_local_users = config.get( - "restrict_public_rooms_to_local_users", False - ) + if "restrict_public_rooms_to_local_users" in config and ( + "allow_public_rooms_without_auth" in config + or "allow_public_rooms_over_federation" in config + ): + raise ConfigError( + "Can't use 'restrict_public_rooms_to_local_users' if" + " 'allow_public_rooms_without_auth' and/or" + " 'allow_public_rooms_over_federation' is set." + ) + + # Check if the legacy "restrict_public_rooms_to_local_users" flag is set. This + # flag is now obsolete but we need to check it for backward-compatibility. + if config.get("restrict_public_rooms_to_local_users", False): + self.allow_public_rooms_without_auth = False + self.allow_public_rooms_over_federation = False + else: + # If set to 'False', requires authentication to access the server's public + # rooms directory through the client API. Defaults to 'True'. + self.allow_public_rooms_without_auth = config.get( + "allow_public_rooms_without_auth", True + ) + # If set to 'False', forbids any other homeserver to fetch the server's public + # rooms directory via federation. Defaults to 'True'. + self.allow_public_rooms_over_federation = config.get( + "allow_public_rooms_over_federation", True + ) default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION) @@ -336,29 +355,6 @@ class ServerConfig(Config): # pid_file: %(pid_file)s - # CPU affinity mask. Setting this restricts the CPUs on which the - # process will be scheduled. It is represented as a bitmask, with the - # lowest order bit corresponding to the first logical CPU and the - # highest order bit corresponding to the last logical CPU. Not all CPUs - # may exist on a given system but a mask may specify more CPUs than are - # present. - # - # For example: - # 0x00000001 is processor #0, - # 0x00000003 is processors #0 and #1, - # 0xFFFFFFFF is all processors (#0 through #31). - # - # Pinning a Python process to a single CPU is desirable, because Python - # is inherently single-threaded due to the GIL, and can suffer a - # 30-40%% slowdown due to cache blow-out and thread context switching - # if the scheduler happens to schedule the underlying threads across - # different cores. See - # https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/. - # - # This setting requires the affinity package to be installed! - # - #cpu_affinity: 0xFFFFFFFF - # The path to the web client which will be served at /_matrix/client/ # if 'webclient' is configured under the 'listeners' configuration. # @@ -390,11 +386,15 @@ class ServerConfig(Config): # #require_auth_for_profile_requests: true - # If set to 'true', requires authentication to access the server's - # public rooms directory through the client API, and forbids any other - # homeserver to fetch it via federation. Defaults to 'false'. + # If set to 'false', requires authentication to access the server's public rooms + # directory through the client API. Defaults to 'true'. + # + #allow_public_rooms_without_auth: false + + # If set to 'false', forbids any other homeserver to fetch the server's public + # rooms directory via federation. Defaults to 'true'. # - #restrict_public_rooms_to_local_users: true + #allow_public_rooms_over_federation: false # The default room version for newly created rooms. # diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py index d930eb33b5..05110c17a6 100644 --- a/synapse/config/server_notices_config.py +++ b/synapse/config/server_notices_config.py @@ -66,7 +66,7 @@ class ServerNoticesConfig(Config): self.server_notices_mxid_avatar_url = None self.server_notices_room_name = None - def read_config(self, config): + def read_config(self, config, **kwargs): c = config.get("server_notices") if c is None: return diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py index 1502e9faba..1968003cb3 100644 --- a/synapse/config/spam_checker.py +++ b/synapse/config/spam_checker.py @@ -19,7 +19,7 @@ from ._base import Config class SpamCheckerConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.spam_checker = None provider = config.get("spam_checker", None) diff --git a/synapse/config/stats.py b/synapse/config/stats.py index 80fc1b9dd0..73a87c73f2 100644 --- a/synapse/config/stats.py +++ b/synapse/config/stats.py @@ -25,7 +25,7 @@ class StatsConfig(Config): Configuration for the behaviour of synapse's stats engine """ - def read_config(self, config): + def read_config(self, config, **kwargs): self.stats_enabled = True self.stats_bucket_size = 86400 self.stats_retention = sys.maxsize diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py index a89dd5f98a..1bedd607b6 100644 --- a/synapse/config/third_party_event_rules.py +++ b/synapse/config/third_party_event_rules.py @@ -19,7 +19,7 @@ from ._base import Config class ThirdPartyRulesConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.third_party_event_rules = None provider = config.get("third_party_event_rules", None) diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 7951bf21fa..9a66e8cc4b 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -33,7 +33,7 @@ logger = logging.getLogger(__name__) class TlsConfig(Config): - def read_config(self, config): + def read_config(self, config, config_dir_path, **kwargs): acme_config = config.get("acme", None) if acme_config is None: @@ -50,6 +50,10 @@ class TlsConfig(Config): self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) self.acme_domain = acme_config.get("domain", config.get("server_name")) + self.acme_account_key_file = self.abspath( + acme_config.get("account_key_file", config_dir_path + "/client.key") + ) + self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) self.tls_private_key_file = self.abspath(config.get("tls_private_key_path")) @@ -213,11 +217,12 @@ class TlsConfig(Config): if sha256_fingerprint not in sha256_fingerprints: self.tls_fingerprints.append({"sha256": sha256_fingerprint}) - def default_config(self, config_dir_path, server_name, **kwargs): + def default_config(self, config_dir_path, server_name, data_dir_path, **kwargs): base_key_name = os.path.join(config_dir_path, server_name) tls_certificate_path = base_key_name + ".tls.crt" tls_private_key_path = base_key_name + ".tls.key" + default_acme_account_file = os.path.join(data_dir_path, "acme_account.key") # this is to avoid the max line length. Sorrynotsorry proxypassline = ( @@ -343,6 +348,13 @@ class TlsConfig(Config): # #domain: matrix.example.com + # file to use for the account key. This will be generated if it doesn't + # exist. + # + # If unspecified, we will use CONFDIR/client.key. + # + account_key_file: %(default_acme_account_file)s + # List of allowed TLS fingerprints for this server to publish along # with the signing keys for this server. Other matrix servers that # make HTTPS requests to this server will check that the TLS diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py index e031b11599..0665dc3fcf 100644 --- a/synapse/config/user_directory.py +++ b/synapse/config/user_directory.py @@ -21,7 +21,7 @@ class UserDirectoryConfig(Config): Configuration for the behaviour of the /user_directory API """ - def read_config(self, config): + def read_config(self, config, **kwargs): self.user_directory_search_enabled = True self.user_directory_search_all_users = False user_directory_config = config.get("user_directory", None) diff --git a/synapse/config/voip.py b/synapse/config/voip.py index 82cf8c53a8..01e0cb2e28 100644 --- a/synapse/config/voip.py +++ b/synapse/config/voip.py @@ -16,7 +16,7 @@ from ._base import Config class VoipConfig(Config): - def read_config(self, config): + def read_config(self, config, **kwargs): self.turn_uris = config.get("turn_uris", []) self.turn_shared_secret = config.get("turn_shared_secret") self.turn_username = config.get("turn_username") diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 75993abf35..3b75471d85 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -21,7 +21,7 @@ class WorkerConfig(Config): They have their own pid_file and listener configuration. They use the replication_url to talk to the main synapse process.""" - def read_config(self, config): + def read_config(self, config, **kwargs): self.worker_app = config.get("worker_app") # Canonicalise worker_app so that master always has None @@ -46,7 +46,6 @@ class WorkerConfig(Config): self.worker_name = config.get("worker_name", self.worker_app) self.worker_main_http_uri = config.get("worker_main_http_uri", None) - self.worker_cpu_affinity = config.get("worker_cpu_affinity") # This option is really only here to support `--manhole` command line # argument. diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index b4854e82f6..955f0f4308 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -721,15 +721,15 @@ class PublicRoomList(BaseFederationServlet): PATH = "/publicRooms" - def __init__(self, handler, authenticator, ratelimiter, server_name, deny_access): + def __init__(self, handler, authenticator, ratelimiter, server_name, allow_access): super(PublicRoomList, self).__init__( handler, authenticator, ratelimiter, server_name ) - self.deny_access = deny_access + self.allow_access = allow_access @defer.inlineCallbacks def on_GET(self, origin, content, query): - if self.deny_access: + if not self.allow_access: raise FederationDeniedError(origin) limit = parse_integer_from_args(query, "limit", 0) @@ -1436,7 +1436,7 @@ def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=N authenticator=authenticator, ratelimiter=ratelimiter, server_name=hs.hostname, - deny_access=hs.config.restrict_public_rooms_to_local_users, + allow_access=hs.config.allow_public_rooms_over_federation, ).register(resource) if "group_server" in servlet_groups: diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 01e0ef408d..fbef2f3d38 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -15,14 +15,9 @@ import logging -import attr -from zope.interface import implementer - import twisted import twisted.internet.error from twisted.internet import defer -from twisted.python.filepath import FilePath -from twisted.python.url import URL from twisted.web import server, static from twisted.web.resource import Resource @@ -30,27 +25,6 @@ from synapse.app import check_bind_error logger = logging.getLogger(__name__) -try: - from txacme.interfaces import ICertificateStore - - @attr.s - @implementer(ICertificateStore) - class ErsatzStore(object): - """ - A store that only stores in memory. - """ - - certs = attr.ib(default=attr.Factory(dict)) - - def store(self, server_name, pem_objects): - self.certs[server_name] = [o.as_bytes() for o in pem_objects] - return defer.succeed(None) - - -except ImportError: - # txacme is missing - pass - class AcmeHandler(object): def __init__(self, hs): @@ -60,6 +34,7 @@ class AcmeHandler(object): @defer.inlineCallbacks def start_listening(self): + from synapse.handlers import acme_issuing_service # Configure logging for txacme, if you need to debug # from eliot import add_destinations @@ -67,37 +42,18 @@ class AcmeHandler(object): # # add_destinations(TwistedDestination()) - from txacme.challenges import HTTP01Responder - from txacme.service import AcmeIssuingService - from txacme.endpoint import load_or_create_client_key - from txacme.client import Client - from josepy.jwa import RS256 - - self._store = ErsatzStore() - responder = HTTP01Responder() - - self._issuer = AcmeIssuingService( - cert_store=self._store, - client_creator=( - lambda: Client.from_url( - reactor=self.reactor, - url=URL.from_text(self.hs.config.acme_url), - key=load_or_create_client_key( - FilePath(self.hs.config.config_dir_path) - ), - alg=RS256, - ) - ), - clock=self.reactor, - responders=[responder], + well_known = Resource() + + self._issuer = acme_issuing_service.create_issuing_service( + self.reactor, + acme_url=self.hs.config.acme_url, + account_key_file=self.hs.config.acme_account_key_file, + well_known_resource=well_known, ) - well_known = Resource() - well_known.putChild(b"acme-challenge", responder.resource) responder_resource = Resource() responder_resource.putChild(b".well-known", well_known) responder_resource.putChild(b"check", static.Data(b"OK", b"text/plain")) - srv = server.Site(responder_resource) bind_addresses = self.hs.config.acme_bind_addresses @@ -128,7 +84,7 @@ class AcmeHandler(object): logger.exception("Fail!") raise logger.warning("Reprovisioned %s, saving.", self._acme_domain) - cert_chain = self._store.certs[self._acme_domain] + cert_chain = self._issuer.cert_store.certs[self._acme_domain] try: with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: diff --git a/synapse/handlers/acme_issuing_service.py b/synapse/handlers/acme_issuing_service.py new file mode 100644 index 0000000000..e1d4224e74 --- /dev/null +++ b/synapse/handlers/acme_issuing_service.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Utility function to create an ACME issuing service. + +This file contains the unconditional imports on the acme and cryptography bits that we +only need (and may only have available) if we are doing ACME, so is designed to be +imported conditionally. +""" +import logging + +import attr +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from josepy import JWKRSA +from josepy.jwa import RS256 +from txacme.challenges import HTTP01Responder +from txacme.client import Client +from txacme.interfaces import ICertificateStore +from txacme.service import AcmeIssuingService +from txacme.util import generate_private_key +from zope.interface import implementer + +from twisted.internet import defer +from twisted.python.filepath import FilePath +from twisted.python.url import URL + +logger = logging.getLogger(__name__) + + +def create_issuing_service(reactor, acme_url, account_key_file, well_known_resource): + """Create an ACME issuing service, and attach it to a web Resource + + Args: + reactor: twisted reactor + acme_url (str): URL to use to request certificates + account_key_file (str): where to store the account key + well_known_resource (twisted.web.IResource): web resource for .well-known. + we will attach a child resource for "acme-challenge". + + Returns: + AcmeIssuingService + """ + responder = HTTP01Responder() + + well_known_resource.putChild(b"acme-challenge", responder.resource) + + store = ErsatzStore() + + return AcmeIssuingService( + cert_store=store, + client_creator=( + lambda: Client.from_url( + reactor=reactor, + url=URL.from_text(acme_url), + key=load_or_create_client_key(account_key_file), + alg=RS256, + ) + ), + clock=reactor, + responders=[responder], + ) + + +@attr.s +@implementer(ICertificateStore) +class ErsatzStore(object): + """ + A store that only stores in memory. + """ + + certs = attr.ib(default=attr.Factory(dict)) + + def store(self, server_name, pem_objects): + self.certs[server_name] = [o.as_bytes() for o in pem_objects] + return defer.succeed(None) + + +def load_or_create_client_key(key_file): + """Load the ACME account key from a file, creating it if it does not exist. + + Args: + key_file (str): name of the file to use as the account key + """ + # this is based on txacme.endpoint.load_or_create_client_key, but doesn't + # hardcode the 'client.key' filename + acme_key_file = FilePath(key_file) + if acme_key_file.exists(): + logger.info("Loading ACME account key from '%s'", acme_key_file) + key = serialization.load_pem_private_key( + acme_key_file.getContent(), password=None, backend=default_backend() + ) + else: + logger.info("Saving new ACME account key to '%s'", acme_key_file) + key = generate_private_key("rsa") + acme_key_file.setContent( + key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + return JWKRSA(key=key) diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 062e026e5f..76ee97ddd3 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -180,9 +180,7 @@ class PaginationHandler(object): room_token = pagin_config.from_token.room_key else: pagin_config.from_token = ( - yield self.hs.get_event_sources().get_current_token_for_room( - room_id=room_id - ) + yield self.hs.get_event_sources().get_current_token_for_pagination() ) room_token = pagin_config.from_token.room_key diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index a028337125..cca7e45ddb 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -311,7 +311,7 @@ class PublicRoomListRestServlet(TransactionRestServlet): # Option to allow servers to require auth when accessing # /publicRooms via CS API. This is especially helpful in private # federations. - if self.hs.config.restrict_public_rooms_to_local_users: + if not self.hs.config.allow_public_rooms_without_auth: raise # We allow people to not be authed if they're just looking at our diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index b862daab24..29589853c6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -38,6 +38,14 @@ from synapse.util.caches.descriptors import Cache from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.stringutils import exception_to_unicode +# import a function which will return a monotonic time, in seconds +try: + # on python 3, use time.monotonic, since time.clock can go backwards + from time import monotonic as monotonic_time +except ImportError: + # ... but python 2 doesn't have it + from time import clock as monotonic_time + logger = logging.getLogger(__name__) try: @@ -349,14 +357,14 @@ class SQLBaseStore(object): ) def start_profiling(self): - self._previous_loop_ts = self._clock.time_msec() + self._previous_loop_ts = monotonic_time() def loop(): curr = self._current_txn_total_time prev = self._previous_txn_total_time self._previous_txn_total_time = curr - time_now = self._clock.time_msec() + time_now = monotonic_time() time_then = self._previous_loop_ts self._previous_loop_ts = time_now @@ -374,7 +382,7 @@ class SQLBaseStore(object): def _new_transaction( self, conn, desc, after_callbacks, exception_callbacks, func, *args, **kwargs ): - start = time.time() + start = monotonic_time() txn_id = self._TXN_ID # We don't really need these to be unique, so lets stop it from @@ -440,7 +448,7 @@ class SQLBaseStore(object): logger.debug("[TXN FAIL] {%s} %s", name, e) raise finally: - end = time.time() + end = monotonic_time() duration = end - start LoggingContext.current_context().add_database_transaction(duration) @@ -514,11 +522,11 @@ class SQLBaseStore(object): ) parent_context = None - start_time = time.time() + start_time = monotonic_time() def inner_func(conn, *args, **kwargs): with LoggingContext("runWithConnection", parent_context) as context: - sched_duration_sec = time.time() - start_time + sched_duration_sec = monotonic_time() - start_time sql_scheduling_timer.observe(sched_duration_sec) context.add_database_scheduled(sched_duration_sec) diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 9b416f2f40..488c49747a 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -59,21 +59,25 @@ class EventSources(object): defer.returnValue(token) @defer.inlineCallbacks - def get_current_token_for_room(self, room_id): - push_rules_key, _ = self.store.get_push_rules_stream_token() - to_device_key = self.store.get_to_device_stream_token() - device_list_key = self.store.get_device_stream_token() - groups_key = self.store.get_group_stream_token() + def get_current_token_for_pagination(self): + """Get the current token for a given room to be used to paginate + events. + The returned token does not have the current values for fields other + than `room`, since they are not used during pagination. + + Retuns: + Deferred[StreamToken] + """ token = StreamToken( - room_key=(yield self.sources["room"].get_current_key_for_room(room_id)), - presence_key=(yield self.sources["presence"].get_current_key()), - typing_key=(yield self.sources["typing"].get_current_key()), - receipt_key=(yield self.sources["receipt"].get_current_key()), - account_data_key=(yield self.sources["account_data"].get_current_key()), - push_rules_key=push_rules_key, - to_device_key=to_device_key, - device_list_key=device_list_key, - groups_key=groups_key, + room_key=(yield self.sources["room"].get_current_key()), + presence_key=0, + typing_key=0, + receipt_key=0, + account_data_key=0, + push_rules_key=0, + to_device_key=0, + device_list_key=0, + groups_key=0, ) defer.returnValue(token) |