diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index df4c2d4c97..d50a9840d4 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -19,7 +19,6 @@ import signal
import sys
import traceback
-import psutil
from daemonize import Daemonize
from twisted.internet import defer, error, reactor
@@ -68,21 +67,13 @@ def start_worker_reactor(appname, config):
gc_thresholds=config.gc_thresholds,
pid_file=config.worker_pid_file,
daemonize=config.worker_daemonize,
- cpu_affinity=config.worker_cpu_affinity,
print_pidfile=config.print_pidfile,
logger=logger,
)
def start_reactor(
- appname,
- soft_file_limit,
- gc_thresholds,
- pid_file,
- daemonize,
- cpu_affinity,
- print_pidfile,
- logger,
+ appname, soft_file_limit, gc_thresholds, pid_file, daemonize, print_pidfile, logger
):
""" Run the reactor in the main process
@@ -95,7 +86,6 @@ def start_reactor(
gc_thresholds:
pid_file (str): name of pid file to write to if daemonize is True
daemonize (bool): true to run the reactor in a background process
- cpu_affinity (int|None): cpu affinity mask
print_pidfile (bool): whether to print the pid file, if daemonize is True
logger (logging.Logger): logger instance to pass to Daemonize
"""
@@ -109,20 +99,6 @@ def start_reactor(
# between the sentinel and `run` logcontexts.
with PreserveLoggingContext():
logger.info("Running")
- if cpu_affinity is not None:
- # Turn the bitmask into bits, reverse it so we go from 0 up
- mask_to_bits = bin(cpu_affinity)[2:][::-1]
-
- cpus = []
- cpu_num = 0
-
- for i in mask_to_bits:
- if i == "1":
- cpus.append(cpu_num)
- cpu_num += 1
-
- p = psutil.Process()
- p.cpu_affinity(cpus)
change_resource_limit(soft_file_limit)
if gc_thresholds:
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index 310cdab2e4..90bc79cdda 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -37,6 +37,7 @@ from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
from synapse.replication.slave.storage.devices import SlavedDeviceStore
from synapse.replication.slave.storage.directory import DirectoryStore
from synapse.replication.slave.storage.events import SlavedEventStore
+from synapse.replication.slave.storage.groups import SlavedGroupServerStore
from synapse.replication.slave.storage.keys import SlavedKeyStore
from synapse.replication.slave.storage.profile import SlavedProfileStore
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
@@ -52,6 +53,7 @@ from synapse.rest.client.v1.room import (
PublicRoomListRestServlet,
RoomEventContextServlet,
RoomMemberListRestServlet,
+ RoomMessageListRestServlet,
RoomStateRestServlet,
)
from synapse.rest.client.v1.voip import VoipRestServlet
@@ -74,6 +76,7 @@ class ClientReaderSlavedStore(
SlavedDeviceStore,
SlavedReceiptsStore,
SlavedPushRuleStore,
+ SlavedGroupServerStore,
SlavedAccountDataStore,
SlavedEventStore,
SlavedKeyStore,
@@ -109,6 +112,7 @@ class ClientReaderServer(HomeServer):
JoinedRoomMemberListRestServlet(self).register(resource)
RoomStateRestServlet(self).register(resource)
RoomEventContextServlet(self).register(resource)
+ RoomMessageListRestServlet(self).register(resource)
RegisterRestServlet(self).register(resource)
LoginRestServlet(self).register(resource)
ThreepidRestServlet(self).register(resource)
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index d19c7c7d71..49da105cf6 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -641,7 +641,6 @@ def run(hs):
gc_thresholds=hs.config.gc_thresholds,
pid_file=hs.config.pid_file,
daemonize=hs.config.daemonize,
- cpu_affinity=hs.config.cpu_affinity,
print_pidfile=hs.config.print_pidfile,
logger=logger,
)
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 8284aa4c6d..965478d8d5 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2017-2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -134,11 +136,6 @@ class Config(object):
with open(file_path) as file_stream:
return file_stream.read()
- @staticmethod
- def read_config_file(file_path):
- with open(file_path) as file_stream:
- return yaml.safe_load(file_stream)
-
def invoke_all(self, name, *args, **kargs):
results = []
for cls in type(self).mro():
@@ -153,12 +150,12 @@ class Config(object):
server_name,
generate_secrets=False,
report_stats=None,
+ open_private_ports=False,
):
"""Build a default configuration file
- This is used both when the user explicitly asks us to generate a config file
- (eg with --generate_config), and before loading the config at runtime (to give
- a base which the config files override)
+ This is used when the user explicitly asks us to generate a config file
+ (eg with --generate_config).
Args:
config_dir_path (str): The path where the config files are kept. Used to
@@ -177,25 +174,33 @@ class Config(object):
report_stats (bool|None): Initial setting for the report_stats setting.
If None, report_stats will be left unset.
+ open_private_ports (bool): True to leave private ports (such as the non-TLS
+ HTTP listener) open to the internet.
+
Returns:
str: the yaml config file
"""
- default_config = "\n\n".join(
+ return "\n\n".join(
dedent(conf)
for conf in self.invoke_all(
- "default_config",
+ "generate_config_section",
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
server_name=server_name,
generate_secrets=generate_secrets,
report_stats=report_stats,
+ open_private_ports=open_private_ports,
)
)
- return default_config
-
@classmethod
def load_config(cls, description, argv):
+ """Parse the commandline and config files
+
+ Doesn't support config-file-generation: used by the worker apps.
+
+ Returns: Config object.
+ """
config_parser = argparse.ArgumentParser(description=description)
config_parser.add_argument(
"-c",
@@ -210,7 +215,7 @@ class Config(object):
"--keys-directory",
metavar="DIRECTORY",
help="Where files such as certs and signing keys are stored when"
- " their location is given explicitly in the config."
+ " their location is not given explicitly in the config."
" Defaults to the directory containing the last config file",
)
@@ -222,8 +227,19 @@ class Config(object):
config_files = find_config_files(search_paths=config_args.config_path)
- obj.read_config_files(
- config_files, keys_directory=config_args.keys_directory, generate_keys=False
+ if not config_files:
+ config_parser.error("Must supply a config file.")
+
+ if config_args.keys_directory:
+ config_dir_path = config_args.keys_directory
+ else:
+ config_dir_path = os.path.dirname(config_files[-1])
+ config_dir_path = os.path.abspath(config_dir_path)
+ data_dir_path = os.getcwd()
+
+ config_dict = read_config_files(config_files)
+ obj.parse_config_dict(
+ config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
@@ -232,6 +248,12 @@ class Config(object):
@classmethod
def load_or_generate_config(cls, description, argv):
+ """Parse the commandline and config files
+
+ Supports generation of config files, so is used for the main homeserver app.
+
+ Returns: Config object, or None if --generate-config or --generate-keys was set
+ """
config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument(
"-c",
@@ -241,37 +263,74 @@ class Config(object):
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
- config_parser.add_argument(
+
+ generate_group = config_parser.add_argument_group("Config generation")
+ generate_group.add_argument(
"--generate-config",
action="store_true",
- help="Generate a config file for the server name",
+ help="Generate a config file, then exit.",
)
- config_parser.add_argument(
+ generate_group.add_argument(
+ "--generate-missing-configs",
+ "--generate-keys",
+ action="store_true",
+ help="Generate any missing additional config files, then exit.",
+ )
+ generate_group.add_argument(
+ "-H", "--server-name", help="The server name to generate a config file for."
+ )
+ generate_group.add_argument(
"--report-stats",
action="store",
- help="Whether the generated config reports anonymized usage statistics",
+ help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
- config_parser.add_argument(
- "--generate-keys",
- action="store_true",
- help="Generate any missing key files then exit",
- )
- config_parser.add_argument(
+ generate_group.add_argument(
+ "--config-directory",
"--keys-directory",
metavar="DIRECTORY",
- help="Used with 'generate-*' options to specify where files such as"
- " signing keys should be stored, unless explicitly"
- " specified in the config.",
+ help=(
+ "Specify where additional config files such as signing keys and log"
+ " config should be stored. Defaults to the same directory as the last"
+ " config file."
+ ),
)
- config_parser.add_argument(
- "-H", "--server-name", help="The server name to generate a config file for"
+ generate_group.add_argument(
+ "--data-directory",
+ metavar="DIRECTORY",
+ help=(
+ "Specify where data such as the media store and database file should be"
+ " stored. Defaults to the current working directory."
+ ),
+ )
+ generate_group.add_argument(
+ "--open-private-ports",
+ action="store_true",
+ help=(
+ "Leave private ports (such as the non-TLS HTTP listener) open to the"
+ " internet. Do not use this unless you know what you are doing."
+ ),
)
+
config_args, remaining_args = config_parser.parse_known_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
- generate_keys = config_args.generate_keys
+ if not config_files:
+ config_parser.error(
+ "Must supply a config file.\nA config file can be automatically"
+ ' generated using "--generate-config -H SERVER_NAME'
+ ' -c CONFIG-FILE"'
+ )
+
+ if config_args.config_directory:
+ config_dir_path = config_args.config_directory
+ else:
+ config_dir_path = os.path.dirname(config_files[-1])
+ config_dir_path = os.path.abspath(config_dir_path)
+ data_dir_path = os.getcwd()
+
+ generate_missing_configs = config_args.generate_missing_configs
obj = cls()
@@ -281,19 +340,16 @@ class Config(object):
"Please specify either --report-stats=yes or --report-stats=no\n\n"
+ MISSING_REPORT_STATS_SPIEL
)
- if not config_files:
- config_parser.error(
- "Must supply a config file.\nA config file can be automatically"
- ' generated using "--generate-config -H SERVER_NAME'
- ' -c CONFIG-FILE"'
- )
+
(config_path,) = config_files
if not cls.path_exists(config_path):
- if config_args.keys_directory:
- config_dir_path = config_args.keys_directory
+ print("Generating config file %s" % (config_path,))
+
+ if config_args.data_directory:
+ data_dir_path = config_args.data_directory
else:
- config_dir_path = os.path.dirname(config_path)
- config_dir_path = os.path.abspath(config_dir_path)
+ data_dir_path = os.getcwd()
+ data_dir_path = os.path.abspath(data_dir_path)
server_name = config_args.server_name
if not server_name:
@@ -304,10 +360,11 @@ class Config(object):
config_str = obj.generate_config(
config_dir_path=config_dir_path,
- data_dir_path=os.getcwd(),
+ data_dir_path=data_dir_path,
server_name=server_name,
report_stats=(config_args.report_stats == "yes"),
generate_secrets=True,
+ open_private_ports=config_args.open_private_ports,
)
if not cls.path_exists(config_dir_path):
@@ -316,8 +373,8 @@ class Config(object):
config_file.write("# vim:ft=yaml\n\n")
config_file.write(config_str)
- config = yaml.safe_load(config_str)
- obj.invoke_all("generate_files", config)
+ config_dict = yaml.safe_load(config_str)
+ obj.generate_missing_files(config_dict, config_dir_path)
print(
(
@@ -331,12 +388,12 @@ class Config(object):
else:
print(
(
- "Config file %r already exists. Generating any missing key"
+ "Config file %r already exists. Generating any missing config"
" files."
)
% (config_path,)
)
- generate_keys = True
+ generate_missing_configs = True
parser = argparse.ArgumentParser(
parents=[config_parser],
@@ -347,66 +404,63 @@ class Config(object):
obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
- if not config_files:
- config_parser.error(
- "Must supply a config file.\nA config file can be automatically"
- ' generated using "--generate-config -H SERVER_NAME'
- ' -c CONFIG-FILE"'
- )
-
- obj.read_config_files(
- config_files,
- keys_directory=config_args.keys_directory,
- generate_keys=generate_keys,
- )
-
- if generate_keys:
+ config_dict = read_config_files(config_files)
+ if generate_missing_configs:
+ obj.generate_missing_files(config_dict, config_dir_path)
return None
+ obj.parse_config_dict(
+ config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
+ )
obj.invoke_all("read_arguments", args)
return obj
- def read_config_files(self, config_files, keys_directory=None, generate_keys=False):
- if not keys_directory:
- keys_directory = os.path.dirname(config_files[-1])
-
- self.config_dir_path = os.path.abspath(keys_directory)
+ def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
+ """Read the information from the config dict into this Config object.
- specified_config = {}
- for config_file in config_files:
- yaml_config = self.read_config_file(config_file)
- specified_config.update(yaml_config)
+ Args:
+ config_dict (dict): Configuration data, as read from the yaml
- if "server_name" not in specified_config:
- raise ConfigError(MISSING_SERVER_NAME)
+ config_dir_path (str): The path where the config files are kept. Used to
+ create filenames for things like the log config and the signing key.
- server_name = specified_config["server_name"]
- config_string = self.generate_config(
- config_dir_path=self.config_dir_path,
- data_dir_path=os.getcwd(),
- server_name=server_name,
- generate_secrets=False,
+ data_dir_path (str): The path where the data files are kept. Used to create
+ filenames for things like the database and media store.
+ """
+ self.invoke_all(
+ "read_config",
+ config_dict,
+ config_dir_path=config_dir_path,
+ data_dir_path=data_dir_path,
)
- config = yaml.safe_load(config_string)
- config.pop("log_config")
- config.update(specified_config)
- if "report_stats" not in config:
- raise ConfigError(
- MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS
- + "\n"
- + MISSING_REPORT_STATS_SPIEL
- )
+ def generate_missing_files(self, config_dict, config_dir_path):
+ self.invoke_all("generate_files", config_dict, config_dir_path)
- if generate_keys:
- self.invoke_all("generate_files", config)
- return
- self.parse_config_dict(config)
+def read_config_files(config_files):
+ """Read the config files into a dict
- def parse_config_dict(self, config_dict):
- self.invoke_all("read_config", config_dict)
+ Args:
+ config_files (iterable[str]): A list of the config files to read
+
+ Returns: dict
+ """
+ specified_config = {}
+ for config_file in config_files:
+ with open(config_file) as file_stream:
+ yaml_config = yaml.safe_load(file_stream)
+ specified_config.update(yaml_config)
+
+ if "server_name" not in specified_config:
+ raise ConfigError(MISSING_SERVER_NAME)
+
+ if "report_stats" not in specified_config:
+ raise ConfigError(
+ MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
+ )
+ return specified_config
def find_config_files(search_paths):
diff --git a/synapse/config/api.py b/synapse/config/api.py
index 23b0ea6962..dddea79a8a 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -18,7 +18,7 @@ from ._base import Config
class ApiConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.room_invite_state_types = config.get(
"room_invite_state_types",
[
@@ -30,7 +30,7 @@ class ApiConfig(Config):
],
)
- def default_config(cls, **kwargs):
+ def generate_config_section(cls, **kwargs):
return """\
## API Configuration ##
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index 679ee62480..8387ff6805 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -29,12 +29,12 @@ logger = logging.getLogger(__name__)
class AppServiceConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.app_service_config_files = config.get("app_service_config_files", [])
self.notify_appservices = config.get("notify_appservices", True)
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
- def default_config(cls, **kwargs):
+ def generate_config_section(cls, **kwargs):
return """\
# A list of application service config files to use
#
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index e2eb473a92..8dac8152cf 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -16,7 +16,7 @@ from ._base import Config
class CaptchaConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.recaptcha_private_key = config.get("recaptcha_private_key")
self.recaptcha_public_key = config.get("recaptcha_public_key")
self.enable_registration_captcha = config.get(
@@ -28,7 +28,7 @@ class CaptchaConfig(Config):
"https://www.recaptcha.net/recaptcha/api/siteverify",
)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
## Captcha ##
# See docs/CAPTCHA_SETUP for full details of configuring this.
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index 609c0815c8..ebe34d933b 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -22,7 +22,7 @@ class CasConfig(Config):
cas_server_url: URL of CAS server
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
cas_config = config.get("cas_config", None)
if cas_config:
self.cas_enabled = cas_config.get("enabled", True)
@@ -35,7 +35,7 @@ class CasConfig(Config):
self.cas_service_url = None
self.cas_required_attributes = {}
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Enable CAS for registration and login.
#
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index 5b0bf919c7..94916f3a49 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -84,7 +84,7 @@ class ConsentConfig(Config):
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
consent_config = config.get("user_consent")
if consent_config is None:
return
@@ -111,5 +111,5 @@ class ConsentConfig(Config):
"policy_name", "Privacy Policy"
)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
diff --git a/synapse/config/database.py b/synapse/config/database.py
index adc0a47ddf..bcb2089dd7 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -18,7 +18,7 @@ from ._base import Config
class DatabaseConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
self.database_config = config.get("database")
@@ -38,7 +38,7 @@ class DatabaseConfig(Config):
self.set_databasepath(config.get("database_path"))
- def default_config(self, data_dir_path, **kwargs):
+ def generate_config_section(self, data_dir_path, **kwargs):
database_path = os.path.join(data_dir_path, "homeserver.db")
return (
"""\
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 3a6cb07206..fcd55d3e3d 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -27,7 +27,7 @@ from ._base import Config, ConfigError
class EmailConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
# TODO: We should separate better the email configuration from the notification
# and account validity config.
@@ -214,7 +214,7 @@ class EmailConfig(Config):
if not os.path.isfile(p):
raise ConfigError("Unable to find email template file %s" % (p,))
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Enable sending emails for password resets, notification events or
# account expiry notices
@@ -233,11 +233,13 @@ class EmailConfig(Config):
# app_name: Matrix
#
# # Enable email notifications by default
+ # #
# notif_for_new_users: True
#
# # Defining a custom URL for Riot is only needed if email notifications
# # should contain links to a self-hosted installation of Riot; when set
# # the "app_name" setting is ignored
+ # #
# riot_base_url: "http://localhost/riot"
#
# # Enable sending password reset emails via the configured, trusted
@@ -250,16 +252,22 @@ class EmailConfig(Config):
# #
# # If this option is set to false and SMTP options have not been
# # configured, resetting user passwords via email will be disabled
+ # #
# #trust_identity_server_for_password_resets: false
#
# # Configure the time that a validation email or text message code
# # will expire after sending
# #
# # This is currently used for password resets
+ # #
# #validation_token_lifetime: 1h
#
# # Template directory. All template files should be stored within this
- # # directory
+ # # directory. If not set, default templates from within the Synapse
+ # # package will be used
+ # #
+ # # For the list of default templates, please see
+ # # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
# #
# #template_dir: res/templates
#
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index e4be172a79..2a522b5f44 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -17,11 +17,11 @@ from ._base import Config
class GroupsConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_group_creation = config.get("enable_group_creation", False)
self.group_creation_prefix = config.get("group_creation_prefix", "")
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
# Uncomment to allow non-server-admin users to create groups on this server
#
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py
index b190dcbe38..36d87cef03 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt_config.py
@@ -23,7 +23,7 @@ MISSING_JWT = """Missing jwt library. This is required for jwt login.
class JWTConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
jwt_config = config.get("jwt_config", None)
if jwt_config:
self.jwt_enabled = jwt_config.get("enabled", False)
@@ -41,7 +41,7 @@ class JWTConfig(Config):
self.jwt_secret = None
self.jwt_algorithm = None
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
# The JWT needs to contain a globally unique "sub" (subject) claim.
#
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 94a0f47ea4..8fc74f9cdf 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -65,13 +65,18 @@ class TrustedKeyServer(object):
class KeyConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, config_dir_path, **kwargs):
# the signing key can be specified inline or in a separate file
if "signing_key" in config:
self.signing_key = read_signing_keys([config["signing_key"]])
else:
- self.signing_key_path = config["signing_key_path"]
- self.signing_key = self.read_signing_key(self.signing_key_path)
+ signing_key_path = config.get("signing_key_path")
+ if signing_key_path is None:
+ signing_key_path = os.path.join(
+ config_dir_path, config["server_name"] + ".signing.key"
+ )
+
+ self.signing_key = self.read_signing_key(signing_key_path)
self.old_signing_keys = self.read_old_signing_keys(
config.get("old_signing_keys", {})
@@ -117,7 +122,7 @@ class KeyConfig(Config):
# falsification of values
self.form_secret = config.get("form_secret", None)
- def default_config(
+ def generate_config_section(
self, config_dir_path, server_name, generate_secrets=False, **kwargs
):
base_key_name = os.path.join(config_dir_path, server_name)
@@ -237,10 +242,18 @@ class KeyConfig(Config):
)
return keys
- def generate_files(self, config):
- signing_key_path = config["signing_key_path"]
+ def generate_files(self, config, config_dir_path):
+ if "signing_key" in config:
+ return
+
+ signing_key_path = config.get("signing_key_path")
+ if signing_key_path is None:
+ signing_key_path = os.path.join(
+ config_dir_path, config["server_name"] + ".signing.key"
+ )
if not self.path_exists(signing_key_path):
+ print("Generating signing key file %s" % (signing_key_path,))
with open(signing_key_path, "w") as signing_key_file:
key_id = "a_" + random_string(4)
write_signing_keys(signing_key_file, (generate_signing_key(key_id),))
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index a22655b125..931aec41c0 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -74,13 +74,13 @@ root:
class LoggingConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.verbosity = config.get("verbose", 0)
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
self.log_config = self.abspath(config.get("log_config"))
self.log_file = self.abspath(config.get("log_file"))
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
log_config = os.path.join(config_dir_path, server_name + ".log.config")
return (
"""\
@@ -133,10 +133,14 @@ class LoggingConfig(Config):
help="Do not redirect stdout/stderr to the log",
)
- def generate_files(self, config):
+ def generate_files(self, config, config_dir_path):
log_config = config.get("log_config")
if log_config and not os.path.exists(log_config):
log_file = self.abspath("homeserver.log")
+ print(
+ "Generating log config file %s which will log to %s"
+ % (log_config, log_file)
+ )
with open(log_config, "w") as log_config_file:
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index c85e234d22..3698441963 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -21,7 +21,7 @@ MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentr
class MetricsConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_metrics = config.get("enable_metrics", False)
self.report_stats = config.get("report_stats", None)
self.metrics_port = config.get("metrics_port")
@@ -40,7 +40,7 @@ class MetricsConfig(Config):
"sentry.dsn field is required when sentry integration is enabled"
)
- def default_config(self, report_stats=None, **kwargs):
+ def generate_config_section(self, report_stats=None, **kwargs):
res = """\
## Metrics ###
diff --git a/synapse/config/password.py b/synapse/config/password.py
index eea59e772b..d5b5953f2f 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -20,21 +20,28 @@ class PasswordConfig(Config):
"""Password login configuration
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
password_config = config.get("password_config", {})
if password_config is None:
password_config = {}
self.password_enabled = password_config.get("enabled", True)
+ self.password_localdb_enabled = password_config.get("localdb_enabled", True)
self.password_pepper = password_config.get("pepper", "")
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
password_config:
# Uncomment to disable password login
#
#enabled: false
+ # Uncomment to disable authentication against the local password
+ # database. This is ignored if `enabled` is false, and is only useful
+ # if you have other password_providers.
+ #
+ #localdb_enabled: false
+
# Uncomment and change to a secret random string for extra security.
# DO NOT CHANGE THIS AFTER INITIAL SETUP!
#
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index fcf279e8e1..788c39c9fb 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -21,7 +21,7 @@ LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
class PasswordAuthProviderConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.password_providers = []
providers = []
@@ -46,7 +46,7 @@ class PasswordAuthProviderConfig(Config):
self.password_providers.append((provider_class, provider_config))
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
#password_providers:
# - module: "ldap_auth_provider.LdapAuthProvider"
diff --git a/synapse/config/push.py b/synapse/config/push.py
index 62c0060c9c..1b932722a5 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -18,7 +18,7 @@ from ._base import Config
class PushConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
push_config = config.get("push", {})
self.push_include_content = push_config.get("include_content", True)
@@ -42,7 +42,7 @@ class PushConfig(Config):
)
self.push_include_content = not redact_content
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Clients requesting push notifications can either have the body of
# the message sent in the notification poke along with other details
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 5a9adac480..8c587f3fd2 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -36,7 +36,7 @@ class FederationRateLimitConfig(object):
class RatelimitConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
# Load the new-style messages config if it exists. Otherwise fall back
# to the old method.
@@ -80,7 +80,7 @@ class RatelimitConfig(Config):
"federation_rr_transactions_per_room_per_second", 50
)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
## Ratelimiting ##
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index a1e27ba66c..4a59e6ec90 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -46,7 +46,7 @@ class AccountValidityConfig(Config):
class RegistrationConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_registration = bool(
strtobool(str(config.get("enable_registration", False)))
)
@@ -85,7 +85,7 @@ class RegistrationConfig(Config):
"disable_msisdn_registration", False
)
- def default_config(self, generate_secrets=False, **kwargs):
+ def generate_config_section(self, generate_secrets=False, **kwargs):
if generate_secrets:
registration_shared_secret = 'registration_shared_secret: "%s"' % (
random_string_with_symbols(50),
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 9f9669ebb1..80a628d9b0 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -86,12 +86,14 @@ def parse_thumbnail_requirements(thumbnail_sizes):
class ContentRepositoryConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M"))
self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
- self.media_store_path = self.ensure_directory(config["media_store_path"])
+ self.media_store_path = self.ensure_directory(
+ config.get("media_store_path", "media_store")
+ )
backup_media_store_path = config.get("backup_media_store_path")
@@ -148,7 +150,7 @@ class ContentRepositoryConfig(Config):
(provider_class, parsed_config, wrapper_config)
)
- self.uploads_path = self.ensure_directory(config["uploads_path"])
+ self.uploads_path = self.ensure_directory(config.get("uploads_path", "uploads"))
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
self.thumbnail_requirements = parse_thumbnail_requirements(
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
@@ -188,7 +190,7 @@ class ContentRepositoryConfig(Config):
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
- def default_config(self, data_dir_path, **kwargs):
+ def generate_config_section(self, data_dir_path, **kwargs):
media_store = os.path.join(data_dir_path, "media_store")
uploads_path = os.path.join(data_dir_path, "uploads")
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index c1da0e20e0..a92693017b 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -19,7 +19,7 @@ from ._base import Config, ConfigError
class RoomDirectoryConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.enable_room_list_search = config.get("enable_room_list_search", True)
alias_creation_rules = config.get("alias_creation_rules")
@@ -46,7 +46,7 @@ class RoomDirectoryConfig(Config):
_RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
]
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Uncomment to disable searching the public room list. When disabled
# blocks searching local and remote room lists for local and remote
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 2ec38e48e9..872a1ba934 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -17,7 +17,7 @@ from ._base import Config, ConfigError
class SAML2Config(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.saml2_enabled = False
saml2_config = config.get("saml2_config")
@@ -61,7 +61,7 @@ class SAML2Config(Config):
},
}
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
# Enable SAML2 for registration and login. Uses pysaml2.
#
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 6d3f1da96c..2a74dea2ea 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -40,7 +40,7 @@ DEFAULT_ROOM_VERSION = "4"
class ServerConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.server_name = config["server_name"]
self.server_context = config.get("server_context", None)
@@ -57,7 +57,6 @@ class ServerConfig(Config):
self.user_agent_suffix = config.get("user_agent_suffix")
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
self.public_baseurl = config.get("public_baseurl")
- self.cpu_affinity = config.get("cpu_affinity")
# Whether to send federation traffic out in this process. This only
# applies to some federation traffic, and so shouldn't be used to
@@ -83,12 +82,32 @@ class ServerConfig(Config):
"require_auth_for_profile_requests", False
)
- # If set to 'True', requires authentication to access the server's
- # public rooms directory through the client API, and forbids any other
- # homeserver to fetch it via federation.
- self.restrict_public_rooms_to_local_users = config.get(
- "restrict_public_rooms_to_local_users", False
- )
+ if "restrict_public_rooms_to_local_users" in config and (
+ "allow_public_rooms_without_auth" in config
+ or "allow_public_rooms_over_federation" in config
+ ):
+ raise ConfigError(
+ "Can't use 'restrict_public_rooms_to_local_users' if"
+ " 'allow_public_rooms_without_auth' and/or"
+ " 'allow_public_rooms_over_federation' is set."
+ )
+
+ # Check if the legacy "restrict_public_rooms_to_local_users" flag is set. This
+ # flag is now obsolete but we need to check it for backward-compatibility.
+ if config.get("restrict_public_rooms_to_local_users", False):
+ self.allow_public_rooms_without_auth = False
+ self.allow_public_rooms_over_federation = False
+ else:
+ # If set to 'False', requires authentication to access the server's public
+ # rooms directory through the client API. Defaults to 'True'.
+ self.allow_public_rooms_without_auth = config.get(
+ "allow_public_rooms_without_auth", True
+ )
+ # If set to 'False', forbids any other homeserver to fetch the server's public
+ # rooms directory via federation. Defaults to 'True'.
+ self.allow_public_rooms_over_federation = config.get(
+ "allow_public_rooms_over_federation", True
+ )
default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)
@@ -308,7 +327,9 @@ class ServerConfig(Config):
def has_tls_listener(self):
return any(l["tls"] for l in self.listeners)
- def default_config(self, server_name, data_dir_path, **kwargs):
+ def generate_config_section(
+ self, server_name, data_dir_path, open_private_ports, **kwargs
+ ):
_, bind_port = parse_and_validate_server_name(server_name)
if bind_port is not None:
unsecure_port = bind_port - 400
@@ -321,6 +342,13 @@ class ServerConfig(Config):
# Bring DEFAULT_ROOM_VERSION into the local-scope for use in the
# default config string
default_room_version = DEFAULT_ROOM_VERSION
+
+ unsecure_http_binding = "port: %i\n tls: false" % (unsecure_port,)
+ if not open_private_ports:
+ unsecure_http_binding += (
+ "\n bind_addresses: ['::1', '127.0.0.1']"
+ )
+
return (
"""\
## Server ##
@@ -336,29 +364,6 @@ class ServerConfig(Config):
#
pid_file: %(pid_file)s
- # CPU affinity mask. Setting this restricts the CPUs on which the
- # process will be scheduled. It is represented as a bitmask, with the
- # lowest order bit corresponding to the first logical CPU and the
- # highest order bit corresponding to the last logical CPU. Not all CPUs
- # may exist on a given system but a mask may specify more CPUs than are
- # present.
- #
- # For example:
- # 0x00000001 is processor #0,
- # 0x00000003 is processors #0 and #1,
- # 0xFFFFFFFF is all processors (#0 through #31).
- #
- # Pinning a Python process to a single CPU is desirable, because Python
- # is inherently single-threaded due to the GIL, and can suffer a
- # 30-40%% slowdown due to cache blow-out and thread context switching
- # if the scheduler happens to schedule the underlying threads across
- # different cores. See
- # https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
- #
- # This setting requires the affinity package to be installed!
- #
- #cpu_affinity: 0xFFFFFFFF
-
# The path to the web client which will be served at /_matrix/client/
# if 'webclient' is configured under the 'listeners' configuration.
#
@@ -390,11 +395,15 @@ class ServerConfig(Config):
#
#require_auth_for_profile_requests: true
- # If set to 'true', requires authentication to access the server's
- # public rooms directory through the client API, and forbids any other
- # homeserver to fetch it via federation. Defaults to 'false'.
+ # If set to 'false', requires authentication to access the server's public rooms
+ # directory through the client API. Defaults to 'true'.
+ #
+ #allow_public_rooms_without_auth: false
+
+ # If set to 'false', forbids any other homeserver to fetch the server's public
+ # rooms directory via federation. Defaults to 'true'.
#
- #restrict_public_rooms_to_local_users: true
+ #allow_public_rooms_over_federation: false
# The default room version for newly created rooms.
#
@@ -535,9 +544,7 @@ class ServerConfig(Config):
# If you plan to use a reverse proxy, please see
# https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
#
- - port: %(unsecure_port)s
- tls: false
- bind_addresses: ['::1', '127.0.0.1']
+ - %(unsecure_http_binding)s
type: http
x_forwarded: true
@@ -545,7 +552,7 @@ class ServerConfig(Config):
- names: [client, federation]
compress: false
- # example additonal_resources:
+ # example additional_resources:
#
#additional_resources:
# "/_matrix/my/custom/endpoint":
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py
index d930eb33b5..eaac3d73bc 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices_config.py
@@ -66,7 +66,7 @@ class ServerNoticesConfig(Config):
self.server_notices_mxid_avatar_url = None
self.server_notices_room_name = None
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
c = config.get("server_notices")
if c is None:
return
@@ -78,5 +78,5 @@ class ServerNoticesConfig(Config):
# todo: i18n
self.server_notices_room_name = c.get("room_name", "Server Notices")
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index 1502e9faba..e40797ab50 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -19,14 +19,14 @@ from ._base import Config
class SpamCheckerConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.spam_checker = None
provider = config.get("spam_checker", None)
if provider is not None:
self.spam_checker = load_module(provider)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
#spam_checker:
# module: "my_custom_project.SuperSpamChecker"
diff --git a/synapse/config/stats.py b/synapse/config/stats.py
index 80fc1b9dd0..b518a3ed9c 100644
--- a/synapse/config/stats.py
+++ b/synapse/config/stats.py
@@ -25,7 +25,7 @@ class StatsConfig(Config):
Configuration for the behaviour of synapse's stats engine
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.stats_enabled = True
self.stats_bucket_size = 86400
self.stats_retention = sys.maxsize
@@ -42,7 +42,7 @@ class StatsConfig(Config):
/ 1000
)
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Local statistics collection. Used in populating the room directory.
#
diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py
index a89dd5f98a..b3431441b9 100644
--- a/synapse/config/third_party_event_rules.py
+++ b/synapse/config/third_party_event_rules.py
@@ -19,14 +19,14 @@ from ._base import Config
class ThirdPartyRulesConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.third_party_event_rules = None
provider = config.get("third_party_event_rules", None)
if provider is not None:
self.third_party_event_rules = load_module(provider)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
# Server admins can define a Python module that implements extra rules for
# allowing or denying incoming events. In order to work, this module needs to
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 7951bf21fa..ca508a224f 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -23,7 +23,7 @@ import six
from unpaddedbase64 import encode_base64
-from OpenSSL import crypto
+from OpenSSL import SSL, crypto
from twisted.internet._sslverify import Certificate, trustRootFromCertificates
from synapse.config._base import Config, ConfigError
@@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
class TlsConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, config_dir_path, **kwargs):
acme_config = config.get("acme", None)
if acme_config is None:
@@ -50,6 +50,10 @@ class TlsConfig(Config):
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
self.acme_domain = acme_config.get("domain", config.get("server_name"))
+ self.acme_account_key_file = self.abspath(
+ acme_config.get("account_key_file", config_dir_path + "/client.key")
+ )
+
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
@@ -77,6 +81,27 @@ class TlsConfig(Config):
"federation_verify_certificates", True
)
+ # Minimum TLS version to use for outbound federation traffic
+ self.federation_client_minimum_tls_version = str(
+ config.get("federation_client_minimum_tls_version", 1)
+ )
+
+ if self.federation_client_minimum_tls_version not in ["1", "1.1", "1.2", "1.3"]:
+ raise ConfigError(
+ "federation_client_minimum_tls_version must be one of: 1, 1.1, 1.2, 1.3"
+ )
+
+ # Prevent people shooting themselves in the foot here by setting it to
+ # the biggest number blindly
+ if self.federation_client_minimum_tls_version == "1.3":
+ if getattr(SSL, "OP_NO_TLSv1_3", None) is None:
+ raise ConfigError(
+ (
+ "federation_client_minimum_tls_version cannot be 1.3, "
+ "your OpenSSL does not support it"
+ )
+ )
+
# Whitelist of domains to not verify certificates for
fed_whitelist_entries = config.get(
"federation_certificate_verification_whitelist", []
@@ -213,11 +238,14 @@ class TlsConfig(Config):
if sha256_fingerprint not in sha256_fingerprints:
self.tls_fingerprints.append({"sha256": sha256_fingerprint})
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(
+ self, config_dir_path, server_name, data_dir_path, **kwargs
+ ):
base_key_name = os.path.join(config_dir_path, server_name)
tls_certificate_path = base_key_name + ".tls.crt"
tls_private_key_path = base_key_name + ".tls.key"
+ default_acme_account_file = os.path.join(data_dir_path, "acme_account.key")
# this is to avoid the max line length. Sorrynotsorry
proxypassline = (
@@ -254,6 +282,15 @@ class TlsConfig(Config):
#
#federation_verify_certificates: false
+ # The minimum TLS version that will be used for outbound federation requests.
+ #
+ # Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note
+ # that setting this value higher than `1.2` will prevent federation to most
+ # of the public Matrix network: only configure it to `1.3` if you have an
+ # entirely private federation setup and you can ensure TLS 1.3 support.
+ #
+ #federation_client_minimum_tls_version: 1.2
+
# Skip federation certificate verification on the following whitelist
# of domains.
#
@@ -343,6 +380,13 @@ class TlsConfig(Config):
#
#domain: matrix.example.com
+ # file to use for the account key. This will be generated if it doesn't
+ # exist.
+ #
+ # If unspecified, we will use CONFDIR/client.key.
+ #
+ account_key_file: %(default_acme_account_file)s
+
# List of allowed TLS fingerprints for this server to publish along
# with the signing keys for this server. Other matrix servers that
# make HTTPS requests to this server will check that the TLS
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index e031b11599..f6313e17d4 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -21,7 +21,7 @@ class UserDirectoryConfig(Config):
Configuration for the behaviour of the /user_directory API
"""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.user_directory_search_enabled = True
self.user_directory_search_all_users = False
user_directory_config = config.get("user_directory", None)
@@ -33,7 +33,7 @@ class UserDirectoryConfig(Config):
"search_all_users", False
)
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# User Directory configuration
#
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index 82cf8c53a8..2ca0e1cf70 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -16,7 +16,7 @@ from ._base import Config
class VoipConfig(Config):
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.turn_uris = config.get("turn_uris", [])
self.turn_shared_secret = config.get("turn_shared_secret")
self.turn_username = config.get("turn_username")
@@ -26,7 +26,7 @@ class VoipConfig(Config):
)
self.turn_allow_guests = config.get("turn_allow_guests", True)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
## TURN ##
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index 75993abf35..3b75471d85 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -21,7 +21,7 @@ class WorkerConfig(Config):
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
- def read_config(self, config):
+ def read_config(self, config, **kwargs):
self.worker_app = config.get("worker_app")
# Canonicalise worker_app so that master always has None
@@ -46,7 +46,6 @@ class WorkerConfig(Config):
self.worker_name = config.get("worker_name", self.worker_app)
self.worker_main_http_uri = config.get("worker_main_http_uri", None)
- self.worker_cpu_affinity = config.get("worker_cpu_affinity")
# This option is really only here to support `--manhole` command line
# argument.
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 2bc5cc3807..4f48e8e88d 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -24,12 +24,25 @@ from OpenSSL import SSL, crypto
from twisted.internet._sslverify import _defaultCurveName
from twisted.internet.abstract import isIPAddress, isIPv6Address
from twisted.internet.interfaces import IOpenSSLClientConnectionCreator
-from twisted.internet.ssl import CertificateOptions, ContextFactory, platformTrust
+from twisted.internet.ssl import (
+ CertificateOptions,
+ ContextFactory,
+ TLSVersion,
+ platformTrust,
+)
from twisted.python.failure import Failure
logger = logging.getLogger(__name__)
+_TLS_VERSION_MAP = {
+ "1": TLSVersion.TLSv1_0,
+ "1.1": TLSVersion.TLSv1_1,
+ "1.2": TLSVersion.TLSv1_2,
+ "1.3": TLSVersion.TLSv1_3,
+}
+
+
class ServerContextFactory(ContextFactory):
"""Factory for PyOpenSSL SSL contexts that are used to handle incoming
connections."""
@@ -43,16 +56,18 @@ class ServerContextFactory(ContextFactory):
try:
_ecCurve = crypto.get_elliptic_curve(_defaultCurveName)
context.set_tmp_ecdh(_ecCurve)
-
except Exception:
logger.exception("Failed to enable elliptic curve for TLS")
- context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
+
+ context.set_options(
+ SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_NO_TLSv1 | SSL.OP_NO_TLSv1_1
+ )
context.use_certificate_chain_file(config.tls_certificate_file)
context.use_privatekey(config.tls_private_key)
# https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
context.set_cipher_list(
- "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1"
+ "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM"
)
def getContext(self):
@@ -79,10 +94,22 @@ class ClientTLSOptionsFactory(object):
# Use CA root certs provided by OpenSSL
trust_root = platformTrust()
- self._verify_ssl_context = CertificateOptions(trustRoot=trust_root).getContext()
+ # "insecurelyLowerMinimumTo" is the argument that will go lower than
+ # Twisted's default, which is why it is marked as "insecure" (since
+ # Twisted's defaults are reasonably secure). But, since Twisted is
+ # moving to TLS 1.2 by default, we want to respect the config option if
+ # it is set to 1.0 (which the alternate option, raiseMinimumTo, will not
+ # let us do).
+ minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version]
+
+ self._verify_ssl = CertificateOptions(
+ trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS
+ )
+ self._verify_ssl_context = self._verify_ssl.getContext()
self._verify_ssl_context.set_info_callback(self._context_info_cb)
- self._no_verify_ssl_context = CertificateOptions().getContext()
+ self._no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS)
+ self._no_verify_ssl_context = self._no_verify_ssl.getContext()
self._no_verify_ssl_context.set_info_callback(self._context_info_cb)
def get_options(self, host):
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index 58b929363f..1e925b19e7 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -163,7 +163,6 @@ class FederationBase(object):
logger.warning(
"Event %s content has been tampered, redacting",
pdu.event_id,
- pdu.get_pdu_json(),
)
return redacted_event
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index b4854e82f6..955f0f4308 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -721,15 +721,15 @@ class PublicRoomList(BaseFederationServlet):
PATH = "/publicRooms"
- def __init__(self, handler, authenticator, ratelimiter, server_name, deny_access):
+ def __init__(self, handler, authenticator, ratelimiter, server_name, allow_access):
super(PublicRoomList, self).__init__(
handler, authenticator, ratelimiter, server_name
)
- self.deny_access = deny_access
+ self.allow_access = allow_access
@defer.inlineCallbacks
def on_GET(self, origin, content, query):
- if self.deny_access:
+ if not self.allow_access:
raise FederationDeniedError(origin)
limit = parse_integer_from_args(query, "limit", 0)
@@ -1436,7 +1436,7 @@ def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=N
authenticator=authenticator,
ratelimiter=ratelimiter,
server_name=hs.hostname,
- deny_access=hs.config.restrict_public_rooms_to_local_users,
+ allow_access=hs.config.allow_public_rooms_over_federation,
).register(resource)
if "group_server" in servlet_groups:
diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py
index 01e0ef408d..fbef2f3d38 100644
--- a/synapse/handlers/acme.py
+++ b/synapse/handlers/acme.py
@@ -15,14 +15,9 @@
import logging
-import attr
-from zope.interface import implementer
-
import twisted
import twisted.internet.error
from twisted.internet import defer
-from twisted.python.filepath import FilePath
-from twisted.python.url import URL
from twisted.web import server, static
from twisted.web.resource import Resource
@@ -30,27 +25,6 @@ from synapse.app import check_bind_error
logger = logging.getLogger(__name__)
-try:
- from txacme.interfaces import ICertificateStore
-
- @attr.s
- @implementer(ICertificateStore)
- class ErsatzStore(object):
- """
- A store that only stores in memory.
- """
-
- certs = attr.ib(default=attr.Factory(dict))
-
- def store(self, server_name, pem_objects):
- self.certs[server_name] = [o.as_bytes() for o in pem_objects]
- return defer.succeed(None)
-
-
-except ImportError:
- # txacme is missing
- pass
-
class AcmeHandler(object):
def __init__(self, hs):
@@ -60,6 +34,7 @@ class AcmeHandler(object):
@defer.inlineCallbacks
def start_listening(self):
+ from synapse.handlers import acme_issuing_service
# Configure logging for txacme, if you need to debug
# from eliot import add_destinations
@@ -67,37 +42,18 @@ class AcmeHandler(object):
#
# add_destinations(TwistedDestination())
- from txacme.challenges import HTTP01Responder
- from txacme.service import AcmeIssuingService
- from txacme.endpoint import load_or_create_client_key
- from txacme.client import Client
- from josepy.jwa import RS256
-
- self._store = ErsatzStore()
- responder = HTTP01Responder()
-
- self._issuer = AcmeIssuingService(
- cert_store=self._store,
- client_creator=(
- lambda: Client.from_url(
- reactor=self.reactor,
- url=URL.from_text(self.hs.config.acme_url),
- key=load_or_create_client_key(
- FilePath(self.hs.config.config_dir_path)
- ),
- alg=RS256,
- )
- ),
- clock=self.reactor,
- responders=[responder],
+ well_known = Resource()
+
+ self._issuer = acme_issuing_service.create_issuing_service(
+ self.reactor,
+ acme_url=self.hs.config.acme_url,
+ account_key_file=self.hs.config.acme_account_key_file,
+ well_known_resource=well_known,
)
- well_known = Resource()
- well_known.putChild(b"acme-challenge", responder.resource)
responder_resource = Resource()
responder_resource.putChild(b".well-known", well_known)
responder_resource.putChild(b"check", static.Data(b"OK", b"text/plain"))
-
srv = server.Site(responder_resource)
bind_addresses = self.hs.config.acme_bind_addresses
@@ -128,7 +84,7 @@ class AcmeHandler(object):
logger.exception("Fail!")
raise
logger.warning("Reprovisioned %s, saving.", self._acme_domain)
- cert_chain = self._store.certs[self._acme_domain]
+ cert_chain = self._issuer.cert_store.certs[self._acme_domain]
try:
with open(self.hs.config.tls_private_key_file, "wb") as private_key_file:
diff --git a/synapse/handlers/acme_issuing_service.py b/synapse/handlers/acme_issuing_service.py
new file mode 100644
index 0000000000..e1d4224e74
--- /dev/null
+++ b/synapse/handlers/acme_issuing_service.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Utility function to create an ACME issuing service.
+
+This file contains the unconditional imports on the acme and cryptography bits that we
+only need (and may only have available) if we are doing ACME, so is designed to be
+imported conditionally.
+"""
+import logging
+
+import attr
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import serialization
+from josepy import JWKRSA
+from josepy.jwa import RS256
+from txacme.challenges import HTTP01Responder
+from txacme.client import Client
+from txacme.interfaces import ICertificateStore
+from txacme.service import AcmeIssuingService
+from txacme.util import generate_private_key
+from zope.interface import implementer
+
+from twisted.internet import defer
+from twisted.python.filepath import FilePath
+from twisted.python.url import URL
+
+logger = logging.getLogger(__name__)
+
+
+def create_issuing_service(reactor, acme_url, account_key_file, well_known_resource):
+ """Create an ACME issuing service, and attach it to a web Resource
+
+ Args:
+ reactor: twisted reactor
+ acme_url (str): URL to use to request certificates
+ account_key_file (str): where to store the account key
+ well_known_resource (twisted.web.IResource): web resource for .well-known.
+ we will attach a child resource for "acme-challenge".
+
+ Returns:
+ AcmeIssuingService
+ """
+ responder = HTTP01Responder()
+
+ well_known_resource.putChild(b"acme-challenge", responder.resource)
+
+ store = ErsatzStore()
+
+ return AcmeIssuingService(
+ cert_store=store,
+ client_creator=(
+ lambda: Client.from_url(
+ reactor=reactor,
+ url=URL.from_text(acme_url),
+ key=load_or_create_client_key(account_key_file),
+ alg=RS256,
+ )
+ ),
+ clock=reactor,
+ responders=[responder],
+ )
+
+
+@attr.s
+@implementer(ICertificateStore)
+class ErsatzStore(object):
+ """
+ A store that only stores in memory.
+ """
+
+ certs = attr.ib(default=attr.Factory(dict))
+
+ def store(self, server_name, pem_objects):
+ self.certs[server_name] = [o.as_bytes() for o in pem_objects]
+ return defer.succeed(None)
+
+
+def load_or_create_client_key(key_file):
+ """Load the ACME account key from a file, creating it if it does not exist.
+
+ Args:
+ key_file (str): name of the file to use as the account key
+ """
+ # this is based on txacme.endpoint.load_or_create_client_key, but doesn't
+ # hardcode the 'client.key' filename
+ acme_key_file = FilePath(key_file)
+ if acme_key_file.exists():
+ logger.info("Loading ACME account key from '%s'", acme_key_file)
+ key = serialization.load_pem_private_key(
+ acme_key_file.getContent(), password=None, backend=default_backend()
+ )
+ else:
+ logger.info("Saving new ACME account key to '%s'", acme_key_file)
+ key = generate_private_key("rsa")
+ acme_key_file.setContent(
+ key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+ )
+ return JWKRSA(key=key)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 97b21c4093..c8c1ed3246 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -743,7 +743,7 @@ class AuthHandler(BaseHandler):
result = (result, None)
defer.returnValue(result)
- if login_type == LoginType.PASSWORD:
+ if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled:
known_login_type = True
canonical_user_id = yield self._check_local_password(
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index f59d0479b5..99e8413092 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -101,9 +101,13 @@ class DeviceWorkerHandler(BaseHandler):
room_ids = yield self.store.get_rooms_for_user(user_id)
- # First we check if any devices have changed
- changed = yield self.store.get_user_whose_devices_changed(
- from_token.device_list_key
+ # First we check if any devices have changed for users that we share
+ # rooms with.
+ users_who_share_room = yield self.store.get_users_who_share_room_with_user(
+ user_id
+ )
+ changed = yield self.store.get_users_whose_devices_changed(
+ from_token.device_list_key, users_who_share_room
)
# Then work out if any users have since joined
@@ -188,10 +192,6 @@ class DeviceWorkerHandler(BaseHandler):
break
if possibly_changed or possibly_left:
- users_who_share_room = yield self.store.get_users_who_share_room_with_user(
- user_id
- )
-
# Take the intersection of the users whose devices may have changed
# and those that actually still share a room with the user
possibly_joined = possibly_changed & users_who_share_room
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 062e026e5f..76ee97ddd3 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -180,9 +180,7 @@ class PaginationHandler(object):
room_token = pagin_config.from_token.room_key
else:
pagin_config.from_token = (
- yield self.hs.get_event_sources().get_current_token_for_room(
- room_id=room_id
- )
+ yield self.hs.get_event_sources().get_current_token_for_pagination()
)
room_token = pagin_config.from_token.room_key
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 89d89fc27c..db3f8cb76b 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -32,6 +32,7 @@ from synapse.storage.state import StateFilter
from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID
from synapse.util import stringutils
from synapse.util.async_helpers import Linearizer
+from synapse.util.caches.response_cache import ResponseCache
from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
@@ -40,6 +41,8 @@ logger = logging.getLogger(__name__)
id_server_scheme = "https://"
+FIVE_MINUTES_IN_MS = 5 * 60 * 1000
+
class RoomCreationHandler(BaseHandler):
@@ -75,6 +78,12 @@ class RoomCreationHandler(BaseHandler):
# linearizer to stop two upgrades happening at once
self._upgrade_linearizer = Linearizer("room_upgrade_linearizer")
+ # If a user tries to update the same room multiple times in quick
+ # succession, only process the first attempt and return its result to
+ # subsequent requests
+ self._upgrade_response_cache = ResponseCache(
+ hs, "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
+ )
self._server_notices_mxid = hs.config.server_notices_mxid
self.third_party_event_rules = hs.get_third_party_event_rules()
@@ -95,67 +104,96 @@ class RoomCreationHandler(BaseHandler):
user_id = requester.user.to_string()
- with (yield self._upgrade_linearizer.queue(old_room_id)):
- # start by allocating a new room id
- r = yield self.store.get_room(old_room_id)
- if r is None:
- raise NotFoundError("Unknown room id %s" % (old_room_id,))
- new_room_id = yield self._generate_room_id(
- creator_id=user_id, is_public=r["is_public"]
- )
+ # Check if this room is already being upgraded by another person
+ for key in self._upgrade_response_cache.pending_result_cache:
+ if key[0] == old_room_id and key[1] != user_id:
+ # Two different people are trying to upgrade the same room.
+ # Send the second an error.
+ #
+ # Note that this of course only gets caught if both users are
+ # on the same homeserver.
+ raise SynapseError(
+ 400, "An upgrade for this room is currently in progress"
+ )
- logger.info("Creating new room %s to replace %s", new_room_id, old_room_id)
+ # Upgrade the room
+ #
+ # If this user has sent multiple upgrade requests for the same room
+ # and one of them is not complete yet, cache the response and
+ # return it to all subsequent requests
+ ret = yield self._upgrade_response_cache.wrap(
+ (old_room_id, user_id),
+ self._upgrade_room,
+ requester,
+ old_room_id,
+ new_version, # args for _upgrade_room
+ )
+ defer.returnValue(ret)
- # we create and auth the tombstone event before properly creating the new
- # room, to check our user has perms in the old room.
- tombstone_event, tombstone_context = (
- yield self.event_creation_handler.create_event(
- requester,
- {
- "type": EventTypes.Tombstone,
- "state_key": "",
- "room_id": old_room_id,
- "sender": user_id,
- "content": {
- "body": "This room has been replaced",
- "replacement_room": new_room_id,
- },
- },
- token_id=requester.access_token_id,
- )
- )
- old_room_version = yield self.store.get_room_version(old_room_id)
- yield self.auth.check_from_context(
- old_room_version, tombstone_event, tombstone_context
- )
+ @defer.inlineCallbacks
+ def _upgrade_room(self, requester, old_room_id, new_version):
+ user_id = requester.user.to_string()
+
+ # start by allocating a new room id
+ r = yield self.store.get_room(old_room_id)
+ if r is None:
+ raise NotFoundError("Unknown room id %s" % (old_room_id,))
+ new_room_id = yield self._generate_room_id(
+ creator_id=user_id, is_public=r["is_public"]
+ )
+
+ logger.info("Creating new room %s to replace %s", new_room_id, old_room_id)
- yield self.clone_existing_room(
+ # we create and auth the tombstone event before properly creating the new
+ # room, to check our user has perms in the old room.
+ tombstone_event, tombstone_context = (
+ yield self.event_creation_handler.create_event(
requester,
- old_room_id=old_room_id,
- new_room_id=new_room_id,
- new_room_version=new_version,
- tombstone_event_id=tombstone_event.event_id,
+ {
+ "type": EventTypes.Tombstone,
+ "state_key": "",
+ "room_id": old_room_id,
+ "sender": user_id,
+ "content": {
+ "body": "This room has been replaced",
+ "replacement_room": new_room_id,
+ },
+ },
+ token_id=requester.access_token_id,
)
+ )
+ old_room_version = yield self.store.get_room_version(old_room_id)
+ yield self.auth.check_from_context(
+ old_room_version, tombstone_event, tombstone_context
+ )
- # now send the tombstone
- yield self.event_creation_handler.send_nonmember_event(
- requester, tombstone_event, tombstone_context
- )
+ yield self.clone_existing_room(
+ requester,
+ old_room_id=old_room_id,
+ new_room_id=new_room_id,
+ new_room_version=new_version,
+ tombstone_event_id=tombstone_event.event_id,
+ )
- old_room_state = yield tombstone_context.get_current_state_ids(self.store)
+ # now send the tombstone
+ yield self.event_creation_handler.send_nonmember_event(
+ requester, tombstone_event, tombstone_context
+ )
- # update any aliases
- yield self._move_aliases_to_new_room(
- requester, old_room_id, new_room_id, old_room_state
- )
+ old_room_state = yield tombstone_context.get_current_state_ids(self.store)
- # and finally, shut down the PLs in the old room, and update them in the new
- # room.
- yield self._update_upgraded_room_pls(
- requester, old_room_id, new_room_id, old_room_state
- )
+ # update any aliases
+ yield self._move_aliases_to_new_room(
+ requester, old_room_id, new_room_id, old_room_state
+ )
+
+ # and finally, shut down the PLs in the old room, and update them in the new
+ # room.
+ yield self._update_upgraded_room_pls(
+ requester, old_room_id, new_room_id, old_room_state
+ )
- defer.returnValue(new_room_id)
+ defer.returnValue(new_room_id)
@defer.inlineCallbacks
def _update_upgraded_room_pls(
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 4d6e883802..66b05b4732 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -823,6 +823,7 @@ class RoomMemberHandler(object):
"sender": user.to_string(),
"state_key": token,
},
+ ratelimit=False,
txn_id=txn_id,
)
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index 5a0995d4fe..d90c9e0108 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -33,6 +33,9 @@ class SetPasswordHandler(BaseHandler):
@defer.inlineCallbacks
def set_password(self, user_id, newpassword, requester=None):
+ if not self.hs.config.password_localdb_enabled:
+ raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN)
+
password_hash = yield self._auth_handler.hash(newpassword)
except_device_id = requester.device_id if requester else None
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index c5188a1f8e..a3f550554f 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -1058,40 +1058,74 @@ class SyncHandler(object):
newly_left_rooms,
newly_left_users,
):
+ """Generate the DeviceLists section of sync
+
+ Args:
+ sync_result_builder (SyncResultBuilder)
+ newly_joined_rooms (set[str]): Set of rooms user has joined since
+ previous sync
+ newly_joined_or_invited_users (set[str]): Set of users that have
+ joined or been invited to a room since previous sync.
+ newly_left_rooms (set[str]): Set of rooms user has left since
+ previous sync
+ newly_left_users (set[str]): Set of users that have left a room
+ we're in since previous sync
+
+ Returns:
+ Deferred[DeviceLists]
+ """
+
user_id = sync_result_builder.sync_config.user.to_string()
since_token = sync_result_builder.since_token
+ # We're going to mutate these fields, so lets copy them rather than
+ # assume they won't get used later.
+ newly_joined_or_invited_users = set(newly_joined_or_invited_users)
+ newly_left_users = set(newly_left_users)
+
if since_token and since_token.device_list_key:
- changed = yield self.store.get_user_whose_devices_changed(
- since_token.device_list_key
+ # We want to figure out what user IDs the client should refetch
+ # device keys for, and which users we aren't going to track changes
+ # for anymore.
+ #
+ # For the first step we check:
+ # a. if any users we share a room with have updated their devices,
+ # and
+ # b. we also check if we've joined any new rooms, or if a user has
+ # joined a room we're in.
+ #
+ # For the second step we just find any users we no longer share a
+ # room with by looking at all users that have left a room plus users
+ # that were in a room we've left.
+
+ users_who_share_room = yield self.store.get_users_who_share_room_with_user(
+ user_id
+ )
+
+ # Step 1a, check for changes in devices of users we share a room with
+ users_that_have_changed = yield self.store.get_users_whose_devices_changed(
+ since_token.device_list_key, users_who_share_room
)
- # TODO: Be more clever than this, i.e. remove users who we already
- # share a room with?
+ # Step 1b, check for newly joined rooms
for room_id in newly_joined_rooms:
joined_users = yield self.state.get_current_users_in_room(room_id)
newly_joined_or_invited_users.update(joined_users)
- for room_id in newly_left_rooms:
- left_users = yield self.state.get_current_users_in_room(room_id)
- newly_left_users.update(left_users)
-
# TODO: Check that these users are actually new, i.e. either they
# weren't in the previous sync *or* they left and rejoined.
- changed.update(newly_joined_or_invited_users)
+ users_that_have_changed.update(newly_joined_or_invited_users)
- if not changed and not newly_left_users:
- defer.returnValue(DeviceLists(changed=[], left=newly_left_users))
+ # Now find users that we no longer track
+ for room_id in newly_left_rooms:
+ left_users = yield self.state.get_current_users_in_room(room_id)
+ newly_left_users.update(left_users)
- users_who_share_room = yield self.store.get_users_who_share_room_with_user(
- user_id
- )
+ # Remove any users that we still share a room with.
+ newly_left_users -= users_who_share_room
defer.returnValue(
- DeviceLists(
- changed=users_who_share_room & changed,
- left=set(newly_left_users) - users_who_share_room,
- )
+ DeviceLists(changed=users_that_have_changed, left=newly_left_users)
)
else:
defer.returnValue(DeviceLists(changed=[], left=[]))
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 6fd13e87d1..f067c163c1 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -16,10 +16,11 @@
import cgi
import collections
+import http.client
import logging
-
-from six import PY3
-from six.moves import http_client, urllib
+import types
+import urllib
+from io import BytesIO
from canonicaljson import encode_canonical_json, encode_pretty_printed_json, json
@@ -41,11 +42,6 @@ from synapse.api.errors import (
from synapse.util.caches import intern_dict
from synapse.util.logcontext import preserve_fn
-if PY3:
- from io import BytesIO
-else:
- from cStringIO import StringIO as BytesIO
-
logger = logging.getLogger(__name__)
HTML_ERROR_TEMPLATE = """<!DOCTYPE html>
@@ -75,10 +71,9 @@ def wrap_json_request_handler(h):
deferred fails with any other type of error we send a 500 reponse.
"""
- @defer.inlineCallbacks
- def wrapped_request_handler(self, request):
+ async def wrapped_request_handler(self, request):
try:
- yield h(self, request)
+ await h(self, request)
except SynapseError as e:
code = e.code
logger.info("%s SynapseError: %s - %s", request, code, e.msg)
@@ -142,10 +137,12 @@ def wrap_html_request_handler(h):
where "request" must be a SynapseRequest.
"""
- def wrapped_request_handler(self, request):
- d = defer.maybeDeferred(h, self, request)
- d.addErrback(_return_html_error, request)
- return d
+ async def wrapped_request_handler(self, request):
+ try:
+ return await h(self, request)
+ except Exception:
+ f = failure.Failure()
+ return _return_html_error(f, request)
return wrap_async_request_handler(wrapped_request_handler)
@@ -171,7 +168,7 @@ def _return_html_error(f, request):
exc_info=(f.type, f.value, f.getTracebackObject()),
)
else:
- code = http_client.INTERNAL_SERVER_ERROR
+ code = http.client.INTERNAL_SERVER_ERROR
msg = "Internal server error"
logger.error(
@@ -201,10 +198,9 @@ def wrap_async_request_handler(h):
logged until the deferred completes.
"""
- @defer.inlineCallbacks
- def wrapped_async_request_handler(self, request):
+ async def wrapped_async_request_handler(self, request):
with request.processing():
- yield h(self, request)
+ await h(self, request)
# we need to preserve_fn here, because the synchronous render method won't yield for
# us (obviously)
@@ -270,12 +266,11 @@ class JsonResource(HttpServer, resource.Resource):
def render(self, request):
""" This gets called by twisted every time someone sends us a request.
"""
- self._async_render(request)
+ defer.ensureDeferred(self._async_render(request))
return NOT_DONE_YET
@wrap_json_request_handler
- @defer.inlineCallbacks
- def _async_render(self, request):
+ async def _async_render(self, request):
""" This gets called from render() every time someone sends us a request.
This checks if anyone has registered a callback for that method and
path.
@@ -292,26 +287,19 @@ class JsonResource(HttpServer, resource.Resource):
# Now trigger the callback. If it returns a response, we send it
# here. If it throws an exception, that is handled by the wrapper
# installed by @request_handler.
-
- def _unquote(s):
- if PY3:
- # On Python 3, unquote is unicode -> unicode
- return urllib.parse.unquote(s)
- else:
- # On Python 2, unquote is bytes -> bytes We need to encode the
- # URL again (as it was decoded by _get_handler_for request), as
- # ASCII because it's a URL, and then decode it to get the UTF-8
- # characters that were quoted.
- return urllib.parse.unquote(s.encode("ascii")).decode("utf8")
-
kwargs = intern_dict(
{
- name: _unquote(value) if value else value
+ name: urllib.parse.unquote(value) if value else value
for name, value in group_dict.items()
}
)
- callback_return = yield callback(request, **kwargs)
+ callback_return = callback(request, **kwargs)
+
+ # Is it synchronous? We'll allow this for now.
+ if isinstance(callback_return, (defer.Deferred, types.CoroutineType)):
+ callback_return = await callback_return
+
if callback_return is not None:
code, response = callback_return
self._send_response(request, code, response)
@@ -360,6 +348,23 @@ class JsonResource(HttpServer, resource.Resource):
)
+class DirectServeResource(resource.Resource):
+ def render(self, request):
+ """
+ Render the request, using an asynchronous render handler if it exists.
+ """
+ render_callback_name = "_async_render_" + request.method.decode("ascii")
+
+ if hasattr(self, render_callback_name):
+ # Call the handler
+ callback = getattr(self, render_callback_name)
+ defer.ensureDeferred(callback(request))
+
+ return NOT_DONE_YET
+ else:
+ super().render(request)
+
+
def _options_handler(request):
"""Request handler for OPTIONS requests
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index 1f30179b51..eaf0aaa86e 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -437,7 +437,10 @@ def runUntilCurrentTimer(func):
counts = gc.get_count()
for i in (2, 1, 0):
if threshold[i] < counts[i]:
- logger.info("Collecting gc %d", i)
+ if i == 0:
+ logger.debug("Collecting gc %d", i)
+ else:
+ logger.info("Collecting gc %d", i)
start = time.time()
unreachable = gc.collect(i)
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 77e2d1a0e1..6324c00ef1 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -72,10 +72,11 @@ REQUIREMENTS = [
# Twisted 18.7.0 requires attrs>=17.4.0
"attrs>=17.4.0",
"netaddr>=0.7.18",
+ "Jinja2>=2.9",
+ "bleach>=1.4.3",
]
CONDITIONAL_REQUIREMENTS = {
- "email": ["Jinja2>=2.9", "bleach>=1.4.3"],
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
# we use execute_batch, which arrived in psycopg 2.7.
"postgres": ["psycopg2>=2.7"],
@@ -94,6 +95,7 @@ CONDITIONAL_REQUIREMENTS = {
"url_preview": ["lxml>=3.5.0"],
"test": ["mock>=2.0", "parameterized"],
"sentry": ["sentry-sdk>=0.7.2"],
+ "jwt": ["pyjwt>=1.6.4"],
}
ALL_OPTIONAL_REQUIREMENTS = set()
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 4efb679a04..ede6bc8b1e 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -336,7 +336,7 @@ class LoginRestServlet(RestServlet):
}
else:
user_id, access_token = (
- yield self.handlers.registration_handler.register(localpart=user)
+ yield self.registration_handler.register(localpart=user)
)
device_id = login_submission.get("device_id")
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index a028337125..cca7e45ddb 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -311,7 +311,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
# Option to allow servers to require auth when accessing
# /publicRooms via CS API. This is especially helpful in private
# federations.
- if self.hs.config.restrict_public_rooms_to_local_users:
+ if not self.hs.config.allow_public_rooms_without_auth:
raise
# We allow people to not be authed if they're just looking at our
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 9a32892d8b..624c42441e 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -23,13 +23,13 @@ from six.moves import http_client
import jinja2
from jinja2 import TemplateNotFound
-from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
-
from synapse.api.errors import NotFoundError, StoreError, SynapseError
from synapse.config import ConfigError
-from synapse.http.server import finish_request, wrap_html_request_handler
+from synapse.http.server import (
+ DirectServeResource,
+ finish_request,
+ wrap_html_request_handler,
+)
from synapse.http.servlet import parse_string
from synapse.types import UserID
@@ -47,7 +47,7 @@ else:
return a == b
-class ConsentResource(Resource):
+class ConsentResource(DirectServeResource):
"""A twisted Resource to display a privacy policy and gather consent to it
When accessed via GET, returns the privacy policy via a template.
@@ -87,7 +87,7 @@ class ConsentResource(Resource):
Args:
hs (synapse.server.HomeServer): homeserver
"""
- Resource.__init__(self)
+ super().__init__()
self.hs = hs
self.store = hs.get_datastore()
@@ -118,18 +118,12 @@ class ConsentResource(Resource):
self._hmac_secret = hs.config.form_secret.encode("utf-8")
- def render_GET(self, request):
- self._async_render_GET(request)
- return NOT_DONE_YET
-
@wrap_html_request_handler
- @defer.inlineCallbacks
- def _async_render_GET(self, request):
+ async def _async_render_GET(self, request):
"""
Args:
request (twisted.web.http.Request):
"""
-
version = parse_string(request, "v", default=self._default_consent_version)
username = parse_string(request, "u", required=False, default="")
userhmac = None
@@ -145,7 +139,7 @@ class ConsentResource(Resource):
else:
qualified_user_id = UserID(username, self.hs.hostname).to_string()
- u = yield self.store.get_user_by_id(qualified_user_id)
+ u = await self.store.get_user_by_id(qualified_user_id)
if u is None:
raise NotFoundError("Unknown user")
@@ -165,13 +159,8 @@ class ConsentResource(Resource):
except TemplateNotFound:
raise NotFoundError("Unknown policy version")
- def render_POST(self, request):
- self._async_render_POST(request)
- return NOT_DONE_YET
-
@wrap_html_request_handler
- @defer.inlineCallbacks
- def _async_render_POST(self, request):
+ async def _async_render_POST(self, request):
"""
Args:
request (twisted.web.http.Request):
@@ -188,12 +177,12 @@ class ConsentResource(Resource):
qualified_user_id = UserID(username, self.hs.hostname).to_string()
try:
- yield self.store.user_set_consent_version(qualified_user_id, version)
+ await self.store.user_set_consent_version(qualified_user_id, version)
except StoreError as e:
if e.code != 404:
raise
raise NotFoundError("Unknown user")
- yield self.registration_handler.post_consent_actions(qualified_user_id)
+ await self.registration_handler.post_consent_actions(qualified_user_id)
try:
self._render_template(request, "success.html")
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index ec8b9d7269..031a316693 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -16,18 +16,20 @@ import logging
from io import BytesIO
from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
from synapse.api.errors import Codes, SynapseError
from synapse.crypto.keyring import ServerKeyFetcher
-from synapse.http.server import respond_with_json_bytes, wrap_json_request_handler
+from synapse.http.server import (
+ DirectServeResource,
+ respond_with_json_bytes,
+ wrap_json_request_handler,
+)
from synapse.http.servlet import parse_integer, parse_json_object_from_request
logger = logging.getLogger(__name__)
-class RemoteKey(Resource):
+class RemoteKey(DirectServeResource):
"""HTTP resource for retreiving the TLS certificate and NACL signature
verification keys for a collection of servers. Checks that the reported
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
@@ -94,13 +96,8 @@ class RemoteKey(Resource):
self.clock = hs.get_clock()
self.federation_domain_whitelist = hs.config.federation_domain_whitelist
- def render_GET(self, request):
- self.async_render_GET(request)
- return NOT_DONE_YET
-
@wrap_json_request_handler
- @defer.inlineCallbacks
- def async_render_GET(self, request):
+ async def _async_render_GET(self, request):
if len(request.postpath) == 1:
server, = request.postpath
query = {server.decode("ascii"): {}}
@@ -114,20 +111,15 @@ class RemoteKey(Resource):
else:
raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
- yield self.query_keys(request, query, query_remote_on_cache_miss=True)
-
- def render_POST(self, request):
- self.async_render_POST(request)
- return NOT_DONE_YET
+ await self.query_keys(request, query, query_remote_on_cache_miss=True)
@wrap_json_request_handler
- @defer.inlineCallbacks
- def async_render_POST(self, request):
+ async def _async_render_POST(self, request):
content = parse_json_object_from_request(request)
query = content["server_keys"]
- yield self.query_keys(request, query, query_remote_on_cache_miss=True)
+ await self.query_keys(request, query, query_remote_on_cache_miss=True)
@defer.inlineCallbacks
def query_keys(self, request, query, query_remote_on_cache_miss=False):
diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py
index fa3d6680fc..9f747de263 100644
--- a/synapse/rest/media/v1/config_resource.py
+++ b/synapse/rest/media/v1/config_resource.py
@@ -14,31 +14,28 @@
# limitations under the License.
#
-from twisted.internet import defer
-from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
-from synapse.http.server import respond_with_json, wrap_json_request_handler
+from synapse.http.server import (
+ DirectServeResource,
+ respond_with_json,
+ wrap_json_request_handler,
+)
-class MediaConfigResource(Resource):
+class MediaConfigResource(DirectServeResource):
isLeaf = True
def __init__(self, hs):
- Resource.__init__(self)
+ super().__init__()
config = hs.get_config()
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {"m.upload.size": config.max_upload_size}
- def render_GET(self, request):
- self._async_render_GET(request)
- return NOT_DONE_YET
-
@wrap_json_request_handler
- @defer.inlineCallbacks
- def _async_render_GET(self, request):
- yield self.auth.get_user_by_req(request)
+ async def _async_render_GET(self, request):
+ await self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict, send_cors=True)
def render_OPTIONS(self, request):
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index a21a35f843..66a01559e1 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -14,37 +14,31 @@
# limitations under the License.
import logging
-from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
-
import synapse.http.servlet
-from synapse.http.server import set_cors_headers, wrap_json_request_handler
+from synapse.http.server import (
+ DirectServeResource,
+ set_cors_headers,
+ wrap_json_request_handler,
+)
from ._base import parse_media_id, respond_404
logger = logging.getLogger(__name__)
-class DownloadResource(Resource):
+class DownloadResource(DirectServeResource):
isLeaf = True
def __init__(self, hs, media_repo):
- Resource.__init__(self)
-
+ super().__init__()
self.media_repo = media_repo
self.server_name = hs.hostname
# this is expected by @wrap_json_request_handler
self.clock = hs.get_clock()
- def render_GET(self, request):
- self._async_render_GET(request)
- return NOT_DONE_YET
-
@wrap_json_request_handler
- @defer.inlineCallbacks
- def _async_render_GET(self, request):
+ async def _async_render_GET(self, request):
set_cors_headers(request)
request.setHeader(
b"Content-Security-Policy",
@@ -58,7 +52,7 @@ class DownloadResource(Resource):
)
server_name, media_id, name = parse_media_id(request)
if server_name == self.server_name:
- yield self.media_repo.get_local_media(request, media_id, name)
+ await self.media_repo.get_local_media(request, media_id, name)
else:
allow_remote = synapse.http.servlet.parse_boolean(
request, "allow_remote", default=True
@@ -72,4 +66,4 @@ class DownloadResource(Resource):
respond_404(request)
return
- yield self.media_repo.get_remote_media(request, server_name, media_id, name)
+ await self.media_repo.get_remote_media(request, server_name, media_id, name)
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index de6f292ffb..0337b64dc2 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -32,12 +32,11 @@ from canonicaljson import json
from twisted.internet import defer
from twisted.internet.error import DNSLookupError
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
from synapse.api.errors import Codes, SynapseError
from synapse.http.client import SimpleHttpClient
from synapse.http.server import (
+ DirectServeResource,
respond_with_json,
respond_with_json_bytes,
wrap_json_request_handler,
@@ -58,11 +57,11 @@ _charset_match = re.compile(br"<\s*meta[^>]*charset\s*=\s*([a-z0-9-]+)", flags=r
_content_type_match = re.compile(r'.*; *charset="?(.*?)"?(;|$)', flags=re.I)
-class PreviewUrlResource(Resource):
+class PreviewUrlResource(DirectServeResource):
isLeaf = True
def __init__(self, hs, media_repo, media_storage):
- Resource.__init__(self)
+ super().__init__()
self.auth = hs.get_auth()
self.clock = hs.get_clock()
@@ -98,16 +97,11 @@ class PreviewUrlResource(Resource):
def render_OPTIONS(self, request):
return respond_with_json(request, 200, {}, send_cors=True)
- def render_GET(self, request):
- self._async_render_GET(request)
- return NOT_DONE_YET
-
@wrap_json_request_handler
- @defer.inlineCallbacks
- def _async_render_GET(self, request):
+ async def _async_render_GET(self, request):
# XXX: if get_user_by_req fails, what should we do in an async render?
- requester = yield self.auth.get_user_by_req(request)
+ requester = await self.auth.get_user_by_req(request)
url = parse_string(request, "url")
if b"ts" in request.args:
ts = parse_integer(request, "ts")
@@ -159,7 +153,7 @@ class PreviewUrlResource(Resource):
else:
logger.info("Returning cached response")
- og = yield make_deferred_yieldable(observable.observe())
+ og = await make_deferred_yieldable(defer.maybeDeferred(observable.observe))
respond_with_json_bytes(request, 200, og, send_cors=True)
@defer.inlineCallbacks
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index ca84c9f139..08329884ac 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -17,10 +17,12 @@
import logging
from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
-from synapse.http.server import set_cors_headers, wrap_json_request_handler
+from synapse.http.server import (
+ DirectServeResource,
+ set_cors_headers,
+ wrap_json_request_handler,
+)
from synapse.http.servlet import parse_integer, parse_string
from ._base import (
@@ -34,11 +36,11 @@ from ._base import (
logger = logging.getLogger(__name__)
-class ThumbnailResource(Resource):
+class ThumbnailResource(DirectServeResource):
isLeaf = True
def __init__(self, hs, media_repo, media_storage):
- Resource.__init__(self)
+ super().__init__()
self.store = hs.get_datastore()
self.media_repo = media_repo
@@ -47,13 +49,8 @@ class ThumbnailResource(Resource):
self.server_name = hs.hostname
self.clock = hs.get_clock()
- def render_GET(self, request):
- self._async_render_GET(request)
- return NOT_DONE_YET
-
@wrap_json_request_handler
- @defer.inlineCallbacks
- def _async_render_GET(self, request):
+ async def _async_render_GET(self, request):
set_cors_headers(request)
server_name, media_id, _ = parse_media_id(request)
width = parse_integer(request, "width", required=True)
@@ -63,21 +60,21 @@ class ThumbnailResource(Resource):
if server_name == self.server_name:
if self.dynamic_thumbnails:
- yield self._select_or_generate_local_thumbnail(
+ await self._select_or_generate_local_thumbnail(
request, media_id, width, height, method, m_type
)
else:
- yield self._respond_local_thumbnail(
+ await self._respond_local_thumbnail(
request, media_id, width, height, method, m_type
)
self.media_repo.mark_recently_accessed(None, media_id)
else:
if self.dynamic_thumbnails:
- yield self._select_or_generate_remote_thumbnail(
+ await self._select_or_generate_remote_thumbnail(
request, server_name, media_id, width, height, method, m_type
)
else:
- yield self._respond_remote_thumbnail(
+ await self._respond_remote_thumbnail(
request, server_name, media_id, width, height, method, m_type
)
self.media_repo.mark_recently_accessed(server_name, media_id)
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index d1d7e959f0..5d76bbdf68 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -15,22 +15,24 @@
import logging
-from twisted.internet import defer
-from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from synapse.api.errors import SynapseError
-from synapse.http.server import respond_with_json, wrap_json_request_handler
+from synapse.http.server import (
+ DirectServeResource,
+ respond_with_json,
+ wrap_json_request_handler,
+)
from synapse.http.servlet import parse_string
logger = logging.getLogger(__name__)
-class UploadResource(Resource):
+class UploadResource(DirectServeResource):
isLeaf = True
def __init__(self, hs, media_repo):
- Resource.__init__(self)
+ super().__init__()
self.media_repo = media_repo
self.filepaths = media_repo.filepaths
@@ -41,18 +43,13 @@ class UploadResource(Resource):
self.max_upload_size = hs.config.max_upload_size
self.clock = hs.get_clock()
- def render_POST(self, request):
- self._async_render_POST(request)
- return NOT_DONE_YET
-
def render_OPTIONS(self, request):
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
@wrap_json_request_handler
- @defer.inlineCallbacks
- def _async_render_POST(self, request):
- requester = yield self.auth.get_user_by_req(request)
+ async def _async_render_POST(self, request):
+ requester = await self.auth.get_user_by_req(request)
# TODO: The checks here are a bit late. The content will have
# already been uploaded to a tmp file at this point
content_length = request.getHeader(b"Content-Length").decode("ascii")
@@ -81,7 +78,7 @@ class UploadResource(Resource):
# disposition = headers.getRawHeaders(b"Content-Disposition")[0]
# TODO(markjh): parse content-dispostion
- content_uri = yield self.media_repo.create_content(
+ content_uri = await self.media_repo.create_content(
media_type, upload_name, request.content, content_length, requester.user
)
diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py
index ab14b70675..939c87306c 100644
--- a/synapse/rest/saml2/response_resource.py
+++ b/synapse/rest/saml2/response_resource.py
@@ -18,34 +18,27 @@ import logging
import saml2
from saml2.client import Saml2Client
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
-
from synapse.api.errors import CodeMessageException
-from synapse.http.server import wrap_html_request_handler
+from synapse.http.server import DirectServeResource, wrap_html_request_handler
from synapse.http.servlet import parse_string
from synapse.rest.client.v1.login import SSOAuthHandler
logger = logging.getLogger(__name__)
-class SAML2ResponseResource(Resource):
+class SAML2ResponseResource(DirectServeResource):
"""A Twisted web resource which handles the SAML response"""
isLeaf = 1
def __init__(self, hs):
- Resource.__init__(self)
+ super().__init__()
self._saml_client = Saml2Client(hs.config.saml2_sp_config)
self._sso_auth_handler = SSOAuthHandler(hs)
- def render_POST(self, request):
- self._async_render_POST(request)
- return NOT_DONE_YET
-
@wrap_html_request_handler
- def _async_render_POST(self, request):
+ async def _async_render_POST(self, request):
resp_bytes = parse_string(request, "SAMLResponse", required=True)
relay_state = parse_string(request, "RelayState", required=True)
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index c74bcc8f0b..29589853c6 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -38,6 +38,14 @@ from synapse.util.caches.descriptors import Cache
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.stringutils import exception_to_unicode
+# import a function which will return a monotonic time, in seconds
+try:
+ # on python 3, use time.monotonic, since time.clock can go backwards
+ from time import monotonic as monotonic_time
+except ImportError:
+ # ... but python 2 doesn't have it
+ from time import clock as monotonic_time
+
logger = logging.getLogger(__name__)
try:
@@ -167,22 +175,22 @@ class PerformanceCounters(object):
self.current_counters = {}
self.previous_counters = {}
- def update(self, key, start_time, end_time=None):
- if end_time is None:
- end_time = time.time()
- duration = end_time - start_time
+ def update(self, key, duration_secs):
count, cum_time = self.current_counters.get(key, (0, 0))
count += 1
- cum_time += duration
+ cum_time += duration_secs
self.current_counters[key] = (count, cum_time)
- return end_time
- def interval(self, interval_duration, limit=3):
+ def interval(self, interval_duration_secs, limit=3):
counters = []
for name, (count, cum_time) in iteritems(self.current_counters):
prev_count, prev_time = self.previous_counters.get(name, (0, 0))
counters.append(
- ((cum_time - prev_time) / interval_duration, count - prev_count, name)
+ (
+ (cum_time - prev_time) / interval_duration_secs,
+ count - prev_count,
+ name,
+ )
)
self.previous_counters = dict(self.current_counters)
@@ -213,7 +221,6 @@ class SQLBaseStore(object):
# is running in mainline, and we have some nice monitoring frontends
# to watch it
self._txn_perf_counters = PerformanceCounters()
- self._get_event_counters = PerformanceCounters()
self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
@@ -350,32 +357,24 @@ class SQLBaseStore(object):
)
def start_profiling(self):
- self._previous_loop_ts = self._clock.time_msec()
+ self._previous_loop_ts = monotonic_time()
def loop():
curr = self._current_txn_total_time
prev = self._previous_txn_total_time
self._previous_txn_total_time = curr
- time_now = self._clock.time_msec()
+ time_now = monotonic_time()
time_then = self._previous_loop_ts
self._previous_loop_ts = time_now
- ratio = (curr - prev) / (time_now - time_then)
+ duration = time_now - time_then
+ ratio = (curr - prev) / duration
- top_three_counters = self._txn_perf_counters.interval(
- time_now - time_then, limit=3
- )
-
- top_3_event_counters = self._get_event_counters.interval(
- time_now - time_then, limit=3
- )
+ top_three_counters = self._txn_perf_counters.interval(duration, limit=3)
perf_logger.info(
- "Total database time: %.3f%% {%s} {%s}",
- ratio * 100,
- top_three_counters,
- top_3_event_counters,
+ "Total database time: %.3f%% {%s}", ratio * 100, top_three_counters
)
self._clock.looping_call(loop, 10000)
@@ -383,7 +382,7 @@ class SQLBaseStore(object):
def _new_transaction(
self, conn, desc, after_callbacks, exception_callbacks, func, *args, **kwargs
):
- start = time.time()
+ start = monotonic_time()
txn_id = self._TXN_ID
# We don't really need these to be unique, so lets stop it from
@@ -449,7 +448,7 @@ class SQLBaseStore(object):
logger.debug("[TXN FAIL] {%s} %s", name, e)
raise
finally:
- end = time.time()
+ end = monotonic_time()
duration = end - start
LoggingContext.current_context().add_database_transaction(duration)
@@ -457,7 +456,7 @@ class SQLBaseStore(object):
transaction_logger.debug("[TXN END] {%s} %f sec", name, duration)
self._current_txn_total_time += duration
- self._txn_perf_counters.update(desc, start, end)
+ self._txn_perf_counters.update(desc, duration)
sql_txn_timer.labels(desc).observe(duration)
@defer.inlineCallbacks
@@ -523,11 +522,11 @@ class SQLBaseStore(object):
)
parent_context = None
- start_time = time.time()
+ start_time = monotonic_time()
def inner_func(conn, *args, **kwargs):
with LoggingContext("runWithConnection", parent_context) as context:
- sched_duration_sec = time.time() - start_time
+ sched_duration_sec = monotonic_time() - start_time
sql_scheduling_timer.observe(sched_duration_sec)
context.add_database_scheduled(sched_duration_sec)
diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py
index 3413a46675..d2b113a4e7 100644
--- a/synapse/storage/devices.py
+++ b/synapse/storage/devices.py
@@ -24,6 +24,7 @@ from synapse.api.errors import StoreError
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import Cache, SQLBaseStore, db_to_json
from synapse.storage.background_updates import BackgroundUpdateStore
+from synapse.util import batch_iter
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
logger = logging.getLogger(__name__)
@@ -391,22 +392,47 @@ class DeviceWorkerStore(SQLBaseStore):
return now_stream_id, []
- @defer.inlineCallbacks
- def get_user_whose_devices_changed(self, from_key):
- """Get set of users whose devices have changed since `from_key`.
+ def get_users_whose_devices_changed(self, from_key, user_ids):
+ """Get set of users whose devices have changed since `from_key` that
+ are in the given list of user_ids.
+
+ Args:
+ from_key (str): The device lists stream token
+ user_ids (Iterable[str])
+
+ Returns:
+ Deferred[set[str]]: The set of user_ids whose devices have changed
+ since `from_key`
"""
from_key = int(from_key)
- changed = self._device_list_stream_cache.get_all_entities_changed(from_key)
- if changed is not None:
- defer.returnValue(set(changed))
- sql = """
- SELECT DISTINCT user_id FROM device_lists_stream WHERE stream_id > ?
- """
- rows = yield self._execute(
- "get_user_whose_devices_changed", None, sql, from_key
+ # Get set of users who *may* have changed. Users not in the returned
+ # list have definitely not changed.
+ to_check = list(
+ self._device_list_stream_cache.get_entities_changed(user_ids, from_key)
+ )
+
+ if not to_check:
+ return defer.succeed(set())
+
+ def _get_users_whose_devices_changed_txn(txn):
+ changes = set()
+
+ sql = """
+ SELECT DISTINCT user_id FROM device_lists_stream
+ WHERE stream_id > ?
+ AND user_id IN (%s)
+ """
+
+ for chunk in batch_iter(to_check, 100):
+ txn.execute(sql % (",".join("?" for _ in chunk),), (from_key,) + chunk)
+ changes.update(user_id for user_id, in txn)
+
+ return changes
+
+ return self.runInteraction(
+ "get_users_whose_devices_changed", _get_users_whose_devices_changed_txn
)
- defer.returnValue(set(row[0] for row in rows))
def get_all_device_list_changes_for_remotes(self, from_key, to_key):
"""Return a list of `(stream_id, user_id, destination)` which is the
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index fc10b9534e..7c4e1dc7ec 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -133,7 +133,7 @@ def _setup_new_database(cur, database_engine):
if ver <= SCHEMA_VERSION:
valid_dirs.append((ver, abs_path))
else:
- logger.warn("Unexpected entry in 'full_schemas': %s", filename)
+ logger.debug("Ignoring entry '%s' in 'full_schemas'", filename)
if not valid_dirs:
raise PrepareDatabaseException(
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
index 9b416f2f40..488c49747a 100644
--- a/synapse/streams/events.py
+++ b/synapse/streams/events.py
@@ -59,21 +59,25 @@ class EventSources(object):
defer.returnValue(token)
@defer.inlineCallbacks
- def get_current_token_for_room(self, room_id):
- push_rules_key, _ = self.store.get_push_rules_stream_token()
- to_device_key = self.store.get_to_device_stream_token()
- device_list_key = self.store.get_device_stream_token()
- groups_key = self.store.get_group_stream_token()
+ def get_current_token_for_pagination(self):
+ """Get the current token for a given room to be used to paginate
+ events.
+ The returned token does not have the current values for fields other
+ than `room`, since they are not used during pagination.
+
+ Retuns:
+ Deferred[StreamToken]
+ """
token = StreamToken(
- room_key=(yield self.sources["room"].get_current_key_for_room(room_id)),
- presence_key=(yield self.sources["presence"].get_current_key()),
- typing_key=(yield self.sources["typing"].get_current_key()),
- receipt_key=(yield self.sources["receipt"].get_current_key()),
- account_data_key=(yield self.sources["account_data"].get_current_key()),
- push_rules_key=push_rules_key,
- to_device_key=to_device_key,
- device_list_key=device_list_key,
- groups_key=groups_key,
+ room_key=(yield self.sources["room"].get_current_key()),
+ presence_key=0,
+ typing_key=0,
+ receipt_key=0,
+ account_data_key=0,
+ push_rules_key=0,
+ to_device_key=0,
+ device_list_key=0,
+ groups_key=0,
)
defer.returnValue(token)
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index b1da81633c..cbe54d45dd 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -137,7 +137,7 @@ class ResponseCache(object):
*args: positional parameters to pass to the callback, if it is used
- **kwargs: named paramters to pass to the callback, if it is used
+ **kwargs: named parameters to pass to the callback, if it is used
Returns:
twisted.internet.defer.Deferred: yieldable result
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index a9885cb507..6b0d2deea0 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -336,10 +336,9 @@ class LoggingContext(object):
logger.warning("Called stop on logcontext %s without calling start", self)
return
- usage_end = get_thread_resource_usage()
-
- self._resource_usage.ru_utime += usage_end.ru_utime - self.usage_start.ru_utime
- self._resource_usage.ru_stime += usage_end.ru_stime - self.usage_start.ru_stime
+ utime_delta, stime_delta = self._get_cputime()
+ self._resource_usage.ru_utime += utime_delta
+ self._resource_usage.ru_stime += stime_delta
self.usage_start = None
@@ -357,13 +356,44 @@ class LoggingContext(object):
# can include resource usage so far.
is_main_thread = threading.current_thread() is self.main_thread
if self.alive and self.usage_start and is_main_thread:
- current = get_thread_resource_usage()
- res.ru_utime += current.ru_utime - self.usage_start.ru_utime
- res.ru_stime += current.ru_stime - self.usage_start.ru_stime
+ utime_delta, stime_delta = self._get_cputime()
+ res.ru_utime += utime_delta
+ res.ru_stime += stime_delta
return res
+ def _get_cputime(self):
+ """Get the cpu usage time so far
+
+ Returns: Tuple[float, float]: seconds in user mode, seconds in system mode
+ """
+ current = get_thread_resource_usage()
+
+ utime_delta = current.ru_utime - self.usage_start.ru_utime
+ stime_delta = current.ru_stime - self.usage_start.ru_stime
+
+ # sanity check
+ if utime_delta < 0:
+ logger.error(
+ "utime went backwards! %f < %f",
+ current.ru_utime,
+ self.usage_start.ru_utime,
+ )
+ utime_delta = 0
+
+ if stime_delta < 0:
+ logger.error(
+ "stime went backwards! %f < %f",
+ current.ru_stime,
+ self.usage_start.ru_stime,
+ )
+ stime_delta = 0
+
+ return utime_delta, stime_delta
+
def add_database_transaction(self, duration_sec):
+ if duration_sec < 0:
+ raise ValueError("DB txn time can only be non-negative")
self._resource_usage.db_txn_count += 1
self._resource_usage.db_txn_duration_sec += duration_sec
@@ -374,6 +404,8 @@ class LoggingContext(object):
sched_sec (float): number of seconds it took us to get a
connection
"""
+ if sched_sec < 0:
+ raise ValueError("DB scheduling time can only be non-negative")
self._resource_usage.db_sched_duration_sec += sched_sec
def record_event_fetch(self, event_count):
|