diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 21d110c82d..965478d8d5 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -136,11 +136,6 @@ class Config(object):
with open(file_path) as file_stream:
return file_stream.read()
- @staticmethod
- def read_config_file(file_path):
- with open(file_path) as file_stream:
- return yaml.safe_load(file_stream)
-
def invoke_all(self, name, *args, **kargs):
results = []
for cls in type(self).mro():
@@ -155,12 +150,12 @@ class Config(object):
server_name,
generate_secrets=False,
report_stats=None,
+ open_private_ports=False,
):
"""Build a default configuration file
- This is used both when the user explicitly asks us to generate a config file
- (eg with --generate_config), and before loading the config at runtime (to give
- a base which the config files override)
+ This is used when the user explicitly asks us to generate a config file
+ (eg with --generate_config).
Args:
config_dir_path (str): The path where the config files are kept. Used to
@@ -179,23 +174,25 @@ class Config(object):
report_stats (bool|None): Initial setting for the report_stats setting.
If None, report_stats will be left unset.
+ open_private_ports (bool): True to leave private ports (such as the non-TLS
+ HTTP listener) open to the internet.
+
Returns:
str: the yaml config file
"""
- default_config = "\n\n".join(
+ return "\n\n".join(
dedent(conf)
for conf in self.invoke_all(
- "default_config",
+ "generate_config_section",
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
server_name=server_name,
generate_secrets=generate_secrets,
report_stats=report_stats,
+ open_private_ports=open_private_ports,
)
)
- return default_config
-
@classmethod
def load_config(cls, description, argv):
"""Parse the commandline and config files
@@ -240,9 +237,7 @@ class Config(object):
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
- config_dict = obj.read_config_files(
- config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path
- )
+ config_dict = read_config_files(config_files)
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
@@ -300,6 +295,23 @@ class Config(object):
" config file."
),
)
+ generate_group.add_argument(
+ "--data-directory",
+ metavar="DIRECTORY",
+ help=(
+ "Specify where data such as the media store and database file should be"
+ " stored. Defaults to the current working directory."
+ ),
+ )
+ generate_group.add_argument(
+ "--open-private-ports",
+ action="store_true",
+ help=(
+ "Leave private ports (such as the non-TLS HTTP listener) open to the"
+ " internet. Do not use this unless you know what you are doing."
+ ),
+ )
+
config_args, remaining_args = config_parser.parse_known_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
@@ -333,6 +345,12 @@ class Config(object):
if not cls.path_exists(config_path):
print("Generating config file %s" % (config_path,))
+ if config_args.data_directory:
+ data_dir_path = config_args.data_directory
+ else:
+ data_dir_path = os.getcwd()
+ data_dir_path = os.path.abspath(data_dir_path)
+
server_name = config_args.server_name
if not server_name:
raise ConfigError(
@@ -346,6 +364,7 @@ class Config(object):
server_name=server_name,
report_stats=(config_args.report_stats == "yes"),
generate_secrets=True,
+ open_private_ports=config_args.open_private_ports,
)
if not cls.path_exists(config_dir_path):
@@ -354,8 +373,8 @@ class Config(object):
config_file.write("# vim:ft=yaml\n\n")
config_file.write(config_str)
- config = yaml.safe_load(config_str)
- obj.invoke_all("generate_files", config)
+ config_dict = yaml.safe_load(config_str)
+ obj.generate_missing_files(config_dict, config_dir_path)
print(
(
@@ -385,12 +404,9 @@ class Config(object):
obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
- config_dict = obj.read_config_files(
- config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path
- )
-
+ config_dict = read_config_files(config_files)
if generate_missing_configs:
- obj.generate_missing_files(config_dict)
+ obj.generate_missing_files(config_dict, config_dir_path)
return None
obj.parse_config_dict(
@@ -400,53 +416,6 @@ class Config(object):
return obj
- def read_config_files(self, config_files, config_dir_path, data_dir_path):
- """Read the config files into a dict
-
- Args:
- config_files (iterable[str]): A list of the config files to read
-
- config_dir_path (str): The path where the config files are kept. Used to
- create filenames for things like the log config and the signing key.
-
- data_dir_path (str): The path where the data files are kept. Used to create
- filenames for things like the database and media store.
-
- Returns: dict
- """
- # first we read the config files into a dict
- specified_config = {}
- for config_file in config_files:
- yaml_config = self.read_config_file(config_file)
- specified_config.update(yaml_config)
-
- # not all of the options have sensible defaults in code, so we now need to
- # generate a default config file suitable for the specified server name...
- if "server_name" not in specified_config:
- raise ConfigError(MISSING_SERVER_NAME)
- server_name = specified_config["server_name"]
- config_string = self.generate_config(
- config_dir_path=config_dir_path,
- data_dir_path=data_dir_path,
- server_name=server_name,
- generate_secrets=False,
- )
-
- # ... and read it into a base config dict ...
- config = yaml.safe_load(config_string)
-
- # ... and finally, overlay it with the actual configuration.
- config.pop("log_config")
- config.update(specified_config)
-
- if "report_stats" not in config:
- raise ConfigError(
- MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS
- + "\n"
- + MISSING_REPORT_STATS_SPIEL
- )
- return config
-
def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
"""Read the information from the config dict into this Config object.
@@ -466,8 +435,32 @@ class Config(object):
data_dir_path=data_dir_path,
)
- def generate_missing_files(self, config_dict):
- self.invoke_all("generate_files", config_dict)
+ def generate_missing_files(self, config_dict, config_dir_path):
+ self.invoke_all("generate_files", config_dict, config_dir_path)
+
+
+def read_config_files(config_files):
+ """Read the config files into a dict
+
+ Args:
+ config_files (iterable[str]): A list of the config files to read
+
+ Returns: dict
+ """
+ specified_config = {}
+ for config_file in config_files:
+ with open(config_file) as file_stream:
+ yaml_config = yaml.safe_load(file_stream)
+ specified_config.update(yaml_config)
+
+ if "server_name" not in specified_config:
+ raise ConfigError(MISSING_SERVER_NAME)
+
+ if "report_stats" not in specified_config:
+ raise ConfigError(
+ MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
+ )
+ return specified_config
def find_config_files(search_paths):
diff --git a/synapse/config/api.py b/synapse/config/api.py
index d9eff9ae1f..dddea79a8a 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -30,7 +30,7 @@ class ApiConfig(Config):
],
)
- def default_config(cls, **kwargs):
+ def generate_config_section(cls, **kwargs):
return """\
## API Configuration ##
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index b74cebfca9..8387ff6805 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -34,7 +34,7 @@ class AppServiceConfig(Config):
self.notify_appservices = config.get("notify_appservices", True)
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
- def default_config(cls, **kwargs):
+ def generate_config_section(cls, **kwargs):
return """\
# A list of application service config files to use
#
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index a08b08570b..8dac8152cf 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -28,7 +28,7 @@ class CaptchaConfig(Config):
"https://www.recaptcha.net/recaptcha/api/siteverify",
)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
## Captcha ##
# See docs/CAPTCHA_SETUP for full details of configuring this.
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index a5f0449955..ebe34d933b 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -35,7 +35,7 @@ class CasConfig(Config):
self.cas_service_url = None
self.cas_required_attributes = {}
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Enable CAS for registration and login.
#
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index 6fd4931681..94916f3a49 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -111,5 +111,5 @@ class ConsentConfig(Config):
"policy_name", "Privacy Policy"
)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
diff --git a/synapse/config/database.py b/synapse/config/database.py
index c8963e276a..bcb2089dd7 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -38,7 +38,7 @@ class DatabaseConfig(Config):
self.set_databasepath(config.get("database_path"))
- def default_config(self, data_dir_path, **kwargs):
+ def generate_config_section(self, data_dir_path, **kwargs):
database_path = os.path.join(data_dir_path, "homeserver.db")
return (
"""\
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 07df7b7173..fcd55d3e3d 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -214,7 +214,7 @@ class EmailConfig(Config):
if not os.path.isfile(p):
raise ConfigError("Unable to find email template file %s" % (p,))
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Enable sending emails for password resets, notification events or
# account expiry notices
@@ -233,11 +233,13 @@ class EmailConfig(Config):
# app_name: Matrix
#
# # Enable email notifications by default
+ # #
# notif_for_new_users: True
#
# # Defining a custom URL for Riot is only needed if email notifications
# # should contain links to a self-hosted installation of Riot; when set
# # the "app_name" setting is ignored
+ # #
# riot_base_url: "http://localhost/riot"
#
# # Enable sending password reset emails via the configured, trusted
@@ -250,16 +252,22 @@ class EmailConfig(Config):
# #
# # If this option is set to false and SMTP options have not been
# # configured, resetting user passwords via email will be disabled
+ # #
# #trust_identity_server_for_password_resets: false
#
# # Configure the time that a validation email or text message code
# # will expire after sending
# #
# # This is currently used for password resets
+ # #
# #validation_token_lifetime: 1h
#
# # Template directory. All template files should be stored within this
- # # directory
+ # # directory. If not set, default templates from within the Synapse
+ # # package will be used
+ # #
+ # # For the list of default templates, please see
+ # # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
# #
# #template_dir: res/templates
#
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index d11f4d3b96..2a522b5f44 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -21,7 +21,7 @@ class GroupsConfig(Config):
self.enable_group_creation = config.get("enable_group_creation", False)
self.group_creation_prefix = config.get("group_creation_prefix", "")
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
# Uncomment to allow non-server-admin users to create groups on this server
#
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py
index a2c97dea95..36d87cef03 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt_config.py
@@ -41,7 +41,7 @@ class JWTConfig(Config):
self.jwt_secret = None
self.jwt_algorithm = None
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
# The JWT needs to contain a globally unique "sub" (subject) claim.
#
diff --git a/synapse/config/key.py b/synapse/config/key.py
index e58638f708..8fc74f9cdf 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -65,13 +65,18 @@ class TrustedKeyServer(object):
class KeyConfig(Config):
- def read_config(self, config, **kwargs):
+ def read_config(self, config, config_dir_path, **kwargs):
# the signing key can be specified inline or in a separate file
if "signing_key" in config:
self.signing_key = read_signing_keys([config["signing_key"]])
else:
- self.signing_key_path = config["signing_key_path"]
- self.signing_key = self.read_signing_key(self.signing_key_path)
+ signing_key_path = config.get("signing_key_path")
+ if signing_key_path is None:
+ signing_key_path = os.path.join(
+ config_dir_path, config["server_name"] + ".signing.key"
+ )
+
+ self.signing_key = self.read_signing_key(signing_key_path)
self.old_signing_keys = self.read_old_signing_keys(
config.get("old_signing_keys", {})
@@ -117,7 +122,7 @@ class KeyConfig(Config):
# falsification of values
self.form_secret = config.get("form_secret", None)
- def default_config(
+ def generate_config_section(
self, config_dir_path, server_name, generate_secrets=False, **kwargs
):
base_key_name = os.path.join(config_dir_path, server_name)
@@ -237,8 +242,15 @@ class KeyConfig(Config):
)
return keys
- def generate_files(self, config):
- signing_key_path = config["signing_key_path"]
+ def generate_files(self, config, config_dir_path):
+ if "signing_key" in config:
+ return
+
+ signing_key_path = config.get("signing_key_path")
+ if signing_key_path is None:
+ signing_key_path = os.path.join(
+ config_dir_path, config["server_name"] + ".signing.key"
+ )
if not self.path_exists(signing_key_path):
print("Generating signing key file %s" % (signing_key_path,))
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 153a137517..931aec41c0 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -80,7 +80,7 @@ class LoggingConfig(Config):
self.log_config = self.abspath(config.get("log_config"))
self.log_file = self.abspath(config.get("log_file"))
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
log_config = os.path.join(config_dir_path, server_name + ".log.config")
return (
"""\
@@ -133,7 +133,7 @@ class LoggingConfig(Config):
help="Do not redirect stdout/stderr to the log",
)
- def generate_files(self, config):
+ def generate_files(self, config, config_dir_path):
log_config = config.get("log_config")
if log_config and not os.path.exists(log_config):
log_file = self.abspath("homeserver.log")
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index 6af82e1329..3698441963 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -40,7 +40,7 @@ class MetricsConfig(Config):
"sentry.dsn field is required when sentry integration is enabled"
)
- def default_config(self, report_stats=None, **kwargs):
+ def generate_config_section(self, report_stats=None, **kwargs):
res = """\
## Metrics ###
diff --git a/synapse/config/password.py b/synapse/config/password.py
index 300b67f236..d5b5953f2f 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -26,15 +26,22 @@ class PasswordConfig(Config):
password_config = {}
self.password_enabled = password_config.get("enabled", True)
+ self.password_localdb_enabled = password_config.get("localdb_enabled", True)
self.password_pepper = password_config.get("pepper", "")
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
password_config:
# Uncomment to disable password login
#
#enabled: false
+ # Uncomment to disable authentication against the local password
+ # database. This is ignored if `enabled` is false, and is only useful
+ # if you have other password_providers.
+ #
+ #localdb_enabled: false
+
# Uncomment and change to a secret random string for extra security.
# DO NOT CHANGE THIS AFTER INITIAL SETUP!
#
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index 8ffefd2639..788c39c9fb 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -46,7 +46,7 @@ class PasswordAuthProviderConfig(Config):
self.password_providers.append((provider_class, provider_config))
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
#password_providers:
# - module: "ldap_auth_provider.LdapAuthProvider"
diff --git a/synapse/config/push.py b/synapse/config/push.py
index 99d15e4461..1b932722a5 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -42,7 +42,7 @@ class PushConfig(Config):
)
self.push_include_content = not redact_content
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Clients requesting push notifications can either have the body of
# the message sent in the notification poke along with other details
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index b03047f2b5..8c587f3fd2 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -80,7 +80,7 @@ class RatelimitConfig(Config):
"federation_rr_transactions_per_room_per_second", 50
)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
## Ratelimiting ##
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 6d8a2df29b..4a59e6ec90 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -85,7 +85,7 @@ class RegistrationConfig(Config):
"disable_msisdn_registration", False
)
- def default_config(self, generate_secrets=False, **kwargs):
+ def generate_config_section(self, generate_secrets=False, **kwargs):
if generate_secrets:
registration_shared_secret = 'registration_shared_secret: "%s"' % (
random_string_with_symbols(50),
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 15a19e0911..80a628d9b0 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -91,7 +91,9 @@ class ContentRepositoryConfig(Config):
self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
- self.media_store_path = self.ensure_directory(config["media_store_path"])
+ self.media_store_path = self.ensure_directory(
+ config.get("media_store_path", "media_store")
+ )
backup_media_store_path = config.get("backup_media_store_path")
@@ -148,7 +150,7 @@ class ContentRepositoryConfig(Config):
(provider_class, parsed_config, wrapper_config)
)
- self.uploads_path = self.ensure_directory(config["uploads_path"])
+ self.uploads_path = self.ensure_directory(config.get("uploads_path", "uploads"))
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
self.thumbnail_requirements = parse_thumbnail_requirements(
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
@@ -188,7 +190,7 @@ class ContentRepositoryConfig(Config):
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
- def default_config(self, data_dir_path, **kwargs):
+ def generate_config_section(self, data_dir_path, **kwargs):
media_store = os.path.join(data_dir_path, "media_store")
uploads_path = os.path.join(data_dir_path, "uploads")
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index 24223db7a1..a92693017b 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -46,7 +46,7 @@ class RoomDirectoryConfig(Config):
_RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
]
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Uncomment to disable searching the public room list. When disabled
# blocks searching local and remote room lists for local and remote
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index d86cf0e6ee..872a1ba934 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -61,7 +61,7 @@ class SAML2Config(Config):
},
}
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
# Enable SAML2 for registration and login. Uses pysaml2.
#
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 1e58b2e91b..2a74dea2ea 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -82,12 +82,32 @@ class ServerConfig(Config):
"require_auth_for_profile_requests", False
)
- # If set to 'True', requires authentication to access the server's
- # public rooms directory through the client API, and forbids any other
- # homeserver to fetch it via federation.
- self.restrict_public_rooms_to_local_users = config.get(
- "restrict_public_rooms_to_local_users", False
- )
+ if "restrict_public_rooms_to_local_users" in config and (
+ "allow_public_rooms_without_auth" in config
+ or "allow_public_rooms_over_federation" in config
+ ):
+ raise ConfigError(
+ "Can't use 'restrict_public_rooms_to_local_users' if"
+ " 'allow_public_rooms_without_auth' and/or"
+ " 'allow_public_rooms_over_federation' is set."
+ )
+
+ # Check if the legacy "restrict_public_rooms_to_local_users" flag is set. This
+ # flag is now obsolete but we need to check it for backward-compatibility.
+ if config.get("restrict_public_rooms_to_local_users", False):
+ self.allow_public_rooms_without_auth = False
+ self.allow_public_rooms_over_federation = False
+ else:
+ # If set to 'False', requires authentication to access the server's public
+ # rooms directory through the client API. Defaults to 'True'.
+ self.allow_public_rooms_without_auth = config.get(
+ "allow_public_rooms_without_auth", True
+ )
+ # If set to 'False', forbids any other homeserver to fetch the server's public
+ # rooms directory via federation. Defaults to 'True'.
+ self.allow_public_rooms_over_federation = config.get(
+ "allow_public_rooms_over_federation", True
+ )
default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)
@@ -307,7 +327,9 @@ class ServerConfig(Config):
def has_tls_listener(self):
return any(l["tls"] for l in self.listeners)
- def default_config(self, server_name, data_dir_path, **kwargs):
+ def generate_config_section(
+ self, server_name, data_dir_path, open_private_ports, **kwargs
+ ):
_, bind_port = parse_and_validate_server_name(server_name)
if bind_port is not None:
unsecure_port = bind_port - 400
@@ -320,6 +342,13 @@ class ServerConfig(Config):
# Bring DEFAULT_ROOM_VERSION into the local-scope for use in the
# default config string
default_room_version = DEFAULT_ROOM_VERSION
+
+ unsecure_http_binding = "port: %i\n tls: false" % (unsecure_port,)
+ if not open_private_ports:
+ unsecure_http_binding += (
+ "\n bind_addresses: ['::1', '127.0.0.1']"
+ )
+
return (
"""\
## Server ##
@@ -366,11 +395,15 @@ class ServerConfig(Config):
#
#require_auth_for_profile_requests: true
- # If set to 'true', requires authentication to access the server's
- # public rooms directory through the client API, and forbids any other
- # homeserver to fetch it via federation. Defaults to 'false'.
+ # If set to 'false', requires authentication to access the server's public rooms
+ # directory through the client API. Defaults to 'true'.
+ #
+ #allow_public_rooms_without_auth: false
+
+ # If set to 'false', forbids any other homeserver to fetch the server's public
+ # rooms directory via federation. Defaults to 'true'.
#
- #restrict_public_rooms_to_local_users: true
+ #allow_public_rooms_over_federation: false
# The default room version for newly created rooms.
#
@@ -511,9 +544,7 @@ class ServerConfig(Config):
# If you plan to use a reverse proxy, please see
# https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
#
- - port: %(unsecure_port)s
- tls: false
- bind_addresses: ['::1', '127.0.0.1']
+ - %(unsecure_http_binding)s
type: http
x_forwarded: true
@@ -521,7 +552,7 @@ class ServerConfig(Config):
- names: [client, federation]
compress: false
- # example additonal_resources:
+ # example additional_resources:
#
#additional_resources:
# "/_matrix/my/custom/endpoint":
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py
index 05110c17a6..eaac3d73bc 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices_config.py
@@ -78,5 +78,5 @@ class ServerNoticesConfig(Config):
# todo: i18n
self.server_notices_room_name = c.get("room_name", "Server Notices")
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index 1968003cb3..e40797ab50 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -26,7 +26,7 @@ class SpamCheckerConfig(Config):
if provider is not None:
self.spam_checker = load_module(provider)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
#spam_checker:
# module: "my_custom_project.SuperSpamChecker"
diff --git a/synapse/config/stats.py b/synapse/config/stats.py
index 73a87c73f2..b518a3ed9c 100644
--- a/synapse/config/stats.py
+++ b/synapse/config/stats.py
@@ -42,7 +42,7 @@ class StatsConfig(Config):
/ 1000
)
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# Local statistics collection. Used in populating the room directory.
#
diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py
index 1bedd607b6..b3431441b9 100644
--- a/synapse/config/third_party_event_rules.py
+++ b/synapse/config/third_party_event_rules.py
@@ -26,7 +26,7 @@ class ThirdPartyRulesConfig(Config):
if provider is not None:
self.third_party_event_rules = load_module(provider)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
# Server admins can define a Python module that implements extra rules for
# allowing or denying incoming events. In order to work, this module needs to
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 9a66e8cc4b..ca508a224f 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -23,7 +23,7 @@ import six
from unpaddedbase64 import encode_base64
-from OpenSSL import crypto
+from OpenSSL import SSL, crypto
from twisted.internet._sslverify import Certificate, trustRootFromCertificates
from synapse.config._base import Config, ConfigError
@@ -81,6 +81,27 @@ class TlsConfig(Config):
"federation_verify_certificates", True
)
+ # Minimum TLS version to use for outbound federation traffic
+ self.federation_client_minimum_tls_version = str(
+ config.get("federation_client_minimum_tls_version", 1)
+ )
+
+ if self.federation_client_minimum_tls_version not in ["1", "1.1", "1.2", "1.3"]:
+ raise ConfigError(
+ "federation_client_minimum_tls_version must be one of: 1, 1.1, 1.2, 1.3"
+ )
+
+ # Prevent people shooting themselves in the foot here by setting it to
+ # the biggest number blindly
+ if self.federation_client_minimum_tls_version == "1.3":
+ if getattr(SSL, "OP_NO_TLSv1_3", None) is None:
+ raise ConfigError(
+ (
+ "federation_client_minimum_tls_version cannot be 1.3, "
+ "your OpenSSL does not support it"
+ )
+ )
+
# Whitelist of domains to not verify certificates for
fed_whitelist_entries = config.get(
"federation_certificate_verification_whitelist", []
@@ -217,7 +238,9 @@ class TlsConfig(Config):
if sha256_fingerprint not in sha256_fingerprints:
self.tls_fingerprints.append({"sha256": sha256_fingerprint})
- def default_config(self, config_dir_path, server_name, data_dir_path, **kwargs):
+ def generate_config_section(
+ self, config_dir_path, server_name, data_dir_path, **kwargs
+ ):
base_key_name = os.path.join(config_dir_path, server_name)
tls_certificate_path = base_key_name + ".tls.crt"
@@ -259,6 +282,15 @@ class TlsConfig(Config):
#
#federation_verify_certificates: false
+ # The minimum TLS version that will be used for outbound federation requests.
+ #
+ # Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note
+ # that setting this value higher than `1.2` will prevent federation to most
+ # of the public Matrix network: only configure it to `1.3` if you have an
+ # entirely private federation setup and you can ensure TLS 1.3 support.
+ #
+ #federation_client_minimum_tls_version: 1.2
+
# Skip federation certificate verification on the following whitelist
# of domains.
#
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index 0665dc3fcf..f6313e17d4 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -33,7 +33,7 @@ class UserDirectoryConfig(Config):
"search_all_users", False
)
- def default_config(self, config_dir_path, server_name, **kwargs):
+ def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """
# User Directory configuration
#
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index 01e0cb2e28..2ca0e1cf70 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -26,7 +26,7 @@ class VoipConfig(Config):
)
self.turn_allow_guests = config.get("turn_allow_guests", True)
- def default_config(self, **kwargs):
+ def generate_config_section(self, **kwargs):
return """\
## TURN ##
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 2bc5cc3807..4f48e8e88d 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -24,12 +24,25 @@ from OpenSSL import SSL, crypto
from twisted.internet._sslverify import _defaultCurveName
from twisted.internet.abstract import isIPAddress, isIPv6Address
from twisted.internet.interfaces import IOpenSSLClientConnectionCreator
-from twisted.internet.ssl import CertificateOptions, ContextFactory, platformTrust
+from twisted.internet.ssl import (
+ CertificateOptions,
+ ContextFactory,
+ TLSVersion,
+ platformTrust,
+)
from twisted.python.failure import Failure
logger = logging.getLogger(__name__)
+_TLS_VERSION_MAP = {
+ "1": TLSVersion.TLSv1_0,
+ "1.1": TLSVersion.TLSv1_1,
+ "1.2": TLSVersion.TLSv1_2,
+ "1.3": TLSVersion.TLSv1_3,
+}
+
+
class ServerContextFactory(ContextFactory):
"""Factory for PyOpenSSL SSL contexts that are used to handle incoming
connections."""
@@ -43,16 +56,18 @@ class ServerContextFactory(ContextFactory):
try:
_ecCurve = crypto.get_elliptic_curve(_defaultCurveName)
context.set_tmp_ecdh(_ecCurve)
-
except Exception:
logger.exception("Failed to enable elliptic curve for TLS")
- context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
+
+ context.set_options(
+ SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_NO_TLSv1 | SSL.OP_NO_TLSv1_1
+ )
context.use_certificate_chain_file(config.tls_certificate_file)
context.use_privatekey(config.tls_private_key)
# https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
context.set_cipher_list(
- "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1"
+ "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM"
)
def getContext(self):
@@ -79,10 +94,22 @@ class ClientTLSOptionsFactory(object):
# Use CA root certs provided by OpenSSL
trust_root = platformTrust()
- self._verify_ssl_context = CertificateOptions(trustRoot=trust_root).getContext()
+ # "insecurelyLowerMinimumTo" is the argument that will go lower than
+ # Twisted's default, which is why it is marked as "insecure" (since
+ # Twisted's defaults are reasonably secure). But, since Twisted is
+ # moving to TLS 1.2 by default, we want to respect the config option if
+ # it is set to 1.0 (which the alternate option, raiseMinimumTo, will not
+ # let us do).
+ minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version]
+
+ self._verify_ssl = CertificateOptions(
+ trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS
+ )
+ self._verify_ssl_context = self._verify_ssl.getContext()
self._verify_ssl_context.set_info_callback(self._context_info_cb)
- self._no_verify_ssl_context = CertificateOptions().getContext()
+ self._no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS)
+ self._no_verify_ssl_context = self._no_verify_ssl.getContext()
self._no_verify_ssl_context.set_info_callback(self._context_info_cb)
def get_options(self, host):
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index b4854e82f6..955f0f4308 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -721,15 +721,15 @@ class PublicRoomList(BaseFederationServlet):
PATH = "/publicRooms"
- def __init__(self, handler, authenticator, ratelimiter, server_name, deny_access):
+ def __init__(self, handler, authenticator, ratelimiter, server_name, allow_access):
super(PublicRoomList, self).__init__(
handler, authenticator, ratelimiter, server_name
)
- self.deny_access = deny_access
+ self.allow_access = allow_access
@defer.inlineCallbacks
def on_GET(self, origin, content, query):
- if self.deny_access:
+ if not self.allow_access:
raise FederationDeniedError(origin)
limit = parse_integer_from_args(query, "limit", 0)
@@ -1436,7 +1436,7 @@ def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=N
authenticator=authenticator,
ratelimiter=ratelimiter,
server_name=hs.hostname,
- deny_access=hs.config.restrict_public_rooms_to_local_users,
+ allow_access=hs.config.allow_public_rooms_over_federation,
).register(resource)
if "group_server" in servlet_groups:
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 97b21c4093..c8c1ed3246 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -743,7 +743,7 @@ class AuthHandler(BaseHandler):
result = (result, None)
defer.returnValue(result)
- if login_type == LoginType.PASSWORD:
+ if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled:
known_login_type = True
canonical_user_id = yield self._check_local_password(
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index f59d0479b5..99e8413092 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -101,9 +101,13 @@ class DeviceWorkerHandler(BaseHandler):
room_ids = yield self.store.get_rooms_for_user(user_id)
- # First we check if any devices have changed
- changed = yield self.store.get_user_whose_devices_changed(
- from_token.device_list_key
+ # First we check if any devices have changed for users that we share
+ # rooms with.
+ users_who_share_room = yield self.store.get_users_who_share_room_with_user(
+ user_id
+ )
+ changed = yield self.store.get_users_whose_devices_changed(
+ from_token.device_list_key, users_who_share_room
)
# Then work out if any users have since joined
@@ -188,10 +192,6 @@ class DeviceWorkerHandler(BaseHandler):
break
if possibly_changed or possibly_left:
- users_who_share_room = yield self.store.get_users_who_share_room_with_user(
- user_id
- )
-
# Take the intersection of the users whose devices may have changed
# and those that actually still share a room with the user
possibly_joined = possibly_changed & users_who_share_room
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 89d89fc27c..db3f8cb76b 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -32,6 +32,7 @@ from synapse.storage.state import StateFilter
from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID
from synapse.util import stringutils
from synapse.util.async_helpers import Linearizer
+from synapse.util.caches.response_cache import ResponseCache
from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
@@ -40,6 +41,8 @@ logger = logging.getLogger(__name__)
id_server_scheme = "https://"
+FIVE_MINUTES_IN_MS = 5 * 60 * 1000
+
class RoomCreationHandler(BaseHandler):
@@ -75,6 +78,12 @@ class RoomCreationHandler(BaseHandler):
# linearizer to stop two upgrades happening at once
self._upgrade_linearizer = Linearizer("room_upgrade_linearizer")
+ # If a user tries to update the same room multiple times in quick
+ # succession, only process the first attempt and return its result to
+ # subsequent requests
+ self._upgrade_response_cache = ResponseCache(
+ hs, "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
+ )
self._server_notices_mxid = hs.config.server_notices_mxid
self.third_party_event_rules = hs.get_third_party_event_rules()
@@ -95,67 +104,96 @@ class RoomCreationHandler(BaseHandler):
user_id = requester.user.to_string()
- with (yield self._upgrade_linearizer.queue(old_room_id)):
- # start by allocating a new room id
- r = yield self.store.get_room(old_room_id)
- if r is None:
- raise NotFoundError("Unknown room id %s" % (old_room_id,))
- new_room_id = yield self._generate_room_id(
- creator_id=user_id, is_public=r["is_public"]
- )
+ # Check if this room is already being upgraded by another person
+ for key in self._upgrade_response_cache.pending_result_cache:
+ if key[0] == old_room_id and key[1] != user_id:
+ # Two different people are trying to upgrade the same room.
+ # Send the second an error.
+ #
+ # Note that this of course only gets caught if both users are
+ # on the same homeserver.
+ raise SynapseError(
+ 400, "An upgrade for this room is currently in progress"
+ )
- logger.info("Creating new room %s to replace %s", new_room_id, old_room_id)
+ # Upgrade the room
+ #
+ # If this user has sent multiple upgrade requests for the same room
+ # and one of them is not complete yet, cache the response and
+ # return it to all subsequent requests
+ ret = yield self._upgrade_response_cache.wrap(
+ (old_room_id, user_id),
+ self._upgrade_room,
+ requester,
+ old_room_id,
+ new_version, # args for _upgrade_room
+ )
+ defer.returnValue(ret)
- # we create and auth the tombstone event before properly creating the new
- # room, to check our user has perms in the old room.
- tombstone_event, tombstone_context = (
- yield self.event_creation_handler.create_event(
- requester,
- {
- "type": EventTypes.Tombstone,
- "state_key": "",
- "room_id": old_room_id,
- "sender": user_id,
- "content": {
- "body": "This room has been replaced",
- "replacement_room": new_room_id,
- },
- },
- token_id=requester.access_token_id,
- )
- )
- old_room_version = yield self.store.get_room_version(old_room_id)
- yield self.auth.check_from_context(
- old_room_version, tombstone_event, tombstone_context
- )
+ @defer.inlineCallbacks
+ def _upgrade_room(self, requester, old_room_id, new_version):
+ user_id = requester.user.to_string()
+
+ # start by allocating a new room id
+ r = yield self.store.get_room(old_room_id)
+ if r is None:
+ raise NotFoundError("Unknown room id %s" % (old_room_id,))
+ new_room_id = yield self._generate_room_id(
+ creator_id=user_id, is_public=r["is_public"]
+ )
+
+ logger.info("Creating new room %s to replace %s", new_room_id, old_room_id)
- yield self.clone_existing_room(
+ # we create and auth the tombstone event before properly creating the new
+ # room, to check our user has perms in the old room.
+ tombstone_event, tombstone_context = (
+ yield self.event_creation_handler.create_event(
requester,
- old_room_id=old_room_id,
- new_room_id=new_room_id,
- new_room_version=new_version,
- tombstone_event_id=tombstone_event.event_id,
+ {
+ "type": EventTypes.Tombstone,
+ "state_key": "",
+ "room_id": old_room_id,
+ "sender": user_id,
+ "content": {
+ "body": "This room has been replaced",
+ "replacement_room": new_room_id,
+ },
+ },
+ token_id=requester.access_token_id,
)
+ )
+ old_room_version = yield self.store.get_room_version(old_room_id)
+ yield self.auth.check_from_context(
+ old_room_version, tombstone_event, tombstone_context
+ )
- # now send the tombstone
- yield self.event_creation_handler.send_nonmember_event(
- requester, tombstone_event, tombstone_context
- )
+ yield self.clone_existing_room(
+ requester,
+ old_room_id=old_room_id,
+ new_room_id=new_room_id,
+ new_room_version=new_version,
+ tombstone_event_id=tombstone_event.event_id,
+ )
- old_room_state = yield tombstone_context.get_current_state_ids(self.store)
+ # now send the tombstone
+ yield self.event_creation_handler.send_nonmember_event(
+ requester, tombstone_event, tombstone_context
+ )
- # update any aliases
- yield self._move_aliases_to_new_room(
- requester, old_room_id, new_room_id, old_room_state
- )
+ old_room_state = yield tombstone_context.get_current_state_ids(self.store)
- # and finally, shut down the PLs in the old room, and update them in the new
- # room.
- yield self._update_upgraded_room_pls(
- requester, old_room_id, new_room_id, old_room_state
- )
+ # update any aliases
+ yield self._move_aliases_to_new_room(
+ requester, old_room_id, new_room_id, old_room_state
+ )
+
+ # and finally, shut down the PLs in the old room, and update them in the new
+ # room.
+ yield self._update_upgraded_room_pls(
+ requester, old_room_id, new_room_id, old_room_state
+ )
- defer.returnValue(new_room_id)
+ defer.returnValue(new_room_id)
@defer.inlineCallbacks
def _update_upgraded_room_pls(
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index 5a0995d4fe..d90c9e0108 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -33,6 +33,9 @@ class SetPasswordHandler(BaseHandler):
@defer.inlineCallbacks
def set_password(self, user_id, newpassword, requester=None):
+ if not self.hs.config.password_localdb_enabled:
+ raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN)
+
password_hash = yield self._auth_handler.hash(newpassword)
except_device_id = requester.device_id if requester else None
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 3032d7f663..a338065984 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -1061,40 +1061,74 @@ class SyncHandler(object):
newly_left_rooms,
newly_left_users,
):
+ """Generate the DeviceLists section of sync
+
+ Args:
+ sync_result_builder (SyncResultBuilder)
+ newly_joined_rooms (set[str]): Set of rooms user has joined since
+ previous sync
+ newly_joined_or_invited_users (set[str]): Set of users that have
+ joined or been invited to a room since previous sync.
+ newly_left_rooms (set[str]): Set of rooms user has left since
+ previous sync
+ newly_left_users (set[str]): Set of users that have left a room
+ we're in since previous sync
+
+ Returns:
+ Deferred[DeviceLists]
+ """
+
user_id = sync_result_builder.sync_config.user.to_string()
since_token = sync_result_builder.since_token
+ # We're going to mutate these fields, so lets copy them rather than
+ # assume they won't get used later.
+ newly_joined_or_invited_users = set(newly_joined_or_invited_users)
+ newly_left_users = set(newly_left_users)
+
if since_token and since_token.device_list_key:
- changed = yield self.store.get_user_whose_devices_changed(
- since_token.device_list_key
+ # We want to figure out what user IDs the client should refetch
+ # device keys for, and which users we aren't going to track changes
+ # for anymore.
+ #
+ # For the first step we check:
+ # a. if any users we share a room with have updated their devices,
+ # and
+ # b. we also check if we've joined any new rooms, or if a user has
+ # joined a room we're in.
+ #
+ # For the second step we just find any users we no longer share a
+ # room with by looking at all users that have left a room plus users
+ # that were in a room we've left.
+
+ users_who_share_room = yield self.store.get_users_who_share_room_with_user(
+ user_id
+ )
+
+ # Step 1a, check for changes in devices of users we share a room with
+ users_that_have_changed = yield self.store.get_users_whose_devices_changed(
+ since_token.device_list_key, users_who_share_room
)
- # TODO: Be more clever than this, i.e. remove users who we already
- # share a room with?
+ # Step 1b, check for newly joined rooms
for room_id in newly_joined_rooms:
joined_users = yield self.state.get_current_users_in_room(room_id)
newly_joined_or_invited_users.update(joined_users)
- for room_id in newly_left_rooms:
- left_users = yield self.state.get_current_users_in_room(room_id)
- newly_left_users.update(left_users)
-
# TODO: Check that these users are actually new, i.e. either they
# weren't in the previous sync *or* they left and rejoined.
- changed.update(newly_joined_or_invited_users)
+ users_that_have_changed.update(newly_joined_or_invited_users)
- if not changed and not newly_left_users:
- defer.returnValue(DeviceLists(changed=[], left=newly_left_users))
+ # Now find users that we no longer track
+ for room_id in newly_left_rooms:
+ left_users = yield self.state.get_current_users_in_room(room_id)
+ newly_left_users.update(left_users)
- users_who_share_room = yield self.store.get_users_who_share_room_with_user(
- user_id
- )
+ # Remove any users that we still share a room with.
+ newly_left_users -= users_who_share_room
defer.returnValue(
- DeviceLists(
- changed=users_who_share_room & changed,
- left=set(newly_left_users) - users_who_share_room,
- )
+ DeviceLists(changed=users_that_have_changed, left=newly_left_users)
)
else:
defer.returnValue(DeviceLists(changed=[], left=[]))
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 13698d9638..6324c00ef1 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -95,6 +95,7 @@ CONDITIONAL_REQUIREMENTS = {
"url_preview": ["lxml>=3.5.0"],
"test": ["mock>=2.0", "parameterized"],
"sentry": ["sentry-sdk>=0.7.2"],
+ "jwt": ["pyjwt>=1.6.4"],
}
ALL_OPTIONAL_REQUIREMENTS = set()
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 4efb679a04..ede6bc8b1e 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -336,7 +336,7 @@ class LoginRestServlet(RestServlet):
}
else:
user_id, access_token = (
- yield self.handlers.registration_handler.register(localpart=user)
+ yield self.registration_handler.register(localpart=user)
)
device_id = login_submission.get("device_id")
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index a028337125..cca7e45ddb 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -311,7 +311,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
# Option to allow servers to require auth when accessing
# /publicRooms via CS API. This is especially helpful in private
# federations.
- if self.hs.config.restrict_public_rooms_to_local_users:
+ if not self.hs.config.allow_public_rooms_without_auth:
raise
# We allow people to not be authed if they're just looking at our
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index aae43d0f99..29589853c6 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -175,22 +175,22 @@ class PerformanceCounters(object):
self.current_counters = {}
self.previous_counters = {}
- def update(self, key, start_time, end_time=None):
- if end_time is None:
- end_time = time.time()
- duration = end_time - start_time
+ def update(self, key, duration_secs):
count, cum_time = self.current_counters.get(key, (0, 0))
count += 1
- cum_time += duration
+ cum_time += duration_secs
self.current_counters[key] = (count, cum_time)
- return end_time
- def interval(self, interval_duration, limit=3):
+ def interval(self, interval_duration_secs, limit=3):
counters = []
for name, (count, cum_time) in iteritems(self.current_counters):
prev_count, prev_time = self.previous_counters.get(name, (0, 0))
counters.append(
- ((cum_time - prev_time) / interval_duration, count - prev_count, name)
+ (
+ (cum_time - prev_time) / interval_duration_secs,
+ count - prev_count,
+ name,
+ )
)
self.previous_counters = dict(self.current_counters)
@@ -221,7 +221,6 @@ class SQLBaseStore(object):
# is running in mainline, and we have some nice monitoring frontends
# to watch it
self._txn_perf_counters = PerformanceCounters()
- self._get_event_counters = PerformanceCounters()
self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
@@ -369,21 +368,13 @@ class SQLBaseStore(object):
time_then = self._previous_loop_ts
self._previous_loop_ts = time_now
- ratio = (curr - prev) / (time_now - time_then)
-
- top_three_counters = self._txn_perf_counters.interval(
- time_now - time_then, limit=3
- )
+ duration = time_now - time_then
+ ratio = (curr - prev) / duration
- top_3_event_counters = self._get_event_counters.interval(
- time_now - time_then, limit=3
- )
+ top_three_counters = self._txn_perf_counters.interval(duration, limit=3)
perf_logger.info(
- "Total database time: %.3f%% {%s} {%s}",
- ratio * 100,
- top_three_counters,
- top_3_event_counters,
+ "Total database time: %.3f%% {%s}", ratio * 100, top_three_counters
)
self._clock.looping_call(loop, 10000)
@@ -465,7 +456,7 @@ class SQLBaseStore(object):
transaction_logger.debug("[TXN END] {%s} %f sec", name, duration)
self._current_txn_total_time += duration
- self._txn_perf_counters.update(desc, start, end)
+ self._txn_perf_counters.update(desc, duration)
sql_txn_timer.labels(desc).observe(duration)
@defer.inlineCallbacks
diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py
index 3413a46675..d2b113a4e7 100644
--- a/synapse/storage/devices.py
+++ b/synapse/storage/devices.py
@@ -24,6 +24,7 @@ from synapse.api.errors import StoreError
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import Cache, SQLBaseStore, db_to_json
from synapse.storage.background_updates import BackgroundUpdateStore
+from synapse.util import batch_iter
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
logger = logging.getLogger(__name__)
@@ -391,22 +392,47 @@ class DeviceWorkerStore(SQLBaseStore):
return now_stream_id, []
- @defer.inlineCallbacks
- def get_user_whose_devices_changed(self, from_key):
- """Get set of users whose devices have changed since `from_key`.
+ def get_users_whose_devices_changed(self, from_key, user_ids):
+ """Get set of users whose devices have changed since `from_key` that
+ are in the given list of user_ids.
+
+ Args:
+ from_key (str): The device lists stream token
+ user_ids (Iterable[str])
+
+ Returns:
+ Deferred[set[str]]: The set of user_ids whose devices have changed
+ since `from_key`
"""
from_key = int(from_key)
- changed = self._device_list_stream_cache.get_all_entities_changed(from_key)
- if changed is not None:
- defer.returnValue(set(changed))
- sql = """
- SELECT DISTINCT user_id FROM device_lists_stream WHERE stream_id > ?
- """
- rows = yield self._execute(
- "get_user_whose_devices_changed", None, sql, from_key
+ # Get set of users who *may* have changed. Users not in the returned
+ # list have definitely not changed.
+ to_check = list(
+ self._device_list_stream_cache.get_entities_changed(user_ids, from_key)
+ )
+
+ if not to_check:
+ return defer.succeed(set())
+
+ def _get_users_whose_devices_changed_txn(txn):
+ changes = set()
+
+ sql = """
+ SELECT DISTINCT user_id FROM device_lists_stream
+ WHERE stream_id > ?
+ AND user_id IN (%s)
+ """
+
+ for chunk in batch_iter(to_check, 100):
+ txn.execute(sql % (",".join("?" for _ in chunk),), (from_key,) + chunk)
+ changes.update(user_id for user_id, in txn)
+
+ return changes
+
+ return self.runInteraction(
+ "get_users_whose_devices_changed", _get_users_whose_devices_changed_txn
)
- defer.returnValue(set(row[0] for row in rows))
def get_all_device_list_changes_for_remotes(self, from_key, to_key):
"""Return a list of `(stream_id, user_id, destination)` which is the
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index b1da81633c..cbe54d45dd 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -137,7 +137,7 @@ class ResponseCache(object):
*args: positional parameters to pass to the callback, if it is used
- **kwargs: named paramters to pass to the callback, if it is used
+ **kwargs: named parameters to pass to the callback, if it is used
Returns:
twisted.internet.defer.Deferred: yieldable result
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index a9885cb507..6b0d2deea0 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -336,10 +336,9 @@ class LoggingContext(object):
logger.warning("Called stop on logcontext %s without calling start", self)
return
- usage_end = get_thread_resource_usage()
-
- self._resource_usage.ru_utime += usage_end.ru_utime - self.usage_start.ru_utime
- self._resource_usage.ru_stime += usage_end.ru_stime - self.usage_start.ru_stime
+ utime_delta, stime_delta = self._get_cputime()
+ self._resource_usage.ru_utime += utime_delta
+ self._resource_usage.ru_stime += stime_delta
self.usage_start = None
@@ -357,13 +356,44 @@ class LoggingContext(object):
# can include resource usage so far.
is_main_thread = threading.current_thread() is self.main_thread
if self.alive and self.usage_start and is_main_thread:
- current = get_thread_resource_usage()
- res.ru_utime += current.ru_utime - self.usage_start.ru_utime
- res.ru_stime += current.ru_stime - self.usage_start.ru_stime
+ utime_delta, stime_delta = self._get_cputime()
+ res.ru_utime += utime_delta
+ res.ru_stime += stime_delta
return res
+ def _get_cputime(self):
+ """Get the cpu usage time so far
+
+ Returns: Tuple[float, float]: seconds in user mode, seconds in system mode
+ """
+ current = get_thread_resource_usage()
+
+ utime_delta = current.ru_utime - self.usage_start.ru_utime
+ stime_delta = current.ru_stime - self.usage_start.ru_stime
+
+ # sanity check
+ if utime_delta < 0:
+ logger.error(
+ "utime went backwards! %f < %f",
+ current.ru_utime,
+ self.usage_start.ru_utime,
+ )
+ utime_delta = 0
+
+ if stime_delta < 0:
+ logger.error(
+ "stime went backwards! %f < %f",
+ current.ru_stime,
+ self.usage_start.ru_stime,
+ )
+ stime_delta = 0
+
+ return utime_delta, stime_delta
+
def add_database_transaction(self, duration_sec):
+ if duration_sec < 0:
+ raise ValueError("DB txn time can only be non-negative")
self._resource_usage.db_txn_count += 1
self._resource_usage.db_txn_duration_sec += duration_sec
@@ -374,6 +404,8 @@ class LoggingContext(object):
sched_sec (float): number of seconds it took us to get a
connection
"""
+ if sched_sec < 0:
+ raise ValueError("DB scheduling time can only be non-negative")
self._resource_usage.db_sched_duration_sec += sched_sec
def record_event_fetch(self, event_count):
|