diff options
Diffstat (limited to 'synapse/config')
-rw-r--r-- | synapse/config/appservice.py | 3 | ||||
-rw-r--r-- | synapse/config/cas.py | 46 | ||||
-rw-r--r-- | synapse/config/jwt_config.py | 2 | ||||
-rw-r--r-- | synapse/config/logger.py | 98 | ||||
-rw-r--r-- | synapse/config/oidc_config.py | 16 | ||||
-rw-r--r-- | synapse/config/push.py | 48 | ||||
-rw-r--r-- | synapse/config/registration.py | 7 | ||||
-rw-r--r-- | synapse/config/repository.py | 4 | ||||
-rw-r--r-- | synapse/config/room_directory.py | 2 | ||||
-rw-r--r-- | synapse/config/saml2_config.py | 119 | ||||
-rw-r--r-- | synapse/config/server.py | 2 | ||||
-rw-r--r-- | synapse/config/tls.py | 18 | ||||
-rw-r--r-- | synapse/config/tracer.py | 2 | ||||
-rw-r--r-- | synapse/config/workers.py | 18 |
14 files changed, 249 insertions, 136 deletions
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 8ed3e24258..746fc3cc02 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -160,6 +160,8 @@ def _load_appservice(hostname, as_info, config_filename): if as_info.get("ip_range_whitelist"): ip_range_whitelist = IPSet(as_info.get("ip_range_whitelist")) + supports_ephemeral = as_info.get("de.sorunome.msc2409.push_ephemeral", False) + return ApplicationService( token=as_info["as_token"], hostname=hostname, @@ -168,6 +170,7 @@ def _load_appservice(hostname, as_info, config_filename): hs_token=as_info["hs_token"], sender=user_id, id=as_info["id"], + supports_ephemeral=supports_ephemeral, protocols=protocols, rate_limited=rate_limited, ip_range_whitelist=ip_range_whitelist, diff --git a/synapse/config/cas.py b/synapse/config/cas.py index 4526c1a67b..2f97e6d258 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -26,14 +26,14 @@ class CasConfig(Config): def read_config(self, config, **kwargs): cas_config = config.get("cas_config", None) - if cas_config: - self.cas_enabled = cas_config.get("enabled", True) + self.cas_enabled = cas_config and cas_config.get("enabled", True) + + if self.cas_enabled: self.cas_server_url = cas_config["server_url"] self.cas_service_url = cas_config["service_url"] self.cas_displayname_attribute = cas_config.get("displayname_attribute") - self.cas_required_attributes = cas_config.get("required_attributes", {}) + self.cas_required_attributes = cas_config.get("required_attributes") or {} else: - self.cas_enabled = False self.cas_server_url = None self.cas_service_url = None self.cas_displayname_attribute = None @@ -41,13 +41,35 @@ class CasConfig(Config): def generate_config_section(self, config_dir_path, server_name, **kwargs): return """ - # Enable CAS for registration and login. + # Enable Central Authentication Service (CAS) for registration and login. # - #cas_config: - # enabled: true - # server_url: "https://cas-server.com" - # service_url: "https://homeserver.domain.com:8448" - # #displayname_attribute: name - # #required_attributes: - # # name: value + cas_config: + # Uncomment the following to enable authorization against a CAS server. + # Defaults to false. + # + #enabled: true + + # The URL of the CAS authorization endpoint. + # + #server_url: "https://cas-server.com" + + # The public URL of the homeserver. + # + #service_url: "https://homeserver.domain.com:8448" + + # The attribute of the CAS response to use as the display name. + # + # If unset, no displayname will be set. + # + #displayname_attribute: name + + # It is possible to configure Synapse to only allow logins if CAS attributes + # match particular values. All of the keys in the mapping below must exist + # and the values must match the given value. Alternately if the given value + # is None then any value is allowed (the attribute just must exist). + # All of the listed attributes must match for the login to be permitted. + # + #required_attributes: + # userGroup: "staff" + # department: None """ diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py index 3252ad9e7f..f30330abb6 100644 --- a/synapse/config/jwt_config.py +++ b/synapse/config/jwt_config.py @@ -63,7 +63,7 @@ class JWTConfig(Config): # and issued at ("iat") claims are validated if present. # # Note that this is a non-standard login type and client support is - # expected to be non-existant. + # expected to be non-existent. # # See https://github.com/matrix-org/synapse/blob/master/docs/jwt.md. # diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 13d6f6a3ea..d4e887a3e0 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -23,7 +23,6 @@ from string import Template import yaml from twisted.logger import ( - ILogObserver, LogBeginner, STDLibLogObserver, eventAsText, @@ -32,11 +31,9 @@ from twisted.logger import ( import synapse from synapse.app import _base as appbase -from synapse.logging._structured import ( - reload_structured_logging, - setup_structured_logging, -) +from synapse.logging._structured import setup_structured_logging from synapse.logging.context import LoggingContextFilter +from synapse.logging.filter import MetadataFilter from synapse.util.versionstring import get_version_string from ._base import Config, ConfigError @@ -48,7 +45,11 @@ DEFAULT_LOG_CONFIG = Template( # This is a YAML file containing a standard Python logging configuration # dictionary. See [1] for details on the valid settings. # +# Synapse also supports structured logging for machine readable logs which can +# be ingested by ELK stacks. See [2] for details. +# # [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema +# [2]: https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md version: 1 @@ -105,7 +106,7 @@ root: # then write them to a file. # # Replace "buffer" with "console" to log to stderr instead. (Note that you'll - # also need to update the configuation for the `twisted` logger above, in + # also need to update the configuration for the `twisted` logger above, in # this case.) # handlers: [buffer] @@ -176,11 +177,11 @@ class LoggingConfig(Config): log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file)) -def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner): +def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) -> None: """ - Set up Python stdlib logging. + Set up Python standard library logging. """ - if log_config is None: + if log_config_path is None: log_format = ( "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s" " - %(message)s" @@ -196,7 +197,8 @@ def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner): handler.setFormatter(formatter) logger.addHandler(handler) else: - logging.config.dictConfig(log_config) + # Load the logging configuration. + _load_logging_config(log_config_path) # We add a log record factory that runs all messages through the # LoggingContextFilter so that we get the context *at the time we log* @@ -204,12 +206,14 @@ def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner): # filter options, but care must when using e.g. MemoryHandler to buffer # writes. - log_filter = LoggingContextFilter(request="") + log_context_filter = LoggingContextFilter(request="") + log_metadata_filter = MetadataFilter({"server_name": config.server_name}) old_factory = logging.getLogRecordFactory() def factory(*args, **kwargs): record = old_factory(*args, **kwargs) - log_filter.filter(record) + log_context_filter.filter(record) + log_metadata_filter.filter(record) return record logging.setLogRecordFactory(factory) @@ -255,21 +259,40 @@ def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner): if not config.no_redirect_stdio: print("Redirected stdout/stderr to logs") - return observer - -def _reload_stdlib_logging(*args, log_config=None): - logger = logging.getLogger("") +def _load_logging_config(log_config_path: str) -> None: + """ + Configure logging from a log config path. + """ + with open(log_config_path, "rb") as f: + log_config = yaml.safe_load(f.read()) if not log_config: - logger.warning("Reloaded a blank config?") + logging.warning("Loaded a blank logging config?") + + # If the old structured logging configuration is being used, convert it to + # the new style configuration. + if "structured" in log_config and log_config.get("structured"): + log_config = setup_structured_logging(log_config) logging.config.dictConfig(log_config) +def _reload_logging_config(log_config_path): + """ + Reload the log configuration from the file and apply it. + """ + # If no log config path was given, it cannot be reloaded. + if log_config_path is None: + return + + _load_logging_config(log_config_path) + logging.info("Reloaded log config from %s due to SIGHUP", log_config_path) + + def setup_logging( hs, config, use_worker_options=False, logBeginner: LogBeginner = globalLogBeginner -) -> ILogObserver: +) -> None: """ Set up the logging subsystem. @@ -282,41 +305,18 @@ def setup_logging( logBeginner: The Twisted logBeginner to use. - Returns: - The "root" Twisted Logger observer, suitable for sending logs to from a - Logger instance. """ - log_config = config.worker_log_config if use_worker_options else config.log_config - - def read_config(*args, callback=None): - if log_config is None: - return None - - with open(log_config, "rb") as f: - log_config_body = yaml.safe_load(f.read()) - - if callback: - callback(log_config=log_config_body) - logging.info("Reloaded log config from %s due to SIGHUP", log_config) - - return log_config_body + log_config_path = ( + config.worker_log_config if use_worker_options else config.log_config + ) - log_config_body = read_config() + # Perform one-time logging configuration. + _setup_stdlib_logging(config, log_config_path, logBeginner=logBeginner) + # Add a SIGHUP handler to reload the logging configuration, if one is available. + appbase.register_sighup(_reload_logging_config, log_config_path) - if log_config_body and log_config_body.get("structured") is True: - logger = setup_structured_logging( - hs, config, log_config_body, logBeginner=logBeginner - ) - appbase.register_sighup(read_config, callback=reload_structured_logging) - else: - logger = _setup_stdlib_logging(config, log_config_body, logBeginner=logBeginner) - appbase.register_sighup(read_config, callback=_reload_stdlib_logging) - - # make sure that the first thing we log is a thing we can grep backwards - # for + # Log immediately so we can grep backwards. logging.warning("***** STARTING SERVER *****") logging.warning("Server %s version %s", sys.argv[0], get_version_string(synapse)) logging.info("Server hostname: %s", config.server_name) logging.info("Instance name: %s", hs.get_instance_name()) - - return logger diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index f924116819..69d188341c 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -56,6 +56,7 @@ class OIDCConfig(Config): self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint") self.oidc_jwks_uri = oidc_config.get("jwks_uri") self.oidc_skip_verification = oidc_config.get("skip_verification", False) + self.oidc_user_profile_method = oidc_config.get("user_profile_method", "auto") self.oidc_allow_existing_users = oidc_config.get("allow_existing_users", False) ump_config = oidc_config.get("user_mapping_provider", {}) @@ -86,11 +87,10 @@ class OIDCConfig(Config): def generate_config_section(self, config_dir_path, server_name, **kwargs): return """\ - # OpenID Connect integration. The following settings can be used to make Synapse - # use an OpenID Connect Provider for authentication, instead of its internal - # password database. + # Enable OpenID Connect (OIDC) / OAuth 2.0 for registration and login. # - # See https://github.com/matrix-org/synapse/blob/master/docs/openid.md. + # See https://github.com/matrix-org/synapse/blob/master/docs/openid.md + # for some example configurations. # oidc_config: # Uncomment the following to enable authorization against an OpenID Connect @@ -159,6 +159,14 @@ class OIDCConfig(Config): # #skip_verification: true + # Whether to fetch the user profile from the userinfo endpoint. Valid + # values are: "auto" or "userinfo_endpoint". + # + # Defaults to "auto", which fetches the userinfo endpoint if "openid" is included + # in `scopes`. Uncomment the following to always fetch the userinfo endpoint. + # + #user_profile_method: "userinfo_endpoint" + # Uncomment to allow a user logging in via OIDC to match a pre-existing account instead # of failing. This could be used if switching from password logins to OIDC. Defaults to false. # diff --git a/synapse/config/push.py b/synapse/config/push.py index a1f3752c8a..3adbfb73e6 100644 --- a/synapse/config/push.py +++ b/synapse/config/push.py @@ -21,8 +21,11 @@ class PushConfig(Config): section = "push" def read_config(self, config, **kwargs): - push_config = config.get("push", {}) + push_config = config.get("push") or {} self.push_include_content = push_config.get("include_content", True) + self.push_group_unread_count_by_room = push_config.get( + "group_unread_count_by_room", True + ) pusher_instances = config.get("pusher_instances") or [] self.pusher_shard_config = ShardedWorkerHandlingConfig(pusher_instances) @@ -49,18 +52,33 @@ class PushConfig(Config): def generate_config_section(self, config_dir_path, server_name, **kwargs): return """ - # Clients requesting push notifications can either have the body of - # the message sent in the notification poke along with other details - # like the sender, or just the event ID and room ID (`event_id_only`). - # If clients choose the former, this option controls whether the - # notification request includes the content of the event (other details - # like the sender are still included). For `event_id_only` push, it - # has no effect. - # - # For modern android devices the notification content will still appear - # because it is loaded by the app. iPhone, however will send a - # notification saying only that a message arrived and who it came from. - # - #push: - # include_content: true + ## Push ## + + push: + # Clients requesting push notifications can either have the body of + # the message sent in the notification poke along with other details + # like the sender, or just the event ID and room ID (`event_id_only`). + # If clients choose the former, this option controls whether the + # notification request includes the content of the event (other details + # like the sender are still included). For `event_id_only` push, it + # has no effect. + # + # For modern android devices the notification content will still appear + # because it is loaded by the app. iPhone, however will send a + # notification saying only that a message arrived and who it came from. + # + # The default value is "true" to include message details. Uncomment to only + # include the event ID and room ID in push notification payloads. + # + #include_content: false + + # When a push notification is received, an unread count is also sent. + # This number can either be calculated as the number of unread messages + # for the user, or the number of *rooms* the user has unread messages in. + # + # The default value is "true", meaning push clients will see the number of + # rooms with unread messages in them. Uncomment to instead send the number + # of unread messages. + # + #group_unread_count_by_room: false """ diff --git a/synapse/config/registration.py b/synapse/config/registration.py index aeae5bcaea..82be5a35aa 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -146,7 +146,7 @@ class RegistrationConfig(Config): RoomCreationPreset.TRUSTED_PRIVATE_CHAT, } - # Pull the creater/inviter from the configuration, this gets used to + # Pull the creator/inviter from the configuration, this gets used to # send invites for invite-only rooms. mxid_localpart = config.get("auto_join_mxid_localpart") self.auto_join_user_id = None @@ -355,8 +355,9 @@ class RegistrationConfig(Config): # email will be globally disabled. # # Additionally, if `msisdn` is not set, registration and password resets via msisdn - # will be disabled regardless. This is due to Synapse currently not supporting any - # method of sending SMS messages on its own. + # will be disabled regardless, and users will not be able to associate an msisdn + # identifier to their account. This is due to Synapse currently not supporting + # any method of sending SMS messages on its own. # # To enable using an identity server for operations regarding a particular third-party # identifier type, set the value to the URL of that identity server as shown in the diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 01009f3924..ba1e9d2361 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -100,7 +100,7 @@ class ContentRepositoryConfig(Config): "media_instance_running_background_jobs", ) - self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M")) + self.max_upload_size = self.parse_size(config.get("max_upload_size", "50M")) self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M")) self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M")) @@ -242,7 +242,7 @@ class ContentRepositoryConfig(Config): # The largest allowed upload size in bytes # - #max_upload_size: 10M + #max_upload_size: 50M # Maximum number of pixels that will be thumbnailed # diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 6de1f9d103..92e1b67528 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -99,7 +99,7 @@ class RoomDirectoryConfig(Config): # # Options for the rules include: # - # user_id: Matches agaisnt the creator of the alias + # user_id: Matches against the creator of the alias # room_id: Matches against the room ID being published # alias: Matches against any current local or canonical aliases # associated with the room diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 99aa8b3bf1..c1b8e98ae0 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -90,6 +90,8 @@ class SAML2Config(Config): "grandfathered_mxid_source_attribute", "uid" ) + self.saml2_idp_entityid = saml2_config.get("idp_entityid", None) + # user_mapping_provider may be None if the key is present but has no value ump_dict = saml2_config.get("user_mapping_provider") or {} @@ -216,10 +218,8 @@ class SAML2Config(Config): return """\ ## Single sign-on integration ## - # Enable SAML2 for registration and login. Uses pysaml2. - # - # At least one of `sp_config` or `config_path` must be set in this section to - # enable SAML login. + # The following settings can be used to make Synapse use a single sign-on + # provider for authentication, instead of its internal password database. # # You will probably also want to set the following options to `false` to # disable the regular login/registration flows: @@ -228,6 +228,11 @@ class SAML2Config(Config): # # You will also want to investigate the settings under the "sso" configuration # section below. + + # Enable SAML2 for registration and login. Uses pysaml2. + # + # At least one of `sp_config` or `config_path` must be set in this section to + # enable SAML login. # # Once SAML support is enabled, a metadata file will be exposed at # https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to @@ -243,40 +248,70 @@ class SAML2Config(Config): # so it is not normally necessary to specify them unless you need to # override them. # - #sp_config: - # # point this to the IdP's metadata. You can use either a local file or - # # (preferably) a URL. - # metadata: - # #local: ["saml2/idp.xml"] - # remote: - # - url: https://our_idp/metadata.xml - # - # # By default, the user has to go to our login page first. If you'd like - # # to allow IdP-initiated login, set 'allow_unsolicited: true' in a - # # 'service.sp' section: - # # - # #service: - # # sp: - # # allow_unsolicited: true - # - # # The examples below are just used to generate our metadata xml, and you - # # may well not need them, depending on your setup. Alternatively you - # # may need a whole lot more detail - see the pysaml2 docs! - # - # description: ["My awesome SP", "en"] - # name: ["Test SP", "en"] - # - # organization: - # name: Example com - # display_name: - # - ["Example co", "en"] - # url: "http://example.com" - # - # contact_person: - # - given_name: Bob - # sur_name: "the Sysadmin" - # email_address": ["admin@example.com"] - # contact_type": technical + sp_config: + # Point this to the IdP's metadata. You must provide either a local + # file via the `local` attribute or (preferably) a URL via the + # `remote` attribute. + # + #metadata: + # local: ["saml2/idp.xml"] + # remote: + # - url: https://our_idp/metadata.xml + + # Allowed clock difference in seconds between the homeserver and IdP. + # + # Uncomment the below to increase the accepted time difference from 0 to 3 seconds. + # + #accepted_time_diff: 3 + + # By default, the user has to go to our login page first. If you'd like + # to allow IdP-initiated login, set 'allow_unsolicited: true' in a + # 'service.sp' section: + # + #service: + # sp: + # allow_unsolicited: true + + # The examples below are just used to generate our metadata xml, and you + # may well not need them, depending on your setup. Alternatively you + # may need a whole lot more detail - see the pysaml2 docs! + + #description: ["My awesome SP", "en"] + #name: ["Test SP", "en"] + + #ui_info: + # display_name: + # - lang: en + # text: "Display Name is the descriptive name of your service." + # description: + # - lang: en + # text: "Description should be a short paragraph explaining the purpose of the service." + # information_url: + # - lang: en + # text: "https://example.com/terms-of-service" + # privacy_statement_url: + # - lang: en + # text: "https://example.com/privacy-policy" + # keywords: + # - lang: en + # text: ["Matrix", "Element"] + # logo: + # - lang: en + # text: "https://example.com/logo.svg" + # width: "200" + # height: "80" + + #organization: + # name: Example com + # display_name: + # - ["Example co", "en"] + # url: "http://example.com" + + #contact_person: + # - given_name: Bob + # sur_name: "the Sysadmin" + # email_address": ["admin@example.com"] + # contact_type": technical # Instead of putting the config inline as above, you can specify a # separate pysaml2 configuration file: @@ -350,6 +385,14 @@ class SAML2Config(Config): # value: "staff" # - attribute: department # value: "sales" + + # If the metadata XML contains multiple IdP entities then the `idp_entityid` + # option must be set to the entity to redirect users to. + # + # Most deployments only have a single IdP entity and so should omit this + # option. + # + #idp_entityid: 'https://our_idp/entityid' """ % { "config_dir_path": config_dir_path } diff --git a/synapse/config/server.py b/synapse/config/server.py index ef6d70e3f8..85aa49c02d 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -39,7 +39,7 @@ logger = logging.Logger(__name__) # in the list. DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"] -DEFAULT_ROOM_VERSION = "5" +DEFAULT_ROOM_VERSION = "6" ROOM_COMPLEXITY_TOO_GREAT = ( "Your homeserver is unable to join rooms this large or complex. " diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 9ddb8b546b..ad37b93c02 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -18,7 +18,7 @@ import os import warnings from datetime import datetime from hashlib import sha256 -from typing import List +from typing import List, Optional from unpaddedbase64 import encode_base64 @@ -177,8 +177,8 @@ class TlsConfig(Config): "use_insecure_ssl_client_just_for_testing_do_not_use" ) - self.tls_certificate = None - self.tls_private_key = None + self.tls_certificate = None # type: Optional[crypto.X509] + self.tls_private_key = None # type: Optional[crypto.PKey] def is_disk_cert_valid(self, allow_self_signed=True): """ @@ -226,12 +226,12 @@ class TlsConfig(Config): days_remaining = (expires_on - now).days return days_remaining - def read_certificate_from_disk(self, require_cert_and_key): + def read_certificate_from_disk(self, require_cert_and_key: bool): """ Read the certificates and private key from disk. Args: - require_cert_and_key (bool): set to True to throw an error if the certificate + require_cert_and_key: set to True to throw an error if the certificate and key file are not given """ if require_cert_and_key: @@ -479,13 +479,13 @@ class TlsConfig(Config): } ) - def read_tls_certificate(self): + def read_tls_certificate(self) -> crypto.X509: """Reads the TLS certificate from the configured file, and returns it Also checks if it is self-signed, and warns if so Returns: - OpenSSL.crypto.X509: the certificate + The certificate """ cert_path = self.tls_certificate_file logger.info("Loading TLS certificate from %s", cert_path) @@ -504,11 +504,11 @@ class TlsConfig(Config): return cert - def read_tls_private_key(self): + def read_tls_private_key(self) -> crypto.PKey: """Reads the TLS private key from the configured file, and returns it Returns: - OpenSSL.crypto.PKey: the private key + The private key """ private_key_path = self.tls_private_key_file logger.info("Loading TLS key from %s", private_key_path) diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py index 8be1346113..0c1a854f09 100644 --- a/synapse/config/tracer.py +++ b/synapse/config/tracer.py @@ -67,7 +67,7 @@ class TracerConfig(Config): # This is a list of regexes which are matched against the server_name of the # homeserver. # - # By defult, it is empty, so no servers are matched. + # By default, it is empty, so no servers are matched. # #homeserver_whitelist: # - ".*" diff --git a/synapse/config/workers.py b/synapse/config/workers.py index f23e42cdf9..57ab097eba 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -132,6 +132,19 @@ class WorkerConfig(Config): self.events_shard_config = ShardedWorkerHandlingConfig(self.writers.events) + # Whether this worker should run background tasks or not. + # + # As a note for developers, the background tasks guarded by this should + # be able to run on only a single instance (meaning that they don't + # depend on any in-memory state of a particular worker). + # + # No effort is made to ensure only a single instance of these tasks is + # running. + background_tasks_instance = config.get("run_background_tasks_on") or "master" + self.run_background_tasks = ( + self.worker_name is None and background_tasks_instance == "master" + ) or self.worker_name == background_tasks_instance + def generate_config_section(self, config_dir_path, server_name, **kwargs): return """\ ## Workers ## @@ -167,6 +180,11 @@ class WorkerConfig(Config): #stream_writers: # events: worker1 # typing: worker1 + + # The worker that is used to run background tasks (e.g. cleaning up expired + # data). If not provided this defaults to the main process. + # + #run_background_tasks_on: worker1 """ def read_arguments(self, args): |