diff options
author | Patrick Cloke <clokep@users.noreply.github.com> | 2021-09-13 13:07:12 -0400 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-09-13 13:07:12 -0400 |
commit | 01c88a09cd6e90fa28c1282a56a08e481727ce20 (patch) | |
tree | d5875f6291b512163d2e01da2150dd4d0956aa7d /synapse | |
parent | Fix copy-paste error in the password section of the sample-config. (#10804) (diff) | |
download | synapse-01c88a09cd6e90fa28c1282a56a08e481727ce20.tar.xz |
Use direct references for some configuration variables (#10798)
Instead of proxying through the magic getter of the RootConfig object. This should be more performant (and is more explicit).
Diffstat (limited to 'synapse')
64 files changed, 139 insertions, 127 deletions
diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 4b1f213c75..d3270cd6d2 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -41,11 +41,11 @@ class ConsentURIBuilder: """ if hs_config.form_secret is None: raise ConfigError("form_secret not set in config") - if hs_config.public_baseurl is None: + if hs_config.server.public_baseurl is None: raise ConfigError("public_baseurl not set in config") self._hmac_secret = hs_config.form_secret.encode("utf-8") - self._public_baseurl = hs_config.public_baseurl + self._public_baseurl = hs_config.server.public_baseurl def build_user_consent_uri(self, user_id): """Build a URI which we can give to the user to do their privacy diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 89bda00090..d1aa2e7fb5 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -82,7 +82,7 @@ def start_worker_reactor(appname, config, run_command=reactor.run): run_command (Callable[]): callable that actually runs the reactor """ - logger = logging.getLogger(config.worker_app) + logger = logging.getLogger(config.worker.worker_app) start_reactor( appname, @@ -398,7 +398,7 @@ async def start(hs: "HomeServer"): # If background tasks are running on the main process, start collecting the # phone home stats. - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: start_phone_stats_home(hs) # We now freeze all allocated objects in the hopes that (almost) @@ -433,9 +433,13 @@ def setup_sentry(hs): # We set some default tags that give some context to this instance with sentry_sdk.configure_scope() as scope: - scope.set_tag("matrix_server_name", hs.config.server_name) + scope.set_tag("matrix_server_name", hs.config.server.server_name) - app = hs.config.worker_app if hs.config.worker_app else "synapse.app.homeserver" + app = ( + hs.config.worker.worker_app + if hs.config.worker.worker_app + else "synapse.app.homeserver" + ) name = hs.get_instance_name() scope.set_tag("worker_app", app) scope.set_tag("worker_name", name) diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index 7396db93c6..5e956b1e27 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -178,12 +178,12 @@ def start(config_options): sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) - if config.worker_app is not None: - assert config.worker_app == "synapse.app.admin_cmd" + if config.worker.worker_app is not None: + assert config.worker.worker_app == "synapse.app.admin_cmd" # Update the config with some basic overrides so that don't have to specify # a full worker config. - config.worker_app = "synapse.app.admin_cmd" + config.worker.worker_app = "synapse.app.admin_cmd" if ( not config.worker_daemonize @@ -196,7 +196,7 @@ def start(config_options): # Explicitly disable background processes config.update_user_directory = False - config.run_background_tasks = False + config.worker.run_background_tasks = False config.start_pushers = False config.pusher_shard_config.instances = [] config.send_federation = False @@ -205,7 +205,7 @@ def start(config_options): synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts ss = AdminCmdServer( - config.server_name, + config.server.server_name, config=config, version_string="Synapse/" + get_version_string(synapse), ) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 7d2cd6a904..33afd59c72 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -416,7 +416,7 @@ def start(config_options): sys.exit(1) # For backwards compatibility let any of the old app names. - assert config.worker_app in ( + assert config.worker.worker_app in ( "synapse.app.appservice", "synapse.app.client_reader", "synapse.app.event_creator", @@ -430,7 +430,7 @@ def start(config_options): "synapse.app.user_dir", ) - if config.worker_app == "synapse.app.appservice": + if config.worker.worker_app == "synapse.app.appservice": if config.appservice.notify_appservices: sys.stderr.write( "\nThe appservices must be disabled in the main synapse process" @@ -446,7 +446,7 @@ def start(config_options): # For other worker types we force this to off. config.appservice.notify_appservices = False - if config.worker_app == "synapse.app.user_dir": + if config.worker.worker_app == "synapse.app.user_dir": if config.server.update_user_directory: sys.stderr.write( "\nThe update_user_directory must be disabled in the main synapse process" @@ -469,7 +469,7 @@ def start(config_options): synapse.metrics.MIN_TIME_BETWEEN_GCS = config.server.gc_seconds hs = GenericWorkerServer( - config.server_name, + config.server.server_name, config=config, version_string="Synapse/" + get_version_string(synapse), ) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 708db86f5d..b909f8db8d 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -350,7 +350,7 @@ def setup(config_options): synapse.metrics.MIN_TIME_BETWEEN_GCS = config.server.gc_seconds hs = SynapseHomeServer( - config.server_name, + config.server.server_name, config=config, version_string="Synapse/" + get_version_string(synapse), ) diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index 86ad7337a9..4a95da90f9 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -73,7 +73,7 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process): store = hs.get_datastore() - stats["homeserver"] = hs.config.server_name + stats["homeserver"] = hs.config.server.server_name stats["server_context"] = hs.config.server_context stats["timestamp"] = now stats["uptime_seconds"] = uptime diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 4a398a7932..aca9d467e6 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -223,7 +223,7 @@ def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) -> # writes. log_context_filter = LoggingContextFilter() - log_metadata_filter = MetadataFilter({"server_name": config.server_name}) + log_metadata_filter = MetadataFilter({"server_name": config.server.server_name}) old_factory = logging.getLogRecordFactory() def factory(*args, **kwargs): @@ -335,5 +335,5 @@ def setup_logging( # Log immediately so we can grep backwards. logging.warning("***** STARTING SERVER *****") logging.warning("Server %s version %s", sys.argv[0], get_version_string(synapse)) - logging.info("Server hostname: %s", config.server_name) + logging.info("Server hostname: %s", config.server.server_name) logging.info("Instance name: %s", hs.get_instance_name()) diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 33954b4f62..6eb6544c4c 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -88,7 +88,7 @@ class EventValidator: self._validate_retention(event) if event.type == EventTypes.ServerACL: - if not server_matches_acl_event(config.server_name, event): + if not server_matches_acl_event(config.server.server_name, event): raise SynapseError( 400, "Can't create an ACL event that denies the local server" ) diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 4c0a3eb5af..4671ac0242 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -281,7 +281,8 @@ class FederationSender(AbstractFederationSender): self._queues_awaiting_rr_flush_by_room: Dict[str, Set[PerDestinationQueue]] = {} self._rr_txn_interval_per_room_ms = ( - 1000.0 / hs.config.federation_rr_transactions_per_room_per_second + 1000.0 + / hs.config.ratelimiting.federation_rr_transactions_per_room_per_second ) # wake up destinations that have outstanding PDUs to be caught up diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index ff8372c4e9..53f99031b1 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -144,7 +144,7 @@ class GroupAttestionRenewer: self.is_mine_id = hs.is_mine_id self.attestations = hs.get_groups_attestation_signing() - if not hs.config.worker_app: + if not hs.config.worker.worker_app: self._renew_attestations_loop = self.clock.looping_call( self._start_renew_attestations, 30 * 60 * 1000 ) diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 955cfa2207..c23ccd6dd9 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -45,16 +45,16 @@ class BaseHandler: self.request_ratelimiter = Ratelimiter( store=self.store, clock=self.clock, rate_hz=0, burst_count=0 ) - self._rc_message = self.hs.config.rc_message + self._rc_message = self.hs.config.ratelimiting.rc_message # Check whether ratelimiting room admin message redaction is enabled # by the presence of rate limits in the config - if self.hs.config.rc_admin_redaction: + if self.hs.config.ratelimiting.rc_admin_redaction: self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter( store=self.store, clock=self.clock, - rate_hz=self.hs.config.rc_admin_redaction.per_second, - burst_count=self.hs.config.rc_admin_redaction.burst_count, + rate_hz=self.hs.config.ratelimiting.rc_admin_redaction.per_second, + burst_count=self.hs.config.ratelimiting.rc_admin_redaction.burst_count, ) else: self.admin_redaction_ratelimiter = None diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index dd69755053..a9c2222f46 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -78,7 +78,7 @@ class AccountValidityHandler: ) # Check the renewal emails to send and send them every 30min. - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self.clock.looping_call(self._send_renewal_emails, 30 * 60 * 1000) self._is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = [] @@ -249,7 +249,7 @@ class AccountValidityHandler: renewal_token = await self._get_renewal_token(user_id) url = "%s_matrix/client/unstable/account_validity/renew?token=%s" % ( - self.hs.config.public_baseurl, + self.hs.config.server.public_baseurl, renewal_token, ) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 34725324a6..fbbf6fd834 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -244,8 +244,8 @@ class AuthHandler(BaseHandler): self._failed_uia_attempts_ratelimiter = Ratelimiter( store=self.store, clock=self.clock, - rate_hz=self.hs.config.rc_login_failed_attempts.per_second, - burst_count=self.hs.config.rc_login_failed_attempts.burst_count, + rate_hz=self.hs.config.ratelimiting.rc_login_failed_attempts.per_second, + burst_count=self.hs.config.ratelimiting.rc_login_failed_attempts.burst_count, ) # The number of seconds to keep a UI auth session active. @@ -255,14 +255,14 @@ class AuthHandler(BaseHandler): self._failed_login_attempts_ratelimiter = Ratelimiter( store=self.store, clock=hs.get_clock(), - rate_hz=self.hs.config.rc_login_failed_attempts.per_second, - burst_count=self.hs.config.rc_login_failed_attempts.burst_count, + rate_hz=self.hs.config.ratelimiting.rc_login_failed_attempts.per_second, + burst_count=self.hs.config.ratelimiting.rc_login_failed_attempts.burst_count, ) self._clock = self.hs.get_clock() # Expire old UI auth sessions after a period of time. - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.looping_call( run_as_background_process, 5 * 60 * 1000, @@ -289,7 +289,7 @@ class AuthHandler(BaseHandler): hs.config.sso_account_deactivated_template ) - self._server_name = hs.config.server_name + self._server_name = hs.config.server.server_name # cast to tuple for use with str.startswith self._whitelisted_sso_clients = tuple(hs.config.sso_client_whitelist) @@ -749,7 +749,7 @@ class AuthHandler(BaseHandler): "name": self.hs.config.user_consent_policy_name, "url": "%s_matrix/consent?v=%s" % ( - self.hs.config.public_baseurl, + self.hs.config.server.public_baseurl, self.hs.config.user_consent_version, ), }, @@ -1799,7 +1799,7 @@ class MacaroonGenerator: def _generate_base_macaroon(self, user_id: str) -> pymacaroons.Macaroon: macaroon = pymacaroons.Macaroon( - location=self.hs.config.server_name, + location=self.hs.config.server.server_name, identifier="key", key=self.hs.config.macaroon_secret_key, ) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index ab22d76359..dcd320c555 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -46,7 +46,7 @@ class DeactivateAccountHandler(BaseHandler): # Start the user parter loop so it can resume parting users from rooms where # it left off (if it has work left to do). - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: hs.get_reactor().callWhenRunning(self._start_user_parting) self._account_validity_enabled = ( diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 679b47f081..b6a2a34ab7 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -84,8 +84,8 @@ class DeviceMessageHandler: self._ratelimiter = Ratelimiter( store=self.store, clock=hs.get_clock(), - rate_hz=hs.config.rc_key_requests.per_second, - burst_count=hs.config.rc_key_requests.burst_count, + rate_hz=hs.config.ratelimiting.rc_key_requests.per_second, + burst_count=hs.config.ratelimiting.rc_key_requests.burst_count, ) async def on_direct_to_device_edu(self, origin: str, content: JsonDict) -> None: diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index d92370859f..08a137561f 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -57,7 +57,7 @@ class E2eKeysHandler: federation_registry = hs.get_federation_registry() - self._is_master = hs.config.worker_app is None + self._is_master = hs.config.worker.worker_app is None if not self._is_master: self._user_device_resync_client = ( ReplicationUserDevicesResyncRestServlet.make_client(hs) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 77df9185f6..6754c64c31 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -101,7 +101,7 @@ class FederationHandler(BaseHandler): hs ) - if hs.config.worker_app: + if hs.config.worker.worker_app: self._maybe_store_room_on_outlier_membership = ( ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(hs) ) @@ -1614,7 +1614,7 @@ class FederationHandler(BaseHandler): Args: room_id """ - if self.config.worker_app: + if self.config.worker.worker_app: await self._clean_room_for_join_client(room_id) else: await self.store.clean_room_for_join(room_id) diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 9ec90ac8c1..946343fa25 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -149,7 +149,7 @@ class FederationEventHandler: self._ephemeral_messages_enabled = hs.config.server.enable_ephemeral_messages self._send_events = ReplicationFederationSendEventsRestServlet.make_client(hs) - if hs.config.worker_app: + if hs.config.worker.worker_app: self._user_device_resync = ( ReplicationUserDevicesResyncRestServlet.make_client(hs) ) @@ -1009,7 +1009,7 @@ class FederationEventHandler: await self._store.mark_remote_user_device_cache_as_stale(sender) # Immediately attempt a resync in the background - if self._config.worker_app: + if self._config.worker.worker_app: await self._user_device_resync(user_id=sender) else: await self._device_list_updater.user_device_resync(sender) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 8ffeabacf9..8b8f1f41ca 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -540,13 +540,13 @@ class IdentityHandler(BaseHandler): # It is already checked that public_baseurl is configured since this code # should only be used if account_threepid_delegate_msisdn is true. - assert self.hs.config.public_baseurl + assert self.hs.config.server.public_baseurl # we need to tell the client to send the token back to us, since it doesn't # otherwise know where to send it, so add submit_url response parameter # (see also MSC2078) data["submit_url"] = ( - self.hs.config.public_baseurl + self.hs.config.server.public_baseurl + "_matrix/client/unstable/add_threepid/msisdn/submit_token" ) return data diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index bf0fef1510..10f1584a00 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -84,7 +84,7 @@ class MessageHandler: # scheduled. self._scheduled_expiry: Optional[IDelayedCall] = None - if not hs.config.worker_app: + if not hs.config.worker.worker_app: run_as_background_process( "_schedule_next_expiry", self._schedule_next_expiry ) @@ -461,7 +461,7 @@ class EventCreationHandler: self._dummy_events_threshold = hs.config.dummy_events_threshold if ( - self.config.run_background_tasks + self.config.worker.run_background_tasks and self.config.cleanup_extremities_with_dummy_events ): self.clock.looping_call( diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 648fcf76f8..dfc251b2a5 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -324,7 +324,7 @@ class OidcProvider: self._allow_existing_users = provider.allow_existing_users self._http_client = hs.get_proxied_http_client() - self._server_name: str = hs.config.server_name + self._server_name: str = hs.config.server.server_name # identifier for the external_ids table self.idp_id = provider.idp_id diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 1dbafd253d..7dc0ee4bef 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -91,7 +91,7 @@ class PaginationHandler: self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max - if hs.config.run_background_tasks and hs.config.retention_enabled: + if hs.config.worker.run_background_tasks and hs.config.retention_enabled: # Run the purge jobs described in the configuration file. for job in hs.config.retention_purge_jobs: logger.info("Setting up purge job with config: %s", job) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 20a033d0ba..51adf8762d 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -63,7 +63,7 @@ class ProfileHandler(BaseHandler): self.user_directory_handler = hs.get_user_directory_handler() - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self.clock.looping_call( self._update_remote_profile_cache, self.PROFILE_UPDATE_MS ) diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index c679a8303e..bd8160e7ed 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) class ReadMarkerHandler(BaseHandler): def __init__(self, hs: "HomeServer"): super().__init__(hs) - self.server_name = hs.config.server_name + self.server_name = hs.config.server.server_name self.store = hs.get_datastore() self.account_data_handler = hs.get_account_data_handler() self.read_marker_linearizer = Linearizer(name="read_marker") diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index fb495229a7..a49b8ee4b1 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -29,7 +29,7 @@ class ReceiptsHandler(BaseHandler): def __init__(self, hs: "HomeServer"): super().__init__(hs) - self.server_name = hs.config.server_name + self.server_name = hs.config.server.server_name self.store = hs.get_datastore() self.event_auth_handler = hs.get_event_auth_handler() diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c374a1fbc2..38c4993da0 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -102,7 +102,7 @@ class RegistrationHandler(BaseHandler): self.spam_checker = hs.get_spam_checker() - if hs.config.worker_app: + if hs.config.worker.worker_app: self._register_client = ReplicationRegisterServlet.make_client(hs) self._register_device_client = RegisterDeviceReplicationServlet.make_client( hs @@ -696,7 +696,7 @@ class RegistrationHandler(BaseHandler): address: the IP address used to perform the registration. shadow_banned: Whether to shadow-ban the user """ - if self.hs.config.worker_app: + if self.hs.config.worker.worker_app: await self._register_client( user_id=user_id, password_hash=password_hash, @@ -786,7 +786,7 @@ class RegistrationHandler(BaseHandler): Does the bits that need doing on the main process. Not for use outside this class and RegisterDeviceReplicationServlet. """ - assert not self.hs.config.worker_app + assert not self.hs.config.worker.worker_app valid_until_ms = None if self.session_lifetime is not None: if is_guest: @@ -843,7 +843,7 @@ class RegistrationHandler(BaseHandler): """ # TODO: 3pid registration can actually happen on the workers. Consider # refactoring it. - if self.hs.config.worker_app: + if self.hs.config.worker.worker_app: await self._post_registration_client( user_id=user_id, auth_result=auth_result, access_token=access_token ) diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 47f2e2a0c1..b64ce8cab8 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -54,7 +54,7 @@ class StatsHandler: # Guard to ensure we only process deltas one at a time self._is_processing = False - if self.stats_enabled and hs.config.run_background_tasks: + if self.stats_enabled and hs.config.worker.run_background_tasks: self.notifier.add_replication_callback(self.notify_new_event) # We kick this off so that we don't have to wait for a change before diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index b84bd5e49a..9cea011e62 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -53,7 +53,7 @@ class FollowerTypingHandler: def __init__(self, hs: "HomeServer"): self.store = hs.get_datastore() - self.server_name = hs.config.server_name + self.server_name = hs.config.server.server_name self.clock = hs.get_clock() self.is_mine_id = hs.is_mine_id diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index d20f6e9106..c6c4d3bd29 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -383,7 +383,7 @@ def init_tracer(hs: "HomeServer"): config = JaegerConfig( config=hs.config.jaeger_config, - service_name=f"{hs.config.server_name} {hs.get_instance_name()}", + service_name=f"{hs.config.server.server_name} {hs.get_instance_name()}", scope_manager=LogContextScopeManager(hs.config), metrics_factory=PrometheusMetricsFactory(), ) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index b11fa6393b..2d403532fa 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -178,7 +178,7 @@ class ModuleApi: @property def public_baseurl(self) -> str: """The configured public base URL for this homeserver.""" - return self._hs.config.public_baseurl + return self._hs.config.server.public_baseurl @property def email_app_name(self) -> str: @@ -640,7 +640,7 @@ class ModuleApi: if desc is None: desc = f.__name__ - if self._hs.config.run_background_tasks or run_on_all_instances: + if self._hs.config.worker.run_background_tasks or run_on_all_instances: self._clock.looping_call( run_as_background_process, msec, diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index b0834720ad..b89c6e6f2b 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -130,7 +130,7 @@ class Mailer: """ params = {"token": token, "client_secret": client_secret, "sid": sid} link = ( - self.hs.config.public_baseurl + self.hs.config.server.public_baseurl + "_synapse/client/password_reset/email/submit_token?%s" % urllib.parse.urlencode(params) ) @@ -140,7 +140,7 @@ class Mailer: await self.send_email( email_address, self.email_subjects.password_reset - % {"server_name": self.hs.config.server_name}, + % {"server_name": self.hs.config.server.server_name}, template_vars, ) @@ -160,7 +160,7 @@ class Mailer: """ params = {"token": token, "client_secret": client_secret, "sid": sid} link = ( - self.hs.config.public_baseurl + self.hs.config.server.public_baseurl + "_matrix/client/unstable/registration/email/submit_token?%s" % urllib.parse.urlencode(params) ) @@ -170,7 +170,7 @@ class Mailer: await self.send_email( email_address, self.email_subjects.email_validation - % {"server_name": self.hs.config.server_name}, + % {"server_name": self.hs.config.server.server_name}, template_vars, ) @@ -191,7 +191,7 @@ class Mailer: """ params = {"token": token, "client_secret": client_secret, "sid": sid} link = ( - self.hs.config.public_baseurl + self.hs.config.server.public_baseurl + "_matrix/client/unstable/add_threepid/email/submit_token?%s" % urllib.parse.urlencode(params) ) @@ -201,7 +201,7 @@ class Mailer: await self.send_email( email_address, self.email_subjects.email_validation - % {"server_name": self.hs.config.server_name}, + % {"server_name": self.hs.config.server.server_name}, template_vars, ) @@ -852,7 +852,7 @@ class Mailer: # XXX: make r0 once API is stable return "%s_matrix/client/unstable/pushers/remove?%s" % ( - self.hs.config.public_baseurl, + self.hs.config.server.public_baseurl, urllib.parse.urlencode(params), ) diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index 3fd2811713..37769ace48 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -73,7 +73,7 @@ class DirectTcpReplicationClientFactory(ReconnectingClientFactory): ): self.client_name = client_name self.command_handler = command_handler - self.server_name = hs.config.server_name + self.server_name = hs.config.server.server_name self.hs = hs self._clock = hs.get_clock() # As self.clock is defined in super class diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index eae4515363..509ed7fb13 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -168,7 +168,7 @@ class ReplicationCommandHandler: continue # Only add any other streams if we're on master. - if hs.config.worker_app is not None: + if hs.config.worker.worker_app is not None: continue if stream.NAME == FederationStream.NAME and hs.config.send_federation: @@ -222,7 +222,7 @@ class ReplicationCommandHandler: }, ) - self._is_master = hs.config.worker_app is None + self._is_master = hs.config.worker.worker_app is None self._federation_sender = None if self._is_master and not hs.config.send_federation: diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index bd47d84258..030852cb5b 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -40,7 +40,7 @@ class ReplicationStreamProtocolFactory(Factory): def __init__(self, hs): self.command_handler = hs.get_tcp_replication() self.clock = hs.get_clock() - self.server_name = hs.config.server_name + self.server_name = hs.config.server.server_name # If we've created a `ReplicationStreamProtocolFactory` then we're # almost certainly registering a replication listener, so let's ensure diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py index c445af9bd9..0600cdbf36 100644 --- a/synapse/replication/tcp/streams/federation.py +++ b/synapse/replication/tcp/streams/federation.py @@ -42,7 +42,7 @@ class FederationStream(Stream): ROW_TYPE = FederationStreamRow def __init__(self, hs: "HomeServer"): - if hs.config.worker_app is None: + if hs.config.worker.worker_app is None: # master process: get updates from the FederationRemoteSendQueue. # (if the master is configured to send federation itself, federation_sender # will be a real FederationSender, which has stubs for current_token and diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index b2514d9d0d..a03774c98a 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -247,7 +247,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: RegistrationTokenRestServlet(hs).register(http_server) # Some servlets only get registered for the main process. - if hs.config.worker_app is None: + if hs.config.worker.worker_app is None: SendServerNoticeServlet(hs).register(http_server) diff --git a/synapse/rest/client/auth.py b/synapse/rest/client/auth.py index df8cc4ac7a..7bb7801472 100644 --- a/synapse/rest/client/auth.py +++ b/synapse/rest/client/auth.py @@ -68,7 +68,10 @@ class AuthRestServlet(RestServlet): html = self.terms_template.render( session=session, terms_url="%s_matrix/consent?v=%s" - % (self.hs.config.public_baseurl, self.hs.config.user_consent_version), + % ( + self.hs.config.server.public_baseurl, + self.hs.config.user_consent_version, + ), myurl="%s/r0/auth/%s/fallback/web" % (CLIENT_API_PREFIX, LoginType.TERMS), ) @@ -135,7 +138,7 @@ class AuthRestServlet(RestServlet): session=session, terms_url="%s_matrix/consent?v=%s" % ( - self.hs.config.public_baseurl, + self.hs.config.server.public_baseurl, self.hs.config.user_consent_version, ), myurl="%s/r0/auth/%s/fallback/web" diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index bcba106bdd..a6ede7e2f3 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -93,14 +93,14 @@ class LoginRestServlet(RestServlet): self._address_ratelimiter = Ratelimiter( store=hs.get_datastore(), clock=hs.get_clock(), - rate_hz=self.hs.config.rc_login_address.per_second, - burst_count=self.hs.config.rc_login_address.burst_count, + rate_hz=self.hs.config.ratelimiting.rc_login_address.per_second, + burst_count=self.hs.config.ratelimiting.rc_login_address.burst_count, ) self._account_ratelimiter = Ratelimiter( store=hs.get_datastore(), clock=hs.get_clock(), - rate_hz=self.hs.config.rc_login_account.per_second, - burst_count=self.hs.config.rc_login_account.burst_count, + rate_hz=self.hs.config.ratelimiting.rc_login_account.per_second, + burst_count=self.hs.config.ratelimiting.rc_login_account.burst_count, ) # ensure the CAS/SAML/OIDC handlers are loaded on this worker instance. @@ -486,7 +486,7 @@ class SsoRedirectServlet(RestServlet): # register themselves with the main SSOHandler. _load_sso_handlers(hs) self._sso_handler = hs.get_sso_handler() - self._public_baseurl = hs.config.public_baseurl + self._public_baseurl = hs.config.server.public_baseurl async def on_GET( self, request: SynapseRequest, idp_id: Optional[str] = None diff --git a/synapse/rest/client/openid.py b/synapse/rest/client/openid.py index 4dda6dce4b..add56d6998 100644 --- a/synapse/rest/client/openid.py +++ b/synapse/rest/client/openid.py @@ -69,7 +69,7 @@ class IdTokenServlet(RestServlet): self.auth = hs.get_auth() self.store = hs.get_datastore() self.clock = hs.get_clock() - self.server_name = hs.config.server_name + self.server_name = hs.config.server.server_name async def on_POST( self, request: SynapseRequest, user_id: str diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py index fb3211bf3a..ecebc46e8d 100644 --- a/synapse/rest/client/push_rule.py +++ b/synapse/rest/client/push_rule.py @@ -59,7 +59,7 @@ class PushRuleRestServlet(RestServlet): self.auth = hs.get_auth() self.store = hs.get_datastore() self.notifier = hs.get_notifier() - self._is_worker = hs.config.worker_app is not None + self._is_worker = hs.config.worker.worker_app is not None self._users_new_default_push_rules = hs.config.users_new_default_push_rules diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 9b0c546505..bf46dc60f2 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -388,7 +388,7 @@ class PublicRoomListRestServlet(TransactionRestServlet): limit = None handler = self.hs.get_room_list_handler() - if server and server != self.hs.config.server_name: + if server and server != self.hs.config.server.server_name: # Ensure the server is valid. try: parse_and_validate_server_name(server) @@ -438,7 +438,7 @@ class PublicRoomListRestServlet(TransactionRestServlet): limit = None handler = self.hs.get_room_list_handler() - if server and server != self.hs.config.server_name: + if server and server != self.hs.config.server.server_name: # Ensure the server is valid. try: parse_and_validate_server_name(server) diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index a5fcd15e3a..25f6eb842f 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -86,12 +86,12 @@ class LocalKey(Resource): json_object = { "valid_until_ts": self.valid_until_ts, - "server_name": self.config.server_name, + "server_name": self.config.server.server_name, "verify_keys": verify_keys, "old_verify_keys": old_verify_keys, } for key in self.config.signing_key: - json_object = sign_json(json_object, self.config.server_name, key) + json_object = sign_json(json_object, self.config.server.server_name, key) return json_object def render_GET(self, request): diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 63a40b1852..744360e5fd 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -224,7 +224,9 @@ class RemoteKey(DirectServeJsonResource): for key_json in json_results: key_json = json_decoder.decode(key_json.decode("utf-8")) for signing_key in self.config.key_server_signing_keys: - key_json = sign_json(key_json, self.config.server_name, signing_key) + key_json = sign_json( + key_json, self.config.server.server_name, signing_key + ) signed_keys.append(key_json) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index 19ac3af337..6a66a88c53 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -34,10 +34,10 @@ class WellKnownBuilder: def get_well_known(self): # if we don't have a public_baseurl, we can't help much here. - if self._config.public_baseurl is None: + if self._config.server.public_baseurl is None: return None - result = {"m.homeserver": {"base_url": self._config.public_baseurl}} + result = {"m.homeserver": {"base_url": self._config.server.public_baseurl}} if self._config.default_identity_server: result["m.identity_server"] = { diff --git a/synapse/server.py b/synapse/server.py index 5adeeff61a..4777ef585d 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -313,7 +313,7 @@ class HomeServer(metaclass=abc.ABCMeta): # Register background tasks required by this server. This must be done # somewhat manually due to the background tasks not being registered # unless handlers are instantiated. - if self.config.run_background_tasks: + if self.config.worker.run_background_tasks: self.setup_background_tasks() def start_listening(self) -> None: @@ -370,8 +370,8 @@ class HomeServer(metaclass=abc.ABCMeta): return Ratelimiter( store=self.get_datastore(), clock=self.get_clock(), - rate_hz=self.config.rc_registration.per_second, - burst_count=self.config.rc_registration.burst_count, + rate_hz=self.config.ratelimiting.rc_registration.per_second, + burst_count=self.config.ratelimiting.rc_registration.burst_count, ) @cache_in_self @@ -498,7 +498,7 @@ class HomeServer(metaclass=abc.ABCMeta): @cache_in_self def get_device_handler(self): - if self.config.worker_app: + if self.config.worker.worker_app: return DeviceWorkerHandler(self) else: return DeviceHandler(self) @@ -621,7 +621,7 @@ class HomeServer(metaclass=abc.ABCMeta): def get_federation_sender(self) -> AbstractFederationSender: if self.should_send_federation(): return FederationSender(self) - elif not self.config.worker_app: + elif not self.config.worker.worker_app: return FederationRemoteSendQueue(self) else: raise Exception("Workers cannot send federation traffic") @@ -650,14 +650,14 @@ class HomeServer(metaclass=abc.ABCMeta): def get_groups_local_handler( self, ) -> Union[GroupsLocalWorkerHandler, GroupsLocalHandler]: - if self.config.worker_app: + if self.config.worker.worker_app: return GroupsLocalWorkerHandler(self) else: return GroupsLocalHandler(self) @cache_in_self def get_groups_server_handler(self): - if self.config.worker_app: + if self.config.worker.worker_app: return GroupsServerWorkerHandler(self) else: return GroupsServerHandler(self) @@ -684,7 +684,7 @@ class HomeServer(metaclass=abc.ABCMeta): @cache_in_self def get_room_member_handler(self) -> RoomMemberHandler: - if self.config.worker_app: + if self.config.worker.worker_app: return RoomMemberWorkerHandler(self) return RoomMemberMasterHandler(self) @@ -694,13 +694,13 @@ class HomeServer(metaclass=abc.ABCMeta): @cache_in_self def get_server_notices_manager(self) -> ServerNoticesManager: - if self.config.worker_app: + if self.config.worker.worker_app: raise Exception("Workers cannot send server notices") return ServerNoticesManager(self) @cache_in_self def get_server_notices_sender(self) -> WorkerServerNoticesSender: - if self.config.worker_app: + if self.config.worker.worker_app: return WorkerServerNoticesSender(self) return ServerNoticesSender(self) @@ -766,7 +766,9 @@ class HomeServer(metaclass=abc.ABCMeta): @cache_in_self def get_federation_ratelimiter(self) -> FederationRateLimiter: - return FederationRateLimiter(self.get_clock(), config=self.config.rc_federation) + return FederationRateLimiter( + self.get_clock(), config=self.config.ratelimiting.rc_federation + ) @cache_in_self def get_module_api(self) -> ModuleApi: diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 00a644e8f7..1dc347f0c9 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -271,7 +271,7 @@ class DataStore( def get_users_paginate_txn(txn): filters = [] - args = [self.hs.config.server_name] + args = [self.hs.config.server.server_name] # Set ordering order_by_column = UserSortOrder(order_by).value @@ -356,13 +356,13 @@ def check_database_before_upgrade(cur, database_engine, config: HomeServerConfig return user_domain = get_domain_from_id(rows[0][0]) - if user_domain == config.server_name: + if user_domain == config.server.server_name: return raise Exception( "Found users in database not native to %s!\n" "You cannot change a synapse server_name after it's been configured" - % (config.server_name,) + % (config.server.server_name,) ) diff --git a/synapse/storage/databases/main/censor_events.py b/synapse/storage/databases/main/censor_events.py index f22c1f241b..6305414e3d 100644 --- a/synapse/storage/databases/main/censor_events.py +++ b/synapse/storage/databases/main/censor_events.py @@ -35,7 +35,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase super().__init__(database, db_conn, hs) if ( - hs.config.run_background_tasks + hs.config.worker.run_background_tasks and self.hs.config.redaction_retention_period is not None ): hs.get_clock().looping_call(self._censor_redactions, 5 * 60 * 1000) diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index 074b077bef..7a98275d92 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -355,7 +355,7 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore): self.user_ips_max_age = hs.config.user_ips_max_age - if hs.config.run_background_tasks and self.user_ips_max_age: + if hs.config.worker.run_background_tasks and self.user_ips_max_age: self._clock.looping_call(self._prune_old_user_ips, 5 * 1000) @wrap_as_background_process("prune_old_user_ips") diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 3816a0ca53..6464520386 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -51,7 +51,7 @@ class DeviceWorkerStore(SQLBaseStore): def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.looping_call( self._prune_old_outbound_device_pokes, 60 * 60 * 1000 ) diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index bddf5ef192..047782eb06 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -62,7 +62,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: hs.get_clock().looping_call( self._delete_old_forward_extrem_cache, 60 * 60 * 1000 ) diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 55caa6bbe7..97b3e92d3f 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -82,7 +82,7 @@ class EventPushActionsWorkerStore(SQLBaseStore): self._rotate_delay = 3 self._rotate_count = 10000 self._doing_notif_rotation = False - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._rotate_notif_loop = self._clock.looping_call( self._rotate_notifs, 30 * 60 * 1000 ) diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 9501f00f3b..d72e716b5c 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -158,7 +158,7 @@ class EventsWorkerStore(SQLBaseStore): db_conn, "events", "stream_ordering", step=-1 ) - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: # We periodically clean out old transaction ID mappings self._clock.looping_call( self._cleanup_old_transaction_ids, diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py index dc0bbc56ac..dac3d14da8 100644 --- a/synapse/storage/databases/main/metrics.py +++ b/synapse/storage/databases/main/metrics.py @@ -56,7 +56,7 @@ class ServerMetricsStore(EventPushActionsWorkerStore, SQLBaseStore): super().__init__(database, db_conn, hs) # Read the extrems every 60 minutes - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.looping_call(self._read_forward_extremities, 60 * 60 * 1000) # Used in _generate_user_daily_visits to keep track of progress diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 5109ac174a..fafadb88fc 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -132,14 +132,14 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): hs.config.account_validity.account_validity_startup_job_max_delta ) - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.call_later( 0.0, self._set_expiration_date_when_missing, ) # Create a background job for culling expired 3PID validity tokens - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.looping_call( self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS ) diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 6e7312266d..118b390e93 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -815,7 +815,7 @@ class RoomWorkerStore(SQLBaseStore): If it is `None` media will be removed from quarantine """ logger.info("Quarantining media: %s/%s", server_name, media_id) - is_local = server_name == self.config.server_name + is_local = server_name == self.config.server.server_name def _quarantine_media_by_id_txn(txn): local_mxcs = [media_id] if is_local else [] diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 64c18c6f86..9beeb96aa9 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -81,7 +81,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): txn.close() if ( - self.hs.config.run_background_tasks + self.hs.config.worker.run_background_tasks and self.hs.config.metrics_flags.known_servers ): self._known_servers_count = 1 diff --git a/synapse/storage/databases/main/session.py b/synapse/storage/databases/main/session.py index 172f27d109..5a97120437 100644 --- a/synapse/storage/databases/main/session.py +++ b/synapse/storage/databases/main/session.py @@ -48,7 +48,7 @@ class SessionStore(SQLBaseStore): super().__init__(database, db_conn, hs) # Create a background job for culling expired sessions. - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.looping_call(self._delete_expired_sessions, 30 * 60 * 1000) async def create_session( diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py index 4245fa1a3c..343d6efc92 100644 --- a/synapse/storage/databases/main/stats.py +++ b/synapse/storage/databases/main/stats.py @@ -672,7 +672,7 @@ class StatsStore(StateDeltasStore): def get_users_media_usage_paginate_txn(txn): filters = [] - args = [self.hs.config.server_name] + args = [self.hs.config.server.server_name] if search_term: filters.append("(lmr.user_id LIKE ? OR displayname LIKE ?)") diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index 7728d5f102..860146cd1b 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -60,7 +60,7 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore): def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) - if hs.config.run_background_tasks: + if hs.config.worker.run_background_tasks: self._clock.looping_call(self._cleanup_transactions, 30 * 60 * 1000) @wrap_as_background_process("cleanup_transactions") diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 16d9824ec1..8aebdc2817 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -510,7 +510,7 @@ class UserDirectoryStore(UserDirectoryBackgroundUpdateStore): self._prefer_local_users_in_search = ( hs.config.user_directory_search_prefer_local_users ) - self._server_name = hs.config.server_name + self._server_name = hs.config.server.server_name async def remove_from_user_dir(self, user_id: str) -> None: def _remove_from_user_dir_txn(txn): diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 61392b9639..d4754c904c 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -134,7 +134,7 @@ def prepare_database( # if it's a worker app, refuse to upgrade the database, to avoid multiple # workers doing it at once. if ( - config.worker_app is not None + config.worker.worker_app is not None and version_info.current_version != SCHEMA_VERSION ): raise UpgradeDatabaseException( @@ -154,7 +154,7 @@ def prepare_database( # if it's a worker app, refuse to upgrade the database, to avoid multiple # workers doing it at once. - if config and config.worker_app is not None: + if config and config.worker.worker_app is not None: raise UpgradeDatabaseException(EMPTY_DATABASE_ON_WORKER_ERROR) _setup_new_database(cur, database_engine, databases=databases) @@ -355,7 +355,7 @@ def _upgrade_existing_database( else: assert config - is_worker = config and config.worker_app is not None + is_worker = config and config.worker.worker_app is not None if ( current_schema_state.compat_version is not None diff --git a/synapse/storage/schema/main/delta/30/as_users.py b/synapse/storage/schema/main/delta/30/as_users.py index 7f08fabe9f..8a1f340083 100644 --- a/synapse/storage/schema/main/delta/30/as_users.py +++ b/synapse/storage/schema/main/delta/30/as_users.py @@ -38,7 +38,7 @@ def run_upgrade(cur, database_engine, config, *args, **kwargs): logger.warning("Could not get app_service_config_files from config") pass - appservices = load_appservices(config.server_name, config_files) + appservices = load_appservices(config.server.server_name, config_files) owned = {} diff --git a/synapse/storage/schema/main/delta/57/local_current_membership.py b/synapse/storage/schema/main/delta/57/local_current_membership.py index 66989222e6..d25093c19f 100644 --- a/synapse/storage/schema/main/delta/57/local_current_membership.py +++ b/synapse/storage/schema/main/delta/57/local_current_membership.py @@ -67,7 +67,7 @@ def run_upgrade(cur, database_engine, config, *args, **kwargs): INNER JOIN room_memberships AS r USING (event_id) WHERE type = 'm.room.member' AND state_key LIKE ? """ - cur.execute(sql, ("%:" + config.server_name,)) + cur.execute(sql, ("%:" + config.server.server_name,)) cur.execute( "CREATE UNIQUE INDEX local_current_membership_idx ON local_current_membership(user_id, room_id)" diff --git a/synapse/util/templates.py b/synapse/util/templates.py index eb3c8c9370..12941065ca 100644 --- a/synapse/util/templates.py +++ b/synapse/util/templates.py @@ -63,12 +63,12 @@ def build_jinja_env( env.filters.update( { "format_ts": _format_ts_filter, - "mxc_to_http": _create_mxc_to_http_filter(config.public_baseurl), + "mxc_to_http": _create_mxc_to_http_filter(config.server.public_baseurl), } ) # common variables for all templates - env.globals.update({"server_name": config.server_name}) + env.globals.update({"server_name": config.server.server_name}) return env |