diff options
author | Patrick Cloke <clokep@users.noreply.github.com> | 2021-09-29 06:44:15 -0400 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-09-29 06:44:15 -0400 |
commit | 94b620a5edd6b5bc55c8aad6e00a11cc6bf210fa (patch) | |
tree | f05585bffb2b2b093ddaafd18f4959e21dcc8eee | |
parent | Implement MSC3069: Guest support on whoami (#9655) (diff) | |
download | synapse-94b620a5edd6b5bc55c8aad6e00a11cc6bf210fa.tar.xz |
Use direct references for configuration variables (part 6). (#10916)
Diffstat (limited to '')
54 files changed, 141 insertions, 132 deletions
diff --git a/changelog.d/10916.misc b/changelog.d/10916.misc new file mode 100644 index 0000000000..586a0b3a96 --- /dev/null +++ b/changelog.d/10916.misc @@ -0,0 +1 @@ +Use direct references to config flags. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 548f6dcde9..749bc1deb9 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -86,11 +86,11 @@ def start_worker_reactor(appname, config, run_command=reactor.run): start_reactor( appname, - soft_file_limit=config.soft_file_limit, - gc_thresholds=config.gc_thresholds, + soft_file_limit=config.server.soft_file_limit, + gc_thresholds=config.server.gc_thresholds, pid_file=config.worker.worker_pid_file, daemonize=config.worker.worker_daemonize, - print_pidfile=config.print_pidfile, + print_pidfile=config.server.print_pidfile, logger=logger, run_command=run_command, ) @@ -298,7 +298,7 @@ def refresh_certificate(hs): Refresh the TLS certificates that Synapse is using by re-reading them from disk and updating the TLS context factories to use them. """ - if not hs.config.has_tls_listener(): + if not hs.config.server.has_tls_listener(): return hs.config.read_certificate_from_disk() diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index f2c5b75247..556bcc124e 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -195,14 +195,14 @@ def start(config_options): config.logging.no_redirect_stdio = True # Explicitly disable background processes - config.update_user_directory = False + config.server.update_user_directory = False config.worker.run_background_tasks = False config.start_pushers = False config.pusher_shard_config.instances = [] config.send_federation = False config.federation_shard_config.instances = [] - synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts + synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts ss = AdminCmdServer( config.server.server_name, diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 3036e1b4a0..7489f31d9a 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -462,7 +462,7 @@ def start(config_options): # For other worker types we force this to off. config.server.update_user_directory = False - synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts + synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage if config.server.gc_seconds: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 205831dcda..2b2d4bbf83 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -248,7 +248,7 @@ class SynapseHomeServer(HomeServer): resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self) if name == "webclient": - webclient_loc = self.config.web_client_location + webclient_loc = self.config.server.web_client_location if webclient_loc is None: logger.warning( @@ -343,7 +343,7 @@ def setup(config_options): # generating config files and shouldn't try to continue. sys.exit(0) - events.USE_FROZEN_DICTS = config.use_frozen_dicts + events.USE_FROZEN_DICTS = config.server.use_frozen_dicts synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage if config.server.gc_seconds: @@ -439,11 +439,11 @@ def run(hs): _base.start_reactor( "synapse-homeserver", - soft_file_limit=hs.config.soft_file_limit, - gc_thresholds=hs.config.gc_thresholds, - pid_file=hs.config.pid_file, - daemonize=hs.config.daemonize, - print_pidfile=hs.config.print_pidfile, + soft_file_limit=hs.config.server.soft_file_limit, + gc_thresholds=hs.config.server.gc_thresholds, + pid_file=hs.config.server.pid_file, + daemonize=hs.config.server.daemonize, + print_pidfile=hs.config.server.print_pidfile, logger=logger, ) diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index 49e7a45e5c..fcd01e833c 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -74,7 +74,7 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process): store = hs.get_datastore() stats["homeserver"] = hs.config.server.server_name - stats["server_context"] = hs.config.server_context + stats["server_context"] = hs.config.server.server_context stats["timestamp"] = now stats["uptime_seconds"] = uptime version = sys.version_info @@ -171,7 +171,7 @@ def start_phone_stats_home(hs): current_mau_count_by_service = {} reserved_users = () store = hs.get_datastore() - if hs.config.limit_usage_by_mau or hs.config.mau_stats_only: + if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only: current_mau_count = await store.get_monthly_active_count() current_mau_count_by_service = ( await store.get_monthly_active_count_by_service() @@ -183,9 +183,9 @@ def start_phone_stats_home(hs): current_mau_by_service_gauge.labels(app_service).set(float(count)) registered_reserved_users_mau_gauge.set(float(len(reserved_users))) - max_mau_gauge.set(float(hs.config.max_mau_value)) + max_mau_gauge.set(float(hs.config.server.max_mau_value)) - if hs.config.limit_usage_by_mau or hs.config.mau_stats_only: + if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only: generate_monthly_active_users() clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) # End of monthly active user settings diff --git a/synapse/config/_base.py b/synapse/config/_base.py index d974a1a2a8..26152b0924 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -327,7 +327,7 @@ class RootConfig: """ Redirect lookups on this object either to config objects, or values on config objects, so that `config.tls.blah` works, as well as legacy uses - of things like `config.server_name`. It will first look up the config + of things like `config.server.server_name`. It will first look up the config section name, and then values on those config classes. """ if item in self._configs.keys(): diff --git a/synapse/config/server.py b/synapse/config/server.py index 041412d7ad..818b806357 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -1,6 +1,4 @@ -# Copyright 2014-2016 OpenMarket Ltd -# Copyright 2017-2018 New Vector Ltd -# Copyright 2019 The Matrix.org Foundation C.I.C. +# Copyright 2014-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py index eb4556cdc1..68b8b19024 100644 --- a/synapse/events/presence_router.py +++ b/synapse/events/presence_router.py @@ -45,11 +45,11 @@ def load_legacy_presence_router(hs: "HomeServer"): configuration, and registers the hooks they implement. """ - if hs.config.presence_router_module_class is None: + if hs.config.server.presence_router_module_class is None: return - module = hs.config.presence_router_module_class - config = hs.config.presence_router_config + module = hs.config.server.presence_router_module_class + config = hs.config.server.presence_router_config api = hs.get_module_api() presence_router = module(config=config, module_api=api) diff --git a/synapse/events/utils.py b/synapse/events/utils.py index f86113a448..a13fb0148f 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -372,7 +372,7 @@ class EventClientSerializer: def __init__(self, hs): self.store = hs.get_datastore() self.experimental_msc1849_support_enabled = ( - hs.config.experimental_msc1849_support_enabled + hs.config.server.experimental_msc1849_support_enabled ) async def serialize_event( diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index 95176ba6f9..c32539bf5a 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -117,7 +117,7 @@ class PublicRoomList(BaseFederationServlet): ): super().__init__(hs, authenticator, ratelimiter, server_name) self.handler = hs.get_room_list_handler() - self.allow_access = hs.config.allow_public_rooms_over_federation + self.allow_access = hs.config.server.allow_public_rooms_over_federation async def on_GET( self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]] diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 5cfba3c817..9078781d5a 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -49,7 +49,7 @@ class DirectoryHandler(BaseHandler): self.store = hs.get_datastore() self.config = hs.config self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search - self.require_membership = hs.config.require_membership_for_aliases + self.require_membership = hs.config.server.require_membership_for_aliases self.third_party_event_rules = hs.get_third_party_event_rules() self.federation = hs.get_federation_client() diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 16c435ee86..3b0b895b07 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -762,7 +762,7 @@ class FederationHandler(BaseHandler): if is_blocked: raise SynapseError(403, "This room has been blocked on this server") - if self.hs.config.block_non_admin_invites: + if self.hs.config.server.block_non_admin_invites: raise SynapseError(403, "This server does not accept room invites") if not await self.spam_checker.user_may_invite( diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index fe8a995892..a0640fcac0 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -57,7 +57,7 @@ class IdentityHandler(BaseHandler): self.http_client = SimpleHttpClient(hs) # An HTTP client for contacting identity servers specified by clients. self.blacklisting_http_client = SimpleHttpClient( - hs, ip_blacklist=hs.config.federation_ip_range_blacklist + hs, ip_blacklist=hs.config.server.federation_ip_range_blacklist ) self.federation_http_client = hs.get_federation_http_client() self.hs = hs diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 39c18ecf99..3b8cc50ec0 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -81,7 +81,7 @@ class MessageHandler: self.storage = hs.get_storage() self.state_store = self.storage.state self._event_serializer = hs.get_event_client_serializer() - self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages + self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages # The scheduled call to self._expire_event. None if no call is currently # scheduled. @@ -415,7 +415,9 @@ class EventCreationHandler: self.server_name = hs.hostname self.notifier = hs.get_notifier() self.config = hs.config - self.require_membership_for_aliases = hs.config.require_membership_for_aliases + self.require_membership_for_aliases = ( + hs.config.server.require_membership_for_aliases + ) self._events_shard_config = self.config.worker.events_shard_config self._instance_name = hs.get_instance_name() @@ -425,7 +427,7 @@ class EventCreationHandler: Membership.JOIN, Membership.KNOCK, } - if self.hs.config.include_profile_data_on_invite: + if self.hs.config.server.include_profile_data_on_invite: self.membership_types_to_include_profile_data_in.add(Membership.INVITE) self.send_event = ReplicationSendEventRestServlet.make_client(hs) @@ -461,11 +463,11 @@ class EventCreationHandler: # self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {} # The number of forward extremeities before a dummy event is sent. - self._dummy_events_threshold = hs.config.dummy_events_threshold + self._dummy_events_threshold = hs.config.server.dummy_events_threshold if ( self.config.worker.run_background_tasks - and self.config.cleanup_extremities_with_dummy_events + and self.config.server.cleanup_extremities_with_dummy_events ): self.clock.looping_call( lambda: run_as_background_process( @@ -477,7 +479,7 @@ class EventCreationHandler: self._message_handler = hs.get_message_handler() - self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages + self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages self._external_cache = hs.get_external_cache() diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index a5301ece6f..176e4dfdd4 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -85,12 +85,18 @@ class PaginationHandler: self._purges_by_id: Dict[str, PurgeStatus] = {} self._event_serializer = hs.get_event_client_serializer() - self._retention_default_max_lifetime = hs.config.retention_default_max_lifetime + self._retention_default_max_lifetime = ( + hs.config.server.retention_default_max_lifetime + ) - self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min - self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max + self._retention_allowed_lifetime_min = ( + hs.config.server.retention_allowed_lifetime_min + ) + self._retention_allowed_lifetime_max = ( + hs.config.server.retention_allowed_lifetime_max + ) - if hs.config.worker.run_background_tasks and hs.config.retention_enabled: + if hs.config.worker.run_background_tasks and hs.config.server.retention_enabled: # Run the purge jobs described in the configuration file. for job in hs.config.server.retention_purge_jobs: logger.info("Setting up purge job with config: %s", job) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index b23a1541bc..425c0d4973 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -397,7 +397,7 @@ class ProfileHandler(BaseHandler): # when building a membership event. In this case, we must allow the # lookup. if ( - not self.hs.config.limit_profile_requests_to_users_who_share_rooms + not self.hs.config.server.limit_profile_requests_to_users_who_share_rooms or not requester ): return diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 4f99f137a2..4a7ccb882e 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -854,7 +854,7 @@ class RegistrationHandler(BaseHandler): # Necessary due to auth checks prior to the threepid being # written to the db if is_threepid_reserved( - self.hs.config.mau_limits_reserved_threepids, threepid + self.hs.config.server.mau_limits_reserved_threepids, threepid ): await self.store.upsert_monthly_active_user(user_id) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index dc4fab2223..bf8a85f563 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -666,7 +666,7 @@ class RoomCreationHandler(BaseHandler): await self.ratelimit(requester) room_version_id = config.get( - "room_version", self.config.default_room_version.identifier + "room_version", self.config.server.default_room_version.identifier ) if not isinstance(room_version_id, str): diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 1a56c82fbd..02103f6c9a 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -90,7 +90,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): self.third_party_event_rules = hs.get_third_party_event_rules() self._server_notices_mxid = self.config.servernotices.server_notices_mxid self._enable_lookup = hs.config.enable_3pid_lookup - self.allow_per_room_profiles = self.config.allow_per_room_profiles + self.allow_per_room_profiles = self.config.server.allow_per_room_profiles self._join_rate_limiter_local = Ratelimiter( store=self.store, @@ -617,7 +617,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): is_requester_admin = await self.auth.is_server_admin(requester.user) if not is_requester_admin: - if self.config.block_non_admin_invites: + if self.config.server.block_non_admin_invites: logger.info( "Blocking invite: user is not admin and non-admin " "invites disabled" @@ -1222,7 +1222,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): Raises: ShadowBanError if the requester has been shadow-banned. """ - if self.config.block_non_admin_invites: + if self.config.server.block_non_admin_invites: is_requester_admin = await self.auth.is_server_admin(requester.user) if not is_requester_admin: raise SynapseError( @@ -1420,7 +1420,7 @@ class RoomMemberMasterHandler(RoomMemberHandler): Returns: bool of whether the complexity is too great, or None if unable to be fetched """ - max_complexity = self.hs.config.limit_remote_rooms.complexity + max_complexity = self.hs.config.server.limit_remote_rooms.complexity complexity = await self.federation_handler.get_room_complexity( remote_room_hosts, room_id ) @@ -1436,7 +1436,7 @@ class RoomMemberMasterHandler(RoomMemberHandler): Args: room_id: The room ID to check for complexity. """ - max_complexity = self.hs.config.limit_remote_rooms.complexity + max_complexity = self.hs.config.server.limit_remote_rooms.complexity complexity = await self.store.get_room_complexity(room_id) return complexity["v1"] > max_complexity @@ -1472,7 +1472,7 @@ class RoomMemberMasterHandler(RoomMemberHandler): if too_complex is True: raise SynapseError( code=400, - msg=self.hs.config.limit_remote_rooms.complexity_error, + msg=self.hs.config.server.limit_remote_rooms.complexity_error, errcode=Codes.RESOURCE_LIMIT_EXCEEDED, ) @@ -1507,7 +1507,7 @@ class RoomMemberMasterHandler(RoomMemberHandler): ) raise SynapseError( code=400, - msg=self.hs.config.limit_remote_rooms.complexity_error, + msg=self.hs.config.server.limit_remote_rooms.complexity_error, errcode=Codes.RESOURCE_LIMIT_EXCEEDED, ) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 8226d6f5a1..6d3333ee00 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -105,7 +105,7 @@ class SearchHandler(BaseHandler): dict to be returned to the client with results of search """ - if not self.hs.config.enable_search: + if not self.hs.config.server.enable_search: raise SynapseError(400, "Search is disabled on this homeserver") batch_group = None diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index b91e7cb501..f4430ce3c9 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -60,7 +60,7 @@ class UserDirectoryHandler(StateDeltasHandler): self.clock = hs.get_clock() self.notifier = hs.get_notifier() self.is_mine_id = hs.is_mine_id - self.update_user_directory = hs.config.update_user_directory + self.update_user_directory = hs.config.server.update_user_directory self.search_all_users = hs.config.userdirectory.user_directory_search_all_users self.spam_checker = hs.get_spam_checker() # The current position in the current_state_delta stream diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index cdc36b8d25..4f59224686 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -327,23 +327,23 @@ class MatrixFederationHttpClient: self.reactor = hs.get_reactor() user_agent = hs.version_string - if hs.config.user_agent_suffix: - user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix) + if hs.config.server.user_agent_suffix: + user_agent = "%s %s" % (user_agent, hs.config.server.user_agent_suffix) user_agent = user_agent.encode("ascii") federation_agent = MatrixFederationAgent( self.reactor, tls_client_options_factory, user_agent, - hs.config.federation_ip_range_whitelist, - hs.config.federation_ip_range_blacklist, + hs.config.server.federation_ip_range_whitelist, + hs.config.server.federation_ip_range_blacklist, ) # Use a BlacklistingAgentWrapper to prevent circumventing the IP # blacklist via IP literals in server names self.agent = BlacklistingAgentWrapper( federation_agent, - ip_blacklist=hs.config.federation_ip_range_blacklist, + ip_blacklist=hs.config.server.federation_ip_range_blacklist, ) self.clock = hs.get_clock() diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index 030852cb5b..80f9b23bfd 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -71,7 +71,7 @@ class ReplicationStreamer: self.notifier = hs.get_notifier() self._instance_name = hs.get_instance_name() - self._replication_torture_level = hs.config.replication_torture_level + self._replication_torture_level = hs.config.server.replication_torture_level self.notifier.add_replication_callback(self.on_notifier_poke) diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index bacb828330..fff133ef10 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -119,7 +119,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): ) if existing_user_id is None: - if self.config.request_token_inhibit_3pid_errors: + if self.config.server.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. # Also wait for some random amount of time between 100ms and 1s to make it @@ -403,7 +403,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): existing_user_id = await self.store.get_user_id_by_threepid("email", email) if existing_user_id is not None: - if self.config.request_token_inhibit_3pid_errors: + if self.config.server.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. # Also wait for some random amount of time between 100ms and 1s to make it @@ -486,7 +486,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet): existing_user_id = await self.store.get_user_id_by_threepid("msisdn", msisdn) if existing_user_id is not None: - if self.hs.config.request_token_inhibit_3pid_errors: + if self.hs.config.server.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. # Also wait for some random amount of time between 100ms and 1s to make it @@ -857,8 +857,8 @@ def assert_valid_next_link(hs: "HomeServer", next_link: str) -> None: # If the domain whitelist is set, the domain must be in it if ( valid - and hs.config.next_link_domain_whitelist is not None - and next_link_parsed.hostname not in hs.config.next_link_domain_whitelist + and hs.config.server.next_link_domain_whitelist is not None + and next_link_parsed.hostname not in hs.config.server.next_link_domain_whitelist ): valid = False diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index 65b3b5ce2c..d6b6256413 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -44,10 +44,10 @@ class CapabilitiesRestServlet(RestServlet): await self.auth.get_user_by_req(request, allow_guest=True) change_password = self.auth_handler.can_change_password() - response = { + response: JsonDict = { "capabilities": { "m.room_versions": { - "default": self.config.default_room_version.identifier, + "default": self.config.server.default_room_version.identifier, "available": { v.identifier: v.disposition for v in KNOWN_ROOM_VERSIONS.values() diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index 6ed60c7418..cc1c2f9731 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -90,7 +90,7 @@ class CreateFilterRestServlet(RestServlet): raise AuthError(403, "Can only create filters for local users") content = parse_json_object_from_request(request) - set_timeline_upper_limit(content, self.hs.config.filter_timeline_limit) + set_timeline_upper_limit(content, self.hs.config.server.filter_timeline_limit) filter_id = await self.filtering.add_user_filter( user_localpart=target_user.localpart, user_filter=content diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index d0f20de569..c684636c0a 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -41,7 +41,7 @@ class ProfileDisplaynameRestServlet(RestServlet): ) -> Tuple[int, JsonDict]: requester_user = None - if self.hs.config.require_auth_for_profile_requests: + if self.hs.config.server.require_auth_for_profile_requests: requester = await self.auth.get_user_by_req(request) requester_user = requester.user @@ -94,7 +94,7 @@ class ProfileAvatarURLRestServlet(RestServlet): ) -> Tuple[int, JsonDict]: requester_user = None - if self.hs.config.require_auth_for_profile_requests: + if self.hs.config.server.require_auth_for_profile_requests: requester = await self.auth.get_user_by_req(request) requester_user = requester.user @@ -146,7 +146,7 @@ class ProfileRestServlet(RestServlet): ) -> Tuple[int, JsonDict]: requester_user = None - if self.hs.config.require_auth_for_profile_requests: + if self.hs.config.server.require_auth_for_profile_requests: requester = await self.auth.get_user_by_req(request) requester_user = requester.user diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 48b0062cf4..a6eb6f6410 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -129,7 +129,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): ) if existing_user_id is not None: - if self.hs.config.request_token_inhibit_3pid_errors: + if self.hs.config.server.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. # Also wait for some random amount of time between 100ms and 1s to make it @@ -209,7 +209,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet): ) if existing_user_id is not None: - if self.hs.config.request_token_inhibit_3pid_errors: + if self.hs.config.server.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. # Also wait for some random amount of time between 100ms and 1s to make it @@ -682,7 +682,7 @@ class RegisterRestServlet(RestServlet): # written to the db if threepid: if is_threepid_reserved( - self.hs.config.mau_limits_reserved_threepids, threepid + self.hs.config.server.mau_limits_reserved_threepids, threepid ): await self.store.upsert_monthly_active_user(registered_user_id) diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index bf46dc60f2..ed95189b6d 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -369,7 +369,7 @@ class PublicRoomListRestServlet(TransactionRestServlet): # Option to allow servers to require auth when accessing # /publicRooms via CS API. This is especially helpful in private # federations. - if not self.hs.config.allow_public_rooms_without_auth: + if not self.hs.config.server.allow_public_rooms_without_auth: raise # We allow people to not be authed if they're just looking at our diff --git a/synapse/rest/client/shared_rooms.py b/synapse/rest/client/shared_rooms.py index 1d90493eb0..09a46737de 100644 --- a/synapse/rest/client/shared_rooms.py +++ b/synapse/rest/client/shared_rooms.py @@ -42,7 +42,7 @@ class UserSharedRoomsServlet(RestServlet): super().__init__() self.auth = hs.get_auth() self.store = hs.get_datastore() - self.user_directory_active = hs.config.update_user_directory + self.user_directory_active = hs.config.server.update_user_directory async def on_GET( self, request: SynapseRequest, user_id: str diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 1259058b9b..913216a7c4 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -155,7 +155,7 @@ class SyncRestServlet(RestServlet): try: filter_object = json_decoder.decode(filter_id) set_timeline_upper_limit( - filter_object, self.hs.config.filter_timeline_limit + filter_object, self.hs.config.server.filter_timeline_limit ) except Exception: raise SynapseError(400, "Invalid filter JSON") diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py index 073b0d754f..8522930b50 100644 --- a/synapse/server_notices/resource_limits_server_notices.py +++ b/synapse/server_notices/resource_limits_server_notices.py @@ -47,9 +47,9 @@ class ResourceLimitsServerNotices: self._notifier = hs.get_notifier() self._enabled = ( - hs.config.limit_usage_by_mau + hs.config.server.limit_usage_by_mau and self._server_notices_manager.is_enabled() - and not hs.config.hs_disabled + and not hs.config.server.hs_disabled ) async def maybe_send_server_notice_to_user(self, user_id: str) -> None: @@ -98,7 +98,7 @@ class ResourceLimitsServerNotices: try: if ( limit_type == LimitBlockingTypes.MONTHLY_ACTIVE_USER - and not self._config.mau_limit_alerting + and not self._config.server.mau_limit_alerting ): # We have hit the MAU limit, but MAU alerting is disabled: # reset room if necessary and return @@ -149,7 +149,7 @@ class ResourceLimitsServerNotices: "body": event_body, "msgtype": ServerNoticeMsgType, "server_notice_type": ServerNoticeLimitReached, - "admin_contact": self._config.admin_contact, + "admin_contact": self._config.server.admin_contact, "limit_type": event_limit_type, } event = await self._server_notices_manager.send_notice( diff --git a/synapse/storage/databases/main/censor_events.py b/synapse/storage/databases/main/censor_events.py index 6305414e3d..eee07227ef 100644 --- a/synapse/storage/databases/main/censor_events.py +++ b/synapse/storage/databases/main/censor_events.py @@ -36,7 +36,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase if ( hs.config.worker.run_background_tasks - and self.hs.config.redaction_retention_period is not None + and self.hs.config.server.redaction_retention_period is not None ): hs.get_clock().looping_call(self._censor_redactions, 5 * 60 * 1000) @@ -48,7 +48,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase By censor we mean update the event_json table with the redacted event. """ - if self.hs.config.redaction_retention_period is None: + if self.hs.config.server.redaction_retention_period is None: return if not ( @@ -60,7 +60,9 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase # created. return - before_ts = self._clock.time_msec() - self.hs.config.redaction_retention_period + before_ts = ( + self._clock.time_msec() - self.hs.config.server.redaction_retention_period + ) # We fetch all redactions that: # 1. point to an event we have, diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index 7e33ae578c..0e1d97aaeb 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -353,7 +353,7 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore): def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) - self.user_ips_max_age = hs.config.user_ips_max_age + self.user_ips_max_age = hs.config.server.user_ips_max_age if hs.config.worker.run_background_tasks and self.user_ips_max_age: self._clock.looping_call(self._prune_old_user_ips, 5 * 1000) diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index cc4e31ec30..bc7d213fe2 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -104,7 +104,7 @@ class PersistEventsStore: self._clock = hs.get_clock() self._instance_name = hs.get_instance_name() - self._ephemeral_messages_enabled = hs.config.enable_ephemeral_messages + self._ephemeral_messages_enabled = hs.config.server.enable_ephemeral_messages self.is_mine_id = hs.is_mine_id # Ideally we'd move these ID gens here, unfortunately some other ID diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index b76ee51a9b..a14ac03d4b 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -32,8 +32,8 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore): self._clock = hs.get_clock() self.hs = hs - self._limit_usage_by_mau = hs.config.limit_usage_by_mau - self._max_mau_value = hs.config.max_mau_value + self._limit_usage_by_mau = hs.config.server.limit_usage_by_mau + self._max_mau_value = hs.config.server.max_mau_value @cached(num_args=0) async def get_monthly_active_count(self) -> int: @@ -96,8 +96,8 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore): """ users = [] - for tp in self.hs.config.mau_limits_reserved_threepids[ - : self.hs.config.max_mau_value + for tp in self.hs.config.server.mau_limits_reserved_threepids[ + : self.hs.config.server.max_mau_value ]: user_id = await self.hs.get_datastore().get_user_id_by_threepid( tp["medium"], tp["address"] @@ -212,7 +212,7 @@ class MonthlyActiveUsersStore(MonthlyActiveUsersWorkerStore): def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) - self._mau_stats_only = hs.config.mau_stats_only + self._mau_stats_only = hs.config.server.mau_stats_only # Do not add more reserved users than the total allowable number self.db_pool.new_transaction( @@ -221,7 +221,7 @@ class MonthlyActiveUsersStore(MonthlyActiveUsersWorkerStore): [], [], self._initialise_reserved_users, - hs.config.mau_limits_reserved_threepids[: self._max_mau_value], + hs.config.server.mau_limits_reserved_threepids[: self._max_mau_value], ) def _initialise_reserved_users(self, txn, threepids): diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index c83089ee63..7279b0924e 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -207,7 +207,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): return False now = self._clock.time_msec() - trial_duration_ms = self.config.mau_trial_days * 24 * 60 * 60 * 1000 + trial_duration_ms = self.config.server.mau_trial_days * 24 * 60 * 60 * 1000 is_trial = (now - info["creation_ts"] * 1000) < trial_duration_ms return is_trial diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 118b390e93..d69eaf80ce 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -679,8 +679,8 @@ class RoomWorkerStore(SQLBaseStore): # policy. if not ret: return { - "min_lifetime": self.config.retention_default_min_lifetime, - "max_lifetime": self.config.retention_default_max_lifetime, + "min_lifetime": self.config.server.retention_default_min_lifetime, + "max_lifetime": self.config.server.retention_default_max_lifetime, } row = ret[0] @@ -690,10 +690,10 @@ class RoomWorkerStore(SQLBaseStore): # The default values will be None if no default policy has been defined, or if one # of the attributes is missing from the default policy. if row["min_lifetime"] is None: - row["min_lifetime"] = self.config.retention_default_min_lifetime + row["min_lifetime"] = self.config.server.retention_default_min_lifetime if row["max_lifetime"] is None: - row["max_lifetime"] = self.config.retention_default_max_lifetime + row["max_lifetime"] = self.config.server.retention_default_max_lifetime return row diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py index 2a1e99e17a..c85383c975 100644 --- a/synapse/storage/databases/main/search.py +++ b/synapse/storage/databases/main/search.py @@ -51,7 +51,7 @@ class SearchWorkerStore(SQLBaseStore): txn: entries: entries to be added to the table """ - if not self.hs.config.enable_search: + if not self.hs.config.server.enable_search: return if isinstance(self.database_engine, PostgresEngine): sql = ( @@ -105,7 +105,7 @@ class SearchBackgroundUpdateStore(SearchWorkerStore): def __init__(self, database: DatabasePool, db_conn, hs): super().__init__(database, db_conn, hs) - if not hs.config.enable_search: + if not hs.config.server.enable_search: return self.db_pool.updates.register_background_update_handler( diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index f31880b8ec..a63eaddfdc 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -366,7 +366,7 @@ def _upgrade_existing_database( + "new for the server to understand" ) - # some of the deltas assume that config.server_name is set correctly, so now + # some of the deltas assume that server_name is set correctly, so now # is a good time to run the sanity check. if not is_empty and "main" in databases: from synapse.storage.databases.main import check_database_before_upgrade diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index cccff7af26..3aa9ba3c43 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -217,7 +217,7 @@ class AuthTestCase(unittest.HomeserverTestCase): user_id = "@baldrick:matrix.org" macaroon = pymacaroons.Macaroon( - location=self.hs.config.server_name, + location=self.hs.config.server.server_name, identifier="key", key=self.hs.config.key.macaroon_secret_key, ) @@ -239,7 +239,7 @@ class AuthTestCase(unittest.HomeserverTestCase): user_id = "@baldrick:matrix.org" macaroon = pymacaroons.Macaroon( - location=self.hs.config.server_name, + location=self.hs.config.server.server_name, identifier="key", key=self.hs.config.key.macaroon_secret_key, ) @@ -268,7 +268,7 @@ class AuthTestCase(unittest.HomeserverTestCase): self.store.get_monthly_active_count = simple_async_mock(lots_of_users) e = self.get_failure(self.auth.check_auth_blocking(), ResourceLimitError) - self.assertEquals(e.value.admin_contact, self.hs.config.admin_contact) + self.assertEquals(e.value.admin_contact, self.hs.config.server.admin_contact) self.assertEquals(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) self.assertEquals(e.value.code, 403) @@ -303,7 +303,7 @@ class AuthTestCase(unittest.HomeserverTestCase): appservice = ApplicationService( "abcd", - self.hs.config.server_name, + self.hs.config.server.server_name, id="1234", namespaces={ "users": [{"regex": "@_appservice.*:sender", "exclusive": True}] @@ -332,7 +332,7 @@ class AuthTestCase(unittest.HomeserverTestCase): appservice = ApplicationService( "abcd", - self.hs.config.server_name, + self.hs.config.server.server_name, id="1234", namespaces={ "users": [{"regex": "@_appservice.*:sender", "exclusive": True}] @@ -372,7 +372,7 @@ class AuthTestCase(unittest.HomeserverTestCase): self.auth_blocking._hs_disabled = True self.auth_blocking._hs_disabled_message = "Reason for being disabled" e = self.get_failure(self.auth.check_auth_blocking(), ResourceLimitError) - self.assertEquals(e.value.admin_contact, self.hs.config.admin_contact) + self.assertEquals(e.value.admin_contact, self.hs.config.server.admin_contact) self.assertEquals(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) self.assertEquals(e.value.code, 403) @@ -387,7 +387,7 @@ class AuthTestCase(unittest.HomeserverTestCase): self.auth_blocking._hs_disabled = True self.auth_blocking._hs_disabled_message = "Reason for being disabled" e = self.get_failure(self.auth.check_auth_blocking(), ResourceLimitError) - self.assertEquals(e.value.admin_contact, self.hs.config.admin_contact) + self.assertEquals(e.value.admin_contact, self.hs.config.server.admin_contact) self.assertEquals(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) self.assertEquals(e.value.code, 403) diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py index 0b60cc4261..03e1e11f49 100644 --- a/tests/federation/test_federation_server.py +++ b/tests/federation/test_federation_server.py @@ -120,7 +120,7 @@ class StateQueryTests(unittest.FederatingHomeserverTestCase): self.assertEqual( channel.json_body["room_version"], - self.hs.config.default_room_version.identifier, + self.hs.config.server.default_room_version.identifier, ) members = set( diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index d3efb67e3e..bd05a2c2d1 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -175,20 +175,20 @@ class RegistrationTestCase(unittest.HomeserverTestCase): self.assertTrue(result_token is not None) def test_mau_limits_when_disabled(self): - self.hs.config.limit_usage_by_mau = False + self.hs.config.server.limit_usage_by_mau = False # Ensure does not throw exception self.get_success(self.get_or_create_user(self.requester, "a", "display_name")) def test_get_or_create_user_mau_not_blocked(self): - self.hs.config.limit_usage_by_mau = True + self.hs.config.server.limit_usage_by_mau = True self.store.count_monthly_users = Mock( - return_value=make_awaitable(self.hs.config.max_mau_value - 1) + return_value=make_awaitable(self.hs.config.server.max_mau_value - 1) ) # Ensure does not throw exception self.get_success(self.get_or_create_user(self.requester, "c", "User")) def test_get_or_create_user_mau_blocked(self): - self.hs.config.limit_usage_by_mau = True + self.hs.config.server.limit_usage_by_mau = True self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) @@ -198,7 +198,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ) self.store.get_monthly_active_count = Mock( - return_value=make_awaitable(self.hs.config.max_mau_value) + return_value=make_awaitable(self.hs.config.server.max_mau_value) ) self.get_failure( self.get_or_create_user(self.requester, "b", "display_name"), @@ -206,7 +206,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ) def test_register_mau_blocked(self): - self.hs.config.limit_usage_by_mau = True + self.hs.config.server.limit_usage_by_mau = True self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) @@ -215,7 +215,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ) self.store.get_monthly_active_count = Mock( - return_value=make_awaitable(self.hs.config.max_mau_value) + return_value=make_awaitable(self.hs.config.server.max_mau_value) ) self.get_failure( self.handler.register_user(localpart="local_part"), ResourceLimitError diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index d9a8b077d3..638babae69 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -226,7 +226,7 @@ class FederationClientTests(HomeserverTestCase): """Ensure that Synapse does not try to connect to blacklisted IPs""" # Set up the ip_range blacklist - self.hs.config.federation_ip_range_blacklist = IPSet( + self.hs.config.server.federation_ip_range_blacklist = IPSet( ["127.0.0.0/8", "fe80::/64"] ) self.reactor.lookups["internal"] = "127.0.0.1" diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index ee3ae9cce4..a285d5a7fe 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -422,7 +422,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): # Set monthly active users to the limit store.get_monthly_active_count = Mock( - return_value=make_awaitable(self.hs.config.max_mau_value) + return_value=make_awaitable(self.hs.config.server.max_mau_value) ) # Check that the blocking of monthly active users is working as expected # The registration of a new user fails due to the limit @@ -1485,7 +1485,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): # Set monthly active users to the limit self.store.get_monthly_active_count = Mock( - return_value=make_awaitable(self.hs.config.max_mau_value) + return_value=make_awaitable(self.hs.config.server.max_mau_value) ) # Check that the blocking of monthly active users is working as expected # The registration of a new user fails due to the limit @@ -1522,7 +1522,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): # Set monthly active users to the limit self.store.get_monthly_active_count = Mock( - return_value=make_awaitable(self.hs.config.max_mau_value) + return_value=make_awaitable(self.hs.config.server.max_mau_value) ) # Check that the blocking of monthly active users is working as expected # The registration of a new user fails due to the limit diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index 64b0b8458b..2f44547bfb 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -516,7 +516,7 @@ class WhoamiTestCase(unittest.HomeserverTestCase): appservice = ApplicationService( as_token, - self.hs.config.server_name, + self.hs.config.server.server_name, id="1234", namespaces={"users": [{"regex": user_id, "exclusive": True}]}, sender=user_id, diff --git a/tests/rest/client/test_capabilities.py b/tests/rest/client/test_capabilities.py index 422361b62a..b9e3602552 100644 --- a/tests/rest/client/test_capabilities.py +++ b/tests/rest/client/test_capabilities.py @@ -55,7 +55,7 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase): self.assertTrue(room_version in KNOWN_ROOM_VERSIONS, "" + room_version) self.assertEqual( - self.config.default_room_version.identifier, + self.config.server.default_room_version.identifier, capabilities["m.room_versions"]["default"], ) diff --git a/tests/rest/client/test_presence.py b/tests/rest/client/test_presence.py index 1d152352d1..56fe1a3d01 100644 --- a/tests/rest/client/test_presence.py +++ b/tests/rest/client/test_presence.py @@ -50,7 +50,7 @@ class PresenceTestCase(unittest.HomeserverTestCase): PUT to the status endpoint with use_presence enabled will call set_state on the presence handler. """ - self.hs.config.use_presence = True + self.hs.config.server.use_presence = True body = {"presence": "here", "status_msg": "beep boop"} channel = self.make_request( diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 72a5a11b46..af135d57e1 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -50,7 +50,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): appservice = ApplicationService( as_token, - self.hs.config.server_name, + self.hs.config.server.server_name, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, sender="@as:test", @@ -74,7 +74,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): appservice = ApplicationService( as_token, - self.hs.config.server_name, + self.hs.config.server.server_name, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, sender="@as:test", diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index 7f25200a5d..36c495954f 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -346,7 +346,7 @@ class TestResourceLimitsServerNoticesWithRealRooms(unittest.HomeserverTestCase): invites = [] # Register as many users as the MAU limit allows. - for i in range(self.hs.config.max_mau_value): + for i in range(self.hs.config.server.max_mau_value): localpart = "user%d" % i user_id = self.register_user(localpart, "password") tok = self.login(localpart, "password") diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 944dbc34a2..d6b4cdd788 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -51,7 +51,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): @override_config({"max_mau_value": 3, "mau_limit_reserved_threepids": gen_3pids(3)}) def test_initialise_reserved_users(self): - threepids = self.hs.config.mau_limits_reserved_threepids + threepids = self.hs.config.server.mau_limits_reserved_threepids # register three users, of which two have reserved 3pids, and a third # which is a support user. @@ -101,9 +101,9 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): # XXX some of this is redundant. poking things into the config shouldn't # work, and in any case it's not obvious what we expect to happen when # we advance the reactor. - self.hs.config.max_mau_value = 0 + self.hs.config.server.max_mau_value = 0 self.reactor.advance(FORTY_DAYS) - self.hs.config.max_mau_value = 5 + self.hs.config.server.max_mau_value = 5 self.get_success(self.store.reap_monthly_active_users()) @@ -183,7 +183,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): self.get_success(d) count = self.get_success(self.store.get_monthly_active_count()) - self.assertEqual(count, self.hs.config.max_mau_value) + self.assertEqual(count, self.hs.config.server.max_mau_value) self.reactor.advance(FORTY_DAYS) @@ -199,7 +199,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): def test_reap_monthly_active_users_reserved_users(self): """Tests that reaping correctly handles reaping where reserved users are present""" - threepids = self.hs.config.mau_limits_reserved_threepids + threepids = self.hs.config.server.mau_limits_reserved_threepids initial_users = len(threepids) reserved_user_number = initial_users - 1 for i in range(initial_users): @@ -234,7 +234,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): self.get_success(d) count = self.get_success(self.store.get_monthly_active_count()) - self.assertEqual(count, self.hs.config.max_mau_value) + self.assertEqual(count, self.hs.config.server.max_mau_value) def test_populate_monthly_users_is_guest(self): # Test that guest users are not added to mau list @@ -294,7 +294,7 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): {"medium": "email", "address": user2_email}, ] - self.hs.config.mau_limits_reserved_threepids = threepids + self.hs.config.server.mau_limits_reserved_threepids = threepids d = self.store.db_pool.runInteraction( "initialise", self.store._initialise_reserved_users, threepids ) diff --git a/tests/test_mau.py b/tests/test_mau.py index 66111eb367..80ab40e255 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -165,7 +165,7 @@ class TestMauLimit(unittest.HomeserverTestCase): @override_config({"mau_trial_days": 1}) def test_trial_users_cant_come_back(self): - self.hs.config.mau_trial_days = 1 + self.hs.config.server.mau_trial_days = 1 # We should be able to register more than the limit initially token1 = self.create_user("kermit1") diff --git a/tests/unittest.py b/tests/unittest.py index 7a6f5954d0..6d5d87cb78 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -232,7 +232,7 @@ class HomeserverTestCase(TestCase): # Honour the `use_frozen_dicts` config option. We have to do this # manually because this is taken care of in the app `start` code, which # we don't run. Plus we want to reset it on tearDown. - events.USE_FROZEN_DICTS = self.hs.config.use_frozen_dicts + events.USE_FROZEN_DICTS = self.hs.config.server.use_frozen_dicts if self.hs is None: raise Exception("No homeserver returned from make_homeserver.") |