diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 548f6dcde9..749bc1deb9 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -86,11 +86,11 @@ def start_worker_reactor(appname, config, run_command=reactor.run):
start_reactor(
appname,
- soft_file_limit=config.soft_file_limit,
- gc_thresholds=config.gc_thresholds,
+ soft_file_limit=config.server.soft_file_limit,
+ gc_thresholds=config.server.gc_thresholds,
pid_file=config.worker.worker_pid_file,
daemonize=config.worker.worker_daemonize,
- print_pidfile=config.print_pidfile,
+ print_pidfile=config.server.print_pidfile,
logger=logger,
run_command=run_command,
)
@@ -298,7 +298,7 @@ def refresh_certificate(hs):
Refresh the TLS certificates that Synapse is using by re-reading them from
disk and updating the TLS context factories to use them.
"""
- if not hs.config.has_tls_listener():
+ if not hs.config.server.has_tls_listener():
return
hs.config.read_certificate_from_disk()
diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py
index f2c5b75247..556bcc124e 100644
--- a/synapse/app/admin_cmd.py
+++ b/synapse/app/admin_cmd.py
@@ -195,14 +195,14 @@ def start(config_options):
config.logging.no_redirect_stdio = True
# Explicitly disable background processes
- config.update_user_directory = False
+ config.server.update_user_directory = False
config.worker.run_background_tasks = False
config.start_pushers = False
config.pusher_shard_config.instances = []
config.send_federation = False
config.federation_shard_config.instances = []
- synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
+ synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
ss = AdminCmdServer(
config.server.server_name,
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index 3036e1b4a0..7489f31d9a 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -462,7 +462,7 @@ def start(config_options):
# For other worker types we force this to off.
config.server.update_user_directory = False
- synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
+ synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
if config.server.gc_seconds:
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 205831dcda..2b2d4bbf83 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -248,7 +248,7 @@ class SynapseHomeServer(HomeServer):
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
if name == "webclient":
- webclient_loc = self.config.web_client_location
+ webclient_loc = self.config.server.web_client_location
if webclient_loc is None:
logger.warning(
@@ -343,7 +343,7 @@ def setup(config_options):
# generating config files and shouldn't try to continue.
sys.exit(0)
- events.USE_FROZEN_DICTS = config.use_frozen_dicts
+ events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
if config.server.gc_seconds:
@@ -439,11 +439,11 @@ def run(hs):
_base.start_reactor(
"synapse-homeserver",
- soft_file_limit=hs.config.soft_file_limit,
- gc_thresholds=hs.config.gc_thresholds,
- pid_file=hs.config.pid_file,
- daemonize=hs.config.daemonize,
- print_pidfile=hs.config.print_pidfile,
+ soft_file_limit=hs.config.server.soft_file_limit,
+ gc_thresholds=hs.config.server.gc_thresholds,
+ pid_file=hs.config.server.pid_file,
+ daemonize=hs.config.server.daemonize,
+ print_pidfile=hs.config.server.print_pidfile,
logger=logger,
)
diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py
index 49e7a45e5c..fcd01e833c 100644
--- a/synapse/app/phone_stats_home.py
+++ b/synapse/app/phone_stats_home.py
@@ -74,7 +74,7 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
store = hs.get_datastore()
stats["homeserver"] = hs.config.server.server_name
- stats["server_context"] = hs.config.server_context
+ stats["server_context"] = hs.config.server.server_context
stats["timestamp"] = now
stats["uptime_seconds"] = uptime
version = sys.version_info
@@ -171,7 +171,7 @@ def start_phone_stats_home(hs):
current_mau_count_by_service = {}
reserved_users = ()
store = hs.get_datastore()
- if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
+ if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
current_mau_count = await store.get_monthly_active_count()
current_mau_count_by_service = (
await store.get_monthly_active_count_by_service()
@@ -183,9 +183,9 @@ def start_phone_stats_home(hs):
current_mau_by_service_gauge.labels(app_service).set(float(count))
registered_reserved_users_mau_gauge.set(float(len(reserved_users)))
- max_mau_gauge.set(float(hs.config.max_mau_value))
+ max_mau_gauge.set(float(hs.config.server.max_mau_value))
- if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
+ if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
generate_monthly_active_users()
clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
# End of monthly active user settings
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index d974a1a2a8..26152b0924 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -327,7 +327,7 @@ class RootConfig:
"""
Redirect lookups on this object either to config objects, or values on
config objects, so that `config.tls.blah` works, as well as legacy uses
- of things like `config.server_name`. It will first look up the config
+ of things like `config.server.server_name`. It will first look up the config
section name, and then values on those config classes.
"""
if item in self._configs.keys():
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 041412d7ad..818b806357 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -1,6 +1,4 @@
-# Copyright 2014-2016 OpenMarket Ltd
-# Copyright 2017-2018 New Vector Ltd
-# Copyright 2019 The Matrix.org Foundation C.I.C.
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py
index eb4556cdc1..68b8b19024 100644
--- a/synapse/events/presence_router.py
+++ b/synapse/events/presence_router.py
@@ -45,11 +45,11 @@ def load_legacy_presence_router(hs: "HomeServer"):
configuration, and registers the hooks they implement.
"""
- if hs.config.presence_router_module_class is None:
+ if hs.config.server.presence_router_module_class is None:
return
- module = hs.config.presence_router_module_class
- config = hs.config.presence_router_config
+ module = hs.config.server.presence_router_module_class
+ config = hs.config.server.presence_router_config
api = hs.get_module_api()
presence_router = module(config=config, module_api=api)
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index f86113a448..a13fb0148f 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -372,7 +372,7 @@ class EventClientSerializer:
def __init__(self, hs):
self.store = hs.get_datastore()
self.experimental_msc1849_support_enabled = (
- hs.config.experimental_msc1849_support_enabled
+ hs.config.server.experimental_msc1849_support_enabled
)
async def serialize_event(
diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py
index 95176ba6f9..c32539bf5a 100644
--- a/synapse/federation/transport/server/__init__.py
+++ b/synapse/federation/transport/server/__init__.py
@@ -117,7 +117,7 @@ class PublicRoomList(BaseFederationServlet):
):
super().__init__(hs, authenticator, ratelimiter, server_name)
self.handler = hs.get_room_list_handler()
- self.allow_access = hs.config.allow_public_rooms_over_federation
+ self.allow_access = hs.config.server.allow_public_rooms_over_federation
async def on_GET(
self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]]
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 5cfba3c817..9078781d5a 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -49,7 +49,7 @@ class DirectoryHandler(BaseHandler):
self.store = hs.get_datastore()
self.config = hs.config
self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search
- self.require_membership = hs.config.require_membership_for_aliases
+ self.require_membership = hs.config.server.require_membership_for_aliases
self.third_party_event_rules = hs.get_third_party_event_rules()
self.federation = hs.get_federation_client()
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 16c435ee86..3b0b895b07 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -762,7 +762,7 @@ class FederationHandler(BaseHandler):
if is_blocked:
raise SynapseError(403, "This room has been blocked on this server")
- if self.hs.config.block_non_admin_invites:
+ if self.hs.config.server.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
if not await self.spam_checker.user_may_invite(
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index fe8a995892..a0640fcac0 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -57,7 +57,7 @@ class IdentityHandler(BaseHandler):
self.http_client = SimpleHttpClient(hs)
# An HTTP client for contacting identity servers specified by clients.
self.blacklisting_http_client = SimpleHttpClient(
- hs, ip_blacklist=hs.config.federation_ip_range_blacklist
+ hs, ip_blacklist=hs.config.server.federation_ip_range_blacklist
)
self.federation_http_client = hs.get_federation_http_client()
self.hs = hs
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 39c18ecf99..3b8cc50ec0 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -81,7 +81,7 @@ class MessageHandler:
self.storage = hs.get_storage()
self.state_store = self.storage.state
self._event_serializer = hs.get_event_client_serializer()
- self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages
+ self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages
# The scheduled call to self._expire_event. None if no call is currently
# scheduled.
@@ -415,7 +415,9 @@ class EventCreationHandler:
self.server_name = hs.hostname
self.notifier = hs.get_notifier()
self.config = hs.config
- self.require_membership_for_aliases = hs.config.require_membership_for_aliases
+ self.require_membership_for_aliases = (
+ hs.config.server.require_membership_for_aliases
+ )
self._events_shard_config = self.config.worker.events_shard_config
self._instance_name = hs.get_instance_name()
@@ -425,7 +427,7 @@ class EventCreationHandler:
Membership.JOIN,
Membership.KNOCK,
}
- if self.hs.config.include_profile_data_on_invite:
+ if self.hs.config.server.include_profile_data_on_invite:
self.membership_types_to_include_profile_data_in.add(Membership.INVITE)
self.send_event = ReplicationSendEventRestServlet.make_client(hs)
@@ -461,11 +463,11 @@ class EventCreationHandler:
#
self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {}
# The number of forward extremeities before a dummy event is sent.
- self._dummy_events_threshold = hs.config.dummy_events_threshold
+ self._dummy_events_threshold = hs.config.server.dummy_events_threshold
if (
self.config.worker.run_background_tasks
- and self.config.cleanup_extremities_with_dummy_events
+ and self.config.server.cleanup_extremities_with_dummy_events
):
self.clock.looping_call(
lambda: run_as_background_process(
@@ -477,7 +479,7 @@ class EventCreationHandler:
self._message_handler = hs.get_message_handler()
- self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages
+ self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages
self._external_cache = hs.get_external_cache()
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index a5301ece6f..176e4dfdd4 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -85,12 +85,18 @@ class PaginationHandler:
self._purges_by_id: Dict[str, PurgeStatus] = {}
self._event_serializer = hs.get_event_client_serializer()
- self._retention_default_max_lifetime = hs.config.retention_default_max_lifetime
+ self._retention_default_max_lifetime = (
+ hs.config.server.retention_default_max_lifetime
+ )
- self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min
- self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max
+ self._retention_allowed_lifetime_min = (
+ hs.config.server.retention_allowed_lifetime_min
+ )
+ self._retention_allowed_lifetime_max = (
+ hs.config.server.retention_allowed_lifetime_max
+ )
- if hs.config.worker.run_background_tasks and hs.config.retention_enabled:
+ if hs.config.worker.run_background_tasks and hs.config.server.retention_enabled:
# Run the purge jobs described in the configuration file.
for job in hs.config.server.retention_purge_jobs:
logger.info("Setting up purge job with config: %s", job)
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index b23a1541bc..425c0d4973 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -397,7 +397,7 @@ class ProfileHandler(BaseHandler):
# when building a membership event. In this case, we must allow the
# lookup.
if (
- not self.hs.config.limit_profile_requests_to_users_who_share_rooms
+ not self.hs.config.server.limit_profile_requests_to_users_who_share_rooms
or not requester
):
return
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 4f99f137a2..4a7ccb882e 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -854,7 +854,7 @@ class RegistrationHandler(BaseHandler):
# Necessary due to auth checks prior to the threepid being
# written to the db
if is_threepid_reserved(
- self.hs.config.mau_limits_reserved_threepids, threepid
+ self.hs.config.server.mau_limits_reserved_threepids, threepid
):
await self.store.upsert_monthly_active_user(user_id)
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index dc4fab2223..bf8a85f563 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -666,7 +666,7 @@ class RoomCreationHandler(BaseHandler):
await self.ratelimit(requester)
room_version_id = config.get(
- "room_version", self.config.default_room_version.identifier
+ "room_version", self.config.server.default_room_version.identifier
)
if not isinstance(room_version_id, str):
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 1a56c82fbd..02103f6c9a 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -90,7 +90,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
self.third_party_event_rules = hs.get_third_party_event_rules()
self._server_notices_mxid = self.config.servernotices.server_notices_mxid
self._enable_lookup = hs.config.enable_3pid_lookup
- self.allow_per_room_profiles = self.config.allow_per_room_profiles
+ self.allow_per_room_profiles = self.config.server.allow_per_room_profiles
self._join_rate_limiter_local = Ratelimiter(
store=self.store,
@@ -617,7 +617,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
is_requester_admin = await self.auth.is_server_admin(requester.user)
if not is_requester_admin:
- if self.config.block_non_admin_invites:
+ if self.config.server.block_non_admin_invites:
logger.info(
"Blocking invite: user is not admin and non-admin "
"invites disabled"
@@ -1222,7 +1222,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
Raises:
ShadowBanError if the requester has been shadow-banned.
"""
- if self.config.block_non_admin_invites:
+ if self.config.server.block_non_admin_invites:
is_requester_admin = await self.auth.is_server_admin(requester.user)
if not is_requester_admin:
raise SynapseError(
@@ -1420,7 +1420,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
Returns: bool of whether the complexity is too great, or None
if unable to be fetched
"""
- max_complexity = self.hs.config.limit_remote_rooms.complexity
+ max_complexity = self.hs.config.server.limit_remote_rooms.complexity
complexity = await self.federation_handler.get_room_complexity(
remote_room_hosts, room_id
)
@@ -1436,7 +1436,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
Args:
room_id: The room ID to check for complexity.
"""
- max_complexity = self.hs.config.limit_remote_rooms.complexity
+ max_complexity = self.hs.config.server.limit_remote_rooms.complexity
complexity = await self.store.get_room_complexity(room_id)
return complexity["v1"] > max_complexity
@@ -1472,7 +1472,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
if too_complex is True:
raise SynapseError(
code=400,
- msg=self.hs.config.limit_remote_rooms.complexity_error,
+ msg=self.hs.config.server.limit_remote_rooms.complexity_error,
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
)
@@ -1507,7 +1507,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
)
raise SynapseError(
code=400,
- msg=self.hs.config.limit_remote_rooms.complexity_error,
+ msg=self.hs.config.server.limit_remote_rooms.complexity_error,
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
)
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 8226d6f5a1..6d3333ee00 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -105,7 +105,7 @@ class SearchHandler(BaseHandler):
dict to be returned to the client with results of search
"""
- if not self.hs.config.enable_search:
+ if not self.hs.config.server.enable_search:
raise SynapseError(400, "Search is disabled on this homeserver")
batch_group = None
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index b91e7cb501..f4430ce3c9 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -60,7 +60,7 @@ class UserDirectoryHandler(StateDeltasHandler):
self.clock = hs.get_clock()
self.notifier = hs.get_notifier()
self.is_mine_id = hs.is_mine_id
- self.update_user_directory = hs.config.update_user_directory
+ self.update_user_directory = hs.config.server.update_user_directory
self.search_all_users = hs.config.userdirectory.user_directory_search_all_users
self.spam_checker = hs.get_spam_checker()
# The current position in the current_state_delta stream
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index cdc36b8d25..4f59224686 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -327,23 +327,23 @@ class MatrixFederationHttpClient:
self.reactor = hs.get_reactor()
user_agent = hs.version_string
- if hs.config.user_agent_suffix:
- user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix)
+ if hs.config.server.user_agent_suffix:
+ user_agent = "%s %s" % (user_agent, hs.config.server.user_agent_suffix)
user_agent = user_agent.encode("ascii")
federation_agent = MatrixFederationAgent(
self.reactor,
tls_client_options_factory,
user_agent,
- hs.config.federation_ip_range_whitelist,
- hs.config.federation_ip_range_blacklist,
+ hs.config.server.federation_ip_range_whitelist,
+ hs.config.server.federation_ip_range_blacklist,
)
# Use a BlacklistingAgentWrapper to prevent circumventing the IP
# blacklist via IP literals in server names
self.agent = BlacklistingAgentWrapper(
federation_agent,
- ip_blacklist=hs.config.federation_ip_range_blacklist,
+ ip_blacklist=hs.config.server.federation_ip_range_blacklist,
)
self.clock = hs.get_clock()
diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py
index 030852cb5b..80f9b23bfd 100644
--- a/synapse/replication/tcp/resource.py
+++ b/synapse/replication/tcp/resource.py
@@ -71,7 +71,7 @@ class ReplicationStreamer:
self.notifier = hs.get_notifier()
self._instance_name = hs.get_instance_name()
- self._replication_torture_level = hs.config.replication_torture_level
+ self._replication_torture_level = hs.config.server.replication_torture_level
self.notifier.add_replication_callback(self.on_notifier_poke)
diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py
index bacb828330..fff133ef10 100644
--- a/synapse/rest/client/account.py
+++ b/synapse/rest/client/account.py
@@ -119,7 +119,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
)
if existing_user_id is None:
- if self.config.request_token_inhibit_3pid_errors:
+ if self.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it
@@ -403,7 +403,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
existing_user_id = await self.store.get_user_id_by_threepid("email", email)
if existing_user_id is not None:
- if self.config.request_token_inhibit_3pid_errors:
+ if self.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it
@@ -486,7 +486,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
existing_user_id = await self.store.get_user_id_by_threepid("msisdn", msisdn)
if existing_user_id is not None:
- if self.hs.config.request_token_inhibit_3pid_errors:
+ if self.hs.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it
@@ -857,8 +857,8 @@ def assert_valid_next_link(hs: "HomeServer", next_link: str) -> None:
# If the domain whitelist is set, the domain must be in it
if (
valid
- and hs.config.next_link_domain_whitelist is not None
- and next_link_parsed.hostname not in hs.config.next_link_domain_whitelist
+ and hs.config.server.next_link_domain_whitelist is not None
+ and next_link_parsed.hostname not in hs.config.server.next_link_domain_whitelist
):
valid = False
diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py
index 65b3b5ce2c..d6b6256413 100644
--- a/synapse/rest/client/capabilities.py
+++ b/synapse/rest/client/capabilities.py
@@ -44,10 +44,10 @@ class CapabilitiesRestServlet(RestServlet):
await self.auth.get_user_by_req(request, allow_guest=True)
change_password = self.auth_handler.can_change_password()
- response = {
+ response: JsonDict = {
"capabilities": {
"m.room_versions": {
- "default": self.config.default_room_version.identifier,
+ "default": self.config.server.default_room_version.identifier,
"available": {
v.identifier: v.disposition
for v in KNOWN_ROOM_VERSIONS.values()
diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py
index 6ed60c7418..cc1c2f9731 100644
--- a/synapse/rest/client/filter.py
+++ b/synapse/rest/client/filter.py
@@ -90,7 +90,7 @@ class CreateFilterRestServlet(RestServlet):
raise AuthError(403, "Can only create filters for local users")
content = parse_json_object_from_request(request)
- set_timeline_upper_limit(content, self.hs.config.filter_timeline_limit)
+ set_timeline_upper_limit(content, self.hs.config.server.filter_timeline_limit)
filter_id = await self.filtering.add_user_filter(
user_localpart=target_user.localpart, user_filter=content
diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py
index d0f20de569..c684636c0a 100644
--- a/synapse/rest/client/profile.py
+++ b/synapse/rest/client/profile.py
@@ -41,7 +41,7 @@ class ProfileDisplaynameRestServlet(RestServlet):
) -> Tuple[int, JsonDict]:
requester_user = None
- if self.hs.config.require_auth_for_profile_requests:
+ if self.hs.config.server.require_auth_for_profile_requests:
requester = await self.auth.get_user_by_req(request)
requester_user = requester.user
@@ -94,7 +94,7 @@ class ProfileAvatarURLRestServlet(RestServlet):
) -> Tuple[int, JsonDict]:
requester_user = None
- if self.hs.config.require_auth_for_profile_requests:
+ if self.hs.config.server.require_auth_for_profile_requests:
requester = await self.auth.get_user_by_req(request)
requester_user = requester.user
@@ -146,7 +146,7 @@ class ProfileRestServlet(RestServlet):
) -> Tuple[int, JsonDict]:
requester_user = None
- if self.hs.config.require_auth_for_profile_requests:
+ if self.hs.config.server.require_auth_for_profile_requests:
requester = await self.auth.get_user_by_req(request)
requester_user = requester.user
diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py
index 48b0062cf4..a6eb6f6410 100644
--- a/synapse/rest/client/register.py
+++ b/synapse/rest/client/register.py
@@ -129,7 +129,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
)
if existing_user_id is not None:
- if self.hs.config.request_token_inhibit_3pid_errors:
+ if self.hs.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it
@@ -209,7 +209,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet):
)
if existing_user_id is not None:
- if self.hs.config.request_token_inhibit_3pid_errors:
+ if self.hs.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it
@@ -682,7 +682,7 @@ class RegisterRestServlet(RestServlet):
# written to the db
if threepid:
if is_threepid_reserved(
- self.hs.config.mau_limits_reserved_threepids, threepid
+ self.hs.config.server.mau_limits_reserved_threepids, threepid
):
await self.store.upsert_monthly_active_user(registered_user_id)
diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py
index bf46dc60f2..ed95189b6d 100644
--- a/synapse/rest/client/room.py
+++ b/synapse/rest/client/room.py
@@ -369,7 +369,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
# Option to allow servers to require auth when accessing
# /publicRooms via CS API. This is especially helpful in private
# federations.
- if not self.hs.config.allow_public_rooms_without_auth:
+ if not self.hs.config.server.allow_public_rooms_without_auth:
raise
# We allow people to not be authed if they're just looking at our
diff --git a/synapse/rest/client/shared_rooms.py b/synapse/rest/client/shared_rooms.py
index 1d90493eb0..09a46737de 100644
--- a/synapse/rest/client/shared_rooms.py
+++ b/synapse/rest/client/shared_rooms.py
@@ -42,7 +42,7 @@ class UserSharedRoomsServlet(RestServlet):
super().__init__()
self.auth = hs.get_auth()
self.store = hs.get_datastore()
- self.user_directory_active = hs.config.update_user_directory
+ self.user_directory_active = hs.config.server.update_user_directory
async def on_GET(
self, request: SynapseRequest, user_id: str
diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py
index 1259058b9b..913216a7c4 100644
--- a/synapse/rest/client/sync.py
+++ b/synapse/rest/client/sync.py
@@ -155,7 +155,7 @@ class SyncRestServlet(RestServlet):
try:
filter_object = json_decoder.decode(filter_id)
set_timeline_upper_limit(
- filter_object, self.hs.config.filter_timeline_limit
+ filter_object, self.hs.config.server.filter_timeline_limit
)
except Exception:
raise SynapseError(400, "Invalid filter JSON")
diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py
index 073b0d754f..8522930b50 100644
--- a/synapse/server_notices/resource_limits_server_notices.py
+++ b/synapse/server_notices/resource_limits_server_notices.py
@@ -47,9 +47,9 @@ class ResourceLimitsServerNotices:
self._notifier = hs.get_notifier()
self._enabled = (
- hs.config.limit_usage_by_mau
+ hs.config.server.limit_usage_by_mau
and self._server_notices_manager.is_enabled()
- and not hs.config.hs_disabled
+ and not hs.config.server.hs_disabled
)
async def maybe_send_server_notice_to_user(self, user_id: str) -> None:
@@ -98,7 +98,7 @@ class ResourceLimitsServerNotices:
try:
if (
limit_type == LimitBlockingTypes.MONTHLY_ACTIVE_USER
- and not self._config.mau_limit_alerting
+ and not self._config.server.mau_limit_alerting
):
# We have hit the MAU limit, but MAU alerting is disabled:
# reset room if necessary and return
@@ -149,7 +149,7 @@ class ResourceLimitsServerNotices:
"body": event_body,
"msgtype": ServerNoticeMsgType,
"server_notice_type": ServerNoticeLimitReached,
- "admin_contact": self._config.admin_contact,
+ "admin_contact": self._config.server.admin_contact,
"limit_type": event_limit_type,
}
event = await self._server_notices_manager.send_notice(
diff --git a/synapse/storage/databases/main/censor_events.py b/synapse/storage/databases/main/censor_events.py
index 6305414e3d..eee07227ef 100644
--- a/synapse/storage/databases/main/censor_events.py
+++ b/synapse/storage/databases/main/censor_events.py
@@ -36,7 +36,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
if (
hs.config.worker.run_background_tasks
- and self.hs.config.redaction_retention_period is not None
+ and self.hs.config.server.redaction_retention_period is not None
):
hs.get_clock().looping_call(self._censor_redactions, 5 * 60 * 1000)
@@ -48,7 +48,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
By censor we mean update the event_json table with the redacted event.
"""
- if self.hs.config.redaction_retention_period is None:
+ if self.hs.config.server.redaction_retention_period is None:
return
if not (
@@ -60,7 +60,9 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
# created.
return
- before_ts = self._clock.time_msec() - self.hs.config.redaction_retention_period
+ before_ts = (
+ self._clock.time_msec() - self.hs.config.server.redaction_retention_period
+ )
# We fetch all redactions that:
# 1. point to an event we have,
diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
index 7e33ae578c..0e1d97aaeb 100644
--- a/synapse/storage/databases/main/client_ips.py
+++ b/synapse/storage/databases/main/client_ips.py
@@ -353,7 +353,7 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore):
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)
- self.user_ips_max_age = hs.config.user_ips_max_age
+ self.user_ips_max_age = hs.config.server.user_ips_max_age
if hs.config.worker.run_background_tasks and self.user_ips_max_age:
self._clock.looping_call(self._prune_old_user_ips, 5 * 1000)
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index cc4e31ec30..bc7d213fe2 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -104,7 +104,7 @@ class PersistEventsStore:
self._clock = hs.get_clock()
self._instance_name = hs.get_instance_name()
- self._ephemeral_messages_enabled = hs.config.enable_ephemeral_messages
+ self._ephemeral_messages_enabled = hs.config.server.enable_ephemeral_messages
self.is_mine_id = hs.is_mine_id
# Ideally we'd move these ID gens here, unfortunately some other ID
diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py
index b76ee51a9b..a14ac03d4b 100644
--- a/synapse/storage/databases/main/monthly_active_users.py
+++ b/synapse/storage/databases/main/monthly_active_users.py
@@ -32,8 +32,8 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore):
self._clock = hs.get_clock()
self.hs = hs
- self._limit_usage_by_mau = hs.config.limit_usage_by_mau
- self._max_mau_value = hs.config.max_mau_value
+ self._limit_usage_by_mau = hs.config.server.limit_usage_by_mau
+ self._max_mau_value = hs.config.server.max_mau_value
@cached(num_args=0)
async def get_monthly_active_count(self) -> int:
@@ -96,8 +96,8 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore):
"""
users = []
- for tp in self.hs.config.mau_limits_reserved_threepids[
- : self.hs.config.max_mau_value
+ for tp in self.hs.config.server.mau_limits_reserved_threepids[
+ : self.hs.config.server.max_mau_value
]:
user_id = await self.hs.get_datastore().get_user_id_by_threepid(
tp["medium"], tp["address"]
@@ -212,7 +212,7 @@ class MonthlyActiveUsersStore(MonthlyActiveUsersWorkerStore):
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)
- self._mau_stats_only = hs.config.mau_stats_only
+ self._mau_stats_only = hs.config.server.mau_stats_only
# Do not add more reserved users than the total allowable number
self.db_pool.new_transaction(
@@ -221,7 +221,7 @@ class MonthlyActiveUsersStore(MonthlyActiveUsersWorkerStore):
[],
[],
self._initialise_reserved_users,
- hs.config.mau_limits_reserved_threepids[: self._max_mau_value],
+ hs.config.server.mau_limits_reserved_threepids[: self._max_mau_value],
)
def _initialise_reserved_users(self, txn, threepids):
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index c83089ee63..7279b0924e 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -207,7 +207,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
return False
now = self._clock.time_msec()
- trial_duration_ms = self.config.mau_trial_days * 24 * 60 * 60 * 1000
+ trial_duration_ms = self.config.server.mau_trial_days * 24 * 60 * 60 * 1000
is_trial = (now - info["creation_ts"] * 1000) < trial_duration_ms
return is_trial
diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
index 118b390e93..d69eaf80ce 100644
--- a/synapse/storage/databases/main/room.py
+++ b/synapse/storage/databases/main/room.py
@@ -679,8 +679,8 @@ class RoomWorkerStore(SQLBaseStore):
# policy.
if not ret:
return {
- "min_lifetime": self.config.retention_default_min_lifetime,
- "max_lifetime": self.config.retention_default_max_lifetime,
+ "min_lifetime": self.config.server.retention_default_min_lifetime,
+ "max_lifetime": self.config.server.retention_default_max_lifetime,
}
row = ret[0]
@@ -690,10 +690,10 @@ class RoomWorkerStore(SQLBaseStore):
# The default values will be None if no default policy has been defined, or if one
# of the attributes is missing from the default policy.
if row["min_lifetime"] is None:
- row["min_lifetime"] = self.config.retention_default_min_lifetime
+ row["min_lifetime"] = self.config.server.retention_default_min_lifetime
if row["max_lifetime"] is None:
- row["max_lifetime"] = self.config.retention_default_max_lifetime
+ row["max_lifetime"] = self.config.server.retention_default_max_lifetime
return row
diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index 2a1e99e17a..c85383c975 100644
--- a/synapse/storage/databases/main/search.py
+++ b/synapse/storage/databases/main/search.py
@@ -51,7 +51,7 @@ class SearchWorkerStore(SQLBaseStore):
txn:
entries: entries to be added to the table
"""
- if not self.hs.config.enable_search:
+ if not self.hs.config.server.enable_search:
return
if isinstance(self.database_engine, PostgresEngine):
sql = (
@@ -105,7 +105,7 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)
- if not hs.config.enable_search:
+ if not hs.config.server.enable_search:
return
self.db_pool.updates.register_background_update_handler(
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index f31880b8ec..a63eaddfdc 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -366,7 +366,7 @@ def _upgrade_existing_database(
+ "new for the server to understand"
)
- # some of the deltas assume that config.server_name is set correctly, so now
+ # some of the deltas assume that server_name is set correctly, so now
# is a good time to run the sanity check.
if not is_empty and "main" in databases:
from synapse.storage.databases.main import check_database_before_upgrade
|