diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index a8c717efd5..2d0f3d566c 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -198,7 +198,7 @@ class AuthHandler(BaseHandler):
if inst.is_enabled():
self.checkers[inst.AUTH_TYPE] = inst # type: ignore
- self.bcrypt_rounds = hs.config.bcrypt_rounds
+ self.bcrypt_rounds = hs.config.registration.bcrypt_rounds
# we can't use hs.get_module_api() here, because to do so will create an
# import loop.
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index 9ae5b7750e..12bdca7445 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -133,6 +133,10 @@ class DeactivateAccountHandler(BaseHandler):
# delete from user directory
await self.user_directory_handler.handle_local_user_deactivated(user_id)
+ # If the user is present in the monthly active users table
+ # remove them
+ await self.store.remove_deactivated_user_from_mau_table(user_id)
+
# Mark the user as erased, if they asked for that
if erase_data:
user = UserID.from_string(user_id)
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 5cfba3c817..9078781d5a 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -49,7 +49,7 @@ class DirectoryHandler(BaseHandler):
self.store = hs.get_datastore()
self.config = hs.config
self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search
- self.require_membership = hs.config.require_membership_for_aliases
+ self.require_membership = hs.config.server.require_membership_for_aliases
self.third_party_event_rules = hs.get_third_party_event_rules()
self.federation = hs.get_federation_client()
diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py
index cb81fa0986..d089c56286 100644
--- a/synapse/handlers/event_auth.py
+++ b/synapse/handlers/event_auth.py
@@ -22,7 +22,8 @@ from synapse.api.constants import (
RestrictedJoinRuleTypes,
)
from synapse.api.errors import AuthError, Codes, SynapseError
-from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
+from synapse.api.room_versions import RoomVersion
+from synapse.event_auth import check_auth_rules_for_event
from synapse.events import EventBase
from synapse.events.builder import EventBuilder
from synapse.events.snapshot import EventContext
@@ -45,21 +46,17 @@ class EventAuthHandler:
self._store = hs.get_datastore()
self._server_name = hs.hostname
- async def check_from_context(
+ async def check_auth_rules_from_context(
self,
- room_version: str,
+ room_version_obj: RoomVersion,
event: EventBase,
context: EventContext,
- do_sig_check: bool = True,
) -> None:
+ """Check an event passes the auth rules at its own auth events"""
auth_event_ids = event.auth_event_ids()
auth_events_by_id = await self._store.get_events(auth_event_ids)
auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()}
-
- room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
- event_auth.check(
- room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check
- )
+ check_auth_rules_for_event(room_version_obj, event, auth_events)
def compute_auth_events(
self,
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index adbd150e46..043ca4a224 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -45,6 +45,10 @@ from synapse.api.errors import (
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions
from synapse.crypto.event_signing import compute_event_signature
+from synapse.event_auth import (
+ check_auth_rules_for_event,
+ validate_event_for_room_version,
+)
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
@@ -723,8 +727,8 @@ class FederationHandler(BaseHandler):
state_ids,
)
- builder = self.event_builder_factory.new(
- room_version.identifier,
+ builder = self.event_builder_factory.for_room_version(
+ room_version,
{
"type": EventTypes.Member,
"content": event_content,
@@ -747,10 +751,9 @@ class FederationHandler(BaseHandler):
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request`
- await self._event_auth_handler.check_from_context(
- room_version.identifier, event, context, do_sig_check=False
+ await self._event_auth_handler.check_auth_rules_from_context(
+ room_version, event, context
)
-
return event
async def on_invite_request(
@@ -767,7 +770,7 @@ class FederationHandler(BaseHandler):
if is_blocked:
raise SynapseError(403, "This room has been blocked on this server")
- if self.hs.config.block_non_admin_invites:
+ if self.hs.config.server.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
if not await self.spam_checker.user_may_invite(
@@ -902,9 +905,9 @@ class FederationHandler(BaseHandler):
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
- room_version = await self.store.get_room_version_id(room_id)
- builder = self.event_builder_factory.new(
- room_version,
+ room_version_obj = await self.store.get_room_version(room_id)
+ builder = self.event_builder_factory.for_room_version(
+ room_version_obj,
{
"type": EventTypes.Member,
"content": {"membership": Membership.LEAVE},
@@ -921,8 +924,8 @@ class FederationHandler(BaseHandler):
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request`
- await self._event_auth_handler.check_from_context(
- room_version, event, context, do_sig_check=False
+ await self._event_auth_handler.check_auth_rules_from_context(
+ room_version_obj, event, context
)
except AuthError as e:
logger.warning("Failed to create new leave %r because %s", event, e)
@@ -954,10 +957,10 @@ class FederationHandler(BaseHandler):
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
- room_version = await self.store.get_room_version_id(room_id)
+ room_version_obj = await self.store.get_room_version(room_id)
- builder = self.event_builder_factory.new(
- room_version,
+ builder = self.event_builder_factory.for_room_version(
+ room_version_obj,
{
"type": EventTypes.Member,
"content": {"membership": Membership.KNOCK},
@@ -983,8 +986,8 @@ class FederationHandler(BaseHandler):
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_knock_request`
- await self._event_auth_handler.check_from_context(
- room_version, event, context, do_sig_check=False
+ await self._event_auth_handler.check_auth_rules_from_context(
+ room_version_obj, event, context
)
except AuthError as e:
logger.warning("Failed to create new knock %r because %s", event, e)
@@ -1173,7 +1176,8 @@ class FederationHandler(BaseHandler):
auth_for_e[(EventTypes.Create, "")] = create_event
try:
- event_auth.check(room_version, e, auth_events=auth_for_e)
+ validate_event_for_room_version(room_version, e)
+ check_auth_rules_for_event(room_version, e, auth_for_e)
except SynapseError as err:
# we may get SynapseErrors here as well as AuthErrors. For
# instance, there are a couple of (ancient) events in some
@@ -1250,8 +1254,10 @@ class FederationHandler(BaseHandler):
}
if await self._event_auth_handler.check_host_in_room(room_id, self.hs.hostname):
- room_version = await self.store.get_room_version_id(room_id)
- builder = self.event_builder_factory.new(room_version, event_dict)
+ room_version_obj = await self.store.get_room_version(room_id)
+ builder = self.event_builder_factory.for_room_version(
+ room_version_obj, event_dict
+ )
EventValidator().validate_builder(builder)
event, context = await self.event_creation_handler.create_new_client_event(
@@ -1259,7 +1265,7 @@ class FederationHandler(BaseHandler):
)
event, context = await self.add_display_name_to_third_party_invite(
- room_version, event_dict, event, context
+ room_version_obj, event_dict, event, context
)
EventValidator().validate_new(event, self.config)
@@ -1269,8 +1275,9 @@ class FederationHandler(BaseHandler):
event.internal_metadata.send_on_behalf_of = self.hs.hostname
try:
- await self._event_auth_handler.check_from_context(
- room_version, event, context
+ validate_event_for_room_version(room_version_obj, event)
+ await self._event_auth_handler.check_auth_rules_from_context(
+ room_version_obj, event, context
)
except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e)
@@ -1304,22 +1311,25 @@ class FederationHandler(BaseHandler):
"""
assert_params_in_dict(event_dict, ["room_id"])
- room_version = await self.store.get_room_version_id(event_dict["room_id"])
+ room_version_obj = await self.store.get_room_version(event_dict["room_id"])
# NB: event_dict has a particular specced format we might need to fudge
# if we change event formats too much.
- builder = self.event_builder_factory.new(room_version, event_dict)
+ builder = self.event_builder_factory.for_room_version(
+ room_version_obj, event_dict
+ )
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
- room_version, event_dict, event, context
+ room_version_obj, event_dict, event, context
)
try:
- await self._event_auth_handler.check_from_context(
- room_version, event, context
+ validate_event_for_room_version(room_version_obj, event)
+ await self._event_auth_handler.check_auth_rules_from_context(
+ room_version_obj, event, context
)
except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e)
@@ -1336,7 +1346,7 @@ class FederationHandler(BaseHandler):
async def add_display_name_to_third_party_invite(
self,
- room_version: str,
+ room_version_obj: RoomVersion,
event_dict: JsonDict,
event: EventBase,
context: EventContext,
@@ -1368,7 +1378,9 @@ class FederationHandler(BaseHandler):
# auth checks. If we need the invite and don't have it then the
# auth check code will explode appropriately.
- builder = self.event_builder_factory.new(room_version, event_dict)
+ builder = self.event_builder_factory.for_room_version(
+ room_version_obj, event_dict
+ )
EventValidator().validate_builder(builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py
index 01fd841122..9269cb444d 100644
--- a/synapse/handlers/federation_event.py
+++ b/synapse/handlers/federation_event.py
@@ -29,7 +29,6 @@ from typing import (
from prometheus_client import Counter
-from synapse import event_auth
from synapse.api.constants import (
EventContentFields,
EventTypes,
@@ -47,7 +46,11 @@ from synapse.api.errors import (
SynapseError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
-from synapse.event_auth import auth_types_for_event
+from synapse.event_auth import (
+ auth_types_for_event,
+ check_auth_rules_for_event,
+ validate_event_for_room_version,
+)
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.federation.federation_client import InvalidResponseError
@@ -68,11 +71,7 @@ from synapse.types import (
UserID,
get_domain_from_id,
)
-from synapse.util.async_helpers import (
- Linearizer,
- concurrently_execute,
- yieldable_gather_results,
-)
+from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.iterutils import batch_iter
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import shortstr
@@ -357,6 +356,11 @@ class FederationEventHandler:
)
# all looks good, we can persist the event.
+
+ # First, precalculate the joined hosts so that the federation sender doesn't
+ # need to.
+ await self._event_creation_handler.cache_joined_hosts_for_event(event, context)
+
await self._run_push_actions_and_persist_event(event, context)
return event, context
@@ -1189,7 +1193,10 @@ class FederationEventHandler:
allow_rejected=True,
)
- async def prep(event: EventBase) -> Optional[Tuple[EventBase, EventContext]]:
+ room_version = await self._store.get_room_version_id(room_id)
+ room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
+
+ def prep(event: EventBase) -> Optional[Tuple[EventBase, EventContext]]:
with nested_logging_context(suffix=event.event_id):
auth = {}
for auth_event_id in event.auth_event_ids():
@@ -1207,17 +1214,16 @@ class FederationEventHandler:
auth[(ae.type, ae.state_key)] = ae
context = EventContext.for_outlier()
- context = await self._check_event_auth(
- origin,
- event,
- context,
- claimed_auth_event_map=auth,
- )
+ try:
+ validate_event_for_room_version(room_version_obj, event)
+ check_auth_rules_for_event(room_version_obj, event, auth)
+ except AuthError as e:
+ logger.warning("Rejecting %r because %s", event, e)
+ context.rejected = RejectedReason.AUTH_ERROR
+
return event, context
- events_to_persist = (
- x for x in await yieldable_gather_results(prep, fetched_events) if x
- )
+ events_to_persist = (x for x in (prep(event) for event in fetched_events) if x)
await self.persist_events_and_notify(room_id, tuple(events_to_persist))
async def _check_event_auth(
@@ -1226,7 +1232,6 @@ class FederationEventHandler:
event: EventBase,
context: EventContext,
state: Optional[Iterable[EventBase]] = None,
- claimed_auth_event_map: Optional[StateMap[EventBase]] = None,
backfilled: bool = False,
) -> EventContext:
"""
@@ -1242,42 +1247,45 @@ class FederationEventHandler:
The state events used to check the event for soft-fail. If this is
not provided the current state events will be used.
- claimed_auth_event_map:
- A map of (type, state_key) => event for the event's claimed auth_events.
- Possibly including events that were rejected, or are in the wrong room.
-
- Only populated when populating outliers.
-
backfilled: True if the event was backfilled.
Returns:
The updated context object.
"""
- # claimed_auth_event_map should be given iff the event is an outlier
- assert bool(claimed_auth_event_map) == event.internal_metadata.outlier
+ # This method should only be used for non-outliers
+ assert not event.internal_metadata.outlier
+ # first of all, check that the event itself is valid.
room_version = await self._store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
- if claimed_auth_event_map:
- # if we have a copy of the auth events from the event, use that as the
- # basis for auth.
- auth_events = claimed_auth_event_map
- else:
- # otherwise, we calculate what the auth events *should* be, and use that
- prev_state_ids = await context.get_prev_state_ids()
- auth_events_ids = self._event_auth_handler.compute_auth_events(
- event, prev_state_ids, for_verification=True
- )
- auth_events_x = await self._store.get_events(auth_events_ids)
- auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()}
+ try:
+ validate_event_for_room_version(room_version_obj, event)
+ except AuthError as e:
+ logger.warning("While validating received event %r: %s", event, e)
+ # TODO: use a different rejected reason here?
+ context.rejected = RejectedReason.AUTH_ERROR
+ return context
+
+ # calculate what the auth events *should* be, to use as a basis for auth.
+ prev_state_ids = await context.get_prev_state_ids()
+ auth_events_ids = self._event_auth_handler.compute_auth_events(
+ event, prev_state_ids, for_verification=True
+ )
+ auth_events_x = await self._store.get_events(auth_events_ids)
+ calculated_auth_event_map = {
+ (e.type, e.state_key): e for e in auth_events_x.values()
+ }
try:
(
context,
auth_events_for_auth,
) = await self._update_auth_events_and_context_for_auth(
- origin, event, context, auth_events
+ origin,
+ event,
+ context,
+ calculated_auth_event_map=calculated_auth_event_map,
)
except Exception:
# We don't really mind if the above fails, so lets not fail
@@ -1289,24 +1297,17 @@ class FederationEventHandler:
"Ignoring failure and continuing processing of event.",
event.event_id,
)
- auth_events_for_auth = auth_events
+ auth_events_for_auth = calculated_auth_event_map
try:
- event_auth.check(room_version_obj, event, auth_events=auth_events_for_auth)
+ check_auth_rules_for_event(room_version_obj, event, auth_events_for_auth)
except AuthError as e:
logger.warning("Failed auth resolution for %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR
+ return context
- if not context.rejected:
- await self._check_for_soft_fail(event, state, backfilled, origin=origin)
- await self._maybe_kick_guest_users(event)
-
- # If we are going to send this event over federation we precaclculate
- # the joined hosts.
- if event.internal_metadata.get_send_on_behalf_of():
- await self._event_creation_handler.cache_joined_hosts_for_event(
- event, context
- )
+ await self._check_for_soft_fail(event, state, backfilled, origin=origin)
+ await self._maybe_kick_guest_users(event)
return context
@@ -1404,7 +1405,7 @@ class FederationEventHandler:
}
try:
- event_auth.check(room_version_obj, event, auth_events=current_auth_events)
+ check_auth_rules_for_event(room_version_obj, event, current_auth_events)
except AuthError as e:
logger.warning(
"Soft-failing %r (from %s) because %s",
@@ -1425,7 +1426,7 @@ class FederationEventHandler:
origin: str,
event: EventBase,
context: EventContext,
- input_auth_events: StateMap[EventBase],
+ calculated_auth_event_map: StateMap[EventBase],
) -> Tuple[EventContext, StateMap[EventBase]]:
"""Helper for _check_event_auth. See there for docs.
@@ -1443,19 +1444,17 @@ class FederationEventHandler:
event:
context:
- input_auth_events:
- Map from (event_type, state_key) to event
-
- Normally, our calculated auth_events based on the state of the room
- at the event's position in the DAG, though occasionally (eg if the
- event is an outlier), may be the auth events claimed by the remote
- server.
+ calculated_auth_event_map:
+ Our calculated auth_events based on the state of the room
+ at the event's position in the DAG.
Returns:
updated context, updated auth event map
"""
- # take a copy of input_auth_events before we modify it.
- auth_events: MutableStateMap[EventBase] = dict(input_auth_events)
+ assert not event.internal_metadata.outlier
+
+ # take a copy of calculated_auth_event_map before we modify it.
+ auth_events: MutableStateMap[EventBase] = dict(calculated_auth_event_map)
event_auth_events = set(event.auth_event_ids())
@@ -1475,6 +1474,11 @@ class FederationEventHandler:
logger.debug("Events %s are in the store", have_events)
missing_auth.difference_update(have_events)
+ # missing_auth is now the set of event_ids which:
+ # a. are listed in event.auth_events, *and*
+ # b. are *not* part of our calculated auth events based on room state, *and*
+ # c. are *not* yet in our database.
+
if missing_auth:
# If we don't have all the auth events, we need to get them.
logger.info("auth_events contains unknown events: %s", missing_auth)
@@ -1496,19 +1500,31 @@ class FederationEventHandler:
}
)
- if event.internal_metadata.is_outlier():
- # XXX: given that, for an outlier, we'll be working with the
- # event's *claimed* auth events rather than those we calculated:
- # (a) is there any point in this test, since different_auth below will
- # obviously be empty
- # (b) alternatively, why don't we do it earlier?
- logger.info("Skipping auth_event fetch for outlier")
- return context, auth_events
+ # auth_events now contains
+ # 1. our *calculated* auth events based on the room state, plus:
+ # 2. any events which:
+ # a. are listed in `event.auth_events`, *and*
+ # b. are not part of our calculated auth events, *and*
+ # c. were not in our database before the call to /event_auth
+ # d. have since been added to our database (most likely by /event_auth).
different_auth = event_auth_events.difference(
e.event_id for e in auth_events.values()
)
+ # different_auth is the set of events which *are* in `event.auth_events`, but
+ # which are *not* in `auth_events`. Comparing with (2.) above, this means
+ # exclusively the set of `event.auth_events` which we already had in our
+ # database before any call to /event_auth.
+ #
+ # I'm reasonably sure that the fact that events returned by /event_auth are
+ # blindly added to auth_events (and hence excluded from different_auth) is a bug
+ # - though it's a very long-standing one (see
+ # https://github.com/matrix-org/synapse/commit/78015948a7febb18e000651f72f8f58830a55b93#diff-0bc92da3d703202f5b9be2d3f845e375f5b1a6bc6ba61705a8af9be1121f5e42R786
+ # from Jan 2015 which seems to add it, though it actually just moves it from
+ # elsewhere (before that, it gets lost in a mess of huge "various bug fixes"
+ # PRs).
+
if not different_auth:
return context, auth_events
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index fe8a995892..c881475c25 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -57,7 +57,7 @@ class IdentityHandler(BaseHandler):
self.http_client = SimpleHttpClient(hs)
# An HTTP client for contacting identity servers specified by clients.
self.blacklisting_http_client = SimpleHttpClient(
- hs, ip_blacklist=hs.config.federation_ip_range_blacklist
+ hs, ip_blacklist=hs.config.server.federation_ip_range_blacklist
)
self.federation_http_client = hs.get_federation_http_client()
self.hs = hs
@@ -573,9 +573,15 @@ class IdentityHandler(BaseHandler):
# Try to validate as email
if self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
+ # Remote emails will only be used if a valid identity server is provided.
+ assert (
+ self.hs.config.registration.account_threepid_delegate_email is not None
+ )
+
# Ask our delegated email identity server
validation_session = await self.threepid_from_creds(
- self.hs.config.account_threepid_delegate_email, threepid_creds
+ self.hs.config.registration.account_threepid_delegate_email,
+ threepid_creds,
)
elif self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
# Get a validated session matching these details
@@ -587,10 +593,11 @@ class IdentityHandler(BaseHandler):
return validation_session
# Try to validate as msisdn
- if self.hs.config.account_threepid_delegate_msisdn:
+ if self.hs.config.registration.account_threepid_delegate_msisdn:
# Ask our delegated msisdn identity server
validation_session = await self.threepid_from_creds(
- self.hs.config.account_threepid_delegate_msisdn, threepid_creds
+ self.hs.config.registration.account_threepid_delegate_msisdn,
+ threepid_creds,
)
return validation_session
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index fd861e94f8..ccd7827207 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -16,6 +16,7 @@
# limitations under the License.
import logging
import random
+from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple
from canonicaljson import encode_canonical_json
@@ -39,9 +40,11 @@ from synapse.api.errors import (
NotFoundError,
ShadowBanError,
SynapseError,
+ UnsupportedRoomVersionError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
from synapse.api.urls import ConsentURIBuilder
+from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase
from synapse.events.builder import EventBuilder
from synapse.events.snapshot import EventContext
@@ -79,7 +82,7 @@ class MessageHandler:
self.storage = hs.get_storage()
self.state_store = self.storage.state
self._event_serializer = hs.get_event_client_serializer()
- self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages
+ self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages
# The scheduled call to self._expire_event. None if no call is currently
# scheduled.
@@ -413,7 +416,9 @@ class EventCreationHandler:
self.server_name = hs.hostname
self.notifier = hs.get_notifier()
self.config = hs.config
- self.require_membership_for_aliases = hs.config.require_membership_for_aliases
+ self.require_membership_for_aliases = (
+ hs.config.server.require_membership_for_aliases
+ )
self._events_shard_config = self.config.worker.events_shard_config
self._instance_name = hs.get_instance_name()
@@ -423,7 +428,7 @@ class EventCreationHandler:
Membership.JOIN,
Membership.KNOCK,
}
- if self.hs.config.include_profile_data_on_invite:
+ if self.hs.config.server.include_profile_data_on_invite:
self.membership_types_to_include_profile_data_in.add(Membership.INVITE)
self.send_event = ReplicationSendEventRestServlet.make_client(hs)
@@ -459,11 +464,11 @@ class EventCreationHandler:
#
self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {}
# The number of forward extremeities before a dummy event is sent.
- self._dummy_events_threshold = hs.config.dummy_events_threshold
+ self._dummy_events_threshold = hs.config.server.dummy_events_threshold
if (
self.config.worker.run_background_tasks
- and self.config.cleanup_extremities_with_dummy_events
+ and self.config.server.cleanup_extremities_with_dummy_events
):
self.clock.looping_call(
lambda: run_as_background_process(
@@ -475,7 +480,7 @@ class EventCreationHandler:
self._message_handler = hs.get_message_handler()
- self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages
+ self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages
self._external_cache = hs.get_external_cache()
@@ -549,16 +554,22 @@ class EventCreationHandler:
await self.auth.check_auth_blocking(requester=requester)
if event_dict["type"] == EventTypes.Create and event_dict["state_key"] == "":
- room_version = event_dict["content"]["room_version"]
+ room_version_id = event_dict["content"]["room_version"]
+ room_version_obj = KNOWN_ROOM_VERSIONS.get(room_version_id)
+ if not room_version_obj:
+ # this can happen if support is withdrawn for a room version
+ raise UnsupportedRoomVersionError(room_version_id)
else:
try:
- room_version = await self.store.get_room_version_id(
+ room_version_obj = await self.store.get_room_version(
event_dict["room_id"]
)
except NotFoundError:
raise AuthError(403, "Unknown room")
- builder = self.event_builder_factory.new(room_version, event_dict)
+ builder = self.event_builder_factory.for_room_version(
+ room_version_obj, event_dict
+ )
self.validator.validate_builder(builder)
@@ -1064,9 +1075,17 @@ class EventCreationHandler:
EventTypes.Create,
"",
):
- room_version = event.content.get("room_version", RoomVersions.V1.identifier)
+ room_version_id = event.content.get(
+ "room_version", RoomVersions.V1.identifier
+ )
+ room_version_obj = KNOWN_ROOM_VERSIONS.get(room_version_id)
+ if not room_version_obj:
+ raise UnsupportedRoomVersionError(
+ "Attempt to create a room with unsupported room version %s"
+ % (room_version_id,)
+ )
else:
- room_version = await self.store.get_room_version_id(event.room_id)
+ room_version_obj = await self.store.get_room_version(event.room_id)
if event.internal_metadata.is_out_of_band_membership():
# the only sort of out-of-band-membership events we expect to see here are
@@ -1075,8 +1094,9 @@ class EventCreationHandler:
assert event.content["membership"] == Membership.LEAVE
else:
try:
- await self._event_auth_handler.check_from_context(
- room_version, event, context
+ validate_event_for_room_version(room_version_obj, event)
+ await self._event_auth_handler.check_auth_rules_from_context(
+ room_version_obj, event, context
)
except AuthError as err:
logger.warning("Denying new event %r because %s", event, err)
@@ -1456,6 +1476,39 @@ class EventCreationHandler:
if prev_state_ids:
raise AuthError(403, "Changing the room create event is forbidden")
+ if event.type == EventTypes.MSC2716_INSERTION:
+ room_version = await self.store.get_room_version_id(event.room_id)
+ room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
+
+ create_event = await self.store.get_create_event_for_room(event.room_id)
+ room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR)
+
+ # Only check an insertion event if the room version
+ # supports it or the event is from the room creator.
+ if room_version_obj.msc2716_historical or (
+ self.config.experimental.msc2716_enabled
+ and event.sender == room_creator
+ ):
+ next_batch_id = event.content.get(
+ EventContentFields.MSC2716_NEXT_BATCH_ID
+ )
+ conflicting_insertion_event_id = (
+ await self.store.get_insertion_event_by_batch_id(
+ event.room_id, next_batch_id
+ )
+ )
+ if conflicting_insertion_event_id is not None:
+ # The current insertion event that we're processing is invalid
+ # because an insertion event already exists in the room with the
+ # same next_batch_id. We can't allow multiple because the batch
+ # pointing will get weird, e.g. we can't determine which insertion
+ # event the batch event is pointing to.
+ raise SynapseError(
+ HTTPStatus.BAD_REQUEST,
+ "Another insertion event already exists with the same next_batch_id",
+ errcode=Codes.INVALID_PARAM,
+ )
+
# Mark any `m.historical` messages as backfilled so they don't appear
# in `/sync` and have the proper decrementing `stream_ordering` as we import
backfilled = False
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 08b93b3ec1..176e4dfdd4 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -85,23 +85,29 @@ class PaginationHandler:
self._purges_by_id: Dict[str, PurgeStatus] = {}
self._event_serializer = hs.get_event_client_serializer()
- self._retention_default_max_lifetime = hs.config.retention_default_max_lifetime
+ self._retention_default_max_lifetime = (
+ hs.config.server.retention_default_max_lifetime
+ )
- self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min
- self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max
+ self._retention_allowed_lifetime_min = (
+ hs.config.server.retention_allowed_lifetime_min
+ )
+ self._retention_allowed_lifetime_max = (
+ hs.config.server.retention_allowed_lifetime_max
+ )
- if hs.config.worker.run_background_tasks and hs.config.retention_enabled:
+ if hs.config.worker.run_background_tasks and hs.config.server.retention_enabled:
# Run the purge jobs described in the configuration file.
- for job in hs.config.retention_purge_jobs:
+ for job in hs.config.server.retention_purge_jobs:
logger.info("Setting up purge job with config: %s", job)
self.clock.looping_call(
run_as_background_process,
- job["interval"],
+ job.interval,
"purge_history_for_rooms_in_range",
self.purge_history_for_rooms_in_range,
- job["shortest_max_lifetime"],
- job["longest_max_lifetime"],
+ job.shortest_max_lifetime,
+ job.longest_max_lifetime,
)
async def purge_history_for_rooms_in_range(
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index b23a1541bc..2e19706c69 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -178,7 +178,7 @@ class ProfileHandler(BaseHandler):
if not by_admin and target_user != requester.user:
raise AuthError(400, "Cannot set another user's displayname")
- if not by_admin and not self.hs.config.enable_set_displayname:
+ if not by_admin and not self.hs.config.registration.enable_set_displayname:
profile = await self.store.get_profileinfo(target_user.localpart)
if profile.display_name:
raise SynapseError(
@@ -268,7 +268,7 @@ class ProfileHandler(BaseHandler):
if not by_admin and target_user != requester.user:
raise AuthError(400, "Cannot set another user's avatar_url")
- if not by_admin and not self.hs.config.enable_set_avatar_url:
+ if not by_admin and not self.hs.config.registration.enable_set_avatar_url:
profile = await self.store.get_profileinfo(target_user.localpart)
if profile.avatar_url:
raise SynapseError(
@@ -397,7 +397,7 @@ class ProfileHandler(BaseHandler):
# when building a membership event. In this case, we must allow the
# lookup.
if (
- not self.hs.config.limit_profile_requests_to_users_who_share_rooms
+ not self.hs.config.server.limit_profile_requests_to_users_who_share_rooms
or not requester
):
return
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 4f99f137a2..441af7a848 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -116,8 +116,8 @@ class RegistrationHandler(BaseHandler):
self._register_device_client = self.register_device_inner
self.pusher_pool = hs.get_pusherpool()
- self.session_lifetime = hs.config.session_lifetime
- self.access_token_lifetime = hs.config.access_token_lifetime
+ self.session_lifetime = hs.config.registration.session_lifetime
+ self.access_token_lifetime = hs.config.registration.access_token_lifetime
init_counters_for_auth_provider("")
@@ -340,8 +340,13 @@ class RegistrationHandler(BaseHandler):
auth_provider=(auth_provider_id or ""),
).inc()
+ # If the user does not need to consent at registration, auto-join any
+ # configured rooms.
if not self.hs.config.consent.user_consent_at_registration:
- if not self.hs.config.auto_join_rooms_for_guests and make_guest:
+ if (
+ not self.hs.config.registration.auto_join_rooms_for_guests
+ and make_guest
+ ):
logger.info(
"Skipping auto-join for %s because auto-join for guests is disabled",
user_id,
@@ -387,7 +392,7 @@ class RegistrationHandler(BaseHandler):
"preset": self.hs.config.registration.autocreate_auto_join_room_preset,
}
- # If the configuration providers a user ID to create rooms with, use
+ # If the configuration provides a user ID to create rooms with, use
# that instead of the first user registered.
requires_join = False
if self.hs.config.registration.auto_join_user_id:
@@ -854,7 +859,7 @@ class RegistrationHandler(BaseHandler):
# Necessary due to auth checks prior to the threepid being
# written to the db
if is_threepid_reserved(
- self.hs.config.mau_limits_reserved_threepids, threepid
+ self.hs.config.server.mau_limits_reserved_threepids, threepid
):
await self.store.upsert_monthly_active_user(user_id)
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 8fede5e935..873e08258e 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -52,6 +52,7 @@ from synapse.api.errors import (
)
from synapse.api.filtering import Filter
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
+from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase
from synapse.events.utils import copy_power_levels_contents
from synapse.rest.admin._base import assert_user_is_admin
@@ -237,8 +238,9 @@ class RoomCreationHandler(BaseHandler):
},
},
)
- old_room_version = await self.store.get_room_version_id(old_room_id)
- await self._event_auth_handler.check_from_context(
+ old_room_version = await self.store.get_room_version(old_room_id)
+ validate_event_for_room_version(old_room_version, tombstone_event)
+ await self._event_auth_handler.check_auth_rules_from_context(
old_room_version, tombstone_event, tombstone_context
)
@@ -666,7 +668,7 @@ class RoomCreationHandler(BaseHandler):
await self.ratelimit(requester)
room_version_id = config.get(
- "room_version", self.config.default_room_version.identifier
+ "room_version", self.config.server.default_room_version.identifier
)
if not isinstance(room_version_id, str):
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index afa7e4727d..c8fb24a20c 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -89,8 +89,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
self.spam_checker = hs.get_spam_checker()
self.third_party_event_rules = hs.get_third_party_event_rules()
self._server_notices_mxid = self.config.servernotices.server_notices_mxid
- self._enable_lookup = hs.config.enable_3pid_lookup
- self.allow_per_room_profiles = self.config.allow_per_room_profiles
+ self._enable_lookup = hs.config.registration.enable_3pid_lookup
+ self.allow_per_room_profiles = self.config.server.allow_per_room_profiles
self._join_rate_limiter_local = Ratelimiter(
store=self.store,
@@ -625,7 +625,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
is_requester_admin = await self.auth.is_server_admin(requester.user)
if not is_requester_admin:
- if self.config.block_non_admin_invites:
+ if self.config.server.block_non_admin_invites:
logger.info(
"Blocking invite: user is not admin and non-admin "
"invites disabled"
@@ -1230,7 +1230,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
Raises:
ShadowBanError if the requester has been shadow-banned.
"""
- if self.config.block_non_admin_invites:
+ if self.config.server.block_non_admin_invites:
is_requester_admin = await self.auth.is_server_admin(requester.user)
if not is_requester_admin:
raise SynapseError(
@@ -1428,7 +1428,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
Returns: bool of whether the complexity is too great, or None
if unable to be fetched
"""
- max_complexity = self.hs.config.limit_remote_rooms.complexity
+ max_complexity = self.hs.config.server.limit_remote_rooms.complexity
complexity = await self.federation_handler.get_room_complexity(
remote_room_hosts, room_id
)
@@ -1444,7 +1444,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
Args:
room_id: The room ID to check for complexity.
"""
- max_complexity = self.hs.config.limit_remote_rooms.complexity
+ max_complexity = self.hs.config.server.limit_remote_rooms.complexity
complexity = await self.store.get_room_complexity(room_id)
return complexity["v1"] > max_complexity
@@ -1480,7 +1480,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
if too_complex is True:
raise SynapseError(
code=400,
- msg=self.hs.config.limit_remote_rooms.complexity_error,
+ msg=self.hs.config.server.limit_remote_rooms.complexity_error,
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
)
@@ -1515,7 +1515,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
)
raise SynapseError(
code=400,
- msg=self.hs.config.limit_remote_rooms.complexity_error,
+ msg=self.hs.config.server.limit_remote_rooms.complexity_error,
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
)
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 8226d6f5a1..6d3333ee00 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -105,7 +105,7 @@ class SearchHandler(BaseHandler):
dict to be returned to the client with results of search
"""
- if not self.hs.config.enable_search:
+ if not self.hs.config.server.enable_search:
raise SynapseError(400, "Search is disabled on this homeserver")
batch_group = None
diff --git a/synapse/handlers/send_email.py b/synapse/handlers/send_email.py
index 25e6b012b7..1a062a784c 100644
--- a/synapse/handlers/send_email.py
+++ b/synapse/handlers/send_email.py
@@ -105,8 +105,13 @@ async def _sendmail(
# set to enable TLS.
factory = build_sender_factory(hostname=smtphost if enable_tls else None)
- # the IReactorTCP interface claims host has to be a bytes, which seems to be wrong
- reactor.connectTCP(smtphost, smtpport, factory, timeout=30, bindAddress=None) # type: ignore[arg-type]
+ reactor.connectTCP(
+ smtphost, # type: ignore[arg-type]
+ smtpport,
+ factory,
+ timeout=30,
+ bindAddress=None,
+ )
await make_deferred_yieldable(d)
diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py
index 8f5d465fa1..184730ebe8 100644
--- a/synapse/handlers/ui_auth/checkers.py
+++ b/synapse/handlers/ui_auth/checkers.py
@@ -153,21 +153,23 @@ class _BaseThreepidAuthChecker:
# msisdns are currently always ThreepidBehaviour.REMOTE
if medium == "msisdn":
- if not self.hs.config.account_threepid_delegate_msisdn:
+ if not self.hs.config.registration.account_threepid_delegate_msisdn:
raise SynapseError(
400, "Phone number verification is not enabled on this homeserver"
)
threepid = await identity_handler.threepid_from_creds(
- self.hs.config.account_threepid_delegate_msisdn, threepid_creds
+ self.hs.config.registration.account_threepid_delegate_msisdn,
+ threepid_creds,
)
elif medium == "email":
if (
self.hs.config.email.threepid_behaviour_email
== ThreepidBehaviour.REMOTE
):
- assert self.hs.config.account_threepid_delegate_email
+ assert self.hs.config.registration.account_threepid_delegate_email
threepid = await identity_handler.threepid_from_creds(
- self.hs.config.account_threepid_delegate_email, threepid_creds
+ self.hs.config.registration.account_threepid_delegate_email,
+ threepid_creds,
)
elif (
self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL
@@ -240,7 +242,7 @@ class MsisdnAuthChecker(UserInteractiveAuthChecker, _BaseThreepidAuthChecker):
_BaseThreepidAuthChecker.__init__(self, hs)
def is_enabled(self) -> bool:
- return bool(self.hs.config.account_threepid_delegate_msisdn)
+ return bool(self.hs.config.registration.account_threepid_delegate_msisdn)
async def check_auth(self, authdict: dict, clientip: str) -> Any:
return await self._check_threepid("msisdn", authdict)
@@ -252,7 +254,7 @@ class RegistrationTokenAuthChecker(UserInteractiveAuthChecker):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
- self._enabled = bool(hs.config.registration_requires_token)
+ self._enabled = bool(hs.config.registration.registration_requires_token)
self.store = hs.get_datastore()
def is_enabled(self) -> bool:
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index b91e7cb501..18d8c8744e 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -60,7 +60,7 @@ class UserDirectoryHandler(StateDeltasHandler):
self.clock = hs.get_clock()
self.notifier = hs.get_notifier()
self.is_mine_id = hs.is_mine_id
- self.update_user_directory = hs.config.update_user_directory
+ self.update_user_directory = hs.config.server.update_user_directory
self.search_all_users = hs.config.userdirectory.user_directory_search_all_users
self.spam_checker = hs.get_spam_checker()
# The current position in the current_state_delta stream
@@ -132,12 +132,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# FIXME(#3714): We should probably do this in the same worker as all
# the other changes.
- # Support users are for diagnostics and should not appear in the user directory.
- is_support = await self.store.is_support_user(user_id)
- # When change profile information of deactivated user it should not appear in the user directory.
- is_deactivated = await self.store.get_user_deactivated_status(user_id)
-
- if not (is_support or is_deactivated):
+ if await self.store.should_include_local_user_in_dir(user_id):
await self.store.update_profile_in_user_dir(
user_id, profile.display_name, profile.avatar_url
)
@@ -229,8 +224,10 @@ class UserDirectoryHandler(StateDeltasHandler):
else:
logger.debug("Server is still in room: %r", room_id)
- is_support = await self.store.is_support_user(state_key)
- if not is_support:
+ include_in_dir = not self.is_mine_id(
+ state_key
+ ) or await self.store.should_include_local_user_in_dir(state_key)
+ if include_in_dir:
if change is MatchChange.no_change:
# Handle any profile changes
await self._handle_profile_change(
@@ -356,13 +353,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# First, if they're our user then we need to update for every user
if self.is_mine_id(user_id):
-
- is_appservice = self.store.get_if_app_services_interested_in_user(
- user_id
- )
-
- # We don't care about appservice users.
- if not is_appservice:
+ if await self.store.should_include_local_user_in_dir(user_id):
for other_user_id in other_users_in_room:
if user_id == other_user_id:
continue
@@ -374,10 +365,10 @@ class UserDirectoryHandler(StateDeltasHandler):
if user_id == other_user_id:
continue
- is_appservice = self.store.get_if_app_services_interested_in_user(
+ include_other_user = self.is_mine_id(
other_user_id
- )
- if self.is_mine_id(other_user_id) and not is_appservice:
+ ) and await self.store.should_include_local_user_in_dir(other_user_id)
+ if include_other_user:
to_insert.add((other_user_id, user_id))
if to_insert:
|