diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index 955cfa2207..c23ccd6dd9 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -45,16 +45,16 @@ class BaseHandler:
self.request_ratelimiter = Ratelimiter(
store=self.store, clock=self.clock, rate_hz=0, burst_count=0
)
- self._rc_message = self.hs.config.rc_message
+ self._rc_message = self.hs.config.ratelimiting.rc_message
# Check whether ratelimiting room admin message redaction is enabled
# by the presence of rate limits in the config
- if self.hs.config.rc_admin_redaction:
+ if self.hs.config.ratelimiting.rc_admin_redaction:
self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter(
store=self.store,
clock=self.clock,
- rate_hz=self.hs.config.rc_admin_redaction.per_second,
- burst_count=self.hs.config.rc_admin_redaction.burst_count,
+ rate_hz=self.hs.config.ratelimiting.rc_admin_redaction.per_second,
+ burst_count=self.hs.config.ratelimiting.rc_admin_redaction.burst_count,
)
else:
self.admin_redaction_ratelimiter = None
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 078accd634..a9c2222f46 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -78,7 +78,7 @@ class AccountValidityHandler:
)
# Check the renewal emails to send and send them every 30min.
- if hs.config.run_background_tasks:
+ if hs.config.worker.run_background_tasks:
self.clock.looping_call(self._send_renewal_emails, 30 * 60 * 1000)
self._is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = []
@@ -249,7 +249,7 @@ class AccountValidityHandler:
renewal_token = await self._get_renewal_token(user_id)
url = "%s_matrix/client/unstable/account_validity/renew?token=%s" % (
- self.hs.config.public_baseurl,
+ self.hs.config.server.public_baseurl,
renewal_token,
)
@@ -398,6 +398,7 @@ class AccountValidityHandler:
"""
now = self.clock.time_msec()
if expiration_ts is None:
+ assert self._account_validity_period is not None
expiration_ts = now + self._account_validity_period
await self.store.set_account_validity_for_user(
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index 4ab4046650..a7b5a4e9c9 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -131,6 +131,8 @@ class ApplicationServicesHandler:
now = self.clock.time_msec()
ts = await self.store.get_received_ts(event.event_id)
+ assert ts is not None
+
synapse.metrics.event_processing_lag_by_event.labels(
"appservice_sender"
).observe((now - ts) / 1000)
@@ -166,6 +168,7 @@ class ApplicationServicesHandler:
if events:
now = self.clock.time_msec()
ts = await self.store.get_received_ts(events[-1].event_id)
+ assert ts is not None
synapse.metrics.event_processing_lag.labels(
"appservice_sender"
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 34725324a6..fbbf6fd834 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -244,8 +244,8 @@ class AuthHandler(BaseHandler):
self._failed_uia_attempts_ratelimiter = Ratelimiter(
store=self.store,
clock=self.clock,
- rate_hz=self.hs.config.rc_login_failed_attempts.per_second,
- burst_count=self.hs.config.rc_login_failed_attempts.burst_count,
+ rate_hz=self.hs.config.ratelimiting.rc_login_failed_attempts.per_second,
+ burst_count=self.hs.config.ratelimiting.rc_login_failed_attempts.burst_count,
)
# The number of seconds to keep a UI auth session active.
@@ -255,14 +255,14 @@ class AuthHandler(BaseHandler):
self._failed_login_attempts_ratelimiter = Ratelimiter(
store=self.store,
clock=hs.get_clock(),
- rate_hz=self.hs.config.rc_login_failed_attempts.per_second,
- burst_count=self.hs.config.rc_login_failed_attempts.burst_count,
+ rate_hz=self.hs.config.ratelimiting.rc_login_failed_attempts.per_second,
+ burst_count=self.hs.config.ratelimiting.rc_login_failed_attempts.burst_count,
)
self._clock = self.hs.get_clock()
# Expire old UI auth sessions after a period of time.
- if hs.config.run_background_tasks:
+ if hs.config.worker.run_background_tasks:
self._clock.looping_call(
run_as_background_process,
5 * 60 * 1000,
@@ -289,7 +289,7 @@ class AuthHandler(BaseHandler):
hs.config.sso_account_deactivated_template
)
- self._server_name = hs.config.server_name
+ self._server_name = hs.config.server.server_name
# cast to tuple for use with str.startswith
self._whitelisted_sso_clients = tuple(hs.config.sso_client_whitelist)
@@ -749,7 +749,7 @@ class AuthHandler(BaseHandler):
"name": self.hs.config.user_consent_policy_name,
"url": "%s_matrix/consent?v=%s"
% (
- self.hs.config.public_baseurl,
+ self.hs.config.server.public_baseurl,
self.hs.config.user_consent_version,
),
},
@@ -1799,7 +1799,7 @@ class MacaroonGenerator:
def _generate_base_macaroon(self, user_id: str) -> pymacaroons.Macaroon:
macaroon = pymacaroons.Macaroon(
- location=self.hs.config.server_name,
+ location=self.hs.config.server.server_name,
identifier="key",
key=self.hs.config.macaroon_secret_key,
)
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index 45d2404dde..dcd320c555 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -46,7 +46,7 @@ class DeactivateAccountHandler(BaseHandler):
# Start the user parter loop so it can resume parting users from rooms where
# it left off (if it has work left to do).
- if hs.config.run_background_tasks:
+ if hs.config.worker.run_background_tasks:
hs.get_reactor().callWhenRunning(self._start_user_parting)
self._account_validity_enabled = (
@@ -131,7 +131,7 @@ class DeactivateAccountHandler(BaseHandler):
await self.store.add_user_pending_deactivation(user_id)
# delete from user directory
- await self.user_directory_handler.handle_user_deactivated(user_id)
+ await self.user_directory_handler.handle_local_user_deactivated(user_id)
# Mark the user as erased, if they asked for that
if erase_data:
diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py
index 679b47f081..b6a2a34ab7 100644
--- a/synapse/handlers/devicemessage.py
+++ b/synapse/handlers/devicemessage.py
@@ -84,8 +84,8 @@ class DeviceMessageHandler:
self._ratelimiter = Ratelimiter(
store=self.store,
clock=hs.get_clock(),
- rate_hz=hs.config.rc_key_requests.per_second,
- burst_count=hs.config.rc_key_requests.burst_count,
+ rate_hz=hs.config.ratelimiting.rc_key_requests.per_second,
+ burst_count=hs.config.ratelimiting.rc_key_requests.burst_count,
)
async def on_direct_to_device_edu(self, origin: str, content: JsonDict) -> None:
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index d92370859f..08a137561f 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -57,7 +57,7 @@ class E2eKeysHandler:
federation_registry = hs.get_federation_registry()
- self._is_master = hs.config.worker_app is None
+ self._is_master = hs.config.worker.worker_app is None
if not self._is_master:
self._user_device_resync_client = (
ReplicationUserDevicesResyncRestServlet.make_client(hs)
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 77df9185f6..6754c64c31 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -101,7 +101,7 @@ class FederationHandler(BaseHandler):
hs
)
- if hs.config.worker_app:
+ if hs.config.worker.worker_app:
self._maybe_store_room_on_outlier_membership = (
ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(hs)
)
@@ -1614,7 +1614,7 @@ class FederationHandler(BaseHandler):
Args:
room_id
"""
- if self.config.worker_app:
+ if self.config.worker.worker_app:
await self._clean_room_for_join_client(room_id)
else:
await self.store.clean_room_for_join(room_id)
diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py
index 9ec90ac8c1..946343fa25 100644
--- a/synapse/handlers/federation_event.py
+++ b/synapse/handlers/federation_event.py
@@ -149,7 +149,7 @@ class FederationEventHandler:
self._ephemeral_messages_enabled = hs.config.server.enable_ephemeral_messages
self._send_events = ReplicationFederationSendEventsRestServlet.make_client(hs)
- if hs.config.worker_app:
+ if hs.config.worker.worker_app:
self._user_device_resync = (
ReplicationUserDevicesResyncRestServlet.make_client(hs)
)
@@ -1009,7 +1009,7 @@ class FederationEventHandler:
await self._store.mark_remote_user_device_cache_as_stale(sender)
# Immediately attempt a resync in the background
- if self._config.worker_app:
+ if self._config.worker.worker_app:
await self._user_device_resync(user_id=sender)
else:
await self._device_list_updater.user_device_resync(sender)
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index 8ffeabacf9..8b8f1f41ca 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -540,13 +540,13 @@ class IdentityHandler(BaseHandler):
# It is already checked that public_baseurl is configured since this code
# should only be used if account_threepid_delegate_msisdn is true.
- assert self.hs.config.public_baseurl
+ assert self.hs.config.server.public_baseurl
# we need to tell the client to send the token back to us, since it doesn't
# otherwise know where to send it, so add submit_url response parameter
# (see also MSC2078)
data["submit_url"] = (
- self.hs.config.public_baseurl
+ self.hs.config.server.public_baseurl
+ "_matrix/client/unstable/add_threepid/msisdn/submit_token"
)
return data
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 75d4e27723..60673cd4b8 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -84,7 +84,7 @@ class MessageHandler:
# scheduled.
self._scheduled_expiry: Optional[IDelayedCall] = None
- if not hs.config.worker_app:
+ if not hs.config.worker.worker_app:
run_as_background_process(
"_schedule_next_expiry", self._schedule_next_expiry
)
@@ -461,7 +461,7 @@ class EventCreationHandler:
self._dummy_events_threshold = hs.config.dummy_events_threshold
if (
- self.config.run_background_tasks
+ self.config.worker.run_background_tasks
and self.config.cleanup_extremities_with_dummy_events
):
self.clock.looping_call(
diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py
index 648fcf76f8..dfc251b2a5 100644
--- a/synapse/handlers/oidc.py
+++ b/synapse/handlers/oidc.py
@@ -324,7 +324,7 @@ class OidcProvider:
self._allow_existing_users = provider.allow_existing_users
self._http_client = hs.get_proxied_http_client()
- self._server_name: str = hs.config.server_name
+ self._server_name: str = hs.config.server.server_name
# identifier for the external_ids table
self.idp_id = provider.idp_id
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 1dbafd253d..7dc0ee4bef 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -91,7 +91,7 @@ class PaginationHandler:
self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min
self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max
- if hs.config.run_background_tasks and hs.config.retention_enabled:
+ if hs.config.worker.run_background_tasks and hs.config.retention_enabled:
# Run the purge jobs described in the configuration file.
for job in hs.config.retention_purge_jobs:
logger.info("Setting up purge job with config: %s", job)
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 4418d63df7..39b39cd3e2 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -28,6 +28,7 @@ from bisect import bisect
from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
+ Any,
Callable,
Collection,
Dict,
@@ -615,7 +616,7 @@ class PresenceHandler(BasePresenceHandler):
super().__init__(hs)
self.hs = hs
self.server_name = hs.hostname
- self.wheel_timer = WheelTimer()
+ self.wheel_timer: WheelTimer[str] = WheelTimer()
self.notifier = hs.get_notifier()
self._presence_enabled = hs.config.use_presence
@@ -924,7 +925,7 @@ class PresenceHandler(BasePresenceHandler):
prev_state = await self.current_state_for_user(user_id)
- new_fields = {"last_active_ts": self.clock.time_msec()}
+ new_fields: Dict[str, Any] = {"last_active_ts": self.clock.time_msec()}
if prev_state.state == PresenceState.UNAVAILABLE:
new_fields["state"] = PresenceState.ONLINE
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 20a033d0ba..51adf8762d 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -63,7 +63,7 @@ class ProfileHandler(BaseHandler):
self.user_directory_handler = hs.get_user_directory_handler()
- if hs.config.run_background_tasks:
+ if hs.config.worker.run_background_tasks:
self.clock.looping_call(
self._update_remote_profile_cache, self.PROFILE_UPDATE_MS
)
diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py
index c679a8303e..bd8160e7ed 100644
--- a/synapse/handlers/read_marker.py
+++ b/synapse/handlers/read_marker.py
@@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
class ReadMarkerHandler(BaseHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
- self.server_name = hs.config.server_name
+ self.server_name = hs.config.server.server_name
self.store = hs.get_datastore()
self.account_data_handler = hs.get_account_data_handler()
self.read_marker_linearizer = Linearizer(name="read_marker")
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index fb495229a7..a49b8ee4b1 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -29,7 +29,7 @@ class ReceiptsHandler(BaseHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
- self.server_name = hs.config.server_name
+ self.server_name = hs.config.server.server_name
self.store = hs.get_datastore()
self.event_auth_handler = hs.get_event_auth_handler()
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index c374a1fbc2..38c4993da0 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -102,7 +102,7 @@ class RegistrationHandler(BaseHandler):
self.spam_checker = hs.get_spam_checker()
- if hs.config.worker_app:
+ if hs.config.worker.worker_app:
self._register_client = ReplicationRegisterServlet.make_client(hs)
self._register_device_client = RegisterDeviceReplicationServlet.make_client(
hs
@@ -696,7 +696,7 @@ class RegistrationHandler(BaseHandler):
address: the IP address used to perform the registration.
shadow_banned: Whether to shadow-ban the user
"""
- if self.hs.config.worker_app:
+ if self.hs.config.worker.worker_app:
await self._register_client(
user_id=user_id,
password_hash=password_hash,
@@ -786,7 +786,7 @@ class RegistrationHandler(BaseHandler):
Does the bits that need doing on the main process. Not for use outside this
class and RegisterDeviceReplicationServlet.
"""
- assert not self.hs.config.worker_app
+ assert not self.hs.config.worker.worker_app
valid_until_ms = None
if self.session_lifetime is not None:
if is_guest:
@@ -843,7 +843,7 @@ class RegistrationHandler(BaseHandler):
"""
# TODO: 3pid registration can actually happen on the workers. Consider
# refactoring it.
- if self.hs.config.worker_app:
+ if self.hs.config.worker.worker_app:
await self._post_registration_client(
user_id=user_id, auth_result=auth_result, access_token=access_token
)
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 2932ed8a94..9345ae02e0 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -33,6 +33,7 @@ from synapse.api.constants import (
Membership,
RoomCreationPreset,
RoomEncryptionAlgorithms,
+ RoomTypes,
)
from synapse.api.errors import (
AuthError,
@@ -397,7 +398,7 @@ class RoomCreationHandler(BaseHandler):
initial_state = {}
# Replicate relevant room events
- types_to_copy = (
+ types_to_copy: List[Tuple[str, Optional[str]]] = [
(EventTypes.JoinRules, ""),
(EventTypes.Name, ""),
(EventTypes.Topic, ""),
@@ -408,7 +409,16 @@ class RoomCreationHandler(BaseHandler):
(EventTypes.ServerACL, ""),
(EventTypes.RelatedGroups, ""),
(EventTypes.PowerLevels, ""),
- )
+ ]
+
+ # If the old room was a space, copy over the room type and the rooms in
+ # the space.
+ if (
+ old_room_create_event.content.get(EventContentFields.ROOM_TYPE)
+ == RoomTypes.SPACE
+ ):
+ creation_content[EventContentFields.ROOM_TYPE] = RoomTypes.SPACE
+ types_to_copy.append((EventTypes.SpaceChild, None))
old_room_state_ids = await self.store.get_filtered_current_state_ids(
old_room_id, StateFilter.from_types(types_to_copy)
@@ -419,6 +429,11 @@ class RoomCreationHandler(BaseHandler):
for k, old_event_id in old_room_state_ids.items():
old_event = old_room_state_events.get(old_event_id)
if old_event:
+ # If the event is an space child event with empty content, it was
+ # removed from the space and should be ignored.
+ if k[0] == EventTypes.SpaceChild and not old_event.content:
+ continue
+
initial_state[k] = old_event.content
# deep-copy the power-levels event before we start modifying it
diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py
index 077c7c0649..d30ba2b724 100644
--- a/synapse/handlers/state_deltas.py
+++ b/synapse/handlers/state_deltas.py
@@ -13,6 +13,7 @@
# limitations under the License.
import logging
+from enum import Enum, auto
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
@@ -21,6 +22,12 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
+class MatchChange(Enum):
+ no_change = auto()
+ now_true = auto()
+ now_false = auto()
+
+
class StateDeltasHandler:
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastore()
@@ -31,18 +38,12 @@ class StateDeltasHandler:
event_id: Optional[str],
key_name: str,
public_value: str,
- ) -> Optional[bool]:
+ ) -> MatchChange:
"""Given two events check if the `key_name` field in content changed
from not matching `public_value` to doing so.
For example, check if `history_visibility` (`key_name`) changed from
`shared` to `world_readable` (`public_value`).
-
- Returns:
- None if the field in the events either both match `public_value`
- or if neither do, i.e. there has been no change.
- True if it didn't match `public_value` but now does
- False if it did match `public_value` but now doesn't
"""
prev_event = None
event = None
@@ -54,7 +55,7 @@ class StateDeltasHandler:
if not event and not prev_event:
logger.debug("Neither event exists: %r %r", prev_event_id, event_id)
- return None
+ return MatchChange.no_change
prev_value = None
value = None
@@ -68,8 +69,8 @@ class StateDeltasHandler:
logger.debug("prev_value: %r -> value: %r", prev_value, value)
if value == public_value and prev_value != public_value:
- return True
+ return MatchChange.now_true
elif value != public_value and prev_value == public_value:
- return False
+ return MatchChange.now_false
else:
- return None
+ return MatchChange.no_change
diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py
index 47f2e2a0c1..b64ce8cab8 100644
--- a/synapse/handlers/stats.py
+++ b/synapse/handlers/stats.py
@@ -54,7 +54,7 @@ class StatsHandler:
# Guard to ensure we only process deltas one at a time
self._is_processing = False
- if self.stats_enabled and hs.config.run_background_tasks:
+ if self.stats_enabled and hs.config.worker.run_background_tasks:
self.notifier.add_replication_callback(self.notify_new_event)
# We kick this off so that we don't have to wait for a change before
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index a97c448595..9cea011e62 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -53,7 +53,7 @@ class FollowerTypingHandler:
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastore()
- self.server_name = hs.config.server_name
+ self.server_name = hs.config.server.server_name
self.clock = hs.get_clock()
self.is_mine_id = hs.is_mine_id
@@ -73,7 +73,7 @@ class FollowerTypingHandler:
self._room_typing: Dict[str, Set[str]] = {}
self._member_last_federation_poke: Dict[RoomMember, int] = {}
- self.wheel_timer = WheelTimer(bucket_size=5000)
+ self.wheel_timer: WheelTimer[RoomMember] = WheelTimer(bucket_size=5000)
self._latest_room_serial = 0
self.clock.looping_call(self._handle_timeouts, 5000)
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 6edb1da50a..6faa1d84be 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -17,7 +17,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional
import synapse.metrics
from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules, Membership
-from synapse.handlers.state_deltas import StateDeltasHandler
+from synapse.handlers.state_deltas import MatchChange, StateDeltasHandler
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.roommember import ProfileInfo
from synapse.types import JsonDict
@@ -30,14 +30,26 @@ logger = logging.getLogger(__name__)
class UserDirectoryHandler(StateDeltasHandler):
- """Handles querying of and keeping updated the user_directory.
+ """Handles queries and updates for the user_directory.
N.B.: ASSUMES IT IS THE ONLY THING THAT MODIFIES THE USER DIRECTORY
- The user directory is filled with users who this server can see are joined to a
- world_readable or publicly joinable room. We keep a database table up to date
- by streaming changes of the current state and recalculating whether users should
- be in the directory or not when necessary.
+ When a local user searches the user_directory, we report two kinds of users:
+
+ - users this server can see are joined to a world_readable or publicly
+ joinable room, and
+ - users belonging to a private room shared by that local user.
+
+ The two cases are tracked separately in the `users_in_public_rooms` and
+ `users_who_share_private_rooms` tables. Both kinds of users have their
+ username and avatar tracked in a `user_directory` table.
+
+ This handler has three responsibilities:
+ 1. Forwarding requests to `/user_directory/search` to the UserDirectoryStore.
+ 2. Providing hooks for the application to call when local users are added,
+ removed, or have their profile changed.
+ 3. Listening for room state changes that indicate remote users have
+ joined or left a room, or that their profile has changed.
"""
def __init__(self, hs: "HomeServer"):
@@ -130,7 +142,7 @@ class UserDirectoryHandler(StateDeltasHandler):
user_id, profile.display_name, profile.avatar_url
)
- async def handle_user_deactivated(self, user_id: str) -> None:
+ async def handle_local_user_deactivated(self, user_id: str) -> None:
"""Called when a user ID is deactivated"""
# FIXME(#3714): We should probably do this in the same worker as all
# the other changes.
@@ -196,7 +208,7 @@ class UserDirectoryHandler(StateDeltasHandler):
public_value=Membership.JOIN,
)
- if change is False:
+ if change is MatchChange.now_false:
# Need to check if the server left the room entirely, if so
# we might need to remove all the users in that room
is_in_room = await self.store.is_host_joined(
@@ -219,14 +231,14 @@ class UserDirectoryHandler(StateDeltasHandler):
is_support = await self.store.is_support_user(state_key)
if not is_support:
- if change is None:
+ if change is MatchChange.no_change:
# Handle any profile changes
await self._handle_profile_change(
state_key, room_id, prev_event_id, event_id
)
continue
- if change: # The user joined
+ if change is MatchChange.now_true: # The user joined
event = await self.store.get_event(event_id, allow_none=True)
# It isn't expected for this event to not exist, but we
# don't want the entire background process to break.
@@ -263,14 +275,14 @@ class UserDirectoryHandler(StateDeltasHandler):
logger.debug("Handling change for %s: %s", typ, room_id)
if typ == EventTypes.RoomHistoryVisibility:
- change = await self._get_key_change(
+ publicness = await self._get_key_change(
prev_event_id,
event_id,
key_name="history_visibility",
public_value=HistoryVisibility.WORLD_READABLE,
)
elif typ == EventTypes.JoinRules:
- change = await self._get_key_change(
+ publicness = await self._get_key_change(
prev_event_id,
event_id,
key_name="join_rule",
@@ -278,9 +290,7 @@ class UserDirectoryHandler(StateDeltasHandler):
)
else:
raise Exception("Invalid event type")
- # If change is None, no change. True => become world_readable/public,
- # False => was world_readable/public
- if change is None:
+ if publicness is MatchChange.no_change:
logger.debug("No change")
return
@@ -290,13 +300,13 @@ class UserDirectoryHandler(StateDeltasHandler):
room_id
)
- logger.debug("Change: %r, is_public: %r", change, is_public)
+ logger.debug("Change: %r, publicness: %r", publicness, is_public)
- if change and not is_public:
+ if publicness is MatchChange.now_true and not is_public:
# If we became world readable but room isn't currently public then
# we ignore the change
return
- elif not change and is_public:
+ elif publicness is MatchChange.now_false and is_public:
# If we stopped being world readable but are still public,
# ignore the change
return
|