diff --git a/synapse/__init__.py b/synapse/__init__.py
index 1613941759..b1369aca8f 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -20,8 +20,6 @@ import json
import os
import sys
-from matrix_common.versionstring import get_distribution_version_string
-
# Check that we're not running on an unsupported Python version.
if sys.version_info < (3, 7):
print("Synapse requires Python 3.7 or above.")
@@ -70,7 +68,9 @@ try:
except ImportError:
pass
-__version__ = get_distribution_version_string("matrix-synapse")
+import synapse.util
+
+__version__ = synapse.util.SYNAPSE_VERSION
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when
diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py
index 361b51d2fa..9586086c03 100755
--- a/synapse/_scripts/synapse_port_db.py
+++ b/synapse/_scripts/synapse_port_db.py
@@ -40,7 +40,6 @@ from typing import (
)
import yaml
-from matrix_common.versionstring import get_distribution_version_string
from typing_extensions import TypedDict
from twisted.internet import defer, reactor as reactor_
@@ -62,7 +61,6 @@ from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackground
from synapse.storage.databases.main.events_bg_updates import (
EventsBackgroundUpdatesStore,
)
-from synapse.storage.databases.main.group_server import GroupServerStore
from synapse.storage.databases.main.media_repository import (
MediaRepositoryBackgroundUpdateStore,
)
@@ -84,7 +82,7 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor
from synapse.storage.engines import create_engine
from synapse.storage.prepare_database import prepare_database
from synapse.types import ISynapseReactor
-from synapse.util import Clock
+from synapse.util import SYNAPSE_VERSION, Clock
# Cast safety: Twisted does some naughty magic which replaces the
# twisted.internet.reactor module with a Reactor instance at runtime.
@@ -219,7 +217,6 @@ class Store(
PushRuleStore,
PusherWorkerStore,
PresenceBackgroundUpdateStore,
- GroupServerStore,
):
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
@@ -258,9 +255,7 @@ class MockHomeserver:
self.clock = Clock(reactor)
self.config = config
self.hostname = config.server.server_name
- self.version_string = "Synapse/" + get_distribution_version_string(
- "matrix-synapse"
- )
+ self.version_string = SYNAPSE_VERSION
def get_clock(self) -> Clock:
return self.clock
diff --git a/synapse/_scripts/update_synapse_database.py b/synapse/_scripts/update_synapse_database.py
index c443522c05..b4aeae6dd5 100755
--- a/synapse/_scripts/update_synapse_database.py
+++ b/synapse/_scripts/update_synapse_database.py
@@ -19,7 +19,6 @@ import sys
from typing import cast
import yaml
-from matrix_common.versionstring import get_distribution_version_string
from twisted.internet import defer, reactor as reactor_
@@ -28,6 +27,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.server import HomeServer
from synapse.storage import DataStore
from synapse.types import ISynapseReactor
+from synapse.util import SYNAPSE_VERSION
# Cast safety: Twisted does some naughty magic which replaces the
# twisted.internet.reactor module with a Reactor instance at runtime.
@@ -43,8 +43,7 @@ class MockHomeserver(HomeServer):
hostname=config.server.server_name,
config=config,
reactor=reactor,
- version_string="Synapse/"
- + get_distribution_version_string("matrix-synapse"),
+ version_string=f"Synapse/{SYNAPSE_VERSION}",
)
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 5a410f805a..c037ccb984 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -20,7 +20,6 @@ from netaddr import IPAddress
from twisted.web.server import Request
from synapse import event_auth
-from synapse.api.auth_blocking import AuthBlocking
from synapse.api.constants import EventTypes, HistoryVisibility, Membership
from synapse.api.errors import (
AuthError,
@@ -67,8 +66,6 @@ class Auth:
10000, "token_cache"
)
- self._auth_blocking = AuthBlocking(self.hs)
-
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips
self._macaroon_secret_key = hs.config.key.macaroon_secret_key
@@ -711,14 +708,3 @@ class Auth:
"User %s not in room %s, and room previews are disabled"
% (user_id, room_id),
)
-
- async def check_auth_blocking(
- self,
- user_id: Optional[str] = None,
- threepid: Optional[dict] = None,
- user_type: Optional[str] = None,
- requester: Optional[Requester] = None,
- ) -> None:
- await self._auth_blocking.check_auth_blocking(
- user_id=user_id, threepid=threepid, user_type=user_type, requester=requester
- )
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index a3446ac6e8..84e389a6cd 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -37,7 +37,6 @@ from typing import (
)
from cryptography.utils import CryptographyDeprecationWarning
-from matrix_common.versionstring import get_distribution_version_string
from typing_extensions import ParamSpec
import twisted
@@ -68,6 +67,7 @@ from synapse.metrics import install_gc_manager, register_threadpool
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.metrics.jemalloc import setup_jemalloc_stats
from synapse.types import ISynapseReactor
+from synapse.util import SYNAPSE_VERSION
from synapse.util.caches.lrucache import setup_expire_lru_cache_entries
from synapse.util.daemonize import daemonize_process
from synapse.util.gai_resolver import GAIResolver
@@ -540,7 +540,7 @@ def setup_sentry(hs: "HomeServer") -> None:
sentry_sdk.init(
dsn=hs.config.metrics.sentry_dsn,
- release=get_distribution_version_string("matrix-synapse"),
+ release=SYNAPSE_VERSION,
)
# We set some default tags that give some context to this instance
diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py
index 6fedf681f8..561621a285 100644
--- a/synapse/app/admin_cmd.py
+++ b/synapse/app/admin_cmd.py
@@ -19,8 +19,6 @@ import sys
import tempfile
from typing import List, Optional
-from matrix_common.versionstring import get_distribution_version_string
-
from twisted.internet import defer, task
import synapse
@@ -43,6 +41,7 @@ from synapse.replication.slave.storage.registration import SlavedRegistrationSto
from synapse.server import HomeServer
from synapse.storage.databases.main.room import RoomWorkerStore
from synapse.types import StateMap
+from synapse.util import SYNAPSE_VERSION
from synapse.util.logcontext import LoggingContext
logger = logging.getLogger("synapse.app.admin_cmd")
@@ -220,7 +219,7 @@ def start(config_options: List[str]) -> None:
ss = AdminCmdServer(
config.server.server_name,
config=config,
- version_string="Synapse/" + get_distribution_version_string("matrix-synapse"),
+ version_string=f"Synapse/{SYNAPSE_VERSION}",
)
setup_logging(ss, config, use_worker_options=True)
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index 89f8998f0e..4a987fb759 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -16,8 +16,6 @@ import logging
import sys
from typing import Dict, List, Optional, Tuple
-from matrix_common.versionstring import get_distribution_version_string
-
from twisted.internet import address
from twisted.web.resource import Resource
@@ -121,6 +119,7 @@ from synapse.storage.databases.main.transactions import TransactionWorkerStore
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
from synapse.storage.databases.main.user_directory import UserDirectoryStore
from synapse.types import JsonDict
+from synapse.util import SYNAPSE_VERSION
from synapse.util.httpresourcetree import create_resource_tree
logger = logging.getLogger("synapse.app.generic_worker")
@@ -447,7 +446,7 @@ def start(config_options: List[str]) -> None:
hs = GenericWorkerServer(
config.server.server_name,
config=config,
- version_string="Synapse/" + get_distribution_version_string("matrix-synapse"),
+ version_string=f"Synapse/{SYNAPSE_VERSION}",
)
setup_logging(hs, config, use_worker_options=True)
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 4c6c0658ab..745e704141 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -18,8 +18,6 @@ import os
import sys
from typing import Dict, Iterable, List
-from matrix_common.versionstring import get_distribution_version_string
-
from twisted.internet.tcp import Port
from twisted.web.resource import EncodingResourceWrapper, Resource
from twisted.web.server import GzipEncoderFactory
@@ -69,7 +67,7 @@ from synapse.rest.synapse.client import build_synapse_client_resource_tree
from synapse.rest.well_known import well_known_resource
from synapse.server import HomeServer
from synapse.storage import DataStore
-from synapse.util.check_dependencies import check_requirements
+from synapse.util.check_dependencies import VERSION, check_requirements
from synapse.util.httpresourcetree import create_resource_tree
from synapse.util.module_loader import load_module
@@ -371,7 +369,7 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
hs = SynapseHomeServer(
config.server.server_name,
config=config,
- version_string="Synapse/" + get_distribution_version_string("matrix-synapse"),
+ version_string=f"Synapse/{VERSION}",
)
synapse.config.logger.setup_logging(hs, config, use_worker_options=False)
diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
index f2dfd49b07..0a285dba31 100644
--- a/synapse/config/experimental.py
+++ b/synapse/config/experimental.py
@@ -84,3 +84,6 @@ class ExperimentalConfig(Config):
# MSC3772: A push rule for mutual relations.
self.msc3772_enabled: bool = experimental.get("msc3772_enabled", False)
+
+ # MSC3715: dir param on /relations.
+ self.msc3715_enabled: bool = experimental.get("msc3715_enabled", False)
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 470b8b4492..82a5b5fa12 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -22,7 +22,6 @@ from string import Template
from typing import TYPE_CHECKING, Any, Dict, Optional
import yaml
-from matrix_common.versionstring import get_distribution_version_string
from zope.interface import implementer
from twisted.logger import (
@@ -37,6 +36,7 @@ from synapse.logging.context import LoggingContextFilter
from synapse.logging.filter import MetadataFilter
from synapse.types import JsonDict
+from ..util import SYNAPSE_VERSION
from ._base import Config, ConfigError
if TYPE_CHECKING:
@@ -349,7 +349,7 @@ def setup_logging(
logging.warning(
"Server %s version %s",
sys.argv[0],
- get_distribution_version_string("matrix-synapse"),
+ SYNAPSE_VERSION,
)
logging.info("Server hostname: %s", config.server.server_name)
logging.info("Instance name: %s", hs.get_instance_name())
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index 4c0b587a76..e23503c1e0 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -45,9 +45,7 @@ if typing.TYPE_CHECKING:
logger = logging.getLogger(__name__)
-def validate_event_for_room_version(
- room_version_obj: RoomVersion, event: "EventBase"
-) -> None:
+def validate_event_for_room_version(event: "EventBase") -> None:
"""Ensure that the event complies with the limits, and has the right signatures
NB: does not *validate* the signatures - it assumes that any signatures present
@@ -60,12 +58,10 @@ def validate_event_for_room_version(
NB: This is used to check events that have been received over federation. As such,
it can only enforce the checks specified in the relevant room version, to avoid
a split-brain situation where some servers accept such events, and others reject
- them.
-
- TODO: consider moving this into EventValidator
+ them. See also EventValidator, which contains extra checks which are applied only to
+ locally-generated events.
Args:
- room_version_obj: the version of the room which contains this event
event: the event to be checked
Raises:
@@ -103,7 +99,7 @@ def validate_event_for_room_version(
raise AuthError(403, "Event not signed by sending server")
is_invite_via_allow_rule = (
- room_version_obj.msc3083_join_rules
+ event.room_version.msc3083_join_rules
and event.type == EventTypes.Member
and event.membership == Membership.JOIN
and EventContentFields.AUTHORISING_USER in event.content
@@ -117,7 +113,6 @@ def validate_event_for_room_version(
def check_auth_rules_for_event(
- room_version_obj: RoomVersion,
event: "EventBase",
auth_events: Iterable["EventBase"],
) -> None:
@@ -136,7 +131,6 @@ def check_auth_rules_for_event(
a bunch of other tests.
Args:
- room_version_obj: the version of the room
event: the event being checked.
auth_events: the room state to check the events against.
@@ -205,7 +199,10 @@ def check_auth_rules_for_event(
raise AuthError(403, "This room has been marked as unfederatable.")
# 4. If type is m.room.aliases
- if event.type == EventTypes.Aliases and room_version_obj.special_case_aliases_auth:
+ if (
+ event.type == EventTypes.Aliases
+ and event.room_version.special_case_aliases_auth
+ ):
# 4a. If event has no state_key, reject
if not event.is_state():
raise AuthError(403, "Alias event must be a state event")
@@ -225,7 +222,7 @@ def check_auth_rules_for_event(
# 5. If type is m.room.membership
if event.type == EventTypes.Member:
- _is_membership_change_allowed(room_version_obj, event, auth_dict)
+ _is_membership_change_allowed(event.room_version, event, auth_dict)
logger.debug("Allowing! %s", event)
return
@@ -247,17 +244,17 @@ def check_auth_rules_for_event(
_can_send_event(event, auth_dict)
if event.type == EventTypes.PowerLevels:
- _check_power_levels(room_version_obj, event, auth_dict)
+ _check_power_levels(event.room_version, event, auth_dict)
if event.type == EventTypes.Redaction:
- check_redaction(room_version_obj, event, auth_dict)
+ check_redaction(event.room_version, event, auth_dict)
if (
event.type == EventTypes.MSC2716_INSERTION
or event.type == EventTypes.MSC2716_BATCH
or event.type == EventTypes.MSC2716_MARKER
):
- check_historical(room_version_obj, event, auth_dict)
+ check_historical(event.room_version, event, auth_dict)
logger.debug("Allowing! %s", event)
diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py
index d2e06c754e..32712d2042 100644
--- a/synapse/events/spamcheck.py
+++ b/synapse/events/spamcheck.py
@@ -28,7 +28,10 @@ from typing import (
Union,
)
-from synapse.api.errors import Codes
+# `Literal` appears with Python 3.8.
+from typing_extensions import Literal
+
+import synapse
from synapse.rest.media.v1._base import FileInfo
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
from synapse.spam_checker_api import RegistrationBehaviour
@@ -47,12 +50,12 @@ CHECK_EVENT_FOR_SPAM_CALLBACK = Callable[
Awaitable[
Union[
str,
- Codes,
+ "synapse.api.errors.Codes",
# Highly experimental, not officially part of the spamchecker API, may
# disappear without warning depending on the results of ongoing
# experiments.
# Use this to return additional information as part of an error.
- Tuple[Codes, Dict],
+ Tuple["synapse.api.errors.Codes", Dict],
# Deprecated
bool,
]
@@ -62,12 +65,72 @@ SHOULD_DROP_FEDERATED_EVENT_CALLBACK = Callable[
["synapse.events.EventBase"],
Awaitable[Union[bool, str]],
]
-USER_MAY_JOIN_ROOM_CALLBACK = Callable[[str, str, bool], Awaitable[bool]]
-USER_MAY_INVITE_CALLBACK = Callable[[str, str, str], Awaitable[bool]]
-USER_MAY_SEND_3PID_INVITE_CALLBACK = Callable[[str, str, str, str], Awaitable[bool]]
-USER_MAY_CREATE_ROOM_CALLBACK = Callable[[str], Awaitable[bool]]
-USER_MAY_CREATE_ROOM_ALIAS_CALLBACK = Callable[[str, RoomAlias], Awaitable[bool]]
-USER_MAY_PUBLISH_ROOM_CALLBACK = Callable[[str, str], Awaitable[bool]]
+USER_MAY_JOIN_ROOM_CALLBACK = Callable[
+ [str, str, bool],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
+]
+USER_MAY_INVITE_CALLBACK = Callable[
+ [str, str, str],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
+]
+USER_MAY_SEND_3PID_INVITE_CALLBACK = Callable[
+ [str, str, str, str],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
+]
+USER_MAY_CREATE_ROOM_CALLBACK = Callable[
+ [str],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
+]
+USER_MAY_CREATE_ROOM_ALIAS_CALLBACK = Callable[
+ [str, RoomAlias],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
+]
+USER_MAY_PUBLISH_ROOM_CALLBACK = Callable[
+ [str, str],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
+]
CHECK_USERNAME_FOR_SPAM_CALLBACK = Callable[[UserProfile], Awaitable[bool]]
LEGACY_CHECK_REGISTRATION_FOR_SPAM_CALLBACK = Callable[
[
@@ -88,7 +151,14 @@ CHECK_REGISTRATION_FOR_SPAM_CALLBACK = Callable[
]
CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK = Callable[
[ReadableFileWrapper, FileInfo],
- Awaitable[bool],
+ Awaitable[
+ Union[
+ Literal["NOT_SPAM"],
+ "synapse.api.errors.Codes",
+ # Deprecated
+ bool,
+ ]
+ ],
]
@@ -181,7 +251,7 @@ def load_legacy_spam_checkers(hs: "synapse.server.HomeServer") -> None:
class SpamChecker:
- NOT_SPAM = "NOT_SPAM"
+ NOT_SPAM: Literal["NOT_SPAM"] = "NOT_SPAM"
def __init__(self, hs: "synapse.server.HomeServer") -> None:
self.hs = hs
@@ -275,7 +345,7 @@ class SpamChecker:
async def check_event_for_spam(
self, event: "synapse.events.EventBase"
- ) -> Union[Tuple[Codes, Dict], str]:
+ ) -> Union[Tuple["synapse.api.errors.Codes", Dict], str]:
"""Checks if a given event is considered "spammy" by this server.
If the server considers an event spammy, then it will be rejected if
@@ -306,7 +376,7 @@ class SpamChecker:
elif res is True:
# This spam-checker rejects the event with deprecated
# return value `True`
- return Codes.FORBIDDEN
+ return (synapse.api.errors.Codes.FORBIDDEN, {})
elif not isinstance(res, str):
# mypy complains that we can't reach this code because of the
# return type in CHECK_EVENT_FOR_SPAM_CALLBACK, but we don't know
@@ -352,7 +422,7 @@ class SpamChecker:
async def user_may_join_room(
self, user_id: str, room_id: str, is_invited: bool
- ) -> bool:
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a given users is allowed to join a room.
Not called when a user creates a room.
@@ -362,54 +432,70 @@ class SpamChecker:
is_invited: Whether the user is invited into the room
Returns:
- Whether the user may join the room
+ NOT_SPAM if the operation is permitted, Codes otherwise.
"""
for callback in self._user_may_join_room_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- may_join_room = await delay_cancellation(
- callback(user_id, room_id, is_invited)
- )
- if may_join_room is False:
- return False
+ res = await delay_cancellation(callback(user_id, room_id, is_invited))
+ # Normalize return values to `Codes` or `"NOT_SPAM"`.
+ if res is True or res is self.NOT_SPAM:
+ continue
+ elif res is False:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting join as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return True
+ # No spam-checker has rejected the request, let it pass.
+ return self.NOT_SPAM
async def user_may_invite(
self, inviter_userid: str, invitee_userid: str, room_id: str
- ) -> bool:
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a given user may send an invite
- If this method returns false, the invite will be rejected.
-
Args:
inviter_userid: The user ID of the sender of the invitation
invitee_userid: The user ID targeted in the invitation
room_id: The room ID
Returns:
- True if the user may send an invite, otherwise False
+ NOT_SPAM if the operation is permitted, Codes otherwise.
"""
for callback in self._user_may_invite_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- may_invite = await delay_cancellation(
+ res = await delay_cancellation(
callback(inviter_userid, invitee_userid, room_id)
)
- if may_invite is False:
- return False
+ # Normalize return values to `Codes` or `"NOT_SPAM"`.
+ if res is True or res is self.NOT_SPAM:
+ continue
+ elif res is False:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting invite as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return True
+ # No spam-checker has rejected the request, let it pass.
+ return self.NOT_SPAM
async def user_may_send_3pid_invite(
self, inviter_userid: str, medium: str, address: str, room_id: str
- ) -> bool:
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a given user may invite a given threepid into the room
- If this method returns false, the threepid invite will be rejected.
-
Note that if the threepid is already associated with a Matrix user ID, Synapse
will call user_may_invite with said user ID instead.
@@ -420,88 +506,113 @@ class SpamChecker:
room_id: The room ID
Returns:
- True if the user may send the invite, otherwise False
+ NOT_SPAM if the operation is permitted, Codes otherwise.
"""
for callback in self._user_may_send_3pid_invite_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- may_send_3pid_invite = await delay_cancellation(
+ res = await delay_cancellation(
callback(inviter_userid, medium, address, room_id)
)
- if may_send_3pid_invite is False:
- return False
+ # Normalize return values to `Codes` or `"NOT_SPAM"`.
+ if res is True or res is self.NOT_SPAM:
+ continue
+ elif res is False:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting 3pid invite as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return True
+ return self.NOT_SPAM
- async def user_may_create_room(self, userid: str) -> bool:
+ async def user_may_create_room(
+ self, userid: str
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a given user may create a room
- If this method returns false, the creation request will be rejected.
-
Args:
userid: The ID of the user attempting to create a room
-
- Returns:
- True if the user may create a room, otherwise False
"""
for callback in self._user_may_create_room_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- may_create_room = await delay_cancellation(callback(userid))
- if may_create_room is False:
- return False
+ res = await delay_cancellation(callback(userid))
+ if res is True or res is self.NOT_SPAM:
+ continue
+ elif res is False:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting room creation as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return True
+ return self.NOT_SPAM
async def user_may_create_room_alias(
self, userid: str, room_alias: RoomAlias
- ) -> bool:
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a given user may create a room alias
- If this method returns false, the association request will be rejected.
-
Args:
userid: The ID of the user attempting to create a room alias
room_alias: The alias to be created
- Returns:
- True if the user may create a room alias, otherwise False
"""
for callback in self._user_may_create_room_alias_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- may_create_room_alias = await delay_cancellation(
- callback(userid, room_alias)
- )
- if may_create_room_alias is False:
- return False
+ res = await delay_cancellation(callback(userid, room_alias))
+ if res is True or res is self.NOT_SPAM:
+ continue
+ elif res is False:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting room create as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return True
+ return self.NOT_SPAM
- async def user_may_publish_room(self, userid: str, room_id: str) -> bool:
+ async def user_may_publish_room(
+ self, userid: str, room_id: str
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a given user may publish a room to the directory
- If this method returns false, the publish request will be rejected.
-
Args:
userid: The user ID attempting to publish the room
room_id: The ID of the room that would be published
-
- Returns:
- True if the user may publish the room, otherwise False
"""
for callback in self._user_may_publish_room_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- may_publish_room = await delay_cancellation(callback(userid, room_id))
- if may_publish_room is False:
- return False
+ res = await delay_cancellation(callback(userid, room_id))
+ if res is True or res is self.NOT_SPAM:
+ continue
+ elif res is False:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting room publication as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return True
+ return self.NOT_SPAM
async def check_username_for_spam(self, user_profile: UserProfile) -> bool:
"""Checks if a user ID or display name are considered "spammy" by this server.
@@ -567,7 +678,7 @@ class SpamChecker:
async def check_media_file_for_spam(
self, file_wrapper: ReadableFileWrapper, file_info: FileInfo
- ) -> bool:
+ ) -> Union["synapse.api.errors.Codes", Literal["NOT_SPAM"]]:
"""Checks if a piece of newly uploaded media should be blocked.
This will be called for local uploads, downloads of remote media, each
@@ -580,31 +691,37 @@ class SpamChecker:
async def check_media_file_for_spam(
self, file: ReadableFileWrapper, file_info: FileInfo
- ) -> bool:
+ ) -> Union[Codes, Literal["NOT_SPAM"]]:
buffer = BytesIO()
await file.write_chunks_to(buffer.write)
if buffer.getvalue() == b"Hello World":
- return True
+ return synapse.module_api.NOT_SPAM
- return False
+ return Codes.FORBIDDEN
Args:
file: An object that allows reading the contents of the media.
file_info: Metadata about the file.
-
- Returns:
- True if the media should be blocked or False if it should be
- allowed.
"""
for callback in self._check_media_file_for_spam_callbacks:
with Measure(
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
):
- spam = await delay_cancellation(callback(file_wrapper, file_info))
- if spam:
- return True
+ res = await delay_cancellation(callback(file_wrapper, file_info))
+ # Normalize return values to `Codes` or `"NOT_SPAM"`.
+ if res is False or res is self.NOT_SPAM:
+ continue
+ elif res is True:
+ return synapse.api.errors.Codes.FORBIDDEN
+ elif isinstance(res, synapse.api.errors.Codes):
+ return res
+ else:
+ logger.warning(
+ "Module returned invalid value, rejecting media file as spam"
+ )
+ return synapse.api.errors.Codes.FORBIDDEN
- return False
+ return self.NOT_SPAM
diff --git a/synapse/events/validator.py b/synapse/events/validator.py
index 29fa9b3880..27c8beba25 100644
--- a/synapse/events/validator.py
+++ b/synapse/events/validator.py
@@ -35,6 +35,10 @@ class EventValidator:
def validate_new(self, event: EventBase, config: HomeServerConfig) -> None:
"""Validates the event has roughly the right format
+ Suitable for checking a locally-created event. It has stricter checks than
+ is appropriate for an event received over federation (for which, see
+ event_auth.validate_event_for_room_version)
+
Args:
event: The event to validate.
config: The homeserver's configuration.
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index ad475a913b..66e6305562 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -1642,10 +1642,6 @@ def _validate_hierarchy_event(d: JsonDict) -> None:
if not isinstance(event_type, str):
raise ValueError("Invalid event: 'event_type' must be a str")
- room_id = d.get("room_id")
- if not isinstance(room_id, str):
- raise ValueError("Invalid event: 'room_id' must be a str")
-
state_key = d.get("state_key")
if not isinstance(state_key, str):
raise ValueError("Invalid event: 'state_key' must be a str")
diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py
index 333ca9a97f..41d8b937af 100644
--- a/synapse/federation/sender/per_destination_queue.py
+++ b/synapse/federation/sender/per_destination_queue.py
@@ -37,6 +37,7 @@ from synapse.metrics import sent_transactions_counter
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import ReadReceipt
from synapse.util.retryutils import NotRetryingDestination, get_retry_limiter
+from synapse.visibility import filter_events_for_server
if TYPE_CHECKING:
import synapse.server
@@ -77,6 +78,7 @@ class PerDestinationQueue:
):
self._server_name = hs.hostname
self._clock = hs.get_clock()
+ self._storage_controllers = hs.get_storage_controllers()
self._store = hs.get_datastores().main
self._transaction_manager = transaction_manager
self._instance_name = hs.get_instance_name()
@@ -442,6 +444,12 @@ class PerDestinationQueue:
"This should not happen." % event_ids
)
+ logger.info(
+ "Catching up destination %s with %d PDUs",
+ self._destination,
+ len(catchup_pdus),
+ )
+
# We send transactions with events from one room only, as its likely
# that the remote will have to do additional processing, which may
# take some time. It's better to give it small amounts of work
@@ -487,19 +495,20 @@ class PerDestinationQueue:
):
continue
- # Filter out events where the server is not in the room,
- # e.g. it may have left/been kicked. *Ideally* we'd pull
- # out the kick and send that, but it's a rare edge case
- # so we don't bother for now (the server that sent the
- # kick should send it out if its online).
- hosts = await self._state.get_hosts_in_room_at_events(
- p.room_id, [p.event_id]
- )
- if self._destination not in hosts:
- continue
-
new_pdus.append(p)
+ # Filter out events where the server is not in the room,
+ # e.g. it may have left/been kicked. *Ideally* we'd pull
+ # out the kick and send that, but it's a rare edge case
+ # so we don't bother for now (the server that sent the
+ # kick should send it out if its online).
+ new_pdus = await filter_events_for_server(
+ self._storage_controllers,
+ self._destination,
+ new_pdus,
+ redact=False,
+ )
+
# If we've filtered out all the extremities, fall back to
# sending the original event. This should ensure that the
# server gets at least some of missed events (especially if
diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py
index 7dfb890661..f7884bfbe0 100644
--- a/synapse/federation/transport/server/federation.py
+++ b/synapse/federation/transport/server/federation.py
@@ -24,7 +24,6 @@ from typing import (
Union,
)
-from matrix_common.versionstring import get_distribution_version_string
from typing_extensions import Literal
from synapse.api.constants import EduTypes
@@ -42,6 +41,7 @@ from synapse.http.servlet import (
parse_strings_from_args,
)
from synapse.types import JsonDict
+from synapse.util import SYNAPSE_VERSION
from synapse.util.ratelimitutils import FederationRateLimiter
if TYPE_CHECKING:
@@ -622,7 +622,7 @@ class FederationVersionServlet(BaseFederationServlet):
{
"server": {
"name": "Synapse",
- "version": get_distribution_version_string("matrix-synapse"),
+ "version": SYNAPSE_VERSION,
}
},
)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index fbafbbee6b..60d13040a2 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -81,6 +81,8 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
+INVALID_USERNAME_OR_PASSWORD = "Invalid username or password"
+
def convert_client_dict_legacy_fields_to_identifier(
submission: JsonDict,
@@ -197,6 +199,7 @@ class AuthHandler:
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastores().main
self.auth = hs.get_auth()
+ self.auth_blocking = hs.get_auth_blocking()
self.clock = hs.get_clock()
self.checkers: Dict[str, UserInteractiveAuthChecker] = {}
for auth_checker_class in INTERACTIVE_AUTH_CHECKERS:
@@ -983,7 +986,7 @@ class AuthHandler:
not is_appservice_ghost
or self.hs.config.appservice.track_appservice_user_ips
):
- await self.auth.check_auth_blocking(user_id)
+ await self.auth_blocking.check_auth_blocking(user_id)
access_token = self.generate_access_token(target_user_id_obj)
await self.store.add_access_token_to_user(
@@ -1215,7 +1218,9 @@ class AuthHandler:
await self._failed_login_attempts_ratelimiter.can_do_action(
None, (medium, address)
)
- raise LoginError(403, "", errcode=Codes.FORBIDDEN)
+ raise LoginError(
+ 403, msg=INVALID_USERNAME_OR_PASSWORD, errcode=Codes.FORBIDDEN
+ )
identifier_dict = {"type": "m.id.user", "user": user_id}
@@ -1341,7 +1346,7 @@ class AuthHandler:
# We raise a 403 here, but note that if we're doing user-interactive
# login, it turns all LoginErrors into a 401 anyway.
- raise LoginError(403, "Invalid password", errcode=Codes.FORBIDDEN)
+ raise LoginError(403, msg=INVALID_USERNAME_OR_PASSWORD, errcode=Codes.FORBIDDEN)
async def check_password_provider_3pid(
self, medium: str, address: str, password: str
@@ -1435,7 +1440,7 @@ class AuthHandler:
except Exception:
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
- await self.auth.check_auth_blocking(res.user_id)
+ await self.auth_blocking.check_auth_blocking(res.user_id)
return res
async def delete_access_token(self, access_token: str) -> None:
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index a0cbeedc30..b79c551703 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -398,35 +398,6 @@ class DeviceHandler(DeviceWorkerHandler):
await self.delete_devices(user_id, user_devices)
@trace
- async def delete_device(self, user_id: str, device_id: str) -> None:
- """Delete the given device
-
- Args:
- user_id: The user to delete the device from.
- device_id: The device to delete.
- """
-
- try:
- await self.store.delete_device(user_id, device_id)
- except errors.StoreError as e:
- if e.code == 404:
- # no match
- set_tag("error", True)
- log_kv(
- {"reason": "User doesn't have device id.", "device_id": device_id}
- )
- else:
- raise
-
- await self._auth_handler.delete_access_tokens_for_user(
- user_id, device_id=device_id
- )
-
- await self.store.delete_e2e_keys_by_device(user_id=user_id, device_id=device_id)
-
- await self.notify_device_update(user_id, [device_id])
-
- @trace
async def delete_all_devices_for_user(
self, user_id: str, except_device_id: Optional[str] = None
) -> None:
@@ -591,7 +562,7 @@ class DeviceHandler(DeviceWorkerHandler):
user_id, device_id, device_data
)
if old_device_id is not None:
- await self.delete_device(user_id, old_device_id)
+ await self.delete_devices(user_id, [old_device_id])
return device_id
async def get_dehydrated_device(
@@ -638,7 +609,7 @@ class DeviceHandler(DeviceWorkerHandler):
await self.store.update_device(user_id, device_id, old_device["display_name"])
# can't call self.delete_device because that will clobber the
# access token so call the storage layer directly
- await self.store.delete_device(user_id, old_device_id)
+ await self.store.delete_devices(user_id, [old_device_id])
await self.store.delete_e2e_keys_by_device(
user_id=user_id, device_id=old_device_id
)
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 1459a046de..8b0f16f965 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -28,6 +28,7 @@ from synapse.api.errors import (
SynapseError,
)
from synapse.appservice import ApplicationService
+from synapse.module_api import NOT_SPAM
from synapse.storage.databases.main.directory import RoomAliasMapping
from synapse.types import JsonDict, Requester, RoomAlias, UserID, get_domain_from_id
@@ -141,10 +142,15 @@ class DirectoryHandler:
403, "You must be in the room to create an alias for it"
)
- if not await self.spam_checker.user_may_create_room_alias(
+ spam_check = await self.spam_checker.user_may_create_room_alias(
user_id, room_alias
- ):
- raise AuthError(403, "This user is not permitted to create this alias")
+ )
+ if spam_check != self.spam_checker.NOT_SPAM:
+ raise AuthError(
+ 403,
+ "This user is not permitted to create this alias",
+ spam_check,
+ )
if not self.config.roomdirectory.is_alias_creation_allowed(
user_id, room_id, room_alias_str
@@ -430,9 +436,12 @@ class DirectoryHandler:
"""
user_id = requester.user.to_string()
- if not await self.spam_checker.user_may_publish_room(user_id, room_id):
+ spam_check = await self.spam_checker.user_may_publish_room(user_id, room_id)
+ if spam_check != NOT_SPAM:
raise AuthError(
- 403, "This user is not permitted to publish rooms to the room list"
+ 403,
+ "This user is not permitted to publish rooms to the room list",
+ spam_check,
)
if requester.is_guest:
diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py
index 6bed464351..ed4149bd58 100644
--- a/synapse/handlers/event_auth.py
+++ b/synapse/handlers/event_auth.py
@@ -48,14 +48,13 @@ class EventAuthHandler:
async def check_auth_rules_from_context(
self,
- room_version_obj: RoomVersion,
event: EventBase,
context: EventContext,
) -> None:
"""Check an event passes the auth rules at its own auth events"""
auth_event_ids = event.auth_event_ids()
auth_events_by_id = await self._store.get_events(auth_event_ids)
- check_auth_rules_for_event(room_version_obj, event, auth_events_by_id.values())
+ check_auth_rules_for_event(event, auth_events_by_id.values())
def compute_auth_events(
self,
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 6a143440d3..34cc5ecd11 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -59,6 +59,7 @@ from synapse.federation.federation_client import InvalidResponseError
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import nested_logging_context
from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.module_api import NOT_SPAM
from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
@@ -545,6 +546,7 @@ class FederationHandler:
if ret.partial_state:
# TODO(faster_joins): roll this back if we don't manage to start the
# background resync (eg process_remote_join fails)
+ # https://github.com/matrix-org/synapse/issues/12998
await self.store.store_partial_state_room(room_id, ret.servers_in_room)
max_stream_id = await self._federation_event_handler.process_remote_join(
@@ -799,9 +801,7 @@ class FederationHandler:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request`
- await self._event_auth_handler.check_auth_rules_from_context(
- room_version, event, context
- )
+ await self._event_auth_handler.check_auth_rules_from_context(event, context)
return event
async def on_invite_request(
@@ -821,11 +821,14 @@ class FederationHandler:
if self.hs.config.server.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
- if not await self.spam_checker.user_may_invite(
+ spam_check = await self.spam_checker.user_may_invite(
event.sender, event.state_key, event.room_id
- ):
+ )
+ if spam_check != NOT_SPAM:
raise SynapseError(
- 403, "This user is not permitted to send invites to this server/user"
+ 403,
+ "This user is not permitted to send invites to this server/user",
+ spam_check,
)
membership = event.content.get("membership")
@@ -972,9 +975,7 @@ class FederationHandler:
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request`
- await self._event_auth_handler.check_auth_rules_from_context(
- room_version_obj, event, context
- )
+ await self._event_auth_handler.check_auth_rules_from_context(event, context)
except AuthError as e:
logger.warning("Failed to create new leave %r because %s", event, e)
raise e
@@ -1033,9 +1034,7 @@ class FederationHandler:
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_knock_request`
- await self._event_auth_handler.check_auth_rules_from_context(
- room_version_obj, event, context
- )
+ await self._event_auth_handler.check_auth_rules_from_context(event, context)
except AuthError as e:
logger.warning("Failed to create new knock %r because %s", event, e)
raise e
@@ -1206,9 +1205,9 @@ class FederationHandler:
event.internal_metadata.send_on_behalf_of = self.hs.hostname
try:
- validate_event_for_room_version(room_version_obj, event)
+ validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(
- room_version_obj, event, context
+ event, context
)
except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e)
@@ -1258,10 +1257,8 @@ class FederationHandler:
)
try:
- validate_event_for_room_version(room_version_obj, event)
- await self._event_auth_handler.check_auth_rules_from_context(
- room_version_obj, event, context
- )
+ validate_event_for_room_version(event)
+ await self._event_auth_handler.check_auth_rules_from_context(event, context)
except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e)
raise e
@@ -1506,14 +1503,17 @@ class FederationHandler:
# TODO(faster_joins): do we need to lock to avoid races? What happens if other
# worker processes kick off a resync in parallel? Perhaps we should just elect
# a single worker to do the resync.
+ # https://github.com/matrix-org/synapse/issues/12994
#
# TODO(faster_joins): what happens if we leave the room during a resync? if we
# really leave, that might mean we have difficulty getting the room state over
# federation.
+ # https://github.com/matrix-org/synapse/issues/12802
#
# TODO(faster_joins): we need some way of prioritising which homeservers in
# `other_destinations` to try first, otherwise we'll spend ages trying dead
# homeservers for large rooms.
+ # https://github.com/matrix-org/synapse/issues/12999
if initial_destination is None and len(other_destinations) == 0:
raise ValueError(
@@ -1543,9 +1543,11 @@ class FederationHandler:
# all the events are updated, so we can update current state and
# clear the lazy-loading flag.
logger.info("Updating current state for %s", room_id)
+ # TODO(faster_joins): support workers
+ # https://github.com/matrix-org/synapse/issues/12994
assert (
self._storage_controllers.persistence is not None
- ), "TODO(faster_joins): support for workers"
+ ), "worker-mode deployments not currently supported here"
await self._storage_controllers.persistence.update_current_state(
room_id
)
@@ -1559,6 +1561,8 @@ class FederationHandler:
)
# TODO(faster_joins) update room stats and user directory?
+ # https://github.com/matrix-org/synapse/issues/12814
+ # https://github.com/matrix-org/synapse/issues/12815
return
# we raced against more events arriving with partial state. Go round
@@ -1566,6 +1570,8 @@ class FederationHandler:
# TODO(faster_joins): there is still a race here, whereby incoming events which raced
# with us will fail to be persisted after the call to `clear_partial_state_room` due to
# having partial state.
+ # https://github.com/matrix-org/synapse/issues/12988
+ #
continue
events = await self.store.get_events_as_list(
@@ -1588,6 +1594,7 @@ class FederationHandler:
# indefinitely is also not the right thing to do if we can
# reach all homeservers and they all claim they don't have
# the state we want.
+ # https://github.com/matrix-org/synapse/issues/13000
logger.error(
"Failed to get state for %s at %s from %s because %s, "
"giving up!",
diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py
index 87a0608359..6c9e6a00b5 100644
--- a/synapse/handlers/federation_event.py
+++ b/synapse/handlers/federation_event.py
@@ -532,6 +532,7 @@ class FederationEventHandler:
#
# TODO(faster_joins): we probably need to be more intelligent, and
# exclude partial-state prev_events from consideration
+ # https://github.com/matrix-org/synapse/issues/13001
logger.warning(
"%s still has partial state: can't de-partial-state it yet",
event.event_id,
@@ -777,6 +778,7 @@ class FederationEventHandler:
state_ids = await self._resolve_state_at_missing_prevs(origin, event)
# TODO(faster_joins): make sure that _resolve_state_at_missing_prevs does
# not return partial state
+ # https://github.com/matrix-org/synapse/issues/13002
await self._process_received_pdu(
origin, event, state_ids=state_ids, backfilled=backfilled
@@ -1428,9 +1430,6 @@ class FederationEventHandler:
allow_rejected=True,
)
- room_version = await self._store.get_room_version_id(room_id)
- room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
-
def prep(event: EventBase) -> Optional[Tuple[EventBase, EventContext]]:
with nested_logging_context(suffix=event.event_id):
auth = []
@@ -1453,8 +1452,8 @@ class FederationEventHandler:
context = EventContext.for_outlier(self._storage_controllers)
try:
- validate_event_for_room_version(room_version_obj, event)
- check_auth_rules_for_event(room_version_obj, event, auth)
+ validate_event_for_room_version(event)
+ check_auth_rules_for_event(event, auth)
except AuthError as e:
logger.warning("Rejecting %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR
@@ -1497,11 +1496,8 @@ class FederationEventHandler:
assert not event.internal_metadata.outlier
# first of all, check that the event itself is valid.
- room_version = await self._store.get_room_version_id(event.room_id)
- room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
-
try:
- validate_event_for_room_version(room_version_obj, event)
+ validate_event_for_room_version(event)
except AuthError as e:
logger.warning("While validating received event %r: %s", event, e)
# TODO: use a different rejected reason here?
@@ -1519,7 +1515,7 @@ class FederationEventHandler:
# ... and check that the event passes auth at those auth events.
try:
- check_auth_rules_for_event(room_version_obj, event, claimed_auth_events)
+ check_auth_rules_for_event(event, claimed_auth_events)
except AuthError as e:
logger.warning(
"While checking auth of %r against auth_events: %s", event, e
@@ -1567,9 +1563,7 @@ class FederationEventHandler:
auth_events_for_auth = calculated_auth_event_map
try:
- check_auth_rules_for_event(
- room_version_obj, event, auth_events_for_auth.values()
- )
+ check_auth_rules_for_event(event, auth_events_for_auth.values())
except AuthError as e:
logger.warning("Failed auth resolution for %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR
@@ -1669,7 +1663,7 @@ class FederationEventHandler:
)
try:
- check_auth_rules_for_event(room_version_obj, event, current_auth_events)
+ check_auth_rules_for_event(event, current_auth_events)
except AuthError as e:
logger.warning(
"Soft-failing %r (from %s) because %s",
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index f455158a2c..189f52fe5a 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -42,7 +42,7 @@ from synapse.api.errors import (
SynapseError,
UnsupportedRoomVersionError,
)
-from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
+from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.api.urls import ConsentURIBuilder
from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase, relation_from_event
@@ -444,7 +444,7 @@ _DUMMY_EVENT_ROOM_EXCLUSION_EXPIRY = 7 * 24 * 60 * 60 * 1000
class EventCreationHandler:
def __init__(self, hs: "HomeServer"):
self.hs = hs
- self.auth = hs.get_auth()
+ self.auth_blocking = hs.get_auth_blocking()
self._event_auth_handler = hs.get_event_auth_handler()
self.store = hs.get_datastores().main
self._storage_controllers = hs.get_storage_controllers()
@@ -605,7 +605,7 @@ class EventCreationHandler:
Returns:
Tuple of created event, Context
"""
- await self.auth.check_auth_blocking(requester=requester)
+ await self.auth_blocking.check_auth_blocking(requester=requester)
if event_dict["type"] == EventTypes.Create and event_dict["state_key"] == "":
room_version_id = event_dict["content"]["room_version"]
@@ -954,14 +954,12 @@ class EventCreationHandler:
"Spam-check module returned invalid error value. Expecting [code, dict], got %s",
spam_check_result,
)
- spam_check_result = Codes.FORBIDDEN
- if isinstance(spam_check_result, Codes):
- raise SynapseError(
- 403,
- "This message has been rejected as probable spam",
- spam_check_result,
- )
+ raise SynapseError(
+ 403,
+ "This message has been rejected as probable spam",
+ Codes.FORBIDDEN,
+ )
# Backwards compatibility: if the return value is not an error code, it
# means the module returned an error message to be included in the
@@ -1102,6 +1100,7 @@ class EventCreationHandler:
#
# TODO(faster_joins): figure out how this works, and make sure that the
# old state is complete.
+ # https://github.com/matrix-org/synapse/issues/13003
metadata = await self.store.get_metadata_for_events(state_event_ids)
state_map_for_event: MutableStateMap[str] = {}
@@ -1273,23 +1272,6 @@ class EventCreationHandler:
)
return prev_event
- if event.is_state() and (event.type, event.state_key) == (
- EventTypes.Create,
- "",
- ):
- room_version_id = event.content.get(
- "room_version", RoomVersions.V1.identifier
- )
- maybe_room_version_obj = KNOWN_ROOM_VERSIONS.get(room_version_id)
- if not maybe_room_version_obj:
- raise UnsupportedRoomVersionError(
- "Attempt to create a room with unsupported room version %s"
- % (room_version_id,)
- )
- room_version_obj = maybe_room_version_obj
- else:
- room_version_obj = await self.store.get_room_version(event.room_id)
-
if event.internal_metadata.is_out_of_band_membership():
# the only sort of out-of-band-membership events we expect to see here are
# invite rejections and rescinded knocks that we have generated ourselves.
@@ -1297,9 +1279,9 @@ class EventCreationHandler:
assert event.content["membership"] == Membership.LEAVE
else:
try:
- validate_event_for_room_version(room_version_obj, event)
+ validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(
- room_version_obj, event, context
+ event, context
)
except AuthError as err:
logger.warning("Denying new event %r because %s", event, err)
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 338204287f..c77d181722 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -91,6 +91,7 @@ class RegistrationHandler:
self.clock = hs.get_clock()
self.hs = hs
self.auth = hs.get_auth()
+ self.auth_blocking = hs.get_auth_blocking()
self._auth_handler = hs.get_auth_handler()
self.profile_handler = hs.get_profile_handler()
self.user_directory_handler = hs.get_user_directory_handler()
@@ -276,7 +277,7 @@ class RegistrationHandler:
# do not check_auth_blocking if the call is coming through the Admin API
if not by_admin:
- await self.auth.check_auth_blocking(threepid=threepid)
+ await self.auth_blocking.check_auth_blocking(threepid=threepid)
if localpart is not None:
await self.check_username(localpart, guest_access_token=guest_access_token)
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 520663f172..75c0be8c36 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -62,6 +62,7 @@ from synapse.events.utils import copy_and_fixup_power_levels_contents
from synapse.federation.federation_client import InvalidResponseError
from synapse.handlers.federation import get_domains_from_state
from synapse.handlers.relations import BundledAggregations
+from synapse.module_api import NOT_SPAM
from synapse.rest.admin._base import assert_user_is_admin
from synapse.storage.state import StateFilter
from synapse.streams import EventSource
@@ -109,6 +110,7 @@ class RoomCreationHandler:
self.store = hs.get_datastores().main
self._storage_controllers = hs.get_storage_controllers()
self.auth = hs.get_auth()
+ self.auth_blocking = hs.get_auth_blocking()
self.clock = hs.get_clock()
self.hs = hs
self.spam_checker = hs.get_spam_checker()
@@ -226,10 +228,9 @@ class RoomCreationHandler:
},
},
)
- old_room_version = await self.store.get_room_version(old_room_id)
- validate_event_for_room_version(old_room_version, tombstone_event)
+ validate_event_for_room_version(tombstone_event)
await self._event_auth_handler.check_auth_rules_from_context(
- old_room_version, tombstone_event, tombstone_context
+ tombstone_event, tombstone_context
)
# Upgrade the room
@@ -437,10 +438,9 @@ class RoomCreationHandler:
"""
user_id = requester.user.to_string()
- if not await self.spam_checker.user_may_create_room(user_id):
- raise SynapseError(
- 403, "You are not permitted to create rooms", Codes.FORBIDDEN
- )
+ spam_check = await self.spam_checker.user_may_create_room(user_id)
+ if spam_check != NOT_SPAM:
+ raise SynapseError(403, "You are not permitted to create rooms", spam_check)
creation_content: JsonDict = {
"room_version": new_room_version.identifier,
@@ -707,7 +707,7 @@ class RoomCreationHandler:
"""
user_id = requester.user.to_string()
- await self.auth.check_auth_blocking(requester=requester)
+ await self.auth_blocking.check_auth_blocking(requester=requester)
if (
self._server_notices_mxid is not None
@@ -727,12 +727,12 @@ class RoomCreationHandler:
invite_3pid_list = config.get("invite_3pid", [])
invite_list = config.get("invite", [])
- if not is_requester_admin and not (
- await self.spam_checker.user_may_create_room(user_id)
- ):
- raise SynapseError(
- 403, "You are not permitted to create rooms", Codes.FORBIDDEN
- )
+ if not is_requester_admin:
+ spam_check = await self.spam_checker.user_may_create_room(user_id)
+ if spam_check != NOT_SPAM:
+ raise SynapseError(
+ 403, "You are not permitted to create rooms", spam_check
+ )
if ratelimit:
await self.request_ratelimiter.ratelimit(requester)
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index d1199a0644..e89b7441ad 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -38,6 +38,7 @@ from synapse.event_auth import get_named_level, get_power_level_event
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN
+from synapse.module_api import NOT_SPAM
from synapse.storage.state import StateFilter
from synapse.types import (
JsonDict,
@@ -683,7 +684,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
if target_id == self._server_notices_mxid:
raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
- block_invite = False
+ block_invite_code = None
if (
self._server_notices_mxid is not None
@@ -701,16 +702,19 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
"Blocking invite: user is not admin and non-admin "
"invites disabled"
)
- block_invite = True
+ block_invite_code = Codes.FORBIDDEN
- if not await self.spam_checker.user_may_invite(
+ spam_check = await self.spam_checker.user_may_invite(
requester.user.to_string(), target_id, room_id
- ):
+ )
+ if spam_check != NOT_SPAM:
logger.info("Blocking invite due to spam checker")
- block_invite = True
+ block_invite_code = spam_check
- if block_invite:
- raise SynapseError(403, "Invites have been disabled on this server")
+ if block_invite_code is not None:
+ raise SynapseError(
+ 403, "Invites have been disabled on this server", block_invite_code
+ )
# An empty prev_events list is allowed as long as the auth_event_ids are present
if prev_event_ids is not None:
@@ -818,11 +822,12 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
# We assume that if the spam checker allowed the user to create
# a room then they're allowed to join it.
and not new_room
- and not await self.spam_checker.user_may_join_room(
+ ):
+ spam_check = await self.spam_checker.user_may_join_room(
target.to_string(), room_id, is_invited=inviter is not None
)
- ):
- raise SynapseError(403, "Not allowed to join this room")
+ if spam_check != NOT_SPAM:
+ raise SynapseError(403, "Not allowed to join this room", spam_check)
# Check if a remote join should be performed.
remote_join, remote_room_hosts = await self._should_perform_remote_join(
@@ -1369,13 +1374,14 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
)
else:
# Check if the spamchecker(s) allow this invite to go through.
- if not await self.spam_checker.user_may_send_3pid_invite(
+ spam_check = await self.spam_checker.user_may_send_3pid_invite(
inviter_userid=requester.user.to_string(),
medium=medium,
address=address,
room_id=room_id,
- ):
- raise SynapseError(403, "Cannot send threepid invite")
+ )
+ if spam_check != NOT_SPAM:
+ raise SynapseError(403, "Cannot send threepid invite", spam_check)
stream_id = await self._make_and_store_3pid_invite(
requester,
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index b4ead79f97..af19c513be 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -237,7 +237,7 @@ class SyncHandler:
self.event_sources = hs.get_event_sources()
self.clock = hs.get_clock()
self.state = hs.get_state_handler()
- self.auth = hs.get_auth()
+ self.auth_blocking = hs.get_auth_blocking()
self._storage_controllers = hs.get_storage_controllers()
self._state_storage_controller = self._storage_controllers.state
@@ -280,7 +280,7 @@ class SyncHandler:
# not been exceeded (if not part of the group by this point, almost certain
# auth_blocking will occur)
user_id = sync_config.user.to_string()
- await self.auth.check_auth_blocking(requester=requester)
+ await self.auth_blocking.check_auth_blocking(requester=requester)
res = await self.response_cache.wrap(
sync_config.request_key,
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index fffd83546c..496fce2ecc 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -35,7 +35,6 @@ from typing import (
)
import attr
-from matrix_common.versionstring import get_distribution_version_string
from prometheus_client import CollectorRegistry, Counter, Gauge, Histogram, Metric
from prometheus_client.core import (
REGISTRY,
@@ -54,6 +53,7 @@ from synapse.metrics._exposition import (
)
from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager
from synapse.metrics._types import Collector
+from synapse.util import SYNAPSE_VERSION
logger = logging.getLogger(__name__)
@@ -419,7 +419,7 @@ build_info = Gauge(
)
build_info.labels(
" ".join([platform.python_implementation(), platform.python_version()]),
- get_distribution_version_string("matrix-synapse"),
+ SYNAPSE_VERSION,
" ".join([platform.system(), platform.release()]),
).set(1)
diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
index a8ad575fcd..6191c2dc96 100644
--- a/synapse/module_api/__init__.py
+++ b/synapse/module_api/__init__.py
@@ -115,6 +115,7 @@ from synapse.types import (
JsonDict,
JsonMapping,
Requester,
+ RoomAlias,
StateMap,
UserID,
UserInfo,
@@ -163,6 +164,7 @@ __all__ = [
"EventBase",
"StateMap",
"ProfileInfo",
+ "RoomAlias",
"UserProfile",
]
@@ -799,7 +801,7 @@ class ModuleApi:
if device_id:
# delete the device, which will also delete its access tokens
yield defer.ensureDeferred(
- self._hs.get_device_handler().delete_device(user_id, device_id)
+ self._hs.get_device_handler().delete_devices(user_id, [device_id])
)
else:
# no associated device. Just delete the access token.
diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
index 1aa08f8d95..fa3266720b 100644
--- a/synapse/rest/admin/__init__.py
+++ b/synapse/rest/admin/__init__.py
@@ -20,8 +20,6 @@ import platform
from http import HTTPStatus
from typing import TYPE_CHECKING, Optional, Tuple
-from matrix_common.versionstring import get_distribution_version_string
-
from synapse.api.errors import Codes, NotFoundError, SynapseError
from synapse.http.server import HttpServer, JsonResource
from synapse.http.servlet import RestServlet, parse_json_object_from_request
@@ -88,6 +86,7 @@ from synapse.rest.admin.users import (
WhoisRestServlet,
)
from synapse.types import JsonDict, RoomStreamToken
+from synapse.util import SYNAPSE_VERSION
if TYPE_CHECKING:
from synapse.server import HomeServer
@@ -100,7 +99,7 @@ class VersionServlet(RestServlet):
def __init__(self, hs: "HomeServer"):
self.res = {
- "server_version": get_distribution_version_string("matrix-synapse"),
+ "server_version": SYNAPSE_VERSION,
"python_version": platform.python_version(),
}
diff --git a/synapse/rest/admin/devices.py b/synapse/rest/admin/devices.py
index cef46ba0dd..d934880102 100644
--- a/synapse/rest/admin/devices.py
+++ b/synapse/rest/admin/devices.py
@@ -80,7 +80,7 @@ class DeviceRestServlet(RestServlet):
if u is None:
raise NotFoundError("Unknown user")
- await self.device_handler.delete_device(target_user.to_string(), device_id)
+ await self.device_handler.delete_devices(target_user.to_string(), [device_id])
return HTTPStatus.OK, {}
async def on_PUT(
diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py
index ad6fd6492b..6fab102437 100644
--- a/synapse/rest/client/devices.py
+++ b/synapse/rest/client/devices.py
@@ -147,7 +147,9 @@ class DeviceRestServlet(RestServlet):
can_skip_ui_auth=True,
)
- await self.device_handler.delete_device(requester.user.to_string(), device_id)
+ await self.device_handler.delete_devices(
+ requester.user.to_string(), [device_id]
+ )
return 200, {}
async def on_PUT(
diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py
index 193a6951b9..23dfa4518f 100644
--- a/synapse/rest/client/logout.py
+++ b/synapse/rest/client/logout.py
@@ -45,8 +45,8 @@ class LogoutRestServlet(RestServlet):
access_token = self.auth.get_access_token_from_request(request)
await self._auth_handler.delete_access_token(access_token)
else:
- await self._device_handler.delete_device(
- requester.user.to_string(), requester.device_id
+ await self._device_handler.delete_devices(
+ requester.user.to_string(), [requester.device_id]
)
return 200, {}
diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py
index 3cae6d2b55..ce97080013 100644
--- a/synapse/rest/client/relations.py
+++ b/synapse/rest/client/relations.py
@@ -43,6 +43,7 @@ class RelationPaginationServlet(RestServlet):
self.auth = hs.get_auth()
self.store = hs.get_datastores().main
self._relations_handler = hs.get_relations_handler()
+ self._msc3715_enabled = hs.config.experimental.msc3715_enabled
async def on_GET(
self,
@@ -55,9 +56,15 @@ class RelationPaginationServlet(RestServlet):
requester = await self.auth.get_user_by_req(request, allow_guest=True)
limit = parse_integer(request, "limit", default=5)
- direction = parse_string(
- request, "org.matrix.msc3715.dir", default="b", allowed_values=["f", "b"]
- )
+ if self._msc3715_enabled:
+ direction = parse_string(
+ request,
+ "org.matrix.msc3715.dir",
+ default="b",
+ allowed_values=["f", "b"],
+ )
+ else:
+ direction = "b"
from_token_str = parse_string(request, "from")
to_token_str = parse_string(request, "to")
diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index 604f18bf52..9137417342 100644
--- a/synapse/rest/media/v1/media_storage.py
+++ b/synapse/rest/media/v1/media_storage.py
@@ -36,6 +36,7 @@ from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IConsumer
from twisted.protocols.basic import FileSender
+import synapse
from synapse.api.errors import NotFoundError
from synapse.logging.context import defer_to_thread, make_deferred_yieldable
from synapse.util import Clock
@@ -145,15 +146,15 @@ class MediaStorage:
f.flush()
f.close()
- spam = await self.spam_checker.check_media_file_for_spam(
+ spam_check = await self.spam_checker.check_media_file_for_spam(
ReadableFileWrapper(self.clock, fname), file_info
)
- if spam:
+ if spam_check != synapse.module_api.NOT_SPAM:
logger.info("Blocking media due to spam checker")
# Note that we'll delete the stored media, due to the
# try/except below. The media also won't be stored in
# the DB.
- raise SpamMediaException()
+ raise SpamMediaException(errcode=spam_check)
for provider in self.storage_providers:
await provider.store_file(path, file_info)
diff --git a/synapse/server.py b/synapse/server.py
index a66ec228db..a6a415aeab 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -29,6 +29,7 @@ from twisted.web.iweb import IPolicyForHTTPS
from twisted.web.resource import Resource
from synapse.api.auth import Auth
+from synapse.api.auth_blocking import AuthBlocking
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter, RequestRatelimiter
from synapse.appservice.api import ApplicationServiceApi
@@ -380,6 +381,10 @@ class HomeServer(metaclass=abc.ABCMeta):
return Auth(self)
@cache_in_self
+ def get_auth_blocking(self) -> AuthBlocking:
+ return AuthBlocking(self)
+
+ @cache_in_self
def get_http_client_context_factory(self) -> IPolicyForHTTPS:
if self.config.tls.use_insecure_ssl_client_just_for_testing_do_not_use:
return InsecureInterceptableContextFactory()
diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py
index 6863020778..3134cd2d3d 100644
--- a/synapse/server_notices/resource_limits_server_notices.py
+++ b/synapse/server_notices/resource_limits_server_notices.py
@@ -37,7 +37,7 @@ class ResourceLimitsServerNotices:
self._server_notices_manager = hs.get_server_notices_manager()
self._store = hs.get_datastores().main
self._storage_controllers = hs.get_storage_controllers()
- self._auth = hs.get_auth()
+ self._auth_blocking = hs.get_auth_blocking()
self._config = hs.config
self._resouce_limited = False
self._account_data_handler = hs.get_account_data_handler()
@@ -91,7 +91,7 @@ class ResourceLimitsServerNotices:
# Normally should always pass in user_id to check_auth_blocking
# if you have it, but in this case are checking what would happen
# to other users if they were to arrive.
- await self._auth.check_auth_blocking()
+ await self._auth_blocking.check_auth_blocking()
except ResourceLimitError as e:
limit_msg = e.msg
limit_type = e.limit_type
diff --git a/synapse/state/v1.py b/synapse/state/v1.py
index 499a328201..8bbb4ce41c 100644
--- a/synapse/state/v1.py
+++ b/synapse/state/v1.py
@@ -30,7 +30,7 @@ from typing import (
from synapse import event_auth
from synapse.api.constants import EventTypes
from synapse.api.errors import AuthError
-from synapse.api.room_versions import RoomVersion, RoomVersions
+from synapse.api.room_versions import RoomVersion
from synapse.events import EventBase
from synapse.types import MutableStateMap, StateMap
@@ -331,7 +331,6 @@ def _resolve_auth_events(
try:
# The signatures have already been checked at this point
event_auth.check_auth_rules_for_event(
- RoomVersions.V1,
event,
auth_events.values(),
)
@@ -349,7 +348,6 @@ def _resolve_normal_events(
try:
# The signatures have already been checked at this point
event_auth.check_auth_rules_for_event(
- RoomVersions.V1,
event,
auth_events.values(),
)
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index c618df2fde..6a16f38a15 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -17,12 +17,14 @@ import itertools
import logging
from typing import (
Any,
+ Awaitable,
Callable,
Collection,
Dict,
Generator,
Iterable,
List,
+ Mapping,
Optional,
Sequence,
Set,
@@ -30,33 +32,58 @@ from typing import (
overload,
)
-from typing_extensions import Literal
+from typing_extensions import Literal, Protocol
-import synapse.state
from synapse import event_auth
from synapse.api.constants import EventTypes
from synapse.api.errors import AuthError
from synapse.api.room_versions import RoomVersion
from synapse.events import EventBase
from synapse.types import MutableStateMap, StateMap
-from synapse.util import Clock
logger = logging.getLogger(__name__)
+class Clock(Protocol):
+ # This is usually synapse.util.Clock, but it's replaced with a FakeClock in tests.
+ # We only ever sleep(0) though, so that other async functions can make forward
+ # progress without waiting for stateres to complete.
+ def sleep(self, duration_ms: float) -> Awaitable[None]:
+ ...
+
+
+class StateResolutionStore(Protocol):
+ # This is usually synapse.state.StateResolutionStore, but it's replaced with a
+ # TestStateResolutionStore in tests.
+ def get_events(
+ self, event_ids: Collection[str], allow_rejected: bool = False
+ ) -> Awaitable[Dict[str, EventBase]]:
+ ...
+
+ def get_auth_chain_difference(
+ self, room_id: str, state_sets: List[Set[str]]
+ ) -> Awaitable[Set[str]]:
+ ...
+
+
# We want to await to the reactor occasionally during state res when dealing
# with large data sets, so that we don't exhaust the reactor. This is done by
# awaiting to reactor during loops every N iterations.
_AWAIT_AFTER_ITERATIONS = 100
+__all__ = [
+ "resolve_events_with_store",
+]
+
+
async def resolve_events_with_store(
clock: Clock,
room_id: str,
room_version: RoomVersion,
state_sets: Sequence[StateMap[str]],
event_map: Optional[Dict[str, EventBase]],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
) -> StateMap[str]:
"""Resolves the state using the v2 state resolution algorithm
@@ -194,7 +221,7 @@ async def _get_power_level_for_sender(
room_id: str,
event_id: str,
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
) -> int:
"""Return the power level of the sender of the given event according to
their auth events.
@@ -243,9 +270,9 @@ async def _get_power_level_for_sender(
async def _get_auth_chain_difference(
room_id: str,
- state_sets: Sequence[StateMap[str]],
+ state_sets: Sequence[Mapping[Any, str]],
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
) -> Set[str]:
"""Compare the auth chains of each state set and return the set of events
that only appear in some but not all of the auth chains.
@@ -406,7 +433,7 @@ async def _add_event_and_auth_chain_to_graph(
room_id: str,
event_id: str,
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
auth_diff: Set[str],
) -> None:
"""Helper function for _reverse_topological_power_sort that add the event
@@ -440,7 +467,7 @@ async def _reverse_topological_power_sort(
room_id: str,
event_ids: Iterable[str],
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
auth_diff: Set[str],
) -> List[str]:
"""Returns a list of the event_ids sorted by reverse topological ordering,
@@ -501,7 +528,7 @@ async def _iterative_auth_checks(
event_ids: List[str],
base_state: StateMap[str],
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
) -> MutableStateMap[str]:
"""Sequentially apply auth checks to each event in given list, updating the
state as it goes along.
@@ -547,7 +574,6 @@ async def _iterative_auth_checks(
try:
event_auth.check_auth_rules_for_event(
- room_version,
event,
auth_events.values(),
)
@@ -570,7 +596,7 @@ async def _mainline_sort(
event_ids: List[str],
resolved_power_event_id: Optional[str],
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
) -> List[str]:
"""Returns a sorted list of event_ids sorted by mainline ordering based on
the given event resolved_power_event_id
@@ -639,7 +665,7 @@ async def _get_mainline_depth_for_event(
event: EventBase,
mainline_map: Dict[str, int],
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
) -> int:
"""Get the mainline depths for the given event based on the mainline map
@@ -683,7 +709,7 @@ async def _get_event(
room_id: str,
event_id: str,
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
allow_none: Literal[False] = False,
) -> EventBase:
...
@@ -694,7 +720,7 @@ async def _get_event(
room_id: str,
event_id: str,
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
allow_none: Literal[True],
) -> Optional[EventBase]:
...
@@ -704,7 +730,7 @@ async def _get_event(
room_id: str,
event_id: str,
event_map: Dict[str, EventBase],
- state_res_store: "synapse.state.StateResolutionStore",
+ state_res_store: StateResolutionStore,
allow_none: bool = False,
) -> Optional[EventBase]:
"""Helper function to look up event in event_map, falling back to looking
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index b1e5208c76..555b4e77d2 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -507,25 +507,6 @@ class BackgroundUpdater:
update_handler
)
- def register_noop_background_update(self, update_name: str) -> None:
- """Register a noop handler for a background update.
-
- This is useful when we previously did a background update, but no
- longer wish to do the update. In this case the background update should
- be removed from the schema delta files, but there may still be some
- users who have the background update queued, so this method should
- also be called to clear the update.
-
- Args:
- update_name: Name of update
- """
-
- async def noop_update(progress: JsonDict, batch_size: int) -> int:
- await self._end_background_update(update_name)
- return 1
-
- self.register_background_update_handler(update_name, noop_update)
-
def register_background_index_update(
self,
update_name: str,
diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py
index 4caaa81808..4bcb99d06e 100644
--- a/synapse/storage/controllers/persist_events.py
+++ b/synapse/storage/controllers/persist_events.py
@@ -388,10 +388,13 @@ class EventsPersistenceStorageController:
# TODO(faster_joins): get a real stream ordering, to make this work correctly
# across workers.
+ # https://github.com/matrix-org/synapse/issues/12994
#
# TODO(faster_joins): this can race against event persistence, in which case we
# will end up with incorrect state. Perhaps we should make this a job we
- # farm out to the event persister, somehow.
+ # farm out to the event persister thread, somehow.
+ # https://github.com/matrix-org/synapse/issues/13007
+ #
stream_id = self.main_store.get_room_max_stream_ordering()
await self.persist_events_store.update_current_state(room_id, delta, stream_id)
diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py
index 3b4cdb67eb..d3a44bc876 100644
--- a/synapse/storage/controllers/state.py
+++ b/synapse/storage/controllers/state.py
@@ -452,6 +452,9 @@ class StateStorageController:
up to date.
"""
# FIXME(faster_joins): what do we do here?
+ # https://github.com/matrix-org/synapse/issues/12814
+ # https://github.com/matrix-org/synapse/issues/12815
+ # https://github.com/matrix-org/synapse/issues/13008
return await self.stores.main.get_partial_current_state_deltas(
prev_stream_id, max_stream_id
diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py
index 11d9d16c19..9121badb3a 100644
--- a/synapse/storage/databases/main/__init__.py
+++ b/synapse/storage/databases/main/__init__.py
@@ -45,7 +45,6 @@ from .event_push_actions import EventPushActionsStore
from .events_bg_updates import EventsBackgroundUpdatesStore
from .events_forward_extremities import EventForwardExtremitiesStore
from .filtering import FilteringStore
-from .group_server import GroupServerStore
from .keys import KeyStore
from .lock import LockStore
from .media_repository import MediaRepositoryStore
@@ -117,7 +116,6 @@ class DataStore(
DeviceStore,
DeviceInboxStore,
UserDirectoryStore,
- GroupServerStore,
UserErasureStore,
MonthlyActiveUsersWorkerStore,
StatsStore,
diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
index 599b418383..422e0e65ca 100644
--- a/synapse/storage/databases/main/deviceinbox.py
+++ b/synapse/storage/databases/main/deviceinbox.py
@@ -834,8 +834,6 @@ class DeviceInboxWorkerStore(SQLBaseStore):
class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
- REMOVE_DELETED_DEVICES = "remove_deleted_devices_from_device_inbox"
- REMOVE_HIDDEN_DEVICES = "remove_hidden_devices_from_device_inbox"
REMOVE_DEAD_DEVICES_FROM_INBOX = "remove_dead_devices_from_device_inbox"
def __init__(
@@ -857,15 +855,6 @@ class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
self.DEVICE_INBOX_STREAM_ID, self._background_drop_index_device_inbox
)
- # Used to be a background update that deletes all device_inboxes for deleted
- # devices.
- self.db_pool.updates.register_noop_background_update(
- self.REMOVE_DELETED_DEVICES
- )
- # Used to be a background update that deletes all device_inboxes for hidden
- # devices.
- self.db_pool.updates.register_noop_background_update(self.REMOVE_HIDDEN_DEVICES)
-
self.db_pool.updates.register_background_update_handler(
self.REMOVE_DEAD_DEVICES_FROM_INBOX,
self._remove_dead_devices_from_device_inbox,
diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index d900064c07..2414a7dc38 100644
--- a/synapse/storage/databases/main/devices.py
+++ b/synapse/storage/databases/main/devices.py
@@ -1240,15 +1240,6 @@ class DeviceBackgroundUpdateStore(SQLBaseStore):
self._remove_duplicate_outbound_pokes,
)
- # a pair of background updates that were added during the 1.14 release cycle,
- # but replaced with 58/06dlols_unique_idx.py
- self.db_pool.updates.register_noop_background_update(
- "device_lists_outbound_last_success_unique_idx",
- )
- self.db_pool.updates.register_noop_background_update(
- "drop_device_lists_outbound_last_success_non_unique_idx",
- )
-
async def _drop_device_list_streams_non_unique_indexes(
self, progress: JsonDict, batch_size: int
) -> int:
@@ -1433,16 +1424,6 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
)
raise StoreError(500, "Problem storing device.")
- async def delete_device(self, user_id: str, device_id: str) -> None:
- """Delete a device and its device_inbox.
-
- Args:
- user_id: The ID of the user which owns the device
- device_id: The ID of the device to delete
- """
-
- await self.delete_devices(user_id, [device_id])
-
async def delete_devices(self, user_id: str, device_ids: List[str]) -> None:
"""Deletes several devices.
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index 17e35cf63e..a8773374be 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -46,7 +46,7 @@ from synapse.storage.database import (
)
from synapse.storage.databases.main.events_worker import EventCacheEntry
from synapse.storage.databases.main.search import SearchEntry
-from synapse.storage.engines.postgres import PostgresEngine
+from synapse.storage.engines import PostgresEngine
from synapse.storage.util.id_generators import AbstractStreamIdGenerator
from synapse.storage.util.sequence import SequenceGenerator
from synapse.types import JsonDict, StateMap, get_domain_from_id
diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py
index d5f0059665..bea34a4c4a 100644
--- a/synapse/storage/databases/main/events_bg_updates.py
+++ b/synapse/storage/databases/main/events_bg_updates.py
@@ -177,11 +177,6 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
self._purged_chain_cover_index,
)
- # The event_thread_relation background update was replaced with the
- # event_arbitrary_relations one, which handles any relation to avoid
- # needed to potentially crawl the entire events table in the future.
- self.db_pool.updates.register_noop_background_update("event_thread_relation")
-
self.db_pool.updates.register_background_update_handler(
"event_arbitrary_relations",
self._event_arbitrary_relations,
diff --git a/synapse/storage/databases/main/group_server.py b/synapse/storage/databases/main/group_server.py
deleted file mode 100644
index c15a7136b6..0000000000
--- a/synapse/storage/databases/main/group_server.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2017 Vector Creations Ltd
-# Copyright 2018 New Vector Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import TYPE_CHECKING
-
-from synapse.storage._base import SQLBaseStore
-from synapse.storage.database import DatabasePool, LoggingDatabaseConnection
-
-if TYPE_CHECKING:
- from synapse.server import HomeServer
-
-
-class GroupServerStore(SQLBaseStore):
- def __init__(
- self,
- database: DatabasePool,
- db_conn: LoggingDatabaseConnection,
- hs: "HomeServer",
- ):
- # Register a legacy groups background update as a no-op.
- database.updates.register_noop_background_update("local_group_updates_index")
- super().__init__(database, db_conn, hs)
diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py
index d028be16de..9b172a64d8 100644
--- a/synapse/storage/databases/main/media_repository.py
+++ b/synapse/storage/databases/main/media_repository.py
@@ -37,9 +37,6 @@ from synapse.types import JsonDict, UserID
if TYPE_CHECKING:
from synapse.server import HomeServer
-BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD = (
- "media_repository_drop_index_wo_method"
-)
BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2 = (
"media_repository_drop_index_wo_method_2"
)
@@ -111,13 +108,6 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
unique=True,
)
- # the original impl of _drop_media_index_without_method was broken (see
- # https://github.com/matrix-org/synapse/issues/8649), so we replace the original
- # impl with a no-op and run the fixed migration as
- # media_repository_drop_index_wo_method_2.
- self.db_pool.updates.register_noop_background_update(
- BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD
- )
self.db_pool.updates.register_background_update_handler(
BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2,
self._drop_media_index_without_method,
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index 21e954ccc1..b6106affa6 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -36,6 +36,7 @@ from synapse.storage.database import (
LoggingTransaction,
)
from synapse.storage.engines import PostgresEngine
+from synapse.storage.engines._base import IsolationLevel
from synapse.storage.util.id_generators import (
AbstractStreamIdTracker,
MultiWriterIdGenerator,
@@ -764,6 +765,10 @@ class ReceiptsWorkerStore(SQLBaseStore):
linearized_event_id,
data,
stream_id=stream_id,
+ # Read committed is actually beneficial here because we check for a receipt with
+ # greater stream order, and checking the very latest data at select time is better
+ # than the data at transaction start time.
+ isolation_level=IsolationLevel.READ_COMMITTED,
)
# If the receipt was older than the currently persisted one, nothing to do.
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index 4991360b70..cb63cd9b7d 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -1805,21 +1805,10 @@ class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
columns=["creation_ts"],
)
- # we no longer use refresh tokens, but it's possible that some people
- # might have a background update queued to build this index. Just
- # clear the background update.
- self.db_pool.updates.register_noop_background_update(
- "refresh_tokens_device_index"
- )
-
self.db_pool.updates.register_background_update_handler(
"users_set_deactivated_flag", self._background_update_set_deactivated_flag
)
- self.db_pool.updates.register_noop_background_update(
- "user_threepids_grandfather"
- )
-
self.db_pool.updates.register_background_index_update(
"user_external_ids_user_id_idx",
index_name="user_external_ids_user_id_idx",
diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
index 68d4fc2e64..5760d3428e 100644
--- a/synapse/storage/databases/main/room.py
+++ b/synapse/storage/databases/main/room.py
@@ -1112,6 +1112,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
# this can race with incoming events, so we watch out for FK errors.
# TODO(faster_joins): this still doesn't completely fix the race, since the persist process
# is not atomic. I fear we need an application-level lock.
+ # https://github.com/matrix-org/synapse/issues/12988
try:
await self.db_pool.runInteraction(
"clear_partial_state_room", self._clear_partial_state_room_txn, room_id
@@ -1119,6 +1120,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
return True
except self.db_pool.engine.module.DatabaseError as e:
# TODO(faster_joins): how do we distinguish between FK errors and other errors?
+ # https://github.com/matrix-org/synapse/issues/12988
logger.warning(
"Exception while clearing lazy partial-state-room %s, retrying: %s",
room_id,
diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index 78e0773b2a..f6e24b68d2 100644
--- a/synapse/storage/databases/main/search.py
+++ b/synapse/storage/databases/main/search.py
@@ -113,7 +113,6 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
EVENT_SEARCH_UPDATE_NAME = "event_search"
EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order"
- EVENT_SEARCH_USE_GIST_POSTGRES_NAME = "event_search_postgres_gist"
EVENT_SEARCH_USE_GIN_POSTGRES_NAME = "event_search_postgres_gin"
EVENT_SEARCH_DELETE_NON_STRINGS = "event_search_sqlite_delete_non_strings"
@@ -132,15 +131,6 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
self.EVENT_SEARCH_ORDER_UPDATE_NAME, self._background_reindex_search_order
)
- # we used to have a background update to turn the GIN index into a
- # GIST one; we no longer do that (obviously) because we actually want
- # a GIN index. However, it's possible that some people might still have
- # the background update queued, so we register a handler to clear the
- # background update.
- self.db_pool.updates.register_noop_background_update(
- self.EVENT_SEARCH_USE_GIST_POSTGRES_NAME
- )
-
self.db_pool.updates.register_background_update_handler(
self.EVENT_SEARCH_USE_GIN_POSTGRES_NAME, self._background_reindex_gin_search
)
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index bdd00273cd..9674c4a757 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -127,13 +127,8 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
NotFoundError: if the room is unknown
"""
- # First we try looking up room version from the database, but for old
- # rooms we might not have added the room version to it yet so we fall
- # back to previous behaviour and look in current state events.
- #
# We really should have an entry in the rooms table for every room we
- # care about, but let's be a bit paranoid (at least while the background
- # update is happening) to avoid breaking existing rooms.
+ # care about, but let's be a bit paranoid.
room_version = self.db_pool.simple_select_one_onecol_txn(
txn,
table="rooms",
@@ -440,6 +435,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
)
# TODO(faster_joins): need to do something about workers here
+ # https://github.com/matrix-org/synapse/issues/12994
txn.call_after(self.is_partial_state_event.invalidate, (event.event_id,))
txn.call_after(
self._get_state_group_for_event.prefill,
diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py
index b95dbef678..538451b05f 100644
--- a/synapse/storage/databases/main/stats.py
+++ b/synapse/storage/databases/main/stats.py
@@ -120,11 +120,6 @@ class StatsStore(StateDeltasStore):
self.db_pool.updates.register_background_update_handler(
"populate_stats_process_users", self._populate_stats_process_users
)
- # we no longer need to perform clean-up, but we will give ourselves
- # the potential to reintroduce it in the future – so documentation
- # will still encourage the use of this no-op handler.
- self.db_pool.updates.register_noop_background_update("populate_stats_cleanup")
- self.db_pool.updates.register_noop_background_update("populate_stats_prepare")
async def _populate_stats_process_users(
self, progress: JsonDict, batch_size: int
diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py
index f51b3d228e..a182e8a098 100644
--- a/synapse/storage/engines/__init__.py
+++ b/synapse/storage/engines/__init__.py
@@ -11,11 +11,35 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Any, Mapping
+from typing import Any, Mapping, NoReturn
from ._base import BaseDatabaseEngine, IncorrectDatabaseSetup
-from .postgres import PostgresEngine
-from .sqlite import Sqlite3Engine
+
+# The classes `PostgresEngine` and `Sqlite3Engine` must always be importable, because
+# we use `isinstance(engine, PostgresEngine)` to write different queries for postgres
+# and sqlite. But the database driver modules are both optional: they may not be
+# installed. To account for this, create dummy classes on import failure so we can
+# still run `isinstance()` checks.
+try:
+ from .postgres import PostgresEngine
+except ImportError:
+
+ class PostgresEngine(BaseDatabaseEngine): # type: ignore[no-redef]
+ def __new__(cls, *args: object, **kwargs: object) -> NoReturn: # type: ignore[misc]
+ raise RuntimeError(
+ f"Cannot create {cls.__name__} -- psycopg2 module is not installed"
+ )
+
+
+try:
+ from .sqlite import Sqlite3Engine
+except ImportError:
+
+ class Sqlite3Engine(BaseDatabaseEngine): # type: ignore[no-redef]
+ def __new__(cls, *args: object, **kwargs: object) -> NoReturn: # type: ignore[misc]
+ raise RuntimeError(
+ f"Cannot create {cls.__name__} -- sqlite3 module is not installed"
+ )
def create_engine(database_config: Mapping[str, Any]) -> BaseDatabaseEngine:
@@ -30,4 +54,10 @@ def create_engine(database_config: Mapping[str, Any]) -> BaseDatabaseEngine:
raise RuntimeError("Unsupported database engine '%s'" % (name,))
-__all__ = ["create_engine", "BaseDatabaseEngine", "IncorrectDatabaseSetup"]
+__all__ = [
+ "create_engine",
+ "BaseDatabaseEngine",
+ "PostgresEngine",
+ "Sqlite3Engine",
+ "IncorrectDatabaseSetup",
+]
diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py
index 391f8ed24a..517f9d5f98 100644
--- a/synapse/storage/engines/postgres.py
+++ b/synapse/storage/engines/postgres.py
@@ -15,6 +15,8 @@
import logging
from typing import TYPE_CHECKING, Any, Mapping, NoReturn, Optional, Tuple, cast
+import psycopg2.extensions
+
from synapse.storage.engines._base import (
BaseDatabaseEngine,
IncorrectDatabaseSetup,
@@ -23,18 +25,14 @@ from synapse.storage.engines._base import (
from synapse.storage.types import Cursor
if TYPE_CHECKING:
- import psycopg2 # noqa: F401
-
from synapse.storage.database import LoggingDatabaseConnection
logger = logging.getLogger(__name__)
-class PostgresEngine(BaseDatabaseEngine["psycopg2.connection"]):
+class PostgresEngine(BaseDatabaseEngine[psycopg2.extensions.connection]):
def __init__(self, database_config: Mapping[str, Any]):
- import psycopg2.extensions
-
super().__init__(psycopg2, database_config)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
@@ -69,7 +67,9 @@ class PostgresEngine(BaseDatabaseEngine["psycopg2.connection"]):
return collation, ctype
def check_database(
- self, db_conn: "psycopg2.connection", allow_outdated_version: bool = False
+ self,
+ db_conn: psycopg2.extensions.connection,
+ allow_outdated_version: bool = False,
) -> None:
# Get the version of PostgreSQL that we're using. As per the psycopg2
# docs: The number is formed by converting the major, minor, and
@@ -176,8 +176,6 @@ class PostgresEngine(BaseDatabaseEngine["psycopg2.connection"]):
return True
def is_deadlock(self, error: Exception) -> bool:
- import psycopg2.extensions
-
if isinstance(error, psycopg2.DatabaseError):
# https://www.postgresql.org/docs/current/static/errcodes-appendix.html
# "40001" serialization_failure
@@ -185,7 +183,7 @@ class PostgresEngine(BaseDatabaseEngine["psycopg2.connection"]):
return error.pgcode in ["40001", "40P01"]
return False
- def is_connection_closed(self, conn: "psycopg2.connection") -> bool:
+ def is_connection_closed(self, conn: psycopg2.extensions.connection) -> bool:
return bool(conn.closed)
def lock_table(self, txn: Cursor, table: str) -> None:
@@ -205,18 +203,16 @@ class PostgresEngine(BaseDatabaseEngine["psycopg2.connection"]):
else:
return "%i.%i.%i" % (numver / 10000, (numver % 10000) / 100, numver % 100)
- def in_transaction(self, conn: "psycopg2.connection") -> bool:
- import psycopg2.extensions
-
+ def in_transaction(self, conn: psycopg2.extensions.connection) -> bool:
return conn.status != psycopg2.extensions.STATUS_READY
def attempt_to_set_autocommit(
- self, conn: "psycopg2.connection", autocommit: bool
+ self, conn: psycopg2.extensions.connection, autocommit: bool
) -> None:
return conn.set_session(autocommit=autocommit)
def attempt_to_set_isolation_level(
- self, conn: "psycopg2.connection", isolation_level: Optional[int]
+ self, conn: psycopg2.extensions.connection, isolation_level: Optional[int]
) -> None:
if isolation_level is None:
isolation_level = self.default_isolation_level
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index c33df42084..09a2b58f4c 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -23,8 +23,7 @@ from typing_extensions import Counter as CounterType
from synapse.config.homeserver import HomeServerConfig
from synapse.storage.database import LoggingDatabaseConnection
-from synapse.storage.engines import BaseDatabaseEngine
-from synapse.storage.engines.postgres import PostgresEngine
+from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
from synapse.storage.schema import SCHEMA_COMPAT_VERSION, SCHEMA_VERSION
from synapse.storage.types import Cursor
diff --git a/synapse/storage/schema/main/delta/70/02remove_noop_background_updates.sql b/synapse/storage/schema/main/delta/70/02remove_noop_background_updates.sql
new file mode 100644
index 0000000000..fa96ac50c2
--- /dev/null
+++ b/synapse/storage/schema/main/delta/70/02remove_noop_background_updates.sql
@@ -0,0 +1,61 @@
+/* Copyright 2022 The Matrix.org Foundation C.I.C
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- Clean-up background updates which should no longer be run. Previously these
+-- used the (now removed) register_noop_background_update method.
+
+-- Used to be a background update that deletes all device_inboxes for deleted
+-- devices.
+DELETE FROM background_updates WHERE update_name = 'remove_deleted_devices_from_device_inbox';
+-- Used to be a background update that deletes all device_inboxes for hidden
+-- devices.
+DELETE FROM background_updates WHERE update_name = 'remove_hidden_devices_from_device_inbox';
+
+-- A pair of background updates that were added during the 1.14 release cycle,
+-- but replaced with 58/06dlols_unique_idx.py
+DELETE FROM background_updates WHERE update_name = 'device_lists_outbound_last_success_unique_idx';
+DELETE FROM background_updates WHERE update_name = 'drop_device_lists_outbound_last_success_non_unique_idx';
+
+-- The event_thread_relation background update was replaced with the
+-- event_arbitrary_relations one, which handles any relation to avoid
+-- needed to potentially crawl the entire events table in the future.
+DELETE FROM background_updates WHERE update_name = 'event_thread_relation';
+
+-- A legacy groups background update.
+DELETE FROM background_updates WHERE update_name = 'local_group_updates_index';
+
+-- The original impl of _drop_media_index_without_method was broken (see
+-- https://github.com/matrix-org/synapse/issues/8649), so we replace the original
+-- impl with a no-op and run the fixed migration as
+-- media_repository_drop_index_wo_method_2.
+DELETE FROM background_updates WHERE update_name = 'media_repository_drop_index_wo_method';
+
+-- We no longer use refresh tokens, but it's possible that some people
+-- might have a background update queued to build this index. Just
+-- clear the background update.
+DELETE FROM background_updates WHERE update_name = 'refresh_tokens_device_index';
+
+DELETE FROM background_updates WHERE update_name = 'user_threepids_grandfather';
+
+-- We used to have a background update to turn the GIN index into a
+-- GIST one; we no longer do that (obviously) because we actually want
+-- a GIN index. However, it's possible that some people might still have
+-- the background update queued, so we register a handler to clear the
+-- background update.
+DELETE FROM background_updates WHERE update_name = 'event_search_postgres_gist';
+
+-- We no longer need to perform clean-up.
+DELETE FROM background_updates WHERE update_name = 'populate_stats_cleanup';
+DELETE FROM background_updates WHERE update_name = 'populate_stats_prepare';
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index 96aaffb53c..af3bab2c15 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -546,6 +546,7 @@ class StateFilter:
# the sender of a piece of state wasn't actually in the room, then clearly that
# state shouldn't have been returned.
# We should at least add some tests around this to see what happens.
+ # https://github.com/matrix-org/synapse/issues/13006
# if we haven't requested membership events, then it depends on the value of
# 'include_others'
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index d8046b7553..6323d452e7 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -19,6 +19,7 @@ from typing import Any, Callable, Dict, Generator, Optional
import attr
from frozendict import frozendict
+from matrix_common.versionstring import get_distribution_version_string
from twisted.internet import defer, task
from twisted.internet.defer import Deferred
@@ -183,3 +184,8 @@ def log_failure(
if not consumeErrors:
return failure
return None
+
+
+# Version string with git info. Computed here once so that we don't invoke git multiple
+# times.
+SYNAPSE_VERSION = get_distribution_version_string("matrix-synapse", __file__)
|