diff --git a/changelog.d/8437.feature b/changelog.d/8437.feature
new file mode 100644
index 0000000000..4abcccb326
--- /dev/null
+++ b/changelog.d/8437.feature
@@ -0,0 +1 @@
+Implement [MSC2409](https://github.com/matrix-org/matrix-doc/pull/2409) to send typing, read receipts, and presence events to appservices.
diff --git a/changelog.d/8515.misc b/changelog.d/8515.misc
new file mode 100644
index 0000000000..1f8aa292d8
--- /dev/null
+++ b/changelog.d/8515.misc
@@ -0,0 +1 @@
+Apply some internal fixes to the `HomeServer` class to make its code more idiomatic and statically-verifiable.
diff --git a/changelog.d/8535.feature b/changelog.d/8535.feature
new file mode 100644
index 0000000000..45342e66ad
--- /dev/null
+++ b/changelog.d/8535.feature
@@ -0,0 +1 @@
+Support modifying event content in `ThirdPartyRules` modules.
diff --git a/changelog.d/8561.misc b/changelog.d/8561.misc
new file mode 100644
index 0000000000..a40dedfa8e
--- /dev/null
+++ b/changelog.d/8561.misc
@@ -0,0 +1 @@
+Move metric registration code down into `LruCache`.
diff --git a/changelog.d/8564.feature b/changelog.d/8564.feature
new file mode 100644
index 0000000000..45342e66ad
--- /dev/null
+++ b/changelog.d/8564.feature
@@ -0,0 +1 @@
+Support modifying event content in `ThirdPartyRules` modules.
diff --git a/mypy.ini b/mypy.ini
index 9748f6258c..b5db54ee3b 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -15,6 +15,7 @@ files =
synapse/events/builder.py,
synapse/events/spamcheck.py,
synapse/federation,
+ synapse/handlers/appservice.py,
synapse/handlers/account_data.py,
synapse/handlers/auth.py,
synapse/handlers/cas_handler.py,
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 37668bc42a..668e0e4314 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -34,7 +34,6 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
from synapse.logging import opentracing as opentracing
from synapse.types import StateMap, UserID
-from synapse.util.caches import register_cache
from synapse.util.caches.lrucache import LruCache
from synapse.util.metrics import Measure
@@ -70,8 +69,7 @@ class Auth:
self.store = hs.get_datastore()
self.state = hs.get_state_handler()
- self.token_cache = LruCache(10000)
- register_cache("cache", "token_cache", self.token_cache)
+ self.token_cache = LruCache(10000, "token_cache")
self._auth_blocking = AuthBlocking(self.hs)
diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py
index 13ec1f71a6..3862d9c08f 100644
--- a/synapse/appservice/__init__.py
+++ b/synapse/appservice/__init__.py
@@ -14,14 +14,15 @@
# limitations under the License.
import logging
import re
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Iterable, List, Match, Optional
from synapse.api.constants import EventTypes
-from synapse.appservice.api import ApplicationServiceApi
-from synapse.types import GroupID, get_domain_from_id
+from synapse.events import EventBase
+from synapse.types import GroupID, JsonDict, UserID, get_domain_from_id
from synapse.util.caches.descriptors import cached
if TYPE_CHECKING:
+ from synapse.appservice.api import ApplicationServiceApi
from synapse.storage.databases.main import DataStore
logger = logging.getLogger(__name__)
@@ -32,38 +33,6 @@ class ApplicationServiceState:
UP = "up"
-class AppServiceTransaction:
- """Represents an application service transaction."""
-
- def __init__(self, service, id, events):
- self.service = service
- self.id = id
- self.events = events
-
- async def send(self, as_api: ApplicationServiceApi) -> bool:
- """Sends this transaction using the provided AS API interface.
-
- Args:
- as_api: The API to use to send.
- Returns:
- True if the transaction was sent.
- """
- return await as_api.push_bulk(
- service=self.service, events=self.events, txn_id=self.id
- )
-
- async def complete(self, store: "DataStore") -> None:
- """Completes this transaction as successful.
-
- Marks this transaction ID on the application service and removes the
- transaction contents from the database.
-
- Args:
- store: The database store to operate on.
- """
- await store.complete_appservice_txn(service=self.service, txn_id=self.id)
-
-
class ApplicationService:
"""Defines an application service. This definition is mostly what is
provided to the /register AS API.
@@ -91,6 +60,7 @@ class ApplicationService:
protocols=None,
rate_limited=True,
ip_range_whitelist=None,
+ supports_ephemeral=False,
):
self.token = token
self.url = (
@@ -102,6 +72,7 @@ class ApplicationService:
self.namespaces = self._check_namespaces(namespaces)
self.id = id
self.ip_range_whitelist = ip_range_whitelist
+ self.supports_ephemeral = supports_ephemeral
if "|" in self.id:
raise Exception("application service ID cannot contain '|' character")
@@ -161,19 +132,21 @@ class ApplicationService:
raise ValueError("Expected string for 'regex' in ns '%s'" % ns)
return namespaces
- def _matches_regex(self, test_string, namespace_key):
+ def _matches_regex(self, test_string: str, namespace_key: str) -> Optional[Match]:
for regex_obj in self.namespaces[namespace_key]:
if regex_obj["regex"].match(test_string):
return regex_obj
return None
- def _is_exclusive(self, ns_key, test_string):
+ def _is_exclusive(self, ns_key: str, test_string: str) -> bool:
regex_obj = self._matches_regex(test_string, ns_key)
if regex_obj:
return regex_obj["exclusive"]
return False
- async def _matches_user(self, event, store):
+ async def _matches_user(
+ self, event: Optional[EventBase], store: Optional["DataStore"] = None
+ ) -> bool:
if not event:
return False
@@ -188,14 +161,23 @@ class ApplicationService:
if not store:
return False
- does_match = await self._matches_user_in_member_list(event.room_id, store)
+ does_match = await self.matches_user_in_member_list(event.room_id, store)
return does_match
- @cached(num_args=1, cache_context=True)
- async def _matches_user_in_member_list(self, room_id, store, cache_context):
- member_list = await store.get_users_in_room(
- room_id, on_invalidate=cache_context.invalidate
- )
+ @cached(num_args=1)
+ async def matches_user_in_member_list(
+ self, room_id: str, store: "DataStore"
+ ) -> bool:
+ """Check if this service is interested a room based upon it's membership
+
+ Args:
+ room_id: The room to check.
+ store: The datastore to query.
+
+ Returns:
+ True if this service would like to know about this room.
+ """
+ member_list = await store.get_users_in_room(room_id)
# check joined member events
for user_id in member_list:
@@ -203,12 +185,14 @@ class ApplicationService:
return True
return False
- def _matches_room_id(self, event):
+ def _matches_room_id(self, event: EventBase) -> bool:
if hasattr(event, "room_id"):
return self.is_interested_in_room(event.room_id)
return False
- async def _matches_aliases(self, event, store):
+ async def _matches_aliases(
+ self, event: EventBase, store: Optional["DataStore"] = None
+ ) -> bool:
if not store or not event:
return False
@@ -218,12 +202,15 @@ class ApplicationService:
return True
return False
- async def is_interested(self, event, store=None) -> bool:
+ async def is_interested(
+ self, event: EventBase, store: Optional["DataStore"] = None
+ ) -> bool:
"""Check if this service is interested in this event.
Args:
- event(Event): The event to check.
- store(DataStore)
+ event: The event to check.
+ store: The datastore to query.
+
Returns:
True if this service would like to know about this event.
"""
@@ -231,39 +218,66 @@ class ApplicationService:
if self._matches_room_id(event):
return True
+ # This will check the namespaces first before
+ # checking the store, so should be run before _matches_aliases
+ if await self._matches_user(event, store):
+ return True
+
+ # This will check the store, so should be run last
if await self._matches_aliases(event, store):
return True
- if await self._matches_user(event, store):
+ return False
+
+ @cached(num_args=1)
+ async def is_interested_in_presence(
+ self, user_id: UserID, store: "DataStore"
+ ) -> bool:
+ """Check if this service is interested a user's presence
+
+ Args:
+ user_id: The user to check.
+ store: The datastore to query.
+
+ Returns:
+ True if this service would like to know about presence for this user.
+ """
+ # Find all the rooms the sender is in
+ if self.is_interested_in_user(user_id.to_string()):
return True
+ room_ids = await store.get_rooms_for_user(user_id.to_string())
+ # Then find out if the appservice is interested in any of those rooms
+ for room_id in room_ids:
+ if await self.matches_user_in_member_list(room_id, store):
+ return True
return False
- def is_interested_in_user(self, user_id):
+ def is_interested_in_user(self, user_id: str) -> bool:
return (
- self._matches_regex(user_id, ApplicationService.NS_USERS)
+ bool(self._matches_regex(user_id, ApplicationService.NS_USERS))
or user_id == self.sender
)
- def is_interested_in_alias(self, alias):
+ def is_interested_in_alias(self, alias: str) -> bool:
return bool(self._matches_regex(alias, ApplicationService.NS_ALIASES))
- def is_interested_in_room(self, room_id):
+ def is_interested_in_room(self, room_id: str) -> bool:
return bool(self._matches_regex(room_id, ApplicationService.NS_ROOMS))
- def is_exclusive_user(self, user_id):
+ def is_exclusive_user(self, user_id: str) -> bool:
return (
self._is_exclusive(ApplicationService.NS_USERS, user_id)
or user_id == self.sender
)
- def is_interested_in_protocol(self, protocol):
+ def is_interested_in_protocol(self, protocol: str) -> bool:
return protocol in self.protocols
- def is_exclusive_alias(self, alias):
+ def is_exclusive_alias(self, alias: str) -> bool:
return self._is_exclusive(ApplicationService.NS_ALIASES, alias)
- def is_exclusive_room(self, room_id):
+ def is_exclusive_room(self, room_id: str) -> bool:
return self._is_exclusive(ApplicationService.NS_ROOMS, room_id)
def get_exclusive_user_regexes(self):
@@ -276,14 +290,14 @@ class ApplicationService:
if regex_obj["exclusive"]
]
- def get_groups_for_user(self, user_id):
+ def get_groups_for_user(self, user_id: str) -> Iterable[str]:
"""Get the groups that this user is associated with by this AS
Args:
- user_id (str): The ID of the user.
+ user_id: The ID of the user.
Returns:
- iterable[str]: an iterable that yields group_id strings.
+ An iterable that yields group_id strings.
"""
return (
regex_obj["group_id"]
@@ -291,7 +305,7 @@ class ApplicationService:
if "group_id" in regex_obj and regex_obj["regex"].match(user_id)
)
- def is_rate_limited(self):
+ def is_rate_limited(self) -> bool:
return self.rate_limited
def __str__(self):
@@ -300,3 +314,45 @@ class ApplicationService:
dict_copy["token"] = "<redacted>"
dict_copy["hs_token"] = "<redacted>"
return "ApplicationService: %s" % (dict_copy,)
+
+
+class AppServiceTransaction:
+ """Represents an application service transaction."""
+
+ def __init__(
+ self,
+ service: ApplicationService,
+ id: int,
+ events: List[EventBase],
+ ephemeral: List[JsonDict],
+ ):
+ self.service = service
+ self.id = id
+ self.events = events
+ self.ephemeral = ephemeral
+
+ async def send(self, as_api: "ApplicationServiceApi") -> bool:
+ """Sends this transaction using the provided AS API interface.
+
+ Args:
+ as_api: The API to use to send.
+ Returns:
+ True if the transaction was sent.
+ """
+ return await as_api.push_bulk(
+ service=self.service,
+ events=self.events,
+ ephemeral=self.ephemeral,
+ txn_id=self.id,
+ )
+
+ async def complete(self, store: "DataStore") -> None:
+ """Completes this transaction as successful.
+
+ Marks this transaction ID on the application service and removes the
+ transaction contents from the database.
+
+ Args:
+ store: The database store to operate on.
+ """
+ await store.complete_appservice_txn(service=self.service, txn_id=self.id)
diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py
index e8f0793795..e366a982b8 100644
--- a/synapse/appservice/api.py
+++ b/synapse/appservice/api.py
@@ -14,12 +14,13 @@
# limitations under the License.
import logging
import urllib
-from typing import TYPE_CHECKING, Optional, Tuple
+from typing import TYPE_CHECKING, List, Optional, Tuple
from prometheus_client import Counter
from synapse.api.constants import EventTypes, ThirdPartyEntityKind
from synapse.api.errors import CodeMessageException
+from synapse.events import EventBase
from synapse.events.utils import serialize_event
from synapse.http.client import SimpleHttpClient
from synapse.types import JsonDict, ThirdPartyInstanceID
@@ -201,7 +202,13 @@ class ApplicationServiceApi(SimpleHttpClient):
key = (service.id, protocol)
return await self.protocol_meta_cache.wrap(key, _get)
- async def push_bulk(self, service, events, txn_id=None):
+ async def push_bulk(
+ self,
+ service: "ApplicationService",
+ events: List[EventBase],
+ ephemeral: List[JsonDict],
+ txn_id: Optional[int] = None,
+ ):
if service.url is None:
return True
@@ -211,15 +218,19 @@ class ApplicationServiceApi(SimpleHttpClient):
logger.warning(
"push_bulk: Missing txn ID sending events to %s", service.url
)
- txn_id = str(0)
- txn_id = str(txn_id)
+ txn_id = 0
+
+ uri = service.url + ("/transactions/%s" % urllib.parse.quote(str(txn_id)))
+
+ # Never send ephemeral events to appservices that do not support it
+ if service.supports_ephemeral:
+ body = {"events": events, "de.sorunome.msc2409.ephemeral": ephemeral}
+ else:
+ body = {"events": events}
- uri = service.url + ("/transactions/%s" % urllib.parse.quote(txn_id))
try:
await self.put_json(
- uri=uri,
- json_body={"events": events},
- args={"access_token": service.hs_token},
+ uri=uri, json_body=body, args={"access_token": service.hs_token},
)
sent_transactions_counter.labels(service.id).inc()
sent_events_counter.labels(service.id).inc(len(events))
diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py
index 8eb8c6f51c..ad3c408519 100644
--- a/synapse/appservice/scheduler.py
+++ b/synapse/appservice/scheduler.py
@@ -49,10 +49,13 @@ This is all tied together by the AppServiceScheduler which DIs the required
components.
"""
import logging
+from typing import List
-from synapse.appservice import ApplicationServiceState
+from synapse.appservice import ApplicationService, ApplicationServiceState
+from synapse.events import EventBase
from synapse.logging.context import run_in_background
from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.types import JsonDict
logger = logging.getLogger(__name__)
@@ -82,8 +85,13 @@ class ApplicationServiceScheduler:
for service in services:
self.txn_ctrl.start_recoverer(service)
- def submit_event_for_as(self, service, event):
- self.queuer.enqueue(service, event)
+ def submit_event_for_as(self, service: ApplicationService, event: EventBase):
+ self.queuer.enqueue_event(service, event)
+
+ def submit_ephemeral_events_for_as(
+ self, service: ApplicationService, events: List[JsonDict]
+ ):
+ self.queuer.enqueue_ephemeral(service, events)
class _ServiceQueuer:
@@ -96,17 +104,15 @@ class _ServiceQueuer:
def __init__(self, txn_ctrl, clock):
self.queued_events = {} # dict of {service_id: [events]}
+ self.queued_ephemeral = {} # dict of {service_id: [events]}
# the appservices which currently have a transaction in flight
self.requests_in_flight = set()
self.txn_ctrl = txn_ctrl
self.clock = clock
- def enqueue(self, service, event):
- self.queued_events.setdefault(service.id, []).append(event)
-
+ def _start_background_request(self, service):
# start a sender for this appservice if we don't already have one
-
if service.id in self.requests_in_flight:
return
@@ -114,7 +120,15 @@ class _ServiceQueuer:
"as-sender-%s" % (service.id,), self._send_request, service
)
- async def _send_request(self, service):
+ def enqueue_event(self, service: ApplicationService, event: EventBase):
+ self.queued_events.setdefault(service.id, []).append(event)
+ self._start_background_request(service)
+
+ def enqueue_ephemeral(self, service: ApplicationService, events: List[JsonDict]):
+ self.queued_ephemeral.setdefault(service.id, []).extend(events)
+ self._start_background_request(service)
+
+ async def _send_request(self, service: ApplicationService):
# sanity-check: we shouldn't get here if this service already has a sender
# running.
assert service.id not in self.requests_in_flight
@@ -123,10 +137,11 @@ class _ServiceQueuer:
try:
while True:
events = self.queued_events.pop(service.id, [])
- if not events:
+ ephemeral = self.queued_ephemeral.pop(service.id, [])
+ if not events and not ephemeral:
return
try:
- await self.txn_ctrl.send(service, events)
+ await self.txn_ctrl.send(service, events, ephemeral)
except Exception:
logger.exception("AS request failed")
finally:
@@ -158,9 +173,16 @@ class _TransactionController:
# for UTs
self.RECOVERER_CLASS = _Recoverer
- async def send(self, service, events):
+ async def send(
+ self,
+ service: ApplicationService,
+ events: List[EventBase],
+ ephemeral: List[JsonDict] = [],
+ ):
try:
- txn = await self.store.create_appservice_txn(service=service, events=events)
+ txn = await self.store.create_appservice_txn(
+ service=service, events=events, ephemeral=ephemeral
+ )
service_is_up = await self._is_service_up(service)
if service_is_up:
sent = await txn.send(self.as_api)
@@ -204,7 +226,7 @@ class _TransactionController:
recoverer.recover()
logger.info("Now %i active recoverers", len(self.recoverers))
- async def _is_service_up(self, service):
+ async def _is_service_up(self, service: ApplicationService) -> bool:
state = await self.store.get_appservice_state(service)
return state == ApplicationServiceState.UP or state is None
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index 8ed3e24258..746fc3cc02 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -160,6 +160,8 @@ def _load_appservice(hostname, as_info, config_filename):
if as_info.get("ip_range_whitelist"):
ip_range_whitelist = IPSet(as_info.get("ip_range_whitelist"))
+ supports_ephemeral = as_info.get("de.sorunome.msc2409.push_ephemeral", False)
+
return ApplicationService(
token=as_info["as_token"],
hostname=hostname,
@@ -168,6 +170,7 @@ def _load_appservice(hostname, as_info, config_filename):
hs_token=as_info["hs_token"],
sender=user_id,
id=as_info["id"],
+ supports_ephemeral=supports_ephemeral,
protocols=protocols,
rate_limited=rate_limited,
ip_range_whitelist=ip_range_whitelist,
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 7a51d0a22f..65df62107f 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -312,6 +312,12 @@ class EventBase(metaclass=abc.ABCMeta):
"""
return [e for e, _ in self.auth_events]
+ def freeze(self):
+ """'Freeze' the event dict, so it cannot be modified by accident"""
+
+ # this will be a no-op if the event dict is already frozen.
+ self._dict = freeze(self._dict)
+
class FrozenEvent(EventBase):
format_version = EventFormatVersions.V1 # All events of this type are V1
diff --git a/synapse/events/builder.py b/synapse/events/builder.py
index df4f950fec..07df258e6e 100644
--- a/synapse/events/builder.py
+++ b/synapse/events/builder.py
@@ -98,7 +98,7 @@ class EventBuilder:
return self._state_key is not None
async def build(
- self, prev_event_ids: List[str], auth_event_ids: Optional[List[str]]
+ self, prev_event_ids: List[str], auth_event_ids: Optional[List[str]],
) -> EventBase:
"""Transform into a fully signed and hashed event
diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py
index 1535cc5339..77fbd3f68a 100644
--- a/synapse/events/third_party_rules.py
+++ b/synapse/events/third_party_rules.py
@@ -12,7 +12,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Callable
+
+from typing import Callable, Union
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
@@ -44,15 +45,20 @@ class ThirdPartyEventRules:
async def check_event_allowed(
self, event: EventBase, context: EventContext
- ) -> bool:
+ ) -> Union[bool, dict]:
"""Check if a provided event should be allowed in the given context.
+ The module can return:
+ * True: the event is allowed.
+ * False: the event is not allowed, and should be rejected with M_FORBIDDEN.
+ * a dict: replacement event data.
+
Args:
event: The event to be checked.
context: The context of the event.
Returns:
- True if the event should be allowed, False if not.
+ The result from the ThirdPartyRules module, as above
"""
if self.third_party_rules is None:
return True
@@ -63,9 +69,10 @@ class ThirdPartyEventRules:
events = await self.store.get_events(prev_state_ids.values())
state_events = {(ev.type, ev.state_key): ev for ev in events.values()}
- # The module can modify the event slightly if it wants, but caution should be
- # exercised, and it's likely to go very wrong if applied to events received over
- # federation.
+ # Ensure that the event is frozen, to make sure that the module is not tempted
+ # to try to modify it. Any attempt to modify it at this point will invalidate
+ # the hashes and signatures.
+ event.freeze()
return await self.third_party_rules.check_event_allowed(event, state_events)
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index c8d5e58035..07240d3a14 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -14,6 +14,7 @@
# limitations under the License.
import logging
+from typing import Dict, List, Optional
from prometheus_client import Counter
@@ -21,13 +22,16 @@ from twisted.internet import defer
import synapse
from synapse.api.constants import EventTypes
+from synapse.appservice import ApplicationService
+from synapse.events import EventBase
+from synapse.handlers.presence import format_user_presence_state
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.metrics import (
event_processing_loop_counter,
event_processing_loop_room_count,
)
from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.types import RoomStreamToken
+from synapse.types import Collection, JsonDict, RoomStreamToken, UserID
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
@@ -44,6 +48,7 @@ class ApplicationServicesHandler:
self.started_scheduler = False
self.clock = hs.get_clock()
self.notify_appservices = hs.config.notify_appservices
+ self.event_sources = hs.get_event_sources()
self.current_max = 0
self.is_processing = False
@@ -82,7 +87,7 @@ class ApplicationServicesHandler:
if not events:
break
- events_by_room = {}
+ events_by_room = {} # type: Dict[str, List[EventBase]]
for event in events:
events_by_room.setdefault(event.room_id, []).append(event)
@@ -161,6 +166,104 @@ class ApplicationServicesHandler:
finally:
self.is_processing = False
+ async def notify_interested_services_ephemeral(
+ self, stream_key: str, new_token: Optional[int], users: Collection[UserID] = [],
+ ):
+ """This is called by the notifier in the background
+ when a ephemeral event handled by the homeserver.
+
+ This will determine which appservices
+ are interested in the event, and submit them.
+
+ Events will only be pushed to appservices
+ that have opted into ephemeral events
+
+ Args:
+ stream_key: The stream the event came from.
+ new_token: The latest stream token
+ users: The user(s) involved with the event.
+ """
+ services = [
+ service
+ for service in self.store.get_app_services()
+ if service.supports_ephemeral
+ ]
+ if not services or not self.notify_appservices:
+ return
+ logger.info("Checking interested services for %s" % (stream_key))
+ with Measure(self.clock, "notify_interested_services_ephemeral"):
+ for service in services:
+ # Only handle typing if we have the latest token
+ if stream_key == "typing_key" and new_token is not None:
+ events = await self._handle_typing(service, new_token)
+ if events:
+ self.scheduler.submit_ephemeral_events_for_as(service, events)
+ # We don't persist the token for typing_key for performance reasons
+ elif stream_key == "receipt_key":
+ events = await self._handle_receipts(service)
+ if events:
+ self.scheduler.submit_ephemeral_events_for_as(service, events)
+ await self.store.set_type_stream_id_for_appservice(
+ service, "read_receipt", new_token
+ )
+ elif stream_key == "presence_key":
+ events = await self._handle_presence(service, users)
+ if events:
+ self.scheduler.submit_ephemeral_events_for_as(service, events)
+ await self.store.set_type_stream_id_for_appservice(
+ service, "presence", new_token
+ )
+
+ async def _handle_typing(self, service: ApplicationService, new_token: int):
+ typing_source = self.event_sources.sources["typing"]
+ # Get the typing events from just before current
+ typing, _ = await typing_source.get_new_events_as(
+ service=service,
+ # For performance reasons, we don't persist the previous
+ # token in the DB and instead fetch the latest typing information
+ # for appservices.
+ from_key=new_token - 1,
+ )
+ return typing
+
+ async def _handle_receipts(self, service: ApplicationService):
+ from_key = await self.store.get_type_stream_id_for_appservice(
+ service, "read_receipt"
+ )
+ receipts_source = self.event_sources.sources["receipt"]
+ receipts, _ = await receipts_source.get_new_events_as(
+ service=service, from_key=from_key
+ )
+ return receipts
+
+ async def _handle_presence(
+ self, service: ApplicationService, users: Collection[UserID]
+ ):
+ events = [] # type: List[JsonDict]
+ presence_source = self.event_sources.sources["presence"]
+ from_key = await self.store.get_type_stream_id_for_appservice(
+ service, "presence"
+ )
+ for user in users:
+ interested = await service.is_interested_in_presence(user, self.store)
+ if not interested:
+ continue
+ presence_events, _ = await presence_source.get_new_events(
+ user=user, service=service, from_key=from_key,
+ )
+ time_now = self.clock.time_msec()
+ presence_events = [
+ {
+ "type": "m.presence",
+ "sender": event.user_id,
+ "content": format_user_presence_state(
+ event, time_now, include_user_id=False
+ ),
+ }
+ for event in presence_events
+ ]
+ events = events + presence_events
+
async def query_user_exists(self, user_id):
"""Check if any application service knows this user_id exists.
@@ -223,7 +326,7 @@ class ApplicationServicesHandler:
async def get_3pe_protocols(self, only_protocol=None):
services = self.store.get_app_services()
- protocols = {}
+ protocols = {} # type: Dict[str, List[JsonDict]]
# Collect up all the individual protocol responses out of the ASes
for s in services:
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 80f1d8fb03..d54cc50c4e 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1510,18 +1510,9 @@ class FederationHandler(BaseHandler):
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
- except AuthError as e:
+ except SynapseError as e:
logger.warning("Failed to create join to %s because %s", room_id, e)
- raise e
-
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- logger.info("Creation of join %s forbidden by third-party rules", event)
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
+ raise
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request`
@@ -1570,15 +1561,6 @@ class FederationHandler(BaseHandler):
context = await self._handle_new_event(origin, event)
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- logger.info("Sending of join %s forbidden by third-party rules", event)
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
-
logger.debug(
"on_send_join_request: After _handle_new_event: %s, sigs: %s",
event.event_id,
@@ -1758,15 +1740,6 @@ class FederationHandler(BaseHandler):
builder=builder
)
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- logger.warning("Creation of leave %s forbidden by third-party rules", event)
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
-
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request`
@@ -1799,16 +1772,7 @@ class FederationHandler(BaseHandler):
event.internal_metadata.outlier = False
- context = await self._handle_new_event(origin, event)
-
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- logger.info("Sending of leave %s forbidden by third-party rules", event)
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
+ await self._handle_new_event(origin, event)
logger.debug(
"on_send_leave_request: After _handle_new_event: %s, sigs: %s",
@@ -2704,18 +2668,6 @@ class FederationHandler(BaseHandler):
builder=builder
)
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- logger.info(
- "Creation of threepid invite %s forbidden by third-party rules",
- event,
- )
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
-
event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context
)
@@ -2766,18 +2718,6 @@ class FederationHandler(BaseHandler):
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
-
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- logger.warning(
- "Exchange of threepid invite %s forbidden by third-party rules", event
- )
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
-
event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context
)
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index f18f882596..d6855c60ea 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -811,6 +811,23 @@ class EventCreationHandler:
if requester:
context.app_service = requester.app_service
+ third_party_result = await self.third_party_event_rules.check_event_allowed(
+ event, context
+ )
+ if not third_party_result:
+ logger.info(
+ "Event %s forbidden by third-party rules", event,
+ )
+ raise SynapseError(
+ 403, "This event is not allowed in this context", Codes.FORBIDDEN
+ )
+ elif isinstance(third_party_result, dict):
+ # the third-party rules want to replace the event. We'll need to build a new
+ # event.
+ event, context = await self._rebuild_event_after_third_party_rules(
+ third_party_result, event
+ )
+
self.validator.validate_new(event, self.config)
# If this event is an annotation then we check that that the sender
@@ -897,14 +914,6 @@ class EventCreationHandler:
else:
room_version = await self.store.get_room_version_id(event.room_id)
- event_allowed = await self.third_party_event_rules.check_event_allowed(
- event, context
- )
- if not event_allowed:
- raise SynapseError(
- 403, "This event is not allowed in this context", Codes.FORBIDDEN
- )
-
if event.internal_metadata.is_out_of_band_membership():
# the only sort of out-of-band-membership events we expect to see here
# are invite rejections we have generated ourselves.
@@ -1307,3 +1316,62 @@ class EventCreationHandler:
room_id,
)
del self._rooms_to_exclude_from_dummy_event_insertion[room_id]
+
+ async def _rebuild_event_after_third_party_rules(
+ self, third_party_result: dict, original_event: EventBase
+ ) -> Tuple[EventBase, EventContext]:
+ # the third_party_event_rules want to replace the event.
+ # we do some basic checks, and then return the replacement event and context.
+
+ # Construct a new EventBuilder and validate it, which helps with the
+ # rest of these checks.
+ try:
+ builder = self.event_builder_factory.for_room_version(
+ original_event.room_version, third_party_result
+ )
+ self.validator.validate_builder(builder)
+ except SynapseError as e:
+ raise Exception(
+ "Third party rules module created an invalid event: " + e.msg,
+ )
+
+ immutable_fields = [
+ # changing the room is going to break things: we've already checked that the
+ # room exists, and are holding a concurrency limiter token for that room.
+ # Also, we might need to use a different room version.
+ "room_id",
+ # changing the type or state key might work, but we'd need to check that the
+ # calling functions aren't making assumptions about them.
+ "type",
+ "state_key",
+ ]
+
+ for k in immutable_fields:
+ if getattr(builder, k, None) != original_event.get(k):
+ raise Exception(
+ "Third party rules module created an invalid event: "
+ "cannot change field " + k
+ )
+
+ # check that the new sender belongs to this HS
+ if not self.hs.is_mine_id(builder.sender):
+ raise Exception(
+ "Third party rules module created an invalid event: "
+ "invalid sender " + builder.sender
+ )
+
+ # copy over the original internal metadata
+ for k, v in original_event.internal_metadata.get_dict().items():
+ setattr(builder.internal_metadata, k, v)
+
+ # the event type hasn't changed, so there's no point in re-calculating the
+ # auth events.
+ event = await builder.build(
+ prev_event_ids=original_event.prev_event_ids(),
+ auth_event_ids=original_event.auth_event_ids(),
+ )
+
+ # we rebuild the event context, to be on the safe side. If nothing else,
+ # delta_ids might need an update.
+ context = await self.state.compute_event_context(event)
+ return event, context
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index 7225923757..c242c409cf 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -13,9 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
+from typing import List, Tuple
+from synapse.appservice import ApplicationService
from synapse.handlers._base import BaseHandler
-from synapse.types import ReadReceipt, get_domain_from_id
+from synapse.types import JsonDict, ReadReceipt, get_domain_from_id
from synapse.util.async_helpers import maybe_awaitable
logger = logging.getLogger(__name__)
@@ -140,5 +142,36 @@ class ReceiptEventSource:
return (events, to_key)
+ async def get_new_events_as(
+ self, from_key: int, service: ApplicationService
+ ) -> Tuple[List[JsonDict], int]:
+ """Returns a set of new receipt events that an appservice
+ may be interested in.
+
+ Args:
+ from_key: the stream position at which events should be fetched from
+ service: The appservice which may be interested
+ """
+ from_key = int(from_key)
+ to_key = self.get_current_key()
+
+ if from_key == to_key:
+ return [], to_key
+
+ # We first need to fetch all new receipts
+ rooms_to_events = await self.store.get_linearized_receipts_for_all_rooms(
+ from_key=from_key, to_key=to_key
+ )
+
+ # Then filter down to rooms that the AS can read
+ events = []
+ for room_id, event in rooms_to_events.items():
+ if not await service.matches_user_in_member_list(room_id, self.store):
+ continue
+
+ events.append(event)
+
+ return (events, to_key)
+
def get_current_key(self, direction="f"):
return self.store.get_max_receipt_stream_id()
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index a306631094..b527724bc4 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import itertools
import logging
from typing import TYPE_CHECKING, Any, Dict, FrozenSet, List, Optional, Set, Tuple
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index 3cbfc2d780..d3692842e3 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -12,16 +12,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import logging
import random
from collections import namedtuple
from typing import TYPE_CHECKING, List, Set, Tuple
from synapse.api.errors import AuthError, ShadowBanError, SynapseError
+from synapse.appservice import ApplicationService
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.tcp.streams import TypingStream
-from synapse.types import UserID, get_domain_from_id
+from synapse.types import JsonDict, UserID, get_domain_from_id
from synapse.util.caches.stream_change_cache import StreamChangeCache
from synapse.util.metrics import Measure
from synapse.util.wheel_timer import WheelTimer
@@ -430,6 +430,33 @@ class TypingNotificationEventSource:
"content": {"user_ids": list(typing)},
}
+ async def get_new_events_as(
+ self, from_key: int, service: ApplicationService
+ ) -> Tuple[List[JsonDict], int]:
+ """Returns a set of new typing events that an appservice
+ may be interested in.
+
+ Args:
+ from_key: the stream position at which events should be fetched from
+ service: The appservice which may be interested
+ """
+ with Measure(self.clock, "typing.get_new_events_as"):
+ from_key = int(from_key)
+ handler = self.get_typing_handler()
+
+ events = []
+ for room_id in handler._room_serials.keys():
+ if handler._room_serials[room_id] <= from_key:
+ continue
+ if not await service.matches_user_in_member_list(
+ room_id, handler.store
+ ):
+ continue
+
+ events.append(self._make_event_for(room_id))
+
+ return (events, handler._latest_room_serial)
+
async def get_new_events(self, from_key, room_ids, **kwargs):
with Measure(self.clock, "typing.get_new_events"):
from_key = int(from_key)
diff --git a/synapse/notifier.py b/synapse/notifier.py
index 51c830c91e..2e993411b9 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -329,6 +329,22 @@ class Notifier:
except Exception:
logger.exception("Error notifying application services of event")
+ async def _notify_app_services_ephemeral(
+ self,
+ stream_key: str,
+ new_token: Union[int, RoomStreamToken],
+ users: Collection[UserID] = [],
+ ):
+ try:
+ stream_token = None
+ if isinstance(new_token, int):
+ stream_token = new_token
+ await self.appservice_handler.notify_interested_services_ephemeral(
+ stream_key, stream_token, users
+ )
+ except Exception:
+ logger.exception("Error notifying application services of event")
+
async def _notify_pusher_pool(self, max_room_stream_token: RoomStreamToken):
try:
await self._pusher_pool.on_new_notifications(max_room_stream_token)
@@ -367,6 +383,15 @@ class Notifier:
self.notify_replication()
+ # Notify appservices
+ run_as_background_process(
+ "_notify_app_services_ephemeral",
+ self._notify_app_services_ephemeral,
+ stream_key,
+ new_token,
+ users,
+ )
+
def on_new_replication_data(self) -> None:
"""Used to inform replication listeners that something has happend
without waking up any of the normal user event streams"""
diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py
index 3a68ce636f..4c95b149c5 100644
--- a/synapse/push/push_rule_evaluator.py
+++ b/synapse/push/push_rule_evaluator.py
@@ -20,7 +20,6 @@ from typing import Any, Dict, List, Optional, Pattern, Union
from synapse.events import EventBase
from synapse.types import UserID
-from synapse.util.caches import register_cache
from synapse.util.caches.lrucache import LruCache
logger = logging.getLogger(__name__)
@@ -186,8 +185,7 @@ class PushRuleEvaluatorForEvent:
# Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches
-regex_cache = LruCache(50000)
-register_cache("cache", "regex_push_cache", regex_cache)
+regex_cache = LruCache(50000, "regex_push_cache")
def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
diff --git a/synapse/server.py b/synapse/server.py
index f921ee4b53..21a232bbd9 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -205,7 +205,13 @@ class HomeServer(metaclass=abc.ABCMeta):
# instantiated during setup() for future return by get_datastore()
DATASTORE_CLASS = abc.abstractproperty()
- def __init__(self, hostname: str, config: HomeServerConfig, reactor=None, **kwargs):
+ def __init__(
+ self,
+ hostname: str,
+ config: HomeServerConfig,
+ reactor=None,
+ version_string="Synapse",
+ ):
"""
Args:
hostname : The hostname for the server.
@@ -236,11 +242,9 @@ class HomeServer(metaclass=abc.ABCMeta):
burst_count=config.rc_registration.burst_count,
)
- self.datastores = None # type: Optional[Databases]
+ self.version_string = version_string
- # Other kwargs are explicit dependencies
- for depname in kwargs:
- setattr(self, depname, kwargs[depname])
+ self.datastores = None # type: Optional[Databases]
def get_instance_id(self) -> str:
"""A unique ID for this synapse process instance.
diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py
index 85f6b1e3fd..43bf0f649a 100644
--- a/synapse/storage/databases/main/appservice.py
+++ b/synapse/storage/databases/main/appservice.py
@@ -15,12 +15,15 @@
# limitations under the License.
import logging
import re
+from typing import List
-from synapse.appservice import AppServiceTransaction
+from synapse.appservice import ApplicationService, AppServiceTransaction
from synapse.config.appservice import load_appservices
+from synapse.events import EventBase
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import DatabasePool
from synapse.storage.databases.main.events_worker import EventsWorkerStore
+from synapse.types import JsonDict
from synapse.util import json_encoder
logger = logging.getLogger(__name__)
@@ -172,15 +175,23 @@ class ApplicationServiceTransactionWorkerStore(
"application_services_state", {"as_id": service.id}, {"state": state}
)
- async def create_appservice_txn(self, service, events):
+ async def create_appservice_txn(
+ self,
+ service: ApplicationService,
+ events: List[EventBase],
+ ephemeral: List[JsonDict],
+ ) -> AppServiceTransaction:
"""Atomically creates a new transaction for this application service
- with the given list of events.
+ with the given list of events. Ephemeral events are NOT persisted to the
+ database and are not resent if a transaction is retried.
Args:
- service(ApplicationService): The service who the transaction is for.
- events(list<Event>): A list of events to put in the transaction.
+ service: The service who the transaction is for.
+ events: A list of persistent events to put in the transaction.
+ ephemeral: A list of ephemeral events to put in the transaction.
+
Returns:
- AppServiceTransaction: A new transaction.
+ A new transaction.
"""
def _create_appservice_txn(txn):
@@ -207,7 +218,9 @@ class ApplicationServiceTransactionWorkerStore(
"VALUES(?,?,?)",
(service.id, new_txn_id, event_ids),
)
- return AppServiceTransaction(service=service, id=new_txn_id, events=events)
+ return AppServiceTransaction(
+ service=service, id=new_txn_id, events=events, ephemeral=ephemeral
+ )
return await self.db_pool.runInteraction(
"create_appservice_txn", _create_appservice_txn
@@ -296,7 +309,9 @@ class ApplicationServiceTransactionWorkerStore(
events = await self.get_events_as_list(event_ids)
- return AppServiceTransaction(service=service, id=entry["txn_id"], events=events)
+ return AppServiceTransaction(
+ service=service, id=entry["txn_id"], events=events, ephemeral=[]
+ )
def _get_last_txn(self, txn, service_id):
txn.execute(
@@ -320,7 +335,7 @@ class ApplicationServiceTransactionWorkerStore(
)
async def get_new_events_for_appservice(self, current_id, limit):
- """Get all new evnets"""
+ """Get all new events for an appservice"""
def get_new_events_for_appservice_txn(txn):
sql = (
@@ -351,6 +366,39 @@ class ApplicationServiceTransactionWorkerStore(
return upper_bound, events
+ async def get_type_stream_id_for_appservice(
+ self, service: ApplicationService, type: str
+ ) -> int:
+ def get_type_stream_id_for_appservice_txn(txn):
+ stream_id_type = "%s_stream_id" % type
+ txn.execute(
+ "SELECT ? FROM application_services_state WHERE as_id=?",
+ (stream_id_type, service.id,),
+ )
+ last_txn_id = txn.fetchone()
+ if last_txn_id is None or last_txn_id[0] is None: # no row exists
+ return 0
+ else:
+ return int(last_txn_id[0])
+
+ return await self.db_pool.runInteraction(
+ "get_type_stream_id_for_appservice", get_type_stream_id_for_appservice_txn
+ )
+
+ async def set_type_stream_id_for_appservice(
+ self, service: ApplicationService, type: str, pos: int
+ ) -> None:
+ def set_type_stream_id_for_appservice_txn(txn):
+ stream_id_type = "%s_stream_id" % type
+ txn.execute(
+ "UPDATE ? SET device_list_stream_id = ? WHERE as_id=?",
+ (stream_id_type, pos, service.id),
+ )
+
+ await self.db_pool.runInteraction(
+ "set_type_stream_id_for_appservice", set_type_stream_id_for_appservice_txn
+ )
+
class ApplicationServiceTransactionStore(ApplicationServiceTransactionWorkerStore):
# This is currently empty due to there not being any AS storage functions
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index c79ddff680..5cdf16521c 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -23,6 +23,7 @@ from twisted.internet import defer
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
from synapse.storage.database import DatabasePool
from synapse.storage.util.id_generators import StreamIdGenerator
+from synapse.types import JsonDict
from synapse.util import json_encoder
from synapse.util.async_helpers import ObservableDeferred
from synapse.util.caches.descriptors import cached, cachedList
@@ -274,6 +275,60 @@ class ReceiptsWorkerStore(SQLBaseStore, metaclass=abc.ABCMeta):
}
return results
+ @cached(num_args=2,)
+ async def get_linearized_receipts_for_all_rooms(
+ self, to_key: int, from_key: Optional[int] = None
+ ) -> Dict[str, JsonDict]:
+ """Get receipts for all rooms between two stream_ids.
+
+ Args:
+ to_key: Max stream id to fetch receipts upto.
+ from_key: Min stream id to fetch receipts from. None fetches
+ from the start.
+
+ Returns:
+ A dictionary of roomids to a list of receipts.
+ """
+
+ def f(txn):
+ if from_key:
+ sql = """
+ SELECT * FROM receipts_linearized WHERE
+ stream_id > ? AND stream_id <= ?
+ """
+ txn.execute(sql, [from_key, to_key])
+ else:
+ sql = """
+ SELECT * FROM receipts_linearized WHERE
+ stream_id <= ?
+ """
+
+ txn.execute(sql, [to_key])
+
+ return self.db_pool.cursor_to_dict(txn)
+
+ txn_results = await self.db_pool.runInteraction(
+ "get_linearized_receipts_for_all_rooms", f
+ )
+
+ results = {}
+ for row in txn_results:
+ # We want a single event per room, since we want to batch the
+ # receipts by room, event and type.
+ room_event = results.setdefault(
+ row["room_id"],
+ {"type": "m.receipt", "room_id": row["room_id"], "content": {}},
+ )
+
+ # The content is of the form:
+ # {"$foo:bar": { "read": { "@user:host": <receipt> }, .. }, .. }
+ event_entry = room_event["content"].setdefault(row["event_id"], {})
+ receipt_type = event_entry.setdefault(row["receipt_type"], {})
+
+ receipt_type[row["user_id"]] = db_to_json(row["data"])
+
+ return results
+
async def get_users_sent_receipts_between(
self, last_id: int, current_id: int
) -> List[str]:
diff --git a/synapse/storage/databases/main/schema/delta/59/19as_device_stream.sql b/synapse/storage/databases/main/schema/delta/59/19as_device_stream.sql
new file mode 100644
index 0000000000..20f5a95a24
--- /dev/null
+++ b/synapse/storage/databases/main/schema/delta/59/19as_device_stream.sql
@@ -0,0 +1,18 @@
+/* Copyright 2020 The Matrix.org Foundation C.I.C
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ALTER TABLE application_services_state
+ ADD COLUMN read_receipt_stream_id INT,
+ ADD COLUMN presence_stream_id INT;
\ No newline at end of file
diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py
index 8fc05be278..89f0b38535 100644
--- a/synapse/util/caches/__init__.py
+++ b/synapse/util/caches/__init__.py
@@ -16,7 +16,7 @@
import logging
from sys import intern
-from typing import Callable, Dict, Optional
+from typing import Callable, Dict, Optional, Sized
import attr
from prometheus_client.core import Gauge
@@ -92,7 +92,7 @@ class CacheMetric:
def register_cache(
cache_type: str,
cache_name: str,
- cache,
+ cache: Sized,
collect_callback: Optional[Callable] = None,
resizable: bool = True,
resize_callback: Optional[Callable] = None,
@@ -100,12 +100,15 @@ def register_cache(
"""Register a cache object for metric collection and resizing.
Args:
- cache_type
+ cache_type: a string indicating the "type" of the cache. This is used
+ only for deduplication so isn't too important provided it's constant.
cache_name: name of the cache
- cache: cache itself
+ cache: cache itself, which must implement __len__(), and may optionally implement
+ a max_size property
collect_callback: If given, a function which is called during metric
collection to update additional metrics.
- resizable: Whether this cache supports being resized.
+ resizable: Whether this cache supports being resized, in which case either
+ resize_callback must be provided, or the cache must support set_max_size().
resize_callback: A function which can be called to resize the cache.
Returns:
diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py
index f728cd2cf2..91fdc8142d 100644
--- a/synapse/util/caches/deferred_cache.py
+++ b/synapse/util/caches/deferred_cache.py
@@ -24,7 +24,6 @@ from prometheus_client import Gauge
from twisted.internet import defer
from synapse.util.async_helpers import ObservableDeferred
-from synapse.util.caches import register_cache
from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
@@ -54,10 +53,7 @@ class DeferredCache(Generic[KT, VT]):
__slots__ = (
"cache",
- "name",
- "keylen",
"thread",
- "metrics",
"_pending_deferred_cache",
)
@@ -89,37 +85,27 @@ class DeferredCache(Generic[KT, VT]):
cache_type()
) # type: MutableMapping[KT, CacheEntry]
+ def metrics_cb():
+ cache_pending_metric.labels(name).set(len(self._pending_deferred_cache))
+
# cache is used for completed results and maps to the result itself, rather than
# a Deferred.
self.cache = LruCache(
max_size=max_entries,
keylen=keylen,
+ cache_name=name,
cache_type=cache_type,
size_callback=(lambda d: len(d)) if iterable else None,
- evicted_callback=self._on_evicted,
+ metrics_collection_callback=metrics_cb,
apply_cache_factor_from_config=apply_cache_factor_from_config,
)
- self.name = name
- self.keylen = keylen
self.thread = None # type: Optional[threading.Thread]
- self.metrics = register_cache(
- "cache",
- name,
- self.cache,
- collect_callback=self._metrics_collection_callback,
- )
@property
def max_entries(self):
return self.cache.max_size
- def _on_evicted(self, evicted_count):
- self.metrics.inc_evictions(evicted_count)
-
- def _metrics_collection_callback(self):
- cache_pending_metric.labels(self.name).set(len(self._pending_deferred_cache))
-
def check_thread(self):
expected_thread = self.thread
if expected_thread is None:
@@ -154,21 +140,18 @@ class DeferredCache(Generic[KT, VT]):
if val is not _Sentinel.sentinel:
val.callbacks.update(callbacks)
if update_metrics:
- self.metrics.inc_hits()
+ m = self.cache.metrics
+ assert m # we always have a name, so should always have metrics
+ m.inc_hits()
return val.deferred
- val = self.cache.get(key, _Sentinel.sentinel, callbacks=callbacks)
- if val is not _Sentinel.sentinel:
- self.metrics.inc_hits()
- return val
-
- if update_metrics:
- self.metrics.inc_misses()
-
- if default is _Sentinel.sentinel:
+ val = self.cache.get(
+ key, default, callbacks=callbacks, update_metrics=update_metrics
+ )
+ if val is _Sentinel.sentinel:
raise KeyError()
else:
- return default
+ return val
def set(
self,
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index 8592b93689..8b426c005b 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -19,8 +19,6 @@ from collections import namedtuple
from synapse.util.caches.lrucache import LruCache
-from . import register_cache
-
logger = logging.getLogger(__name__)
@@ -46,18 +44,16 @@ class DictionaryCache:
"""
def __init__(self, name, max_entries=1000):
- self.cache = LruCache(max_size=max_entries, size_callback=len)
+ self.cache = LruCache(max_size=max_entries, cache_name=name, size_callback=len)
self.name = name
self.sequence = 0
self.thread = None
- # caches_by_name[name] = self.cache
class Sentinel:
__slots__ = []
self.sentinel = Sentinel()
- self.metrics = register_cache("dictionary", name, self.cache)
def check_thread(self):
expected_thread = self.thread
@@ -82,8 +78,6 @@ class DictionaryCache:
"""
entry = self.cache.get(key, self.sentinel)
if entry is not self.sentinel:
- self.metrics.inc_hits()
-
if dict_keys is None:
return DictionaryEntry(
entry.full, entry.known_absent, dict(entry.value)
@@ -95,7 +89,6 @@ class DictionaryCache:
{k: entry.value[k] for k in dict_keys if k in entry.value},
)
- self.metrics.inc_misses()
return DictionaryEntry(False, set(), {})
def invalidate(self, key):
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index 33eae2b7c4..e4804f79e0 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -18,6 +18,7 @@ from functools import wraps
from typing import Callable, Optional, Type, Union
from synapse.config import cache as cache_config
+from synapse.util.caches import CacheMetric, register_cache
from synapse.util.caches.treecache import TreeCache
@@ -43,27 +44,29 @@ class _Node:
class LruCache:
"""
- Least-recently-used cache.
+ Least-recently-used cache, supporting prometheus metrics and invalidation callbacks.
+
Supports del_multi only if cache_type=TreeCache
If cache_type=TreeCache, all keys must be tuples.
-
- Can also set callbacks on objects when getting/setting which are fired
- when that key gets invalidated/evicted.
"""
def __init__(
self,
max_size: int,
+ cache_name: Optional[str] = None,
keylen: int = 1,
cache_type: Type[Union[dict, TreeCache]] = dict,
size_callback: Optional[Callable] = None,
- evicted_callback: Optional[Callable] = None,
+ metrics_collection_callback: Optional[Callable[[], None]] = None,
apply_cache_factor_from_config: bool = True,
):
"""
Args:
max_size: The maximum amount of entries the cache can hold
+ cache_name: The name of this cache, for the prometheus metrics. If unset,
+ no metrics will be reported on this cache.
+
keylen: The length of the tuple used as the cache key. Ignored unless
cache_type is `TreeCache`.
@@ -73,9 +76,13 @@ class LruCache:
size_callback (func(V) -> int | None):
- evicted_callback (func(int)|None):
- if not None, called on eviction with the size of the evicted
- entry
+ metrics_collection_callback:
+ metrics collection callback. This is called early in the metrics
+ collection process, before any of the metrics registered with the
+ prometheus Registry are collected, so can be used to update any dynamic
+ metrics.
+
+ Ignored if cache_name is None.
apply_cache_factor_from_config (bool): If true, `max_size` will be
multiplied by a cache factor derived from the homeserver config
@@ -94,6 +101,19 @@ class LruCache:
else:
self.max_size = int(max_size)
+ if cache_name is not None:
+ metrics = register_cache(
+ "lru_cache",
+ cache_name,
+ self,
+ collect_callback=metrics_collection_callback,
+ ) # type: Optional[CacheMetric]
+ else:
+ metrics = None
+
+ # this is exposed for access from outside this class
+ self.metrics = metrics
+
list_root = _Node(None, None, None, None)
list_root.next_node = list_root
list_root.prev_node = list_root
@@ -105,8 +125,8 @@ class LruCache:
todelete = list_root.prev_node
evicted_len = delete_node(todelete)
cache.pop(todelete.key, None)
- if evicted_callback:
- evicted_callback(evicted_len)
+ if metrics:
+ metrics.inc_evictions(evicted_len)
def synchronized(f):
@wraps(f)
@@ -169,13 +189,17 @@ class LruCache:
return deleted_len
@synchronized
- def cache_get(key, default=None, callbacks=[]):
+ def cache_get(key, default=None, callbacks=[], update_metrics=True):
node = cache.get(key, None)
if node is not None:
move_node_to_front(node)
node.callbacks.update(callbacks)
+ if update_metrics and metrics:
+ metrics.inc_hits()
return node.value
else:
+ if update_metrics and metrics:
+ metrics.inc_misses()
return default
@synchronized
diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py
index 641093d349..4a301b84e1 100644
--- a/tests/app/test_frontend_proxy.py
+++ b/tests/app/test_frontend_proxy.py
@@ -22,7 +22,7 @@ class FrontendProxyTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
- http_client=None, homeserverToUse=GenericWorkerServer
+ http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py
index 0f016c32eb..c2b10d2c70 100644
--- a/tests/app/test_openid_listener.py
+++ b/tests/app/test_openid_listener.py
@@ -26,7 +26,7 @@ from tests.unittest import HomeserverTestCase
class FederationReaderOpenIDListenerTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
- http_client=None, homeserverToUse=GenericWorkerServer
+ http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
@@ -84,7 +84,7 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase):
class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
- http_client=None, homeserverToUse=SynapseHomeServer
+ http_client=None, homeserver_to_use=SynapseHomeServer
)
return hs
diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py
index 68a4caabbf..2acb8b7603 100644
--- a/tests/appservice/test_scheduler.py
+++ b/tests/appservice/test_scheduler.py
@@ -60,7 +60,7 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
self.successResultOf(defer.ensureDeferred(self.txnctrl.send(service, events)))
self.store.create_appservice_txn.assert_called_once_with(
- service=service, events=events # txn made and saved
+ service=service, events=events, ephemeral=[] # txn made and saved
)
self.assertEquals(0, len(self.txnctrl.recoverers)) # no recoverer made
txn.complete.assert_called_once_with(self.store) # txn completed
@@ -81,7 +81,7 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
self.successResultOf(defer.ensureDeferred(self.txnctrl.send(service, events)))
self.store.create_appservice_txn.assert_called_once_with(
- service=service, events=events # txn made and saved
+ service=service, events=events, ephemeral=[] # txn made and saved
)
self.assertEquals(0, txn.send.call_count) # txn not sent though
self.assertEquals(0, txn.complete.call_count) # or completed
@@ -106,7 +106,7 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
self.successResultOf(defer.ensureDeferred(self.txnctrl.send(service, events)))
self.store.create_appservice_txn.assert_called_once_with(
- service=service, events=events
+ service=service, events=events, ephemeral=[]
)
self.assertEquals(1, self.recoverer_fn.call_count) # recoverer made
self.assertEquals(1, self.recoverer.recover.call_count) # and invoked
@@ -202,26 +202,28 @@ class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase):
# Expect the event to be sent immediately.
service = Mock(id=4)
event = Mock()
- self.queuer.enqueue(service, event)
- self.txn_ctrl.send.assert_called_once_with(service, [event])
+ self.queuer.enqueue_event(service, event)
+ self.txn_ctrl.send.assert_called_once_with(service, [event], [])
def test_send_single_event_with_queue(self):
d = defer.Deferred()
- self.txn_ctrl.send = Mock(side_effect=lambda x, y: make_deferred_yieldable(d))
+ self.txn_ctrl.send = Mock(
+ side_effect=lambda x, y, z: make_deferred_yieldable(d)
+ )
service = Mock(id=4)
event = Mock(event_id="first")
event2 = Mock(event_id="second")
event3 = Mock(event_id="third")
# Send an event and don't resolve it just yet.
- self.queuer.enqueue(service, event)
+ self.queuer.enqueue_event(service, event)
# Send more events: expect send() to NOT be called multiple times.
- self.queuer.enqueue(service, event2)
- self.queuer.enqueue(service, event3)
- self.txn_ctrl.send.assert_called_with(service, [event])
+ self.queuer.enqueue_event(service, event2)
+ self.queuer.enqueue_event(service, event3)
+ self.txn_ctrl.send.assert_called_with(service, [event], [])
self.assertEquals(1, self.txn_ctrl.send.call_count)
# Resolve the send event: expect the queued events to be sent
d.callback(service)
- self.txn_ctrl.send.assert_called_with(service, [event2, event3])
+ self.txn_ctrl.send.assert_called_with(service, [event2, event3], [])
self.assertEquals(2, self.txn_ctrl.send.call_count)
def test_multiple_service_queues(self):
@@ -239,21 +241,58 @@ class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase):
send_return_list = [srv_1_defer, srv_2_defer]
- def do_send(x, y):
+ def do_send(x, y, z):
return make_deferred_yieldable(send_return_list.pop(0))
self.txn_ctrl.send = Mock(side_effect=do_send)
# send events for different ASes and make sure they are sent
- self.queuer.enqueue(srv1, srv_1_event)
- self.queuer.enqueue(srv1, srv_1_event2)
- self.txn_ctrl.send.assert_called_with(srv1, [srv_1_event])
- self.queuer.enqueue(srv2, srv_2_event)
- self.queuer.enqueue(srv2, srv_2_event2)
- self.txn_ctrl.send.assert_called_with(srv2, [srv_2_event])
+ self.queuer.enqueue_event(srv1, srv_1_event)
+ self.queuer.enqueue_event(srv1, srv_1_event2)
+ self.txn_ctrl.send.assert_called_with(srv1, [srv_1_event], [])
+ self.queuer.enqueue_event(srv2, srv_2_event)
+ self.queuer.enqueue_event(srv2, srv_2_event2)
+ self.txn_ctrl.send.assert_called_with(srv2, [srv_2_event], [])
# make sure callbacks for a service only send queued events for THAT
# service
srv_2_defer.callback(srv2)
- self.txn_ctrl.send.assert_called_with(srv2, [srv_2_event2])
+ self.txn_ctrl.send.assert_called_with(srv2, [srv_2_event2], [])
self.assertEquals(3, self.txn_ctrl.send.call_count)
+
+ def test_send_single_ephemeral_no_queue(self):
+ # Expect the event to be sent immediately.
+ service = Mock(id=4, name="service")
+ event_list = [Mock(name="event")]
+ self.queuer.enqueue_ephemeral(service, event_list)
+ self.txn_ctrl.send.assert_called_once_with(service, [], event_list)
+
+ def test_send_multiple_ephemeral_no_queue(self):
+ # Expect the event to be sent immediately.
+ service = Mock(id=4, name="service")
+ event_list = [Mock(name="event1"), Mock(name="event2"), Mock(name="event3")]
+ self.queuer.enqueue_ephemeral(service, event_list)
+ self.txn_ctrl.send.assert_called_once_with(service, [], event_list)
+
+ def test_send_single_ephemeral_with_queue(self):
+ d = defer.Deferred()
+ self.txn_ctrl.send = Mock(
+ side_effect=lambda x, y, z: make_deferred_yieldable(d)
+ )
+ service = Mock(id=4)
+ event_list_1 = [Mock(event_id="event1"), Mock(event_id="event2")]
+ event_list_2 = [Mock(event_id="event3"), Mock(event_id="event4")]
+ event_list_3 = [Mock(event_id="event5"), Mock(event_id="event6")]
+
+ # Send an event and don't resolve it just yet.
+ self.queuer.enqueue_ephemeral(service, event_list_1)
+ # Send more events: expect send() to NOT be called multiple times.
+ self.queuer.enqueue_ephemeral(service, event_list_2)
+ self.queuer.enqueue_ephemeral(service, event_list_3)
+ self.txn_ctrl.send.assert_called_with(service, [], event_list_1)
+ self.assertEquals(1, self.txn_ctrl.send.call_count)
+ # Resolve txn_ctrl.send
+ d.callback(service)
+ # Expect the queued events to be sent
+ self.txn_ctrl.send.assert_called_with(service, [], event_list_2 + event_list_3)
+ self.assertEquals(2, self.txn_ctrl.send.call_count)
diff --git a/tests/replication/_base.py b/tests/replication/_base.py
index 81ea985b9f..093e2faac7 100644
--- a/tests/replication/_base.py
+++ b/tests/replication/_base.py
@@ -59,7 +59,7 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
self.reactor.lookups["testserv"] = "1.2.3.4"
self.worker_hs = self.setup_test_homeserver(
http_client=None,
- homeserverToUse=GenericWorkerServer,
+ homeserver_to_use=GenericWorkerServer,
config=self._get_worker_hs_config(),
reactor=self.reactor,
)
@@ -266,7 +266,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
config.update(extra_config)
worker_hs = self.setup_test_homeserver(
- homeserverToUse=GenericWorkerServer,
+ homeserver_to_use=GenericWorkerServer,
config=config,
reactor=self.reactor,
**kwargs
diff --git a/tests/replication/test_federation_ack.py b/tests/replication/test_federation_ack.py
index 23be1167a3..1853667558 100644
--- a/tests/replication/test_federation_ack.py
+++ b/tests/replication/test_federation_ack.py
@@ -31,7 +31,7 @@ class FederationAckTestCase(HomeserverTestCase):
return config
def make_homeserver(self, reactor, clock):
- hs = self.setup_test_homeserver(homeserverToUse=GenericWorkerServer)
+ hs = self.setup_test_homeserver(homeserver_to_use=GenericWorkerServer)
return hs
diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py
index b737625e33..0048bea54a 100644
--- a/tests/rest/client/test_third_party_rules.py
+++ b/tests/rest/client/test_third_party_rules.py
@@ -114,10 +114,10 @@ class ThirdPartyRulesTestCase(unittest.HomeserverTestCase):
self.render(request)
self.assertEquals(channel.result["code"], b"403", channel.result)
- def test_modify_event(self):
- """Tests that the module can successfully tweak an event before it is persisted.
- """
- # first patch the event checker so that it will modify the event
+ def test_cannot_modify_event(self):
+ """cannot accidentally modify an event before it is persisted"""
+
+ # first patch the event checker so that it will try to modify the event
async def check(ev: EventBase, state):
ev.content = {"x": "y"}
return True
@@ -132,6 +132,26 @@ class ThirdPartyRulesTestCase(unittest.HomeserverTestCase):
access_token=self.tok,
)
self.render(request)
+ self.assertEqual(channel.result["code"], b"500", channel.result)
+
+ def test_modify_event(self):
+ """The module can return a modified version of the event"""
+ # first patch the event checker so that it will modify the event
+ async def check(ev: EventBase, state):
+ d = ev.get_dict()
+ d["content"] = {"x": "y"}
+ return d
+
+ current_rules_module().check_event_allowed = check
+
+ # now send the event
+ request, channel = self.make_request(
+ "PUT",
+ "/_matrix/client/r0/rooms/%s/send/modifyme/1" % self.room_id,
+ {"x": "x"},
+ access_token=self.tok,
+ )
+ self.render(request)
self.assertEqual(channel.result["code"], b"200", channel.result)
event_id = channel.json_body["event_id"]
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
index c905a38930..c5c7987349 100644
--- a/tests/storage/test_appservice.py
+++ b/tests/storage/test_appservice.py
@@ -244,7 +244,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
service = Mock(id=self.as_list[0]["id"])
events = [Mock(event_id="e1"), Mock(event_id="e2")]
txn = yield defer.ensureDeferred(
- self.store.create_appservice_txn(service, events)
+ self.store.create_appservice_txn(service, events, [])
)
self.assertEquals(txn.id, 1)
self.assertEquals(txn.events, events)
@@ -258,7 +258,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
yield self._insert_txn(service.id, 9644, events)
yield self._insert_txn(service.id, 9645, events)
txn = yield defer.ensureDeferred(
- self.store.create_appservice_txn(service, events)
+ self.store.create_appservice_txn(service, events, [])
)
self.assertEquals(txn.id, 9646)
self.assertEquals(txn.events, events)
@@ -270,7 +270,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
events = [Mock(event_id="e1"), Mock(event_id="e2")]
yield self._set_last_txn(service.id, 9643)
txn = yield defer.ensureDeferred(
- self.store.create_appservice_txn(service, events)
+ self.store.create_appservice_txn(service, events, [])
)
self.assertEquals(txn.id, 9644)
self.assertEquals(txn.events, events)
@@ -293,7 +293,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
yield self._insert_txn(self.as_list[3]["id"], 9643, events)
txn = yield defer.ensureDeferred(
- self.store.create_appservice_txn(service, events)
+ self.store.create_appservice_txn(service, events, [])
)
self.assertEquals(txn.id, 9644)
self.assertEquals(txn.events, events)
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
index 0adb2174af..f12834edab 100644
--- a/tests/util/test_lrucache.py
+++ b/tests/util/test_lrucache.py
@@ -59,7 +59,7 @@ class LruCacheTestCase(unittest.HomeserverTestCase):
self.assertEquals(cache.pop("key"), None)
def test_del_multi(self):
- cache = LruCache(4, 2, cache_type=TreeCache)
+ cache = LruCache(4, keylen=2, cache_type=TreeCache)
cache[("animal", "cat")] = "mew"
cache[("animal", "dog")] = "woof"
cache[("vehicles", "car")] = "vroom"
@@ -160,7 +160,7 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
m2 = Mock()
m3 = Mock()
m4 = Mock()
- cache = LruCache(4, 2, cache_type=TreeCache)
+ cache = LruCache(4, keylen=2, cache_type=TreeCache)
cache.set(("a", "1"), "value", callbacks=[m1])
cache.set(("a", "2"), "value", callbacks=[m2])
diff --git a/tests/utils.py b/tests/utils.py
index e3bbc0db84..6e7a6fd3cf 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -21,6 +21,7 @@ import time
import uuid
import warnings
from inspect import getcallargs
+from typing import Type
from urllib import parse as urlparse
from mock import Mock, patch
@@ -196,8 +197,8 @@ def setup_test_homeserver(
name="test",
config=None,
reactor=None,
- homeserverToUse=TestHomeServer,
- **kargs
+ homeserver_to_use: Type[HomeServer] = TestHomeServer,
+ **kwargs
):
"""
Setup a homeserver suitable for running tests against. Keyword arguments
@@ -220,8 +221,8 @@ def setup_test_homeserver(
config.ldap_enabled = False
- if "clock" not in kargs:
- kargs["clock"] = MockClock()
+ if "clock" not in kwargs:
+ kwargs["clock"] = MockClock()
if USE_POSTGRES_FOR_TESTS:
test_db = "synapse_test_%s" % uuid.uuid4().hex
@@ -266,18 +267,20 @@ def setup_test_homeserver(
cur.close()
db_conn.close()
- hs = homeserverToUse(
- name,
- config=config,
- version_string="Synapse/tests",
- tls_server_context_factory=Mock(),
- tls_client_options_factory=Mock(),
- reactor=reactor,
- **kargs
+ hs = homeserver_to_use(
+ name, config=config, version_string="Synapse/tests", reactor=reactor,
)
+ # Install @cache_in_self attributes
+ for key, val in kwargs.items():
+ setattr(hs, key, val)
+
+ # Mock TLS
+ hs.tls_server_context_factory = Mock()
+ hs.tls_client_options_factory = Mock()
+
hs.setup()
- if homeserverToUse.__name__ == "TestHomeServer":
+ if homeserver_to_use == TestHomeServer:
hs.setup_background_tasks()
if isinstance(db_engine, PostgresEngine):
@@ -341,7 +344,7 @@ def setup_test_homeserver(
hs.get_auth_handler().validate_hash = validate_hash
- fed = kargs.get("resource_for_federation", None)
+ fed = kwargs.get("resource_for_federation", None)
if fed:
register_federation_servlets(hs, fed)
|