diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 8aca9a3ab9..91118a8d84 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -39,7 +39,7 @@ from unpaddedbase64 import encode_base64
from synapse.api.constants import RelationTypes
from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVersions
-from synapse.types import JsonDict, RoomStreamToken
+from synapse.types import JsonDict, RoomStreamToken, StrCollection
from synapse.util.caches import intern_dict
from synapse.util.frozenutils import freeze
from synapse.util.stringutils import strtobool
@@ -413,7 +413,7 @@ class EventBase(metaclass=abc.ABCMeta):
"""
return [e for e, _ in self._dict["prev_events"]]
- def auth_event_ids(self) -> Sequence[str]:
+ def auth_event_ids(self) -> StrCollection:
"""Returns the list of auth event IDs. The order matches the order
specified in the event, though there is no meaning to it.
@@ -558,7 +558,7 @@ class FrozenEventV2(EventBase):
"""
return self._dict["prev_events"]
- def auth_event_ids(self) -> Sequence[str]:
+ def auth_event_ids(self) -> StrCollection:
"""Returns the list of auth event IDs. The order matches the order
specified in the event, though there is no meaning to it.
diff --git a/synapse/events/builder.py b/synapse/events/builder.py
index 94dd1298e1..c82745275f 100644
--- a/synapse/events/builder.py
+++ b/synapse/events/builder.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Tuple, Union
import attr
from signedjson.types import SigningKey
@@ -103,7 +103,7 @@ class EventBuilder:
async def build(
self,
- prev_event_ids: List[str],
+ prev_event_ids: Collection[str],
auth_event_ids: Optional[List[str]],
depth: Optional[int] = None,
) -> EventBase:
@@ -136,7 +136,7 @@ class EventBuilder:
format_version = self.room_version.event_format
# The types of auth/prev events changes between event versions.
- prev_events: Union[List[str], List[Tuple[str, Dict[str, str]]]]
+ prev_events: Union[Collection[str], List[Tuple[str, Dict[str, str]]]]
auth_events: Union[List[str], List[Tuple[str, Dict[str, str]]]]
if format_version == EventFormatVersions.ROOM_V1_V2:
auth_events = await self._store.add_event_hashes(auth_event_ids)
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index 6eaef8b57a..c04ad08cbb 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, List, Optional, Tuple
import attr
@@ -22,12 +23,56 @@ from synapse.types import JsonDict, StateMap
if TYPE_CHECKING:
from synapse.storage.controllers import StorageControllers
+ from synapse.storage.databases import StateGroupDataStore
from synapse.storage.databases.main import DataStore
from synapse.types.state import StateFilter
+class UnpersistedEventContextBase(ABC):
+ """
+ This is a base class for EventContext and UnpersistedEventContext, objects which
+ hold information relevant to storing an associated event. Note that an
+ UnpersistedEventContexts must be converted into an EventContext before it is
+ suitable to send to the db with its associated event.
+
+ Attributes:
+ _storage: storage controllers for interfacing with the database
+ app_service: If the associated event is being sent by a (local) application service, that
+ app service.
+ """
+
+ def __init__(self, storage_controller: "StorageControllers"):
+ self._storage: "StorageControllers" = storage_controller
+ self.app_service: Optional[ApplicationService] = None
+
+ @abstractmethod
+ async def persist(
+ self,
+ event: EventBase,
+ ) -> "EventContext":
+ """
+ A method to convert an UnpersistedEventContext to an EventContext, suitable for
+ sending to the database with the associated event.
+ """
+ pass
+
+ @abstractmethod
+ async def get_prev_state_ids(
+ self, state_filter: Optional["StateFilter"] = None
+ ) -> StateMap[str]:
+ """
+ Gets the room state at the event (ie not including the event if the event is a
+ state event).
+
+ Args:
+ state_filter: specifies the type of state event to fetch from DB, example:
+ EventTypes.JoinRules
+ """
+ pass
+
+
@attr.s(slots=True, auto_attribs=True)
-class EventContext:
+class EventContext(UnpersistedEventContextBase):
"""
Holds information relevant to persisting an event
@@ -77,9 +122,6 @@ class EventContext:
delta_ids: If ``prev_group`` is not None, the state delta between ``prev_group``
and ``state_group``.
- app_service: If this event is being sent by a (local) application service, that
- app service.
-
partial_state: if True, we may be storing this event with a temporary,
incomplete state.
"""
@@ -122,6 +164,9 @@ class EventContext:
"""Return an EventContext instance suitable for persisting an outlier event"""
return EventContext(storage=storage)
+ async def persist(self, event: EventBase) -> "EventContext":
+ return self
+
async def serialize(self, event: EventBase, store: "DataStore") -> JsonDict:
"""Converts self to a type that can be serialized as JSON, and then
deserialized by `deserialize`
@@ -248,11 +293,182 @@ class EventContext:
Maps a (type, state_key) to the event ID of the state event matching
this tuple.
"""
+
+ assert self.state_group_before_event is not None
+ return await self._storage.state.get_state_ids_for_group(
+ self.state_group_before_event, state_filter
+ )
+
+
+@attr.s(slots=True, auto_attribs=True)
+class UnpersistedEventContext(UnpersistedEventContextBase):
+ """
+ The event context holds information about the state groups for an event. It is important
+ to remember that an event technically has two state groups: the state group before the
+ event, and the state group after the event. If the event is not a state event, the state
+ group will not change (ie the state group before the event will be the same as the state
+ group after the event), but if it is a state event the state group before the event
+ will differ from the state group after the event.
+ This is a version of an EventContext before the new state group (if any) has been
+ computed and stored. It contains information about the state before the event (which
+ also may be the information after the event, if the event is not a state event). The
+ UnpersistedEventContext must be converted into an EventContext by calling the method
+ 'persist' on it before it is suitable to be sent to the DB for processing.
+
+ state_group_after_event:
+ The state group after the event. This will always be None until it is persisted.
+ If the event is not a state event, this will be the same as
+ state_group_before_event.
+
+ state_group_before_event:
+ The ID of the state group representing the state of the room before this event.
+
+ state_delta_due_to_event:
+ If the event is a state event, then this is the delta of the state between
+ `state_group` and `state_group_before_event`
+
+ prev_group_for_state_group_before_event:
+ If it is known, ``state_group_before_event``'s previous state group.
+
+ delta_ids_to_state_group_before_event:
+ If ``prev_group_for_state_group_before_event`` is not None, the state delta
+ between ``prev_group_for_state_group_before_event`` and ``state_group_before_event``.
+
+ partial_state:
+ Whether the event has partial state.
+
+ state_map_before_event:
+ A map of the state before the event, i.e. the state at `state_group_before_event`
+ """
+
+ _storage: "StorageControllers"
+ state_group_before_event: Optional[int]
+ state_group_after_event: Optional[int]
+ state_delta_due_to_event: Optional[dict]
+ prev_group_for_state_group_before_event: Optional[int]
+ delta_ids_to_state_group_before_event: Optional[StateMap[str]]
+ partial_state: bool
+ state_map_before_event: Optional[StateMap[str]] = None
+
+ @classmethod
+ async def batch_persist_unpersisted_contexts(
+ cls,
+ events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]],
+ room_id: str,
+ last_known_state_group: int,
+ datastore: "StateGroupDataStore",
+ ) -> List[Tuple[EventBase, EventContext]]:
+ """
+ Takes a list of events and their associated unpersisted contexts and persists
+ the unpersisted contexts, returning a list of events and persisted contexts.
+ Note that all the events must be in a linear chain (ie a <- b <- c).
+
+ Args:
+ events_and_context: A list of events and their unpersisted contexts
+ room_id: the room_id for the events
+ last_known_state_group: the last persisted state group
+ datastore: a state datastore
+ """
+ amended_events_and_context = await datastore.store_state_deltas_for_batched(
+ events_and_context, room_id, last_known_state_group
+ )
+
+ events_and_persisted_context = []
+ for event, unpersisted_context in amended_events_and_context:
+ if event.is_state():
+ context = EventContext(
+ storage=unpersisted_context._storage,
+ state_group=unpersisted_context.state_group_after_event,
+ state_group_before_event=unpersisted_context.state_group_before_event,
+ state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
+ partial_state=unpersisted_context.partial_state,
+ prev_group=unpersisted_context.state_group_before_event,
+ delta_ids=unpersisted_context.state_delta_due_to_event,
+ )
+ else:
+ context = EventContext(
+ storage=unpersisted_context._storage,
+ state_group=unpersisted_context.state_group_after_event,
+ state_group_before_event=unpersisted_context.state_group_before_event,
+ state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
+ partial_state=unpersisted_context.partial_state,
+ prev_group=unpersisted_context.prev_group_for_state_group_before_event,
+ delta_ids=unpersisted_context.delta_ids_to_state_group_before_event,
+ )
+ events_and_persisted_context.append((event, context))
+ return events_and_persisted_context
+
+ async def get_prev_state_ids(
+ self, state_filter: Optional["StateFilter"] = None
+ ) -> StateMap[str]:
+ """
+ Gets the room state map, excluding this event.
+
+ Args:
+ state_filter: specifies the type of state event to fetch from DB
+
+ Returns:
+ Maps a (type, state_key) to the event ID of the state event matching
+ this tuple.
+ """
+ if self.state_map_before_event:
+ return self.state_map_before_event
+
assert self.state_group_before_event is not None
return await self._storage.state.get_state_ids_for_group(
self.state_group_before_event, state_filter
)
+ async def persist(self, event: EventBase) -> EventContext:
+ """
+ Creates a full `EventContext` for the event, persisting any referenced state that
+ has not yet been persisted.
+
+ Args:
+ event: event that the EventContext is associated with.
+
+ Returns: An EventContext suitable for sending to the database with the event
+ for persisting
+ """
+ assert self.partial_state is not None
+
+ # If we have a full set of state for before the event but don't have a state
+ # group for that state, we need to get one
+ if self.state_group_before_event is None:
+ assert self.state_map_before_event
+ state_group_before_event = await self._storage.state.store_state_group(
+ event.event_id,
+ event.room_id,
+ prev_group=self.prev_group_for_state_group_before_event,
+ delta_ids=self.delta_ids_to_state_group_before_event,
+ current_state_ids=self.state_map_before_event,
+ )
+ self.state_group_before_event = state_group_before_event
+
+ # if the event isn't a state event the state group doesn't change
+ if not self.state_delta_due_to_event:
+ state_group_after_event = self.state_group_before_event
+
+ # otherwise if it is a state event we need to get a state group for it
+ else:
+ state_group_after_event = await self._storage.state.store_state_group(
+ event.event_id,
+ event.room_id,
+ prev_group=self.state_group_before_event,
+ delta_ids=self.state_delta_due_to_event,
+ current_state_ids=None,
+ )
+
+ return EventContext.with_state(
+ storage=self._storage,
+ state_group=state_group_after_event,
+ state_group_before_event=self.state_group_before_event,
+ state_delta_due_to_event=self.state_delta_due_to_event,
+ partial_state=self.partial_state,
+ prev_group=self.state_group_before_event,
+ delta_ids=self.state_delta_due_to_event,
+ )
+
def _encode_state_dict(
state_dict: Optional[StateMap[str]],
diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py
index 623a2c71ea..765c15bb51 100644
--- a/synapse/events/spamcheck.py
+++ b/synapse/events/spamcheck.py
@@ -33,8 +33,8 @@ from typing_extensions import Literal
import synapse
from synapse.api.errors import Codes
from synapse.logging.opentracing import trace
-from synapse.rest.media.v1._base import FileInfo
-from synapse.rest.media.v1.media_storage import ReadableFileWrapper
+from synapse.media._base import FileInfo
+from synapse.media.media_storage import ReadableFileWrapper
from synapse.spam_checker_api import RegistrationBehaviour
from synapse.types import JsonDict, RoomAlias, UserProfile
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py
index 72ab696898..3e4d52c8d8 100644
--- a/synapse/events/third_party_rules.py
+++ b/synapse/events/third_party_rules.py
@@ -18,7 +18,7 @@ from twisted.internet.defer import CancelledError
from synapse.api.errors import ModuleFailedException, SynapseError
from synapse.events import EventBase
-from synapse.events.snapshot import EventContext
+from synapse.events.snapshot import UnpersistedEventContextBase
from synapse.storage.roommember import ProfileInfo
from synapse.types import Requester, StateMap
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
@@ -45,6 +45,8 @@ CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]]
ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable]
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable]
ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable]
+ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
+ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
@@ -78,7 +80,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
# correctly, we need to await its result. Therefore it doesn't make a lot of
# sense to make it go through the run() wrapper.
if f.__name__ == "check_event_allowed":
-
# We need to wrap check_event_allowed because its old form would return either
# a boolean or a dict, but now we want to return the dict separately from the
# boolean.
@@ -100,7 +101,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
return wrap_check_event_allowed
if f.__name__ == "on_create_room":
-
# We need to wrap on_create_room because its old form would return a boolean
# if the room creation is denied, but now we just want it to raise an
# exception.
@@ -174,6 +174,12 @@ class ThirdPartyEventRules:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = []
self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = []
+ self._on_add_user_third_party_identifier_callbacks: List[
+ ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
+ ] = []
+ self._on_remove_user_third_party_identifier_callbacks: List[
+ ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
+ ] = []
def register_third_party_rules_callbacks(
self,
@@ -193,6 +199,12 @@ class ThirdPartyEventRules:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = None,
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
+ on_add_user_third_party_identifier: Optional[
+ ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
+ ] = None,
+ on_remove_user_third_party_identifier: Optional[
+ ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
+ ] = None,
) -> None:
"""Register callbacks from modules for each hook."""
if check_event_allowed is not None:
@@ -230,8 +242,15 @@ class ThirdPartyEventRules:
if on_threepid_bind is not None:
self._on_threepid_bind_callbacks.append(on_threepid_bind)
+ if on_add_user_third_party_identifier is not None:
+ self._on_add_user_third_party_identifier_callbacks.append(
+ on_add_user_third_party_identifier
+ )
+
async def check_event_allowed(
- self, event: EventBase, context: EventContext
+ self,
+ event: EventBase,
+ context: UnpersistedEventContextBase,
) -> Tuple[bool, Optional[dict]]:
"""Check if a provided event should be allowed in the given context.
@@ -511,6 +530,9 @@ class ThirdPartyEventRules:
local homeserver, not when it's created on an identity server (and then kept track
of so that it can be unbound on the same IS later on).
+ THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the
+ `on_add_user_third_party_identifier` callback method instead.
+
Args:
user_id: the user being associated with the threepid.
medium: the threepid's medium.
@@ -523,3 +545,44 @@ class ThirdPartyEventRules:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)
+
+ async def on_add_user_third_party_identifier(
+ self, user_id: str, medium: str, address: str
+ ) -> None:
+ """Called when an association between a user's Matrix ID and a third-party ID
+ (email, phone number) has successfully been registered on the homeserver.
+
+ Args:
+ user_id: The User ID included in the association.
+ medium: The medium of the third-party ID (email, msisdn).
+ address: The address of the third-party ID (i.e. an email address).
+ """
+ for callback in self._on_add_user_third_party_identifier_callbacks:
+ try:
+ await callback(user_id, medium, address)
+ except Exception as e:
+ logger.exception(
+ "Failed to run module API callback %s: %s", callback, e
+ )
+
+ async def on_remove_user_third_party_identifier(
+ self, user_id: str, medium: str, address: str
+ ) -> None:
+ """Called when an association between a user's Matrix ID and a third-party ID
+ (email, phone number) has been successfully removed on the homeserver.
+
+ This is called *after* any known bindings on identity servers for this
+ association have been removed.
+
+ Args:
+ user_id: The User ID included in the removed association.
+ medium: The medium of the third-party ID (email, msisdn).
+ address: The address of the third-party ID (i.e. an email address).
+ """
+ for callback in self._on_remove_user_third_party_identifier_callbacks:
+ try:
+ await callback(user_id, medium, address)
+ except Exception as e:
+ logger.exception(
+ "Failed to run module API callback %s: %s", callback, e
+ )
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index ebf8c7ed83..b9c15ffcdb 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -38,8 +38,7 @@ from synapse.api.constants import (
)
from synapse.api.errors import Codes, SynapseError
from synapse.api.room_versions import RoomVersion
-from synapse.types import JsonDict
-from synapse.util.frozenutils import unfreeze
+from synapse.types import JsonDict, Requester
from . import EventBase
@@ -317,8 +316,9 @@ class SerializeEventConfig:
as_client_event: bool = True
# Function to convert from federation format to client format
event_format: Callable[[JsonDict], JsonDict] = format_event_for_client_v1
- # ID of the user's auth token - used for namespacing of transaction IDs
- token_id: Optional[int] = None
+ # The entity that requested the event. This is used to determine whether to include
+ # the transaction_id in the unsigned section of the event.
+ requester: Optional[Requester] = None
# List of event fields to include. If empty, all fields will be returned.
only_event_fields: Optional[List[str]] = None
# Some events can have stripped room state stored in the `unsigned` field.
@@ -368,11 +368,24 @@ def serialize_event(
e.unsigned["redacted_because"], time_now_ms, config=config
)
- if config.token_id is not None:
- if config.token_id == getattr(e.internal_metadata, "token_id", None):
- txn_id = getattr(e.internal_metadata, "txn_id", None)
- if txn_id is not None:
- d["unsigned"]["transaction_id"] = txn_id
+ # If we have a txn_id saved in the internal_metadata, we should include it in the
+ # unsigned section of the event if it was sent by the same session as the one
+ # requesting the event.
+ # There is a special case for guests, because they only have one access token
+ # without associated access_token_id, so we always include the txn_id for events
+ # they sent.
+ txn_id = getattr(e.internal_metadata, "txn_id", None)
+ if txn_id is not None and config.requester is not None:
+ event_token_id = getattr(e.internal_metadata, "token_id", None)
+ if config.requester.user.to_string() == e.sender and (
+ (
+ event_token_id is not None
+ and config.requester.access_token_id is not None
+ and event_token_id == config.requester.access_token_id
+ )
+ or config.requester.is_guest
+ ):
+ d["unsigned"]["transaction_id"] = txn_id
# invite_room_state and knock_room_state are a list of stripped room state events
# that are meant to provide metadata about a room to an invitee/knocker. They are
@@ -403,14 +416,6 @@ class EventClientSerializer:
clients.
"""
- def __init__(self, inhibit_replacement_via_edits: bool = False):
- """
- Args:
- inhibit_replacement_via_edits: If this is set to True, then events are
- never replaced by their edits.
- """
- self._inhibit_replacement_via_edits = inhibit_replacement_via_edits
-
def serialize_event(
self,
event: Union[JsonDict, EventBase],
@@ -418,7 +423,6 @@ class EventClientSerializer:
*,
config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG,
bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None,
- apply_edits: bool = True,
) -> JsonDict:
"""Serializes a single event.
@@ -428,10 +432,7 @@ class EventClientSerializer:
config: Event serialization config
bundle_aggregations: A map from event_id to the aggregations to be bundled
into the event.
- apply_edits: Whether the content of the event should be modified to reflect
- any replacement in `bundle_aggregations[<event_id>].replace`.
- See also the `inhibit_replacement_via_edits` constructor arg: if that is
- set to True, then this argument is ignored.
+
Returns:
The serialized event
"""
@@ -450,38 +451,10 @@ class EventClientSerializer:
config,
bundle_aggregations,
serialized_event,
- apply_edits=apply_edits,
)
return serialized_event
- def _apply_edit(
- self, orig_event: EventBase, serialized_event: JsonDict, edit: EventBase
- ) -> None:
- """Replace the content, preserving existing relations of the serialized event.
-
- Args:
- orig_event: The original event.
- serialized_event: The original event, serialized. This is modified.
- edit: The event which edits the above.
- """
-
- # Ensure we take copies of the edit content, otherwise we risk modifying
- # the original event.
- edit_content = edit.content.copy()
-
- # Unfreeze the event content if necessary, so that we may modify it below
- edit_content = unfreeze(edit_content)
- serialized_event["content"] = edit_content.get("m.new_content", {})
-
- # Check for existing relations
- relates_to = orig_event.content.get("m.relates_to")
- if relates_to:
- # Keep the relations, ensuring we use a dict copy of the original
- serialized_event["content"]["m.relates_to"] = relates_to.copy()
- else:
- serialized_event["content"].pop("m.relates_to", None)
-
def _inject_bundled_aggregations(
self,
event: EventBase,
@@ -489,7 +462,6 @@ class EventClientSerializer:
config: SerializeEventConfig,
bundled_aggregations: Dict[str, "BundledAggregations"],
serialized_event: JsonDict,
- apply_edits: bool,
) -> None:
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
@@ -504,9 +476,6 @@ class EventClientSerializer:
While serializing the bundled aggregations this map may be searched
again for additional events in a recursive manner.
serialized_event: The serialized event which may be modified.
- apply_edits: Whether the content of the event should be modified to reflect
- any replacement in `aggregations.replace` (subject to the
- `inhibit_replacement_via_edits` constructor arg).
"""
# We have already checked that aggregations exist for this event.
@@ -516,22 +485,12 @@ class EventClientSerializer:
# being serialized.
serialized_aggregations = {}
- if event_aggregations.annotations:
- serialized_aggregations[
- RelationTypes.ANNOTATION
- ] = event_aggregations.annotations
-
if event_aggregations.references:
serialized_aggregations[
RelationTypes.REFERENCE
] = event_aggregations.references
if event_aggregations.replace:
- # If there is an edit, optionally apply it to the event.
- edit = event_aggregations.replace
- if apply_edits and not self._inhibit_replacement_via_edits:
- self._apply_edit(event, serialized_event, edit)
-
# Include information about it in the relations dict.
#
# Matrix spec v1.5 (https://spec.matrix.org/v1.5/client-server-api/#server-side-aggregation-of-mreplace-relationships)
@@ -539,10 +498,7 @@ class EventClientSerializer:
# `sender` of the edit; however MSC3925 proposes extending it to the whole
# of the edit, which is what we do here.
serialized_aggregations[RelationTypes.REPLACE] = self.serialize_event(
- edit,
- time_now,
- config=config,
- apply_edits=False,
+ event_aggregations.replace, time_now, config=config
)
# Include any threaded replies to this event.
|