diff options
author | Erik Johnston <erik@matrix.org> | 2020-10-13 12:07:56 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2020-10-13 12:07:56 +0100 |
commit | b2486f6656bec2307e62de19d2830994a42b879d (patch) | |
tree | 630ab6e5b341a53b5f6c1f7e966ab79673eb73b9 /tests | |
parent | Merge branch 'master' into develop (diff) | |
download | synapse-b2486f6656bec2307e62de19d2830994a42b879d.tar.xz |
Fix message duplication if something goes wrong after persisting the event (#8476)
Should fix #3365.
Diffstat (limited to '')
-rw-r--r-- | tests/handlers/test_message.py | 157 | ||||
-rw-r--r-- | tests/rest/client/test_third_party_rules.py | 2 | ||||
-rw-r--r-- | tests/unittest.py | 11 |
3 files changed, 167 insertions, 3 deletions
diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py new file mode 100644 index 0000000000..64e28bc639 --- /dev/null +++ b/tests/handlers/test_message.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import Tuple + +from synapse.api.constants import EventTypes +from synapse.events import EventBase +from synapse.events.snapshot import EventContext +from synapse.rest import admin +from synapse.rest.client.v1 import login, room +from synapse.types import create_requester +from synapse.util.stringutils import random_string + +from tests import unittest + +logger = logging.getLogger(__name__) + + +class EventCreationTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def prepare(self, reactor, clock, hs): + self.handler = self.hs.get_event_creation_handler() + self.persist_event_storage = self.hs.get_storage().persistence + + self.user_id = self.register_user("tester", "foobar") + self.access_token = self.login("tester", "foobar") + self.room_id = self.helper.create_room_as(self.user_id, tok=self.access_token) + + self.info = self.get_success( + self.hs.get_datastore().get_user_by_access_token(self.access_token,) + ) + self.token_id = self.info["token_id"] + + self.requester = create_requester(self.user_id, access_token_id=self.token_id) + + def _create_duplicate_event(self, txn_id: str) -> Tuple[EventBase, EventContext]: + """Create a new event with the given transaction ID. All events produced + by this method will be considered duplicates. + """ + + # We create a new event with a random body, as otherwise we'll produce + # *exactly* the same event with the same hash, and so same event ID. + return self.get_success( + self.handler.create_event( + self.requester, + { + "type": EventTypes.Message, + "room_id": self.room_id, + "sender": self.requester.user.to_string(), + "content": {"msgtype": "m.text", "body": random_string(5)}, + }, + token_id=self.token_id, + txn_id=txn_id, + ) + ) + + def test_duplicated_txn_id(self): + """Test that attempting to handle/persist an event with a transaction ID + that has already been persisted correctly returns the old event and does + *not* produce duplicate messages. + """ + + txn_id = "something_suitably_random" + + event1, context = self._create_duplicate_event(txn_id) + + ret_event1 = self.get_success( + self.handler.handle_new_client_event(self.requester, event1, context) + ) + stream_id1 = ret_event1.internal_metadata.stream_ordering + + self.assertEqual(event1.event_id, ret_event1.event_id) + + event2, context = self._create_duplicate_event(txn_id) + + # We want to test that the deduplication at the persit event end works, + # so we want to make sure we test with different events. + self.assertNotEqual(event1.event_id, event2.event_id) + + ret_event2 = self.get_success( + self.handler.handle_new_client_event(self.requester, event2, context) + ) + stream_id2 = ret_event2.internal_metadata.stream_ordering + + # Assert that the returned values match those from the initial event + # rather than the new one. + self.assertEqual(ret_event1.event_id, ret_event2.event_id) + self.assertEqual(stream_id1, stream_id2) + + # Let's test that calling `persist_event` directly also does the right + # thing. + event3, context = self._create_duplicate_event(txn_id) + self.assertNotEqual(event1.event_id, event3.event_id) + + ret_event3, event_pos3, _ = self.get_success( + self.persist_event_storage.persist_event(event3, context) + ) + + # Assert that the returned values match those from the initial event + # rather than the new one. + self.assertEqual(ret_event1.event_id, ret_event3.event_id) + self.assertEqual(stream_id1, event_pos3.stream) + + # Let's test that calling `persist_events` directly also does the right + # thing. + event4, context = self._create_duplicate_event(txn_id) + self.assertNotEqual(event1.event_id, event3.event_id) + + events, _ = self.get_success( + self.persist_event_storage.persist_events([(event3, context)]) + ) + ret_event4 = events[0] + + # Assert that the returned values match those from the initial event + # rather than the new one. + self.assertEqual(ret_event1.event_id, ret_event4.event_id) + + def test_duplicated_txn_id_one_call(self): + """Test that we correctly handle duplicates that we try and persist at + the same time. + """ + + txn_id = "something_else_suitably_random" + + # Create two duplicate events to persist at the same time + event1, context1 = self._create_duplicate_event(txn_id) + event2, context2 = self._create_duplicate_event(txn_id) + + # Ensure their event IDs are different to start with + self.assertNotEqual(event1.event_id, event2.event_id) + + events, _ = self.get_success( + self.persist_event_storage.persist_events( + [(event1, context1), (event2, context2)] + ) + ) + + # Check that we've deduplicated the events. + self.assertEqual(len(events), 2) + self.assertEqual(events[0].event_id, events[1].event_id) diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index d03e121664..b737625e33 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -107,7 +107,7 @@ class ThirdPartyRulesTestCase(unittest.HomeserverTestCase): request, channel = self.make_request( "PUT", - "/_matrix/client/r0/rooms/%s/send/foo.bar.forbidden/1" % self.room_id, + "/_matrix/client/r0/rooms/%s/send/foo.bar.forbidden/2" % self.room_id, {}, access_token=self.tok, ) diff --git a/tests/unittest.py b/tests/unittest.py index 5c87f6097e..6c1661c92c 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -254,17 +254,24 @@ class HomeserverTestCase(TestCase): if hasattr(self, "user_id"): if self.hijack_auth: + # We need a valid token ID to satisfy foreign key constraints. + token_id = self.get_success( + self.hs.get_datastore().add_access_token_to_user( + self.helper.auth_user_id, "some_fake_token", None, None, + ) + ) + async def get_user_by_access_token(token=None, allow_guest=False): return { "user": UserID.from_string(self.helper.auth_user_id), - "token_id": 1, + "token_id": token_id, "is_guest": False, } async def get_user_by_req(request, allow_guest=False, rights="access"): return create_requester( UserID.from_string(self.helper.auth_user_id), - 1, + token_id, False, False, None, |