summary refs log tree commit diff
path: root/tests/replication
diff options
context:
space:
mode:
Diffstat (limited to 'tests/replication')
-rw-r--r--tests/replication/_base.py5
-rw-r--r--tests/replication/test_client_reader_shard.py4
-rw-r--r--tests/replication/test_sharded_event_persister.py8
3 files changed, 1 insertions, 16 deletions
diff --git a/tests/replication/_base.py b/tests/replication/_base.py
index bc56b13dcd..516db4c30a 100644
--- a/tests/replication/_base.py
+++ b/tests/replication/_base.py
@@ -36,7 +36,7 @@ from synapse.server import HomeServer
 from synapse.util import Clock
 
 from tests import unittest
-from tests.server import FakeTransport, render
+from tests.server import FakeTransport
 
 try:
     import hiredis
@@ -347,9 +347,6 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
         config["worker_replication_http_port"] = "8765"
         return config
 
-    def render_on_worker(self, worker_hs: HomeServer, request: SynapseRequest):
-        render(request, self._hs_to_site[worker_hs].resource, self.reactor)
-
     def replicate(self):
         """Tell the master side of replication that something has happened, and then
         wait for the replication to occur.
diff --git a/tests/replication/test_client_reader_shard.py b/tests/replication/test_client_reader_shard.py
index 90172bd377..96801db473 100644
--- a/tests/replication/test_client_reader_shard.py
+++ b/tests/replication/test_client_reader_shard.py
@@ -55,7 +55,6 @@ class ClientReaderTestCase(BaseMultiWorkerStreamTestCase):
             "register",
             {"username": "user", "type": "m.login.password", "password": "bar"},
         )  # type: SynapseRequest, FakeChannel
-        self.render_on_worker(worker_hs, request_1)
         self.assertEqual(request_1.code, 401)
 
         # Grab the session
@@ -69,7 +68,6 @@ class ClientReaderTestCase(BaseMultiWorkerStreamTestCase):
             "register",
             {"auth": {"session": session, "type": "m.login.dummy"}},
         )  # type: SynapseRequest, FakeChannel
-        self.render_on_worker(worker_hs, request_2)
         self.assertEqual(request_2.code, 200)
 
         # We're given a registered user.
@@ -89,7 +87,6 @@ class ClientReaderTestCase(BaseMultiWorkerStreamTestCase):
             "register",
             {"username": "user", "type": "m.login.password", "password": "bar"},
         )  # type: SynapseRequest, FakeChannel
-        self.render_on_worker(worker_hs_1, request_1)
         self.assertEqual(request_1.code, 401)
 
         # Grab the session
@@ -104,7 +101,6 @@ class ClientReaderTestCase(BaseMultiWorkerStreamTestCase):
             "register",
             {"auth": {"session": session, "type": "m.login.dummy"}},
         )  # type: SynapseRequest, FakeChannel
-        self.render_on_worker(worker_hs_2, request_2)
         self.assertEqual(request_2.code, 200)
 
         # We're given a registered user.
diff --git a/tests/replication/test_sharded_event_persister.py b/tests/replication/test_sharded_event_persister.py
index 2820dd622f..77fc3856d5 100644
--- a/tests/replication/test_sharded_event_persister.py
+++ b/tests/replication/test_sharded_event_persister.py
@@ -183,7 +183,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
         request, channel = make_request(
             self.reactor, sync_hs_site, "GET", "/sync", access_token=access_token
         )
-        self.render_on_worker(sync_hs, request)
         next_batch = channel.json_body["next_batch"]
 
         # We now gut wrench into the events stream MultiWriterIdGenerator on
@@ -214,7 +213,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             "/sync?since={}".format(next_batch),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
 
         # We should only see the new event and nothing else
         self.assertIn(room_id1, channel.json_body["rooms"]["join"])
@@ -245,7 +243,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             "/sync?since={}".format(vector_clock_token),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
 
         self.assertNotIn(room_id1, channel.json_body["rooms"]["join"])
         self.assertIn(room_id2, channel.json_body["rooms"]["join"])
@@ -271,7 +268,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             "/sync?since={}".format(next_batch),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
 
         prev_batch1 = channel.json_body["rooms"]["join"][room_id1]["timeline"][
             "prev_batch"
@@ -292,7 +288,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             ),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
         self.assertListEqual([], channel.json_body["chunk"])
 
         # Paginating back on the second room should produce the first event
@@ -306,7 +301,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             ),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
         self.assertEqual(len(channel.json_body["chunk"]), 1)
         self.assertEqual(
             channel.json_body["chunk"][0]["event_id"], first_event_in_room2
@@ -322,7 +316,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             ),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
         self.assertListEqual([], channel.json_body["chunk"])
 
         request, channel = make_request(
@@ -334,7 +327,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
             ),
             access_token=access_token,
         )
-        self.render_on_worker(sync_hs, request)
         self.assertEqual(len(channel.json_body["chunk"]), 1)
         self.assertEqual(
             channel.json_body["chunk"][0]["event_id"], first_event_in_room2