diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py
index 63628aa6b0..f7c309cad0 100644
--- a/tests/crypto/test_keyring.py
+++ b/tests/crypto/test_keyring.py
@@ -433,7 +433,7 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
async def get_json(destination, path, **kwargs):
self.assertEqual(destination, SERVER_NAME)
- self.assertEqual(path, "/_matrix/key/v2/server/key1")
+ self.assertEqual(path, "/_matrix/key/v2/server")
return response
self.http_client.get_json.side_effect = get_json
@@ -469,18 +469,6 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
keys = self.get_success(fetcher.get_keys(SERVER_NAME, ["key1"], 0))
self.assertEqual(keys, {})
- def test_keyid_containing_forward_slash(self) -> None:
- """We should url-encode any url unsafe chars in key ids.
-
- Detects https://github.com/matrix-org/synapse/issues/14488.
- """
- fetcher = ServerKeyFetcher(self.hs)
- self.get_success(fetcher.get_keys("example.com", ["key/potato"], 0))
-
- self.http_client.get_json.assert_called_once()
- args, kwargs = self.http_client.get_json.call_args
- self.assertEqual(kwargs["path"], "/_matrix/key/v2/server/key%2Fpotato")
-
class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py
index 685a9a6d52..b703e4472e 100644
--- a/tests/events/test_presence_router.py
+++ b/tests/events/test_presence_router.py
@@ -126,6 +126,13 @@ class PresenceRouterTestModule:
class PresenceRouterTestCase(FederatingHomeserverTestCase):
+ """
+ Test cases using a custom PresenceRouter
+
+ By default in test cases, federation sending is disabled. This class re-enables it
+ for the main process by setting `federation_sender_instances` to None.
+ """
+
servlets = [
admin.register_servlets,
login.register_servlets,
@@ -150,6 +157,11 @@ class PresenceRouterTestCase(FederatingHomeserverTestCase):
self.sync_handler = self.hs.get_sync_handler()
self.module_api = homeserver.get_module_api()
+ def default_config(self) -> JsonDict:
+ config = super().default_config()
+ config["federation_sender_instances"] = None
+ return config
+
@override_config(
{
"presence": {
@@ -162,7 +174,6 @@ class PresenceRouterTestCase(FederatingHomeserverTestCase):
},
}
},
- "send_federation": True,
}
)
def test_receiving_all_presence_legacy(self):
@@ -180,7 +191,6 @@ class PresenceRouterTestCase(FederatingHomeserverTestCase):
},
},
],
- "send_federation": True,
}
)
def test_receiving_all_presence(self):
@@ -290,7 +300,6 @@ class PresenceRouterTestCase(FederatingHomeserverTestCase):
},
}
},
- "send_federation": True,
}
)
def test_send_local_online_presence_to_with_module_legacy(self):
@@ -310,7 +319,6 @@ class PresenceRouterTestCase(FederatingHomeserverTestCase):
},
},
],
- "send_federation": True,
}
)
def test_send_local_online_presence_to_with_module(self):
diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py
index 2873b4d430..b8fee72898 100644
--- a/tests/federation/test_federation_catch_up.py
+++ b/tests/federation/test_federation_catch_up.py
@@ -7,13 +7,21 @@ from synapse.federation.sender import PerDestinationQueue, TransactionManager
from synapse.federation.units import Edu
from synapse.rest import admin
from synapse.rest.client import login, room
+from synapse.types import JsonDict
from synapse.util.retryutils import NotRetryingDestination
from tests.test_utils import event_injection, make_awaitable
-from tests.unittest import FederatingHomeserverTestCase, override_config
+from tests.unittest import FederatingHomeserverTestCase
class FederationCatchUpTestCases(FederatingHomeserverTestCase):
+ """
+ Tests cases of catching up over federation.
+
+ By default for test cases federation sending is disabled. This Test class has it
+ re-enabled for the main process.
+ """
+
servlets = [
admin.register_servlets,
room.register_servlets,
@@ -42,6 +50,11 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
self.record_transaction
)
+ def default_config(self) -> JsonDict:
+ config = super().default_config()
+ config["federation_sender_instances"] = None
+ return config
+
async def record_transaction(self, txn, json_cb):
if self.is_online:
data = json_cb()
@@ -79,7 +92,6 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
)[0]
return {"event_id": event_id, "stream_ordering": stream_ordering}
- @override_config({"send_federation": True})
def test_catch_up_destination_rooms_tracking(self):
"""
Tests that we populate the `destination_rooms` table as needed.
@@ -105,7 +117,6 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
self.assertEqual(row_2["event_id"], event_id_2)
self.assertEqual(row_1["stream_ordering"], row_2["stream_ordering"] - 1)
- @override_config({"send_federation": True})
def test_catch_up_last_successful_stream_ordering_tracking(self):
"""
Tests that we populate the `destination_rooms` table as needed.
@@ -163,7 +174,6 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
"Send succeeded but not marked as last_successful_stream_ordering",
)
- @override_config({"send_federation": True}) # critical to federate
def test_catch_up_from_blank_state(self):
"""
Runs an overall test of federation catch-up from scratch.
@@ -260,7 +270,6 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
return per_dest_queue, results_list
- @override_config({"send_federation": True})
def test_catch_up_loop(self):
"""
Tests the behaviour of _catch_up_transmission_loop.
@@ -325,7 +334,6 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
event_5.internal_metadata.stream_ordering,
)
- @override_config({"send_federation": True})
def test_catch_up_on_synapse_startup(self):
"""
Tests the behaviour of get_catch_up_outstanding_destinations and
@@ -424,7 +432,6 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
# - all destinations are woken exactly once; they appear once in woken.
self.assertCountEqual(woken, server_names[:-1])
- @override_config({"send_federation": True})
def test_not_latest_event(self):
"""Test that we send the latest event in the room even if its not ours."""
diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py
index 01f147418b..8692d8190f 100644
--- a/tests/federation/test_federation_sender.py
+++ b/tests/federation/test_federation_sender.py
@@ -25,10 +25,17 @@ from synapse.rest.client import login
from synapse.types import JsonDict, ReadReceipt
from tests.test_utils import make_awaitable
-from tests.unittest import HomeserverTestCase, override_config
+from tests.unittest import HomeserverTestCase
class FederationSenderReceiptsTestCases(HomeserverTestCase):
+ """
+ Test federation sending to update receipts.
+
+ By default for test cases federation sending is disabled. This Test class has it
+ re-enabled for the main process.
+ """
+
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
federation_transport_client=Mock(spec=["send_transaction"]),
@@ -38,9 +45,17 @@ class FederationSenderReceiptsTestCases(HomeserverTestCase):
return_value=make_awaitable({"test", "host2"})
)
+ hs.get_storage_controllers().state.get_current_hosts_in_room_or_partial_state_approximation = (
+ hs.get_storage_controllers().state.get_current_hosts_in_room
+ )
+
return hs
- @override_config({"send_federation": True})
+ def default_config(self) -> JsonDict:
+ config = super().default_config()
+ config["federation_sender_instances"] = None
+ return config
+
def test_send_receipts(self):
mock_send_transaction = (
self.hs.get_federation_transport_client().send_transaction
@@ -83,7 +98,6 @@ class FederationSenderReceiptsTestCases(HomeserverTestCase):
],
)
- @override_config({"send_federation": True})
def test_send_receipts_thread(self):
mock_send_transaction = (
self.hs.get_federation_transport_client().send_transaction
@@ -160,7 +174,6 @@ class FederationSenderReceiptsTestCases(HomeserverTestCase):
],
)
- @override_config({"send_federation": True})
def test_send_receipts_with_backoff(self):
"""Send two receipts in quick succession; the second should be flushed, but
only after 20ms"""
@@ -247,6 +260,13 @@ class FederationSenderReceiptsTestCases(HomeserverTestCase):
class FederationSenderDevicesTestCases(HomeserverTestCase):
+ """
+ Test federation sending to update devices.
+
+ By default for test cases federation sending is disabled. This Test class has it
+ re-enabled for the main process.
+ """
+
servlets = [
admin.register_servlets,
login.register_servlets,
@@ -261,7 +281,8 @@ class FederationSenderDevicesTestCases(HomeserverTestCase):
def default_config(self):
c = super().default_config()
- c["send_federation"] = True
+ # Enable federation sending on the main process.
+ c["federation_sender_instances"] = None
return c
def prepare(self, reactor, clock, hs):
diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py
index 9ed26d87a7..57bfbd7734 100644
--- a/tests/handlers/test_appservice.py
+++ b/tests/handlers/test_appservice.py
@@ -765,7 +765,12 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase):
fake_device_ids = [f"device_{num}" for num in range(number_of_messages - 1)]
messages = {
self.exclusive_as_user: {
- device_id: to_device_message_content for device_id in fake_device_ids
+ device_id: {
+ "type": "test_to_device_message",
+ "sender": "@some:sender",
+ "content": to_device_message_content,
+ }
+ for device_id in fake_device_ids
}
}
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index c5981ff965..584e7b8971 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -992,7 +992,8 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase):
def default_config(self):
config = super().default_config()
- config["send_federation"] = True
+ # Enable federation sending on the main process.
+ config["federation_sender_instances"] = None
return config
def prepare(self, reactor, clock, hs):
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 9c821b3042..efbb5a8dbb 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -200,7 +200,8 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase):
],
)
- @override_config({"send_federation": True})
+ # Enable federation sending on the main process.
+ @override_config({"federation_sender_instances": None})
def test_started_typing_remote_send(self) -> None:
self.room_members = [U_APPLE, U_ONION]
@@ -305,7 +306,8 @@ class TypingNotificationsTestCase(unittest.HomeserverTestCase):
self.assertEqual(events[0], [])
self.assertEqual(events[1], 0)
- @override_config({"send_federation": True})
+ # Enable federation sending on the main process.
+ @override_config({"federation_sender_instances": None})
def test_stopped_typing(self) -> None:
self.room_members = [U_APPLE, U_BANANA, U_ONION]
diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py
index 9e39cd97e5..75fc5a17a4 100644
--- a/tests/handlers/test_user_directory.py
+++ b/tests/handlers/test_user_directory.py
@@ -56,7 +56,8 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
config = self.default_config()
- config["update_user_directory"] = True
+ # Re-enables updating the user directory, as that function is needed below.
+ config["update_user_directory_from_worker"] = None
self.appservice = ApplicationService(
token="i_am_an_app_service",
@@ -1045,7 +1046,9 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase):
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
config = self.default_config()
- config["update_user_directory"] = True
+ # Re-enables updating the user directory, as that function is needed below. It
+ # will be force disabled later
+ config["update_user_directory_from_worker"] = None
hs = self.setup_test_homeserver(config=config)
self.config = hs.config
diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py
index 058ca57e55..b0f3f4374d 100644
--- a/tests/module_api/test_api.py
+++ b/tests/module_api/test_api.py
@@ -336,7 +336,8 @@ class ModuleApiTestCase(HomeserverTestCase):
# Test sending local online presence to users from the main process
_test_sending_local_online_presence_to_local_user(self, test_with_workers=False)
- @override_config({"send_federation": True})
+ # Enable federation sending on the main process.
+ @override_config({"federation_sender_instances": None})
def test_send_local_online_presence_to_federation(self):
"""Tests that send_local_presence_to_users sends local online presence to remote users."""
# Create a user who will send presence updates
diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py
index 594e7937a8..1cd453248e 100644
--- a/tests/push/test_bulk_push_rule_evaluator.py
+++ b/tests/push/test_bulk_push_rule_evaluator.py
@@ -6,10 +6,11 @@ from synapse.rest import admin
from synapse.rest.client import login, register, room
from synapse.types import create_requester
-from tests import unittest
+from tests.test_utils import simple_async_mock
+from tests.unittest import HomeserverTestCase, override_config
-class TestBulkPushRuleEvaluator(unittest.HomeserverTestCase):
+class TestBulkPushRuleEvaluator(HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
@@ -72,3 +73,43 @@ class TestBulkPushRuleEvaluator(unittest.HomeserverTestCase):
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
# should not raise
self.get_success(bulk_evaluator.action_for_events_by_user([(event, context)]))
+
+ @override_config({"push": {"enabled": False}})
+ def test_action_for_event_by_user_disabled_by_config(self) -> None:
+ """Ensure that push rules are not calculated when disabled in the config"""
+ # Create a new user and room.
+ alice = self.register_user("alice", "pass")
+ token = self.login(alice, "pass")
+
+ room_id = self.helper.create_room_as(
+ alice, room_version=RoomVersions.V9.identifier, tok=token
+ )
+
+ # Alter the power levels in that room to include stringy and floaty levels.
+ # We need to suppress the validation logic or else it will reject these dodgy
+ # values. (Presumably this validation was not always present.)
+ event_creation_handler = self.hs.get_event_creation_handler()
+ requester = create_requester(alice)
+
+ # Create a new message event, and try to evaluate it under the dodgy
+ # power level event.
+ event, context = self.get_success(
+ event_creation_handler.create_event(
+ requester,
+ {
+ "type": "m.room.message",
+ "room_id": room_id,
+ "content": {
+ "msgtype": "m.text",
+ "body": "helo",
+ },
+ "sender": alice,
+ },
+ )
+ )
+
+ bulk_evaluator = BulkPushRuleEvaluator(self.hs)
+ bulk_evaluator._action_for_event_by_user = simple_async_mock() # type: ignore[assignment]
+ # should not raise
+ self.get_success(bulk_evaluator.action_for_events_by_user([(event, context)]))
+ bulk_evaluator._action_for_event_by_user.assert_not_called()
diff --git a/tests/push/test_email.py b/tests/push/test_email.py
index fd14568f55..57b2f0536e 100644
--- a/tests/push/test_email.py
+++ b/tests/push/test_email.py
@@ -66,7 +66,6 @@ class EmailPusherTests(HomeserverTestCase):
"riot_base_url": None,
}
config["public_baseurl"] = "http://aaa"
- config["start_pushers"] = True
hs = self.setup_test_homeserver(config=config)
diff --git a/tests/push/test_http.py b/tests/push/test_http.py
index b383b8401f..afaafe79aa 100644
--- a/tests/push/test_http.py
+++ b/tests/push/test_http.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Any, Dict, List, Optional, Tuple
+from typing import List, Optional, Tuple
from unittest.mock import Mock
from twisted.internet.defer import Deferred
@@ -41,11 +41,6 @@ class HTTPPusherTests(HomeserverTestCase):
user_id = True
hijack_auth = False
- def default_config(self) -> Dict[str, Any]:
- config = super().default_config()
- config["start_pushers"] = True
- return config
-
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
self.push_attempts: List[Tuple[Deferred, str, dict]] = []
diff --git a/tests/replication/_base.py b/tests/replication/_base.py
index 3029a16dda..6a7174b333 100644
--- a/tests/replication/_base.py
+++ b/tests/replication/_base.py
@@ -307,7 +307,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
stream to the master HS.
Args:
- worker_app: Type of worker, e.g. `synapse.app.federation_sender`.
+ worker_app: Type of worker, e.g. `synapse.app.generic_worker`.
extra_config: Any extra config to use for this instances.
**kwargs: Options that get passed to `self.setup_test_homeserver`,
useful to e.g. pass some mocks for things like `federation_http_client`
diff --git a/tests/replication/tcp/streams/test_federation.py b/tests/replication/tcp/streams/test_federation.py
index ffec06a0d6..bcb82c9c80 100644
--- a/tests/replication/tcp/streams/test_federation.py
+++ b/tests/replication/tcp/streams/test_federation.py
@@ -22,9 +22,8 @@ class FederationStreamTestCase(BaseStreamTestCase):
def _get_worker_hs_config(self) -> dict:
# enable federation sending on the worker
config = super()._get_worker_hs_config()
- # TODO: make it so we don't need both of these
- config["send_federation"] = False
- config["worker_app"] = "synapse.app.federation_sender"
+ config["worker_name"] = "federation_sender1"
+ config["federation_sender_instances"] = ["federation_sender1"]
return config
def test_catchup(self):
diff --git a/tests/replication/test_auth.py b/tests/replication/test_auth.py
index 43a16bb141..5d7a89e0c7 100644
--- a/tests/replication/test_auth.py
+++ b/tests/replication/test_auth.py
@@ -38,7 +38,7 @@ class WorkerAuthenticationTestCase(BaseMultiWorkerStreamTestCase):
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
- config["worker_app"] = "synapse.app.client_reader"
+ config["worker_app"] = "synapse.app.generic_worker"
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
@@ -53,7 +53,7 @@ class WorkerAuthenticationTestCase(BaseMultiWorkerStreamTestCase):
4. Return the final request.
"""
- worker_hs = self.make_worker_hs("synapse.app.client_reader")
+ worker_hs = self.make_worker_hs("synapse.app.generic_worker")
site = self._hs_to_site[worker_hs]
channel_1 = make_request(
diff --git a/tests/replication/test_client_reader_shard.py b/tests/replication/test_client_reader_shard.py
index 995097d72c..eb5b376534 100644
--- a/tests/replication/test_client_reader_shard.py
+++ b/tests/replication/test_client_reader_shard.py
@@ -22,20 +22,20 @@ logger = logging.getLogger(__name__)
class ClientReaderTestCase(BaseMultiWorkerStreamTestCase):
- """Test using one or more client readers for registration."""
+ """Test using one or more generic workers for registration."""
servlets = [register.register_servlets]
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
- config["worker_app"] = "synapse.app.client_reader"
+ config["worker_app"] = "synapse.app.generic_worker"
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def test_register_single_worker(self):
- """Test that registration works when using a single client reader worker."""
- worker_hs = self.make_worker_hs("synapse.app.client_reader")
+ """Test that registration works when using a single generic worker."""
+ worker_hs = self.make_worker_hs("synapse.app.generic_worker")
site = self._hs_to_site[worker_hs]
channel_1 = make_request(
@@ -64,9 +64,9 @@ class ClientReaderTestCase(BaseMultiWorkerStreamTestCase):
self.assertEqual(channel_2.json_body["user_id"], "@user:test")
def test_register_multi_worker(self):
- """Test that registration works when using multiple client reader workers."""
- worker_hs_1 = self.make_worker_hs("synapse.app.client_reader")
- worker_hs_2 = self.make_worker_hs("synapse.app.client_reader")
+ """Test that registration works when using multiple generic workers."""
+ worker_hs_1 = self.make_worker_hs("synapse.app.generic_worker")
+ worker_hs_2 = self.make_worker_hs("synapse.app.generic_worker")
site_1 = self._hs_to_site[worker_hs_1]
channel_1 = make_request(
diff --git a/tests/replication/test_federation_ack.py b/tests/replication/test_federation_ack.py
index 26b8bd512a..63b1dd40b5 100644
--- a/tests/replication/test_federation_ack.py
+++ b/tests/replication/test_federation_ack.py
@@ -25,8 +25,9 @@ from tests.unittest import HomeserverTestCase
class FederationAckTestCase(HomeserverTestCase):
def default_config(self) -> dict:
config = super().default_config()
- config["worker_app"] = "synapse.app.federation_sender"
- config["send_federation"] = False
+ config["worker_app"] = "synapse.app.generic_worker"
+ config["worker_name"] = "federation_sender1"
+ config["federation_sender_instances"] = ["federation_sender1"]
return config
def make_homeserver(self, reactor, clock):
diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py
index 6104a55aa1..c28073b8f7 100644
--- a/tests/replication/test_federation_sender_shard.py
+++ b/tests/replication/test_federation_sender_shard.py
@@ -27,17 +27,19 @@ logger = logging.getLogger(__name__)
class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
+ """
+ Various tests for federation sending on workers.
+
+ Federation sending is disabled by default, it will be enabled in each test by
+ updating 'federation_sender_instances'.
+ """
+
servlets = [
login.register_servlets,
register_servlets_for_client_rest_resource,
room.register_servlets,
]
- def default_config(self):
- conf = super().default_config()
- conf["send_federation"] = False
- return conf
-
def test_send_event_single_sender(self):
"""Test that using a single federation sender worker correctly sends a
new event.
@@ -46,8 +48,11 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
mock_client.put_json.return_value = make_awaitable({})
self.make_worker_hs(
- "synapse.app.federation_sender",
- {"send_federation": False},
+ "synapse.app.generic_worker",
+ {
+ "worker_name": "federation_sender1",
+ "federation_sender_instances": ["federation_sender1"],
+ },
federation_http_client=mock_client,
)
@@ -73,11 +78,13 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
mock_client1 = Mock(spec=["put_json"])
mock_client1.put_json.return_value = make_awaitable({})
self.make_worker_hs(
- "synapse.app.federation_sender",
+ "synapse.app.generic_worker",
{
- "send_federation": True,
- "worker_name": "sender1",
- "federation_sender_instances": ["sender1", "sender2"],
+ "worker_name": "federation_sender1",
+ "federation_sender_instances": [
+ "federation_sender1",
+ "federation_sender2",
+ ],
},
federation_http_client=mock_client1,
)
@@ -85,11 +92,13 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
mock_client2 = Mock(spec=["put_json"])
mock_client2.put_json.return_value = make_awaitable({})
self.make_worker_hs(
- "synapse.app.federation_sender",
+ "synapse.app.generic_worker",
{
- "send_federation": True,
- "worker_name": "sender2",
- "federation_sender_instances": ["sender1", "sender2"],
+ "worker_name": "federation_sender2",
+ "federation_sender_instances": [
+ "federation_sender1",
+ "federation_sender2",
+ ],
},
federation_http_client=mock_client2,
)
@@ -136,11 +145,13 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
mock_client1 = Mock(spec=["put_json"])
mock_client1.put_json.return_value = make_awaitable({})
self.make_worker_hs(
- "synapse.app.federation_sender",
+ "synapse.app.generic_worker",
{
- "send_federation": True,
- "worker_name": "sender1",
- "federation_sender_instances": ["sender1", "sender2"],
+ "worker_name": "federation_sender1",
+ "federation_sender_instances": [
+ "federation_sender1",
+ "federation_sender2",
+ ],
},
federation_http_client=mock_client1,
)
@@ -148,11 +159,13 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
mock_client2 = Mock(spec=["put_json"])
mock_client2.put_json.return_value = make_awaitable({})
self.make_worker_hs(
- "synapse.app.federation_sender",
+ "synapse.app.generic_worker",
{
- "send_federation": True,
- "worker_name": "sender2",
- "federation_sender_instances": ["sender1", "sender2"],
+ "worker_name": "federation_sender2",
+ "federation_sender_instances": [
+ "federation_sender1",
+ "federation_sender2",
+ ],
},
federation_http_client=mock_client2,
)
diff --git a/tests/replication/test_pusher_shard.py b/tests/replication/test_pusher_shard.py
index 59fea93e49..ca18ad6553 100644
--- a/tests/replication/test_pusher_shard.py
+++ b/tests/replication/test_pusher_shard.py
@@ -38,11 +38,6 @@ class PusherShardTestCase(BaseMultiWorkerStreamTestCase):
self.other_user_id = self.register_user("otheruser", "pass")
self.other_access_token = self.login("otheruser", "pass")
- def default_config(self):
- conf = super().default_config()
- conf["start_pushers"] = False
- return conf
-
def _create_pusher_and_send_msg(self, localpart):
# Create a user that will get push notifications
user_id = self.register_user(localpart, "pass")
@@ -92,8 +87,8 @@ class PusherShardTestCase(BaseMultiWorkerStreamTestCase):
)
self.make_worker_hs(
- "synapse.app.pusher",
- {"start_pushers": False},
+ "synapse.app.generic_worker",
+ {"worker_name": "pusher1", "pusher_instances": ["pusher1"]},
proxied_blacklisted_http_client=http_client_mock,
)
@@ -122,9 +117,8 @@ class PusherShardTestCase(BaseMultiWorkerStreamTestCase):
)
self.make_worker_hs(
- "synapse.app.pusher",
+ "synapse.app.generic_worker",
{
- "start_pushers": True,
"worker_name": "pusher1",
"pusher_instances": ["pusher1", "pusher2"],
},
@@ -137,9 +131,8 @@ class PusherShardTestCase(BaseMultiWorkerStreamTestCase):
)
self.make_worker_hs(
- "synapse.app.pusher",
+ "synapse.app.generic_worker",
{
- "start_pushers": True,
"worker_name": "pusher2",
"pusher_instances": ["pusher1", "pusher2"],
},
diff --git a/tests/rest/key/v2/test_remote_key_resource.py b/tests/rest/key/v2/test_remote_key_resource.py
index 7f1fba1086..2bb6e27d94 100644
--- a/tests/rest/key/v2/test_remote_key_resource.py
+++ b/tests/rest/key/v2/test_remote_key_resource.py
@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import urllib.parse
from io import BytesIO, StringIO
from typing import Any, Dict, Optional, Union
from unittest.mock import Mock
@@ -65,9 +64,7 @@ class BaseRemoteKeyResourceTestCase(unittest.HomeserverTestCase):
self.assertTrue(ignore_backoff)
self.assertEqual(destination, server_name)
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
- self.assertEqual(
- path, "/_matrix/key/v2/server/%s" % (urllib.parse.quote(key_id),)
- )
+ self.assertEqual(path, "/_matrix/key/v2/server")
response = {
"server_name": server_name,
diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py
index ee48920f84..5fa8bd2d98 100644
--- a/tests/storage/test_event_push_actions.py
+++ b/tests/storage/test_event_push_actions.py
@@ -156,7 +156,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
last_event_id: str
- def _assert_counts(noitf_count: int, highlight_count: int) -> None:
+ def _assert_counts(notif_count: int, highlight_count: int) -> None:
counts = self.get_success(
self.store.db_pool.runInteraction(
"get-unread-counts",
@@ -168,13 +168,22 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
self.assertEqual(
counts.main_timeline,
NotifCounts(
- notify_count=noitf_count,
+ notify_count=notif_count,
unread_count=0,
highlight_count=highlight_count,
),
)
self.assertEqual(counts.threads, {})
+ aggregate_counts = self.get_success(
+ self.store.db_pool.runInteraction(
+ "get-aggregate-unread-counts",
+ self.store._get_unread_counts_by_room_for_user_txn,
+ user_id,
+ )
+ )
+ self.assertEqual(aggregate_counts[room_id], notif_count)
+
def _create_event(highlight: bool = False) -> str:
result = self.helper.send_event(
room_id,
@@ -283,7 +292,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
last_event_id: str
def _assert_counts(
- noitf_count: int,
+ notif_count: int,
highlight_count: int,
thread_notif_count: int,
thread_highlight_count: int,
@@ -299,7 +308,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
self.assertEqual(
counts.main_timeline,
NotifCounts(
- notify_count=noitf_count,
+ notify_count=notif_count,
unread_count=0,
highlight_count=highlight_count,
),
@@ -318,6 +327,17 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
else:
self.assertEqual(counts.threads, {})
+ aggregate_counts = self.get_success(
+ self.store.db_pool.runInteraction(
+ "get-aggregate-unread-counts",
+ self.store._get_unread_counts_by_room_for_user_txn,
+ user_id,
+ )
+ )
+ self.assertEqual(
+ aggregate_counts[room_id], notif_count + thread_notif_count
+ )
+
def _create_event(
highlight: bool = False, thread_id: Optional[str] = None
) -> str:
@@ -454,7 +474,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
last_event_id: str
def _assert_counts(
- noitf_count: int,
+ notif_count: int,
highlight_count: int,
thread_notif_count: int,
thread_highlight_count: int,
@@ -470,7 +490,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
self.assertEqual(
counts.main_timeline,
NotifCounts(
- notify_count=noitf_count,
+ notify_count=notif_count,
unread_count=0,
highlight_count=highlight_count,
),
@@ -489,6 +509,17 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
else:
self.assertEqual(counts.threads, {})
+ aggregate_counts = self.get_success(
+ self.store.db_pool.runInteraction(
+ "get-aggregate-unread-counts",
+ self.store._get_unread_counts_by_room_for_user_txn,
+ user_id,
+ )
+ )
+ self.assertEqual(
+ aggregate_counts[room_id], notif_count + thread_notif_count
+ )
+
def _create_event(
highlight: bool = False, thread_id: Optional[str] = None
) -> str:
@@ -646,7 +677,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
)
return result["event_id"]
- def _assert_counts(noitf_count: int, thread_notif_count: int) -> None:
+ def _assert_counts(notif_count: int, thread_notif_count: int) -> None:
counts = self.get_success(
self.store.db_pool.runInteraction(
"get-unread-counts",
@@ -658,7 +689,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
self.assertEqual(
counts.main_timeline,
NotifCounts(
- notify_count=noitf_count, unread_count=0, highlight_count=0
+ notify_count=notif_count, unread_count=0, highlight_count=0
),
)
if thread_notif_count:
diff --git a/tests/util/test_async_helpers.py b/tests/util/test_async_helpers.py
index 9d5010bf92..91cac9822a 100644
--- a/tests/util/test_async_helpers.py
+++ b/tests/util/test_async_helpers.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
+from typing import Generator, List, NoReturn, Optional
from parameterized import parameterized_class
@@ -41,8 +42,8 @@ from tests.unittest import TestCase
class ObservableDeferredTest(TestCase):
- def test_succeed(self):
- origin_d = Deferred()
+ def test_succeed(self) -> None:
+ origin_d: "Deferred[int]" = Deferred()
observable = ObservableDeferred(origin_d)
observer1 = observable.observe()
@@ -52,16 +53,18 @@ class ObservableDeferredTest(TestCase):
self.assertFalse(observer2.called)
# check the first observer is called first
- def check_called_first(res):
+ def check_called_first(res: int) -> int:
self.assertFalse(observer2.called)
return res
observer1.addBoth(check_called_first)
# store the results
- results = [None, None]
+ results: List[Optional[ObservableDeferred[int]]] = [None, None]
- def check_val(res, idx):
+ def check_val(
+ res: ObservableDeferred[int], idx: int
+ ) -> ObservableDeferred[int]:
results[idx] = res
return res
@@ -72,8 +75,8 @@ class ObservableDeferredTest(TestCase):
self.assertEqual(results[0], 123, "observer 1 callback result")
self.assertEqual(results[1], 123, "observer 2 callback result")
- def test_failure(self):
- origin_d = Deferred()
+ def test_failure(self) -> None:
+ origin_d: Deferred = Deferred()
observable = ObservableDeferred(origin_d, consumeErrors=True)
observer1 = observable.observe()
@@ -83,16 +86,16 @@ class ObservableDeferredTest(TestCase):
self.assertFalse(observer2.called)
# check the first observer is called first
- def check_called_first(res):
+ def check_called_first(res: int) -> int:
self.assertFalse(observer2.called)
return res
observer1.addBoth(check_called_first)
# store the results
- results = [None, None]
+ results: List[Optional[ObservableDeferred[str]]] = [None, None]
- def check_val(res, idx):
+ def check_val(res: ObservableDeferred[str], idx: int) -> None:
results[idx] = res
return None
@@ -103,10 +106,12 @@ class ObservableDeferredTest(TestCase):
raise Exception("gah!")
except Exception as e:
origin_d.errback(e)
+ assert results[0] is not None
self.assertEqual(str(results[0].value), "gah!", "observer 1 errback result")
+ assert results[1] is not None
self.assertEqual(str(results[1].value), "gah!", "observer 2 errback result")
- def test_cancellation(self):
+ def test_cancellation(self) -> None:
"""Test that cancelling an observer does not affect other observers."""
origin_d: "Deferred[int]" = Deferred()
observable = ObservableDeferred(origin_d, consumeErrors=True)
@@ -136,37 +141,38 @@ class ObservableDeferredTest(TestCase):
class TimeoutDeferredTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.clock = Clock()
- def test_times_out(self):
+ def test_times_out(self) -> None:
"""Basic test case that checks that the original deferred is cancelled and that
the timing-out deferred is errbacked
"""
- cancelled = [False]
+ cancelled = False
- def canceller(_d):
- cancelled[0] = True
+ def canceller(_d: Deferred) -> None:
+ nonlocal cancelled
+ cancelled = True
- non_completing_d = Deferred(canceller)
+ non_completing_d: Deferred = Deferred(canceller)
timing_out_d = timeout_deferred(non_completing_d, 1.0, self.clock)
self.assertNoResult(timing_out_d)
- self.assertFalse(cancelled[0], "deferred was cancelled prematurely")
+ self.assertFalse(cancelled, "deferred was cancelled prematurely")
self.clock.pump((1.0,))
- self.assertTrue(cancelled[0], "deferred was not cancelled by timeout")
+ self.assertTrue(cancelled, "deferred was not cancelled by timeout")
self.failureResultOf(timing_out_d, defer.TimeoutError)
- def test_times_out_when_canceller_throws(self):
+ def test_times_out_when_canceller_throws(self) -> None:
"""Test that we have successfully worked around
https://twistedmatrix.com/trac/ticket/9534"""
- def canceller(_d):
+ def canceller(_d: Deferred) -> None:
raise Exception("can't cancel this deferred")
- non_completing_d = Deferred(canceller)
+ non_completing_d: Deferred = Deferred(canceller)
timing_out_d = timeout_deferred(non_completing_d, 1.0, self.clock)
self.assertNoResult(timing_out_d)
@@ -175,22 +181,24 @@ class TimeoutDeferredTest(TestCase):
self.failureResultOf(timing_out_d, defer.TimeoutError)
- def test_logcontext_is_preserved_on_cancellation(self):
- blocking_was_cancelled = [False]
+ def test_logcontext_is_preserved_on_cancellation(self) -> None:
+ blocking_was_cancelled = False
@defer.inlineCallbacks
- def blocking():
- non_completing_d = Deferred()
+ def blocking() -> Generator["Deferred[object]", object, None]:
+ nonlocal blocking_was_cancelled
+
+ non_completing_d: Deferred = Deferred()
with PreserveLoggingContext():
try:
yield non_completing_d
except CancelledError:
- blocking_was_cancelled[0] = True
+ blocking_was_cancelled = True
raise
with LoggingContext("one") as context_one:
# the errbacks should be run in the test logcontext
- def errback(res, deferred_name):
+ def errback(res: Failure, deferred_name: str) -> Failure:
self.assertIs(
current_context(),
context_one,
@@ -209,7 +217,7 @@ class TimeoutDeferredTest(TestCase):
self.clock.pump((1.0,))
self.assertTrue(
- blocking_was_cancelled[0], "non-completing deferred was not cancelled"
+ blocking_was_cancelled, "non-completing deferred was not cancelled"
)
self.failureResultOf(timing_out_d, defer.TimeoutError)
self.assertIs(current_context(), context_one)
@@ -220,13 +228,13 @@ class _TestException(Exception):
class ConcurrentlyExecuteTest(TestCase):
- def test_limits_runners(self):
+ def test_limits_runners(self) -> None:
"""If we have more tasks than runners, we should get the limit of runners"""
started = 0
waiters = []
processed = []
- async def callback(v):
+ async def callback(v: int) -> None:
# when we first enter, bump the start count
nonlocal started
started += 1
@@ -235,7 +243,7 @@ class ConcurrentlyExecuteTest(TestCase):
processed.append(v)
# wait for the goahead before returning
- d2 = Deferred()
+ d2: "Deferred[int]" = Deferred()
waiters.append(d2)
await d2
@@ -265,16 +273,16 @@ class ConcurrentlyExecuteTest(TestCase):
self.assertCountEqual(processed, [1, 2, 3, 4, 5])
self.successResultOf(d2)
- def test_preserves_stacktraces(self):
+ def test_preserves_stacktraces(self) -> None:
"""Test that the stacktrace from an exception thrown in the callback is preserved"""
- d1 = Deferred()
+ d1: "Deferred[int]" = Deferred()
- async def callback(v):
+ async def callback(v: int) -> None:
# alas, this doesn't work at all without an await here
await d1
raise _TestException("bah")
- async def caller():
+ async def caller() -> None:
try:
await concurrently_execute(callback, [1], 2)
except _TestException as e:
@@ -290,17 +298,17 @@ class ConcurrentlyExecuteTest(TestCase):
d1.callback(0)
self.successResultOf(d2)
- def test_preserves_stacktraces_on_preformed_failure(self):
+ def test_preserves_stacktraces_on_preformed_failure(self) -> None:
"""Test that the stacktrace on a Failure returned by the callback is preserved"""
- d1 = Deferred()
+ d1: "Deferred[int]" = Deferred()
f = Failure(_TestException("bah"))
- async def callback(v):
+ async def callback(v: int) -> None:
# alas, this doesn't work at all without an await here
await d1
await defer.fail(f)
- async def caller():
+ async def caller() -> None:
try:
await concurrently_execute(callback, [1], 2)
except _TestException as e:
@@ -336,7 +344,7 @@ class CancellationWrapperTests(TestCase):
else:
raise ValueError(f"Unsupported wrapper type: {self.wrapper}")
- def test_succeed(self):
+ def test_succeed(self) -> None:
"""Test that the new `Deferred` receives the result."""
deferred: "Deferred[str]" = Deferred()
wrapper_deferred = self.wrap_deferred(deferred)
@@ -346,7 +354,7 @@ class CancellationWrapperTests(TestCase):
self.assertTrue(wrapper_deferred.called)
self.assertEqual("success", self.successResultOf(wrapper_deferred))
- def test_failure(self):
+ def test_failure(self) -> None:
"""Test that the new `Deferred` receives the `Failure`."""
deferred: "Deferred[str]" = Deferred()
wrapper_deferred = self.wrap_deferred(deferred)
@@ -361,7 +369,7 @@ class CancellationWrapperTests(TestCase):
class StopCancellationTests(TestCase):
"""Tests for the `stop_cancellation` function."""
- def test_cancellation(self):
+ def test_cancellation(self) -> None:
"""Test that cancellation of the new `Deferred` leaves the original running."""
deferred: "Deferred[str]" = Deferred()
wrapper_deferred = stop_cancellation(deferred)
@@ -384,7 +392,7 @@ class StopCancellationTests(TestCase):
class DelayCancellationTests(TestCase):
"""Tests for the `delay_cancellation` function."""
- def test_deferred_cancellation(self):
+ def test_deferred_cancellation(self) -> None:
"""Test that cancellation of the new `Deferred` waits for the original."""
deferred: "Deferred[str]" = Deferred()
wrapper_deferred = delay_cancellation(deferred)
@@ -405,12 +413,12 @@ class DelayCancellationTests(TestCase):
# Now that the original `Deferred` has failed, we should get a `CancelledError`.
self.failureResultOf(wrapper_deferred, CancelledError)
- def test_coroutine_cancellation(self):
+ def test_coroutine_cancellation(self) -> None:
"""Test that cancellation of the new `Deferred` waits for the original."""
blocking_deferred: "Deferred[None]" = Deferred()
completion_deferred: "Deferred[None]" = Deferred()
- async def task():
+ async def task() -> NoReturn:
await blocking_deferred
completion_deferred.callback(None)
# Raise an exception. Twisted should consume it, otherwise unwanted
@@ -434,7 +442,7 @@ class DelayCancellationTests(TestCase):
# Now that the original coroutine has failed, we should get a `CancelledError`.
self.failureResultOf(wrapper_deferred, CancelledError)
- def test_suppresses_second_cancellation(self):
+ def test_suppresses_second_cancellation(self) -> None:
"""Test that a second cancellation is suppressed.
Identical to `test_cancellation` except the new `Deferred` is cancelled twice.
@@ -459,7 +467,7 @@ class DelayCancellationTests(TestCase):
# Now that the original `Deferred` has failed, we should get a `CancelledError`.
self.failureResultOf(wrapper_deferred, CancelledError)
- def test_propagates_cancelled_error(self):
+ def test_propagates_cancelled_error(self) -> None:
"""Test that a `CancelledError` from the original `Deferred` gets propagated."""
deferred: "Deferred[str]" = Deferred()
wrapper_deferred = delay_cancellation(deferred)
@@ -472,14 +480,14 @@ class DelayCancellationTests(TestCase):
self.assertTrue(wrapper_deferred.called)
self.assertIs(cancelled_error, self.failureResultOf(wrapper_deferred).value)
- def test_preserves_logcontext(self):
+ def test_preserves_logcontext(self) -> None:
"""Test that logging contexts are preserved."""
blocking_d: "Deferred[None]" = Deferred()
- async def inner():
+ async def inner() -> None:
await make_deferred_yieldable(blocking_d)
- async def outer():
+ async def outer() -> None:
with LoggingContext("c") as c:
try:
await delay_cancellation(inner())
@@ -503,7 +511,7 @@ class DelayCancellationTests(TestCase):
class AwakenableSleeperTests(TestCase):
"Tests AwakenableSleeper"
- def test_sleep(self):
+ def test_sleep(self) -> None:
reactor, _ = get_clock()
sleeper = AwakenableSleeper(reactor)
@@ -518,7 +526,7 @@ class AwakenableSleeperTests(TestCase):
reactor.advance(0.6)
self.assertTrue(d.called)
- def test_explicit_wake(self):
+ def test_explicit_wake(self) -> None:
reactor, _ = get_clock()
sleeper = AwakenableSleeper(reactor)
@@ -535,7 +543,7 @@ class AwakenableSleeperTests(TestCase):
reactor.advance(0.6)
- def test_multiple_sleepers_timeout(self):
+ def test_multiple_sleepers_timeout(self) -> None:
reactor, _ = get_clock()
sleeper = AwakenableSleeper(reactor)
@@ -555,7 +563,7 @@ class AwakenableSleeperTests(TestCase):
reactor.advance(0.6)
self.assertTrue(d2.called)
- def test_multiple_sleepers_wake(self):
+ def test_multiple_sleepers_wake(self) -> None:
reactor, _ = get_clock()
sleeper = AwakenableSleeper(reactor)
diff --git a/tests/util/test_batching_queue.py b/tests/util/test_batching_queue.py
index 07be57d72c..94ef91f645 100644
--- a/tests/util/test_batching_queue.py
+++ b/tests/util/test_batching_queue.py
@@ -11,6 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import List, Tuple
+
+from prometheus_client import Gauge
+
from twisted.internet import defer
from synapse.logging.context import make_deferred_yieldable
@@ -26,7 +30,7 @@ from tests.unittest import TestCase
class BatchingQueueTestCase(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.clock, hs_clock = get_clock()
# We ensure that we remove any existing metrics for "test_queue".
@@ -37,25 +41,27 @@ class BatchingQueueTestCase(TestCase):
except KeyError:
pass
- self._pending_calls = []
- self.queue = BatchingQueue("test_queue", hs_clock, self._process_queue)
+ self._pending_calls: List[Tuple[List[str], defer.Deferred]] = []
+ self.queue: BatchingQueue[str, str] = BatchingQueue(
+ "test_queue", hs_clock, self._process_queue
+ )
- async def _process_queue(self, values):
- d = defer.Deferred()
+ async def _process_queue(self, values: List[str]) -> str:
+ d: "defer.Deferred[str]" = defer.Deferred()
self._pending_calls.append((values, d))
return await make_deferred_yieldable(d)
- def _get_sample_with_name(self, metric, name) -> int:
+ def _get_sample_with_name(self, metric: Gauge, name: str) -> float:
"""For a prometheus metric get the value of the sample that has a
matching "name" label.
"""
- for sample in metric.collect()[0].samples:
+ for sample in next(iter(metric.collect())).samples:
if sample.labels.get("name") == name:
return sample.value
self.fail("Found no matching sample")
- def _assert_metrics(self, queued, keys, in_flight):
+ def _assert_metrics(self, queued: int, keys: int, in_flight: int) -> None:
"""Assert that the metrics are correct"""
sample = self._get_sample_with_name(number_queued, self.queue._name)
@@ -75,7 +81,7 @@ class BatchingQueueTestCase(TestCase):
"number_in_flight",
)
- def test_simple(self):
+ def test_simple(self) -> None:
"""Tests the basic case of calling `add_to_queue` once and having
`_process_queue` return.
"""
@@ -106,7 +112,7 @@ class BatchingQueueTestCase(TestCase):
self._assert_metrics(queued=0, keys=0, in_flight=0)
- def test_batching(self):
+ def test_batching(self) -> None:
"""Test that multiple calls at the same time get batched up into one
call to `_process_queue`.
"""
@@ -134,7 +140,7 @@ class BatchingQueueTestCase(TestCase):
self.assertEqual(self.successResultOf(queue_d2), "bar")
self._assert_metrics(queued=0, keys=0, in_flight=0)
- def test_queuing(self):
+ def test_queuing(self) -> None:
"""Test that we queue up requests while a `_process_queue` is being
called.
"""
@@ -184,7 +190,7 @@ class BatchingQueueTestCase(TestCase):
self.assertEqual(self.successResultOf(queue_d3), "bar2")
self._assert_metrics(queued=0, keys=0, in_flight=0)
- def test_different_keys(self):
+ def test_different_keys(self) -> None:
"""Test that calls to different keys get processed in parallel."""
self.assertFalse(self._pending_calls)
diff --git a/tests/util/test_check_dependencies.py b/tests/util/test_check_dependencies.py
index 6913de24b9..aa20fe6780 100644
--- a/tests/util/test_check_dependencies.py
+++ b/tests/util/test_check_dependencies.py
@@ -1,5 +1,20 @@
+# Copyright 2022 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
from contextlib import contextmanager
-from typing import Generator, Optional
+from os import PathLike
+from typing import Generator, Optional, Union
from unittest.mock import patch
from synapse.util.check_dependencies import (
@@ -12,17 +27,17 @@ from tests.unittest import TestCase
class DummyDistribution(metadata.Distribution):
- def __init__(self, version: object):
+ def __init__(self, version: str):
self._version = version
@property
- def version(self):
+ def version(self) -> str:
return self._version
- def locate_file(self, path):
+ def locate_file(self, path: Union[str, PathLike]) -> PathLike:
raise NotImplementedError()
- def read_text(self, filename):
+ def read_text(self, filename: str) -> None:
raise NotImplementedError()
@@ -30,7 +45,7 @@ old = DummyDistribution("0.1.2")
old_release_candidate = DummyDistribution("0.1.2rc3")
new = DummyDistribution("1.2.3")
new_release_candidate = DummyDistribution("1.2.3rc4")
-distribution_with_no_version = DummyDistribution(None)
+distribution_with_no_version = DummyDistribution(None) # type: ignore[arg-type]
# could probably use stdlib TestCase --- no need for twisted here
@@ -45,7 +60,7 @@ class TestDependencyChecker(TestCase):
If `distribution = None`, we pretend that the package is not installed.
"""
- def mock_distribution(name: str):
+ def mock_distribution(name: str) -> DummyDistribution:
if distribution is None:
raise metadata.PackageNotFoundError
else:
diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py
index e8b6246ab5..acb251bfea 100644
--- a/tests/util/test_dict_cache.py
+++ b/tests/util/test_dict_cache.py
@@ -19,10 +19,12 @@ from tests import unittest
class DictCacheTestCase(unittest.TestCase):
- def setUp(self):
- self.cache = DictionaryCache("foobar", max_entries=10)
+ def setUp(self) -> None:
+ self.cache: DictionaryCache[str, str, str] = DictionaryCache(
+ "foobar", max_entries=10
+ )
- def test_simple_cache_hit_full(self):
+ def test_simple_cache_hit_full(self) -> None:
key = "test_simple_cache_hit_full"
v = self.cache.get(key)
@@ -37,7 +39,7 @@ class DictCacheTestCase(unittest.TestCase):
c = self.cache.get(key)
self.assertEqual(test_value, c.value)
- def test_simple_cache_hit_partial(self):
+ def test_simple_cache_hit_partial(self) -> None:
key = "test_simple_cache_hit_partial"
seq = self.cache.sequence
@@ -47,7 +49,7 @@ class DictCacheTestCase(unittest.TestCase):
c = self.cache.get(key, ["test"])
self.assertEqual(test_value, c.value)
- def test_simple_cache_miss_partial(self):
+ def test_simple_cache_miss_partial(self) -> None:
key = "test_simple_cache_miss_partial"
seq = self.cache.sequence
@@ -57,7 +59,7 @@ class DictCacheTestCase(unittest.TestCase):
c = self.cache.get(key, ["test2"])
self.assertEqual({}, c.value)
- def test_simple_cache_hit_miss_partial(self):
+ def test_simple_cache_hit_miss_partial(self) -> None:
key = "test_simple_cache_hit_miss_partial"
seq = self.cache.sequence
@@ -71,7 +73,7 @@ class DictCacheTestCase(unittest.TestCase):
c = self.cache.get(key, ["test2"])
self.assertEqual({"test2": "test_simple_cache_hit_miss_partial2"}, c.value)
- def test_multi_insert(self):
+ def test_multi_insert(self) -> None:
key = "test_simple_cache_hit_miss_partial"
seq = self.cache.sequence
@@ -92,7 +94,7 @@ class DictCacheTestCase(unittest.TestCase):
)
self.assertEqual(c.full, False)
- def test_invalidation(self):
+ def test_invalidation(self) -> None:
"""Test that the partial dict and full dicts get invalidated
separately.
"""
@@ -106,7 +108,7 @@ class DictCacheTestCase(unittest.TestCase):
# entry for "a" warm.
for i in range(20):
self.cache.get(key, ["a"])
- self.cache.update(seq, f"key{i}", {1: 2})
+ self.cache.update(seq, f"key{i}", {"1": "2"})
# We should have evicted the full dict...
r = self.cache.get(key)
diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py
index 7f60aae5ba..9cf920daf8 100644
--- a/tests/util/test_expiring_cache.py
+++ b/tests/util/test_expiring_cache.py
@@ -12,7 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import List, cast
+from synapse.util import Clock
from synapse.util.caches.expiringcache import ExpiringCache
from tests.utils import MockClock
@@ -21,17 +23,21 @@ from .. import unittest
class ExpiringCacheTestCase(unittest.HomeserverTestCase):
- def test_get_set(self):
+ def test_get_set(self) -> None:
clock = MockClock()
- cache = ExpiringCache("test", clock, max_len=1)
+ cache: ExpiringCache[str, str] = ExpiringCache(
+ "test", cast(Clock, clock), max_len=1
+ )
cache["key"] = "value"
self.assertEqual(cache.get("key"), "value")
self.assertEqual(cache["key"], "value")
- def test_eviction(self):
+ def test_eviction(self) -> None:
clock = MockClock()
- cache = ExpiringCache("test", clock, max_len=2)
+ cache: ExpiringCache[str, str] = ExpiringCache(
+ "test", cast(Clock, clock), max_len=2
+ )
cache["key"] = "value"
cache["key2"] = "value2"
@@ -43,9 +49,11 @@ class ExpiringCacheTestCase(unittest.HomeserverTestCase):
self.assertEqual(cache.get("key2"), "value2")
self.assertEqual(cache.get("key3"), "value3")
- def test_iterable_eviction(self):
+ def test_iterable_eviction(self) -> None:
clock = MockClock()
- cache = ExpiringCache("test", clock, max_len=5, iterable=True)
+ cache: ExpiringCache[str, List[int]] = ExpiringCache(
+ "test", cast(Clock, clock), max_len=5, iterable=True
+ )
cache["key"] = [1]
cache["key2"] = [2, 3]
@@ -61,9 +69,11 @@ class ExpiringCacheTestCase(unittest.HomeserverTestCase):
self.assertEqual(cache.get("key3"), [4, 5])
self.assertEqual(cache.get("key4"), [6, 7])
- def test_time_eviction(self):
+ def test_time_eviction(self) -> None:
clock = MockClock()
- cache = ExpiringCache("test", clock, expiry_ms=1000)
+ cache: ExpiringCache[str, int] = ExpiringCache(
+ "test", cast(Clock, clock), expiry_ms=1000
+ )
cache["key"] = 1
clock.advance_time(0.5)
diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py
index 3bb4695405..4f3c983c15 100644
--- a/tests/util/test_file_consumer.py
+++ b/tests/util/test_file_consumer.py
@@ -12,22 +12,28 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
import threading
-from io import StringIO
+from io import BytesIO
+from typing import BinaryIO, Generator, Optional, cast
from unittest.mock import NonCallableMock
-from twisted.internet import defer, reactor
+from zope.interface import implementer
+
+from twisted.internet import defer, reactor as _reactor
+from twisted.internet.interfaces import IPullProducer
+from synapse.types import ISynapseReactor
from synapse.util.file_consumer import BackgroundFileConsumer
from tests import unittest
+reactor = cast(ISynapseReactor, _reactor)
+
class FileConsumerTests(unittest.TestCase):
@defer.inlineCallbacks
- def test_pull_consumer(self):
- string_file = StringIO()
+ def test_pull_consumer(self) -> Generator["defer.Deferred[object]", object, None]:
+ string_file = BytesIO()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
@@ -35,55 +41,57 @@ class FileConsumerTests(unittest.TestCase):
yield producer.register_with_consumer(consumer)
- yield producer.write_and_wait("Foo")
+ yield producer.write_and_wait(b"Foo")
- self.assertEqual(string_file.getvalue(), "Foo")
+ self.assertEqual(string_file.getvalue(), b"Foo")
- yield producer.write_and_wait("Bar")
+ yield producer.write_and_wait(b"Bar")
- self.assertEqual(string_file.getvalue(), "FooBar")
+ self.assertEqual(string_file.getvalue(), b"FooBar")
finally:
consumer.unregisterProducer()
- yield consumer.wait()
+ yield consumer.wait() # type: ignore[misc]
self.assertTrue(string_file.closed)
@defer.inlineCallbacks
- def test_push_consumer(self):
- string_file = BlockingStringWrite()
- consumer = BackgroundFileConsumer(string_file, reactor=reactor)
+ def test_push_consumer(self) -> Generator["defer.Deferred[object]", object, None]:
+ string_file = BlockingBytesWrite()
+ consumer = BackgroundFileConsumer(cast(BinaryIO, string_file), reactor=reactor)
try:
producer = NonCallableMock(spec_set=[])
consumer.registerProducer(producer, True)
- consumer.write("Foo")
- yield string_file.wait_for_n_writes(1)
+ consumer.write(b"Foo")
+ yield string_file.wait_for_n_writes(1) # type: ignore[misc]
- self.assertEqual(string_file.buffer, "Foo")
+ self.assertEqual(string_file.buffer, b"Foo")
- consumer.write("Bar")
- yield string_file.wait_for_n_writes(2)
+ consumer.write(b"Bar")
+ yield string_file.wait_for_n_writes(2) # type: ignore[misc]
- self.assertEqual(string_file.buffer, "FooBar")
+ self.assertEqual(string_file.buffer, b"FooBar")
finally:
consumer.unregisterProducer()
- yield consumer.wait()
+ yield consumer.wait() # type: ignore[misc]
self.assertTrue(string_file.closed)
@defer.inlineCallbacks
- def test_push_producer_feedback(self):
- string_file = BlockingStringWrite()
- consumer = BackgroundFileConsumer(string_file, reactor=reactor)
+ def test_push_producer_feedback(
+ self,
+ ) -> Generator["defer.Deferred[object]", object, None]:
+ string_file = BlockingBytesWrite()
+ consumer = BackgroundFileConsumer(cast(BinaryIO, string_file), reactor=reactor)
try:
producer = NonCallableMock(spec_set=["pauseProducing", "resumeProducing"])
- resume_deferred = defer.Deferred()
+ resume_deferred: defer.Deferred = defer.Deferred()
producer.resumeProducing.side_effect = lambda: resume_deferred.callback(
None
)
@@ -93,65 +101,72 @@ class FileConsumerTests(unittest.TestCase):
number_writes = 0
with string_file.write_lock:
for _ in range(consumer._PAUSE_ON_QUEUE_SIZE):
- consumer.write("Foo")
+ consumer.write(b"Foo")
number_writes += 1
producer.pauseProducing.assert_called_once()
- yield string_file.wait_for_n_writes(number_writes)
+ yield string_file.wait_for_n_writes(number_writes) # type: ignore[misc]
yield resume_deferred
producer.resumeProducing.assert_called_once()
finally:
consumer.unregisterProducer()
- yield consumer.wait()
+ yield consumer.wait() # type: ignore[misc]
self.assertTrue(string_file.closed)
+@implementer(IPullProducer)
class DummyPullProducer:
- def __init__(self):
- self.consumer = None
- self.deferred = defer.Deferred()
+ def __init__(self) -> None:
+ self.consumer: Optional[BackgroundFileConsumer] = None
+ self.deferred: "defer.Deferred[object]" = defer.Deferred()
- def resumeProducing(self):
+ def resumeProducing(self) -> None:
d = self.deferred
self.deferred = defer.Deferred()
d.callback(None)
- def write_and_wait(self, bytes):
+ def stopProducing(self) -> None:
+ raise RuntimeError("Unexpected call")
+
+ def write_and_wait(self, write_bytes: bytes) -> "defer.Deferred[object]":
+ assert self.consumer is not None
d = self.deferred
- self.consumer.write(bytes)
+ self.consumer.write(write_bytes)
return d
- def register_with_consumer(self, consumer):
+ def register_with_consumer(
+ self, consumer: BackgroundFileConsumer
+ ) -> "defer.Deferred[object]":
d = self.deferred
self.consumer = consumer
self.consumer.registerProducer(self, False)
return d
-class BlockingStringWrite:
- def __init__(self):
- self.buffer = ""
+class BlockingBytesWrite:
+ def __init__(self) -> None:
+ self.buffer = b""
self.closed = False
self.write_lock = threading.Lock()
- self._notify_write_deferred = None
+ self._notify_write_deferred: Optional[defer.Deferred] = None
self._number_of_writes = 0
- def write(self, bytes):
+ def write(self, write_bytes: bytes) -> None:
with self.write_lock:
- self.buffer += bytes
+ self.buffer += write_bytes
self._number_of_writes += 1
reactor.callFromThread(self._notify_write)
- def close(self):
+ def close(self) -> None:
self.closed = True
- def _notify_write(self):
+ def _notify_write(self) -> None:
"Called by write to indicate a write happened"
with self.write_lock:
if not self._notify_write_deferred:
@@ -161,7 +176,9 @@ class BlockingStringWrite:
d.callback(None)
@defer.inlineCallbacks
- def wait_for_n_writes(self, n):
+ def wait_for_n_writes(
+ self, n: int
+ ) -> Generator["defer.Deferred[object]", object, None]:
"Wait for n writes to have happened"
while True:
with self.write_lock:
diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py
index 3c0ddd4f18..406c16cdcf 100644
--- a/tests/util/test_itertools.py
+++ b/tests/util/test_itertools.py
@@ -19,7 +19,7 @@ from tests.unittest import TestCase
class ChunkSeqTests(TestCase):
- def test_short_seq(self):
+ def test_short_seq(self) -> None:
parts = chunk_seq("123", 8)
self.assertEqual(
@@ -27,7 +27,7 @@ class ChunkSeqTests(TestCase):
["123"],
)
- def test_long_seq(self):
+ def test_long_seq(self) -> None:
parts = chunk_seq("abcdefghijklmnop", 8)
self.assertEqual(
@@ -35,7 +35,7 @@ class ChunkSeqTests(TestCase):
["abcdefgh", "ijklmnop"],
)
- def test_uneven_parts(self):
+ def test_uneven_parts(self) -> None:
parts = chunk_seq("abcdefghijklmnop", 5)
self.assertEqual(
@@ -43,7 +43,7 @@ class ChunkSeqTests(TestCase):
["abcde", "fghij", "klmno", "p"],
)
- def test_empty_input(self):
+ def test_empty_input(self) -> None:
parts: Iterable[Sequence] = chunk_seq([], 5)
self.assertEqual(
@@ -53,13 +53,13 @@ class ChunkSeqTests(TestCase):
class SortTopologically(TestCase):
- def test_empty(self):
+ def test_empty(self) -> None:
"Test that an empty graph works correctly"
graph: Dict[int, List[int]] = {}
self.assertEqual(list(sorted_topologically([], graph)), [])
- def test_handle_empty_graph(self):
+ def test_handle_empty_graph(self) -> None:
"Test that a graph where a node doesn't have an entry is treated as empty"
graph: Dict[int, List[int]] = {}
@@ -67,7 +67,7 @@ class SortTopologically(TestCase):
# For disconnected nodes the output is simply sorted.
self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2])
- def test_disconnected(self):
+ def test_disconnected(self) -> None:
"Test that a graph with no edges work"
graph: Dict[int, List[int]] = {1: [], 2: []}
@@ -75,20 +75,20 @@ class SortTopologically(TestCase):
# For disconnected nodes the output is simply sorted.
self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2])
- def test_linear(self):
+ def test_linear(self) -> None:
"Test that a simple `4 -> 3 -> 2 -> 1` graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
- def test_subset(self):
+ def test_subset(self) -> None:
"Test that only sorting a subset of the graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
self.assertEqual(list(sorted_topologically([4, 3], graph)), [3, 4])
- def test_fork(self):
+ def test_fork(self) -> None:
"Test that a forked graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]}
@@ -96,13 +96,13 @@ class SortTopologically(TestCase):
# always get the same one.
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
- def test_duplicates(self):
+ def test_duplicates(self) -> None:
"Test that a graph with duplicate edges work"
graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]}
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
- def test_multiple_paths(self):
+ def test_multiple_paths(self) -> None:
"Test that a graph with multiple paths between two nodes work"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]}
diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py
index 2ad321e184..d64c162e1d 100644
--- a/tests/util/test_logcontext.py
+++ b/tests/util/test_logcontext.py
@@ -1,5 +1,21 @@
+# Copyright 2014-2022 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Callable, Generator, cast
+
import twisted.python.failure
-from twisted.internet import defer, reactor
+from twisted.internet import defer, reactor as _reactor
from synapse.logging.context import (
SENTINEL_CONTEXT,
@@ -10,25 +26,30 @@ from synapse.logging.context import (
nested_logging_context,
run_in_background,
)
+from synapse.types import ISynapseReactor
from synapse.util import Clock
from .. import unittest
+reactor = cast(ISynapseReactor, _reactor)
+
class LoggingContextTestCase(unittest.TestCase):
- def _check_test_key(self, value):
- self.assertEqual(current_context().name, value)
+ def _check_test_key(self, value: str) -> None:
+ context = current_context()
+ assert isinstance(context, LoggingContext)
+ self.assertEqual(context.name, value)
- def test_with_context(self):
+ def test_with_context(self) -> None:
with LoggingContext("test"):
self._check_test_key("test")
@defer.inlineCallbacks
- def test_sleep(self):
+ def test_sleep(self) -> Generator["defer.Deferred[object]", object, None]:
clock = Clock(reactor)
@defer.inlineCallbacks
- def competing_callback():
+ def competing_callback() -> Generator["defer.Deferred[object]", object, None]:
with LoggingContext("competing"):
yield clock.sleep(0)
self._check_test_key("competing")
@@ -39,17 +60,18 @@ class LoggingContextTestCase(unittest.TestCase):
yield clock.sleep(0)
self._check_test_key("one")
- def _test_run_in_background(self, function):
+ def _test_run_in_background(self, function: Callable[[], object]) -> defer.Deferred:
sentinel_context = current_context()
- callback_completed = [False]
+ callback_completed = False
with LoggingContext("one"):
# fire off function, but don't wait on it.
d2 = run_in_background(function)
- def cb(res):
- callback_completed[0] = True
+ def cb(res: object) -> object:
+ nonlocal callback_completed
+ callback_completed = True
return res
d2.addCallback(cb)
@@ -60,8 +82,8 @@ class LoggingContextTestCase(unittest.TestCase):
# the logcontext is left in a sane state.
d2 = defer.Deferred()
- def check_logcontext():
- if not callback_completed[0]:
+ def check_logcontext() -> None:
+ if not callback_completed:
reactor.callLater(0.01, check_logcontext)
return
@@ -78,31 +100,31 @@ class LoggingContextTestCase(unittest.TestCase):
# test is done once d2 finishes
return d2
- def test_run_in_background_with_blocking_fn(self):
+ def test_run_in_background_with_blocking_fn(self) -> defer.Deferred:
@defer.inlineCallbacks
- def blocking_function():
+ def blocking_function() -> Generator["defer.Deferred[object]", object, None]:
yield Clock(reactor).sleep(0)
return self._test_run_in_background(blocking_function)
- def test_run_in_background_with_non_blocking_fn(self):
+ def test_run_in_background_with_non_blocking_fn(self) -> defer.Deferred:
@defer.inlineCallbacks
- def nonblocking_function():
+ def nonblocking_function() -> Generator["defer.Deferred[object]", object, None]:
with PreserveLoggingContext():
yield defer.succeed(None)
return self._test_run_in_background(nonblocking_function)
- def test_run_in_background_with_chained_deferred(self):
+ def test_run_in_background_with_chained_deferred(self) -> defer.Deferred:
# a function which returns a deferred which looks like it has been
# called, but is actually paused
- def testfunc():
+ def testfunc() -> defer.Deferred:
return make_deferred_yieldable(_chained_deferred_function())
return self._test_run_in_background(testfunc)
- def test_run_in_background_with_coroutine(self):
- async def testfunc():
+ def test_run_in_background_with_coroutine(self) -> defer.Deferred:
+ async def testfunc() -> None:
self._check_test_key("one")
d = Clock(reactor).sleep(0)
self.assertIs(current_context(), SENTINEL_CONTEXT)
@@ -111,18 +133,20 @@ class LoggingContextTestCase(unittest.TestCase):
return self._test_run_in_background(testfunc)
- def test_run_in_background_with_nonblocking_coroutine(self):
- async def testfunc():
+ def test_run_in_background_with_nonblocking_coroutine(self) -> defer.Deferred:
+ async def testfunc() -> None:
self._check_test_key("one")
return self._test_run_in_background(testfunc)
@defer.inlineCallbacks
- def test_make_deferred_yieldable(self):
+ def test_make_deferred_yieldable(
+ self,
+ ) -> Generator["defer.Deferred[object]", object, None]:
# a function which returns an incomplete deferred, but doesn't follow
# the synapse rules.
- def blocking_function():
- d = defer.Deferred()
+ def blocking_function() -> defer.Deferred:
+ d: defer.Deferred = defer.Deferred()
reactor.callLater(0, d.callback, None)
return d
@@ -139,7 +163,9 @@ class LoggingContextTestCase(unittest.TestCase):
self._check_test_key("one")
@defer.inlineCallbacks
- def test_make_deferred_yieldable_with_chained_deferreds(self):
+ def test_make_deferred_yieldable_with_chained_deferreds(
+ self,
+ ) -> Generator["defer.Deferred[object]", object, None]:
sentinel_context = current_context()
with LoggingContext("one"):
@@ -152,7 +178,7 @@ class LoggingContextTestCase(unittest.TestCase):
# now it should be restored
self._check_test_key("one")
- def test_nested_logging_context(self):
+ def test_nested_logging_context(self) -> None:
with LoggingContext("foo"):
nested_context = nested_logging_context(suffix="bar")
self.assertEqual(nested_context.name, "foo-bar")
@@ -161,11 +187,11 @@ class LoggingContextTestCase(unittest.TestCase):
# a function which returns a deferred which has been "called", but
# which had a function which returned another incomplete deferred on
# its callback list, so won't yet call any other new callbacks.
-def _chained_deferred_function():
+def _chained_deferred_function() -> defer.Deferred:
d = defer.succeed(None)
- def cb(res):
- d2 = defer.Deferred()
+ def cb(res: object) -> defer.Deferred:
+ d2: defer.Deferred = defer.Deferred()
reactor.callLater(0, d2.callback, res)
return d2
diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py
index a2e08281e6..0dee69a6fe 100644
--- a/tests/util/test_logformatter.py
+++ b/tests/util/test_logformatter.py
@@ -23,7 +23,7 @@ class TestException(Exception):
class LogFormatterTestCase(unittest.TestCase):
- def test_formatter(self):
+ def test_formatter(self) -> None:
formatter = LogFormatter()
try:
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
index 67173a4f5b..1fc5a473f0 100644
--- a/tests/util/test_lrucache.py
+++ b/tests/util/test_lrucache.py
@@ -13,10 +13,11 @@
# limitations under the License.
-from typing import List
+from typing import List, Tuple
from unittest.mock import Mock, patch
from synapse.metrics.jemalloc import JemallocStats
+from synapse.types import JsonDict
from synapse.util.caches.lrucache import LruCache, setup_expire_lru_cache_entries
from synapse.util.caches.treecache import TreeCache
@@ -25,14 +26,14 @@ from tests.unittest import override_config
class LruCacheTestCase(unittest.HomeserverTestCase):
- def test_get_set(self):
- cache = LruCache(1)
+ def test_get_set(self) -> None:
+ cache: LruCache[str, str] = LruCache(1)
cache["key"] = "value"
self.assertEqual(cache.get("key"), "value")
self.assertEqual(cache["key"], "value")
- def test_eviction(self):
- cache = LruCache(2)
+ def test_eviction(self) -> None:
+ cache: LruCache[int, int] = LruCache(2)
cache[1] = 1
cache[2] = 2
@@ -45,8 +46,8 @@ class LruCacheTestCase(unittest.HomeserverTestCase):
self.assertEqual(cache.get(2), 2)
self.assertEqual(cache.get(3), 3)
- def test_setdefault(self):
- cache = LruCache(1)
+ def test_setdefault(self) -> None:
+ cache: LruCache[str, int] = LruCache(1)
self.assertEqual(cache.setdefault("key", 1), 1)
self.assertEqual(cache.get("key"), 1)
self.assertEqual(cache.setdefault("key", 2), 1)
@@ -54,14 +55,15 @@ class LruCacheTestCase(unittest.HomeserverTestCase):
cache["key"] = 2 # Make sure overriding works.
self.assertEqual(cache.get("key"), 2)
- def test_pop(self):
- cache = LruCache(1)
+ def test_pop(self) -> None:
+ cache: LruCache[str, int] = LruCache(1)
cache["key"] = 1
self.assertEqual(cache.pop("key"), 1)
self.assertEqual(cache.pop("key"), None)
- def test_del_multi(self):
- cache = LruCache(4, cache_type=TreeCache)
+ def test_del_multi(self) -> None:
+ # The type here isn't quite correct as they don't handle TreeCache well.
+ cache: LruCache[Tuple[str, str], str] = LruCache(4, cache_type=TreeCache)
cache[("animal", "cat")] = "mew"
cache[("animal", "dog")] = "woof"
cache[("vehicles", "car")] = "vroom"
@@ -71,7 +73,7 @@ class LruCacheTestCase(unittest.HomeserverTestCase):
self.assertEqual(cache.get(("animal", "cat")), "mew")
self.assertEqual(cache.get(("vehicles", "car")), "vroom")
- cache.del_multi(("animal",))
+ cache.del_multi(("animal",)) # type: ignore[arg-type]
self.assertEqual(len(cache), 2)
self.assertEqual(cache.get(("animal", "cat")), None)
self.assertEqual(cache.get(("animal", "dog")), None)
@@ -79,22 +81,22 @@ class LruCacheTestCase(unittest.HomeserverTestCase):
self.assertEqual(cache.get(("vehicles", "train")), "chuff")
# Man from del_multi say "Yes".
- def test_clear(self):
- cache = LruCache(1)
+ def test_clear(self) -> None:
+ cache: LruCache[str, int] = LruCache(1)
cache["key"] = 1
cache.clear()
self.assertEqual(len(cache), 0)
@override_config({"caches": {"per_cache_factors": {"mycache": 10}}})
- def test_special_size(self):
- cache = LruCache(10, "mycache")
+ def test_special_size(self) -> None:
+ cache: LruCache = LruCache(10, "mycache")
self.assertEqual(cache.max_size, 100)
class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
- def test_get(self):
+ def test_get(self) -> None:
m = Mock()
- cache = LruCache(1)
+ cache: LruCache[str, str] = LruCache(1)
cache.set("key", "value")
self.assertFalse(m.called)
@@ -111,9 +113,9 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
cache.set("key", "value")
self.assertEqual(m.call_count, 1)
- def test_multi_get(self):
+ def test_multi_get(self) -> None:
m = Mock()
- cache = LruCache(1)
+ cache: LruCache[str, str] = LruCache(1)
cache.set("key", "value")
self.assertFalse(m.called)
@@ -130,9 +132,9 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
cache.set("key", "value")
self.assertEqual(m.call_count, 1)
- def test_set(self):
+ def test_set(self) -> None:
m = Mock()
- cache = LruCache(1)
+ cache: LruCache[str, str] = LruCache(1)
cache.set("key", "value", callbacks=[m])
self.assertFalse(m.called)
@@ -146,9 +148,9 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
cache.set("key", "value")
self.assertEqual(m.call_count, 1)
- def test_pop(self):
+ def test_pop(self) -> None:
m = Mock()
- cache = LruCache(1)
+ cache: LruCache[str, str] = LruCache(1)
cache.set("key", "value", callbacks=[m])
self.assertFalse(m.called)
@@ -162,12 +164,13 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
cache.pop("key")
self.assertEqual(m.call_count, 1)
- def test_del_multi(self):
+ def test_del_multi(self) -> None:
m1 = Mock()
m2 = Mock()
m3 = Mock()
m4 = Mock()
- cache = LruCache(4, cache_type=TreeCache)
+ # The type here isn't quite correct as they don't handle TreeCache well.
+ cache: LruCache[Tuple[str, str], str] = LruCache(4, cache_type=TreeCache)
cache.set(("a", "1"), "value", callbacks=[m1])
cache.set(("a", "2"), "value", callbacks=[m2])
@@ -179,17 +182,17 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
self.assertEqual(m3.call_count, 0)
self.assertEqual(m4.call_count, 0)
- cache.del_multi(("a",))
+ cache.del_multi(("a",)) # type: ignore[arg-type]
self.assertEqual(m1.call_count, 1)
self.assertEqual(m2.call_count, 1)
self.assertEqual(m3.call_count, 0)
self.assertEqual(m4.call_count, 0)
- def test_clear(self):
+ def test_clear(self) -> None:
m1 = Mock()
m2 = Mock()
- cache = LruCache(5)
+ cache: LruCache[str, str] = LruCache(5)
cache.set("key1", "value", callbacks=[m1])
cache.set("key2", "value", callbacks=[m2])
@@ -202,11 +205,11 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
self.assertEqual(m1.call_count, 1)
self.assertEqual(m2.call_count, 1)
- def test_eviction(self):
+ def test_eviction(self) -> None:
m1 = Mock(name="m1")
m2 = Mock(name="m2")
m3 = Mock(name="m3")
- cache = LruCache(2)
+ cache: LruCache[str, str] = LruCache(2)
cache.set("key1", "value", callbacks=[m1])
cache.set("key2", "value", callbacks=[m2])
@@ -241,8 +244,8 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
class LruCacheSizedTestCase(unittest.HomeserverTestCase):
- def test_evict(self):
- cache = LruCache(5, size_callback=len)
+ def test_evict(self) -> None:
+ cache: LruCache[str, List[int]] = LruCache(5, size_callback=len)
cache["key1"] = [0]
cache["key2"] = [1, 2]
cache["key3"] = [3]
@@ -269,6 +272,7 @@ class LruCacheSizedTestCase(unittest.HomeserverTestCase):
cache["key1"] = []
self.assertEqual(len(cache), 0)
+ assert isinstance(cache.cache, dict)
cache.cache["key1"].drop_from_cache()
self.assertIsNone(
cache.pop("key1"), "Cache entry should have been evicted but wasn't"
@@ -278,17 +282,17 @@ class LruCacheSizedTestCase(unittest.HomeserverTestCase):
class TimeEvictionTestCase(unittest.HomeserverTestCase):
"""Test that time based eviction works correctly."""
- def default_config(self):
+ def default_config(self) -> JsonDict:
config = super().default_config()
config.setdefault("caches", {})["expiry_time"] = "30m"
return config
- def test_evict(self):
+ def test_evict(self) -> None:
setup_expire_lru_cache_entries(self.hs)
- cache = LruCache(5, clock=self.hs.get_clock())
+ cache: LruCache[str, int] = LruCache(5, clock=self.hs.get_clock())
# Check that we evict entries we haven't accessed for 30 minutes.
cache["key1"] = 1
@@ -332,7 +336,7 @@ class MemoryEvictionTestCase(unittest.HomeserverTestCase):
}
)
@patch("synapse.util.caches.lrucache.get_jemalloc_stats")
- def test_evict_memory(self, jemalloc_interface) -> None:
+ def test_evict_memory(self, jemalloc_interface: Mock) -> None:
mock_jemalloc_class = Mock(spec=JemallocStats)
jemalloc_interface.return_value = mock_jemalloc_class
@@ -340,7 +344,7 @@ class MemoryEvictionTestCase(unittest.HomeserverTestCase):
mock_jemalloc_class.get_stat.return_value = 924288000
setup_expire_lru_cache_entries(self.hs)
- cache = LruCache(4, clock=self.hs.get_clock())
+ cache: LruCache[str, int] = LruCache(4, clock=self.hs.get_clock())
cache["key1"] = 1
cache["key2"] = 2
diff --git a/tests/util/test_macaroons.py b/tests/util/test_macaroons.py
index 40754a4711..f68377a05a 100644
--- a/tests/util/test_macaroons.py
+++ b/tests/util/test_macaroons.py
@@ -21,14 +21,14 @@ from tests.unittest import TestCase
class MacaroonGeneratorTestCase(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.reactor, hs_clock = get_clock()
self.macaroon_generator = MacaroonGenerator(hs_clock, "tesths", b"verysecret")
self.other_macaroon_generator = MacaroonGenerator(
hs_clock, "tesths", b"anothersecretkey"
)
- def test_guest_access_token(self):
+ def test_guest_access_token(self) -> None:
"""Test the generation and verification of guest access tokens"""
token = self.macaroon_generator.generate_guest_access_token("@user:tesths")
user_id = self.macaroon_generator.verify_guest_token(token)
@@ -47,7 +47,7 @@ class MacaroonGeneratorTestCase(TestCase):
with self.assertRaises(MacaroonVerificationFailedException):
self.macaroon_generator.verify_guest_token(token)
- def test_delete_pusher_token(self):
+ def test_delete_pusher_token(self) -> None:
"""Test the generation and verification of delete_pusher tokens"""
token = self.macaroon_generator.generate_delete_pusher_token(
"@user:tesths", "m.mail", "john@example.com"
@@ -84,7 +84,7 @@ class MacaroonGeneratorTestCase(TestCase):
)
self.assertEqual(user_id, "@user:tesths")
- def test_oidc_session_token(self):
+ def test_oidc_session_token(self) -> None:
"""Test the generation and verification of OIDC session cookies"""
state = "arandomstate"
session_data = OidcSessionData(
diff --git a/tests/util/test_ratelimitutils.py b/tests/util/test_ratelimitutils.py
index 89d8656634..5b327b390e 100644
--- a/tests/util/test_ratelimitutils.py
+++ b/tests/util/test_ratelimitutils.py
@@ -13,16 +13,19 @@
# limitations under the License.
from typing import Optional
+from twisted.internet.defer import Deferred
+
from synapse.config.homeserver import HomeServerConfig
+from synapse.config.ratelimiting import FederationRatelimitSettings
from synapse.util.ratelimitutils import FederationRateLimiter
-from tests.server import get_clock
+from tests.server import ThreadedMemoryReactorClock, get_clock
from tests.unittest import TestCase
from tests.utils import default_config
class FederationRateLimiterTestCase(TestCase):
- def test_ratelimit(self):
+ def test_ratelimit(self) -> None:
"""A simple test with the default values"""
reactor, clock = get_clock()
rc_config = build_rc_config()
@@ -32,7 +35,7 @@ class FederationRateLimiterTestCase(TestCase):
# shouldn't block
self.successResultOf(d1)
- def test_concurrent_limit(self):
+ def test_concurrent_limit(self) -> None:
"""Test what happens when we hit the concurrent limit"""
reactor, clock = get_clock()
rc_config = build_rc_config({"rc_federation": {"concurrent": 2}})
@@ -56,7 +59,7 @@ class FederationRateLimiterTestCase(TestCase):
cm2.__exit__(None, None, None)
self.successResultOf(d3)
- def test_sleep_limit(self):
+ def test_sleep_limit(self) -> None:
"""Test what happens when we hit the sleep limit"""
reactor, clock = get_clock()
rc_config = build_rc_config(
@@ -79,7 +82,7 @@ class FederationRateLimiterTestCase(TestCase):
self.assertAlmostEqual(sleep_time, 500, places=3)
-def _await_resolution(reactor, d):
+def _await_resolution(reactor: ThreadedMemoryReactorClock, d: Deferred) -> float:
"""advance the clock until the deferred completes.
Returns the number of milliseconds it took to complete.
@@ -90,7 +93,7 @@ def _await_resolution(reactor, d):
return (reactor.seconds() - start_time) * 1000
-def build_rc_config(settings: Optional[dict] = None):
+def build_rc_config(settings: Optional[dict] = None) -> FederationRatelimitSettings:
config_dict = default_config("test")
config_dict.update(settings or {})
config = HomeServerConfig()
diff --git a/tests/util/test_retryutils.py b/tests/util/test_retryutils.py
index 26cb71c640..9529ee53c8 100644
--- a/tests/util/test_retryutils.py
+++ b/tests/util/test_retryutils.py
@@ -22,7 +22,7 @@ from tests.unittest import HomeserverTestCase
class RetryLimiterTestCase(HomeserverTestCase):
- def test_new_destination(self):
+ def test_new_destination(self) -> None:
"""A happy-path case with a new destination and a successful operation"""
store = self.hs.get_datastores().main
limiter = self.get_success(get_retry_limiter("test_dest", self.clock, store))
@@ -36,7 +36,7 @@ class RetryLimiterTestCase(HomeserverTestCase):
new_timings = self.get_success(store.get_destination_retry_timings("test_dest"))
self.assertIsNone(new_timings)
- def test_limiter(self):
+ def test_limiter(self) -> None:
"""General test case which walks through the process of a failing request"""
store = self.hs.get_datastores().main
diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py
index 5da04362a9..bc93de62eb 100644
--- a/tests/util/test_rwlock.py
+++ b/tests/util/test_rwlock.py
@@ -49,7 +49,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
acquired_d: "Deferred[None]" = Deferred()
unblock_d: "Deferred[None]" = Deferred()
- async def reader_or_writer():
+ async def reader_or_writer() -> str:
async with read_or_write(key):
acquired_d.callback(None)
await unblock_d
@@ -134,7 +134,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
d.called, msg="deferred %d was unexpectedly resolved" % (i + n)
)
- def test_rwlock(self):
+ def test_rwlock(self) -> None:
rwlock = ReadWriteLock()
key = "key"
@@ -197,7 +197,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
_, acquired_d = self._start_nonblocking_reader(rwlock, key, "last reader")
self.assertTrue(acquired_d.called)
- def test_lock_handoff_to_nonblocking_writer(self):
+ def test_lock_handoff_to_nonblocking_writer(self) -> None:
"""Test a writer handing the lock to another writer that completes instantly."""
rwlock = ReadWriteLock()
key = "key"
@@ -216,7 +216,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
d3, _ = self._start_nonblocking_writer(rwlock, key, "write 3 completed")
self.assertTrue(d3.called)
- def test_cancellation_while_holding_read_lock(self):
+ def test_cancellation_while_holding_read_lock(self) -> None:
"""Test cancellation while holding a read lock.
A waiting writer should be given the lock when the reader holding the lock is
@@ -242,7 +242,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
)
self.assertEqual("write completed", self.successResultOf(writer_d))
- def test_cancellation_while_holding_write_lock(self):
+ def test_cancellation_while_holding_write_lock(self) -> None:
"""Test cancellation while holding a write lock.
A waiting reader should be given the lock when the writer holding the lock is
@@ -268,7 +268,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
)
self.assertEqual("read completed", self.successResultOf(reader_d))
- def test_cancellation_while_waiting_for_read_lock(self):
+ def test_cancellation_while_waiting_for_read_lock(self) -> None:
"""Test cancellation while waiting for a read lock.
Tests that cancelling a waiting reader:
@@ -319,7 +319,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
)
self.assertEqual("write 2 completed", self.successResultOf(writer2_d))
- def test_cancellation_while_waiting_for_write_lock(self):
+ def test_cancellation_while_waiting_for_write_lock(self) -> None:
"""Test cancellation while waiting for a write lock.
Tests that cancelling a waiting writer:
diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py
index 9ed01f7e0c..0305741c99 100644
--- a/tests/util/test_stream_change_cache.py
+++ b/tests/util/test_stream_change_cache.py
@@ -8,7 +8,7 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
Tests for StreamChangeCache.
"""
- def test_prefilled_cache(self):
+ def test_prefilled_cache(self) -> None:
"""
Providing a prefilled cache to StreamChangeCache will result in a cache
with the prefilled-cache entered in.
@@ -16,7 +16,7 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
cache = StreamChangeCache("#test", 1, prefilled_cache={"user@foo.com": 2})
self.assertTrue(cache.has_entity_changed("user@foo.com", 1))
- def test_has_entity_changed(self):
+ def test_has_entity_changed(self) -> None:
"""
StreamChangeCache.entity_has_changed will mark entities as changed, and
has_entity_changed will observe the changed entities.
@@ -51,8 +51,10 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
# return True, whether it's a known entity or not.
self.assertTrue(cache.has_entity_changed("user@foo.com", 0))
self.assertTrue(cache.has_entity_changed("not@here.website", 0))
+ self.assertTrue(cache.has_entity_changed("user@foo.com", 3))
+ self.assertTrue(cache.has_entity_changed("not@here.website", 3))
- def test_entity_has_changed_pops_off_start(self):
+ def test_entity_has_changed_pops_off_start(self) -> None:
"""
StreamChangeCache.entity_has_changed will respect the max size and
purge the oldest items upon reaching that max size.
@@ -65,15 +67,16 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
# The cache is at the max size, 2
self.assertEqual(len(cache._cache), 2)
+ # The cache's earliest known position is 2.
+ self.assertEqual(cache._earliest_known_stream_pos, 2)
# The oldest item has been popped off
self.assertTrue("user@foo.com" not in cache._entity_to_key)
self.assertEqual(
- cache.get_all_entities_changed(2),
- ["bar@baz.net", "user@elsewhere.org"],
+ cache.get_all_entities_changed(3).entities, ["user@elsewhere.org"]
)
- self.assertIsNone(cache.get_all_entities_changed(1))
+ self.assertFalse(cache.get_all_entities_changed(2).hit)
# If we update an existing entity, it keeps the two existing entities
cache.entity_has_changed("bar@baz.net", 5)
@@ -81,12 +84,12 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
{"bar@baz.net", "user@elsewhere.org"}, set(cache._entity_to_key)
)
self.assertEqual(
- cache.get_all_entities_changed(2),
+ cache.get_all_entities_changed(3).entities,
["user@elsewhere.org", "bar@baz.net"],
)
- self.assertIsNone(cache.get_all_entities_changed(1))
+ self.assertFalse(cache.get_all_entities_changed(2).hit)
- def test_get_all_entities_changed(self):
+ def test_get_all_entities_changed(self) -> None:
"""
StreamChangeCache.get_all_entities_changed will return all changed
entities since the given position. If the position is before the start
@@ -99,28 +102,17 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
cache.entity_has_changed("anotheruser@foo.com", 3)
cache.entity_has_changed("user@elsewhere.org", 4)
- r = cache.get_all_entities_changed(1)
-
- # either of these are valid
- ok1 = [
- "user@foo.com",
- "bar@baz.net",
- "anotheruser@foo.com",
- "user@elsewhere.org",
- ]
- ok2 = [
- "user@foo.com",
- "anotheruser@foo.com",
- "bar@baz.net",
- "user@elsewhere.org",
- ]
- self.assertTrue(r == ok1 or r == ok2)
-
r = cache.get_all_entities_changed(2)
- self.assertTrue(r == ok1[1:] or r == ok2[1:])
- self.assertEqual(cache.get_all_entities_changed(3), ["user@elsewhere.org"])
- self.assertEqual(cache.get_all_entities_changed(0), None)
+ # Results are ordered so either of these are valid.
+ ok1 = ["bar@baz.net", "anotheruser@foo.com", "user@elsewhere.org"]
+ ok2 = ["anotheruser@foo.com", "bar@baz.net", "user@elsewhere.org"]
+ self.assertTrue(r.entities == ok1 or r.entities == ok2)
+
+ self.assertEqual(
+ cache.get_all_entities_changed(3).entities, ["user@elsewhere.org"]
+ )
+ self.assertFalse(cache.get_all_entities_changed(1).hit)
# ... later, things gest more updates
cache.entity_has_changed("user@foo.com", 5)
@@ -140,9 +132,9 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
"anotheruser@foo.com",
]
r = cache.get_all_entities_changed(3)
- self.assertTrue(r == ok1 or r == ok2)
+ self.assertTrue(r.entities == ok1 or r.entities == ok2)
- def test_has_any_entity_changed(self):
+ def test_has_any_entity_changed(self) -> None:
"""
StreamChangeCache.has_any_entity_changed will return True if any
entities have been changed since the provided stream position, and
@@ -168,7 +160,7 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
self.assertFalse(cache.has_any_entity_changed(2))
self.assertFalse(cache.has_any_entity_changed(3))
- def test_get_entities_changed(self):
+ def test_get_entities_changed(self) -> None:
"""
StreamChangeCache.get_entities_changed will return the entities in the
given list that have changed since the provided stream ID. If the
@@ -228,7 +220,7 @@ class StreamChangeCacheTests(unittest.HomeserverTestCase):
{"bar@baz.net"},
)
- def test_max_pos(self):
+ def test_max_pos(self) -> None:
"""
StreamChangeCache.get_max_pos_of_last_change will return the most
recent point where the entity could have changed. If the entity is not
diff --git a/tests/util/test_stringutils.py b/tests/util/test_stringutils.py
index ad4dd7f007..f137e05191 100644
--- a/tests/util/test_stringutils.py
+++ b/tests/util/test_stringutils.py
@@ -19,7 +19,7 @@ from .. import unittest
class StringUtilsTestCase(unittest.TestCase):
- def test_client_secret_regex(self):
+ def test_client_secret_regex(self) -> None:
"""Ensure that client_secret does not contain illegal characters"""
good = [
"abcde12345",
@@ -46,7 +46,7 @@ class StringUtilsTestCase(unittest.TestCase):
with self.assertRaises(SynapseError):
assert_valid_client_secret(client_secret)
- def test_base62_encode(self):
+ def test_base62_encode(self) -> None:
self.assertEqual("0", base62_encode(0))
self.assertEqual("10", base62_encode(62))
self.assertEqual("1c", base62_encode(100))
diff --git a/tests/util/test_threepids.py b/tests/util/test_threepids.py
index d957b953bb..3b35b8e4ec 100644
--- a/tests/util/test_threepids.py
+++ b/tests/util/test_threepids.py
@@ -18,31 +18,31 @@ from tests.unittest import HomeserverTestCase
class CanonicaliseEmailTests(HomeserverTestCase):
- def test_no_at(self):
+ def test_no_at(self) -> None:
with self.assertRaises(ValueError):
canonicalise_email("address-without-at.bar")
- def test_two_at(self):
+ def test_two_at(self) -> None:
with self.assertRaises(ValueError):
canonicalise_email("foo@foo@test.bar")
- def test_bad_format(self):
+ def test_bad_format(self) -> None:
with self.assertRaises(ValueError):
canonicalise_email("user@bad.example.net@good.example.com")
- def test_valid_format(self):
+ def test_valid_format(self) -> None:
self.assertEqual(canonicalise_email("foo@test.bar"), "foo@test.bar")
- def test_domain_to_lower(self):
+ def test_domain_to_lower(self) -> None:
self.assertEqual(canonicalise_email("foo@TEST.BAR"), "foo@test.bar")
- def test_domain_with_umlaut(self):
+ def test_domain_with_umlaut(self) -> None:
self.assertEqual(canonicalise_email("foo@Öumlaut.com"), "foo@öumlaut.com")
- def test_address_casefold(self):
+ def test_address_casefold(self) -> None:
self.assertEqual(
canonicalise_email("Strauß@Example.com"), "strauss@example.com"
)
- def test_address_trim(self):
+ def test_address_trim(self) -> None:
self.assertEqual(canonicalise_email(" foo@test.bar "), "foo@test.bar")
diff --git a/tests/util/test_treecache.py b/tests/util/test_treecache.py
index 567cb18468..fe3b4dc6a4 100644
--- a/tests/util/test_treecache.py
+++ b/tests/util/test_treecache.py
@@ -19,7 +19,7 @@ from .. import unittest
class TreeCacheTestCase(unittest.TestCase):
- def test_get_set_onelevel(self):
+ def test_get_set_onelevel(self) -> None:
cache = TreeCache()
cache[("a",)] = "A"
cache[("b",)] = "B"
@@ -27,7 +27,7 @@ class TreeCacheTestCase(unittest.TestCase):
self.assertEqual(cache.get(("b",)), "B")
self.assertEqual(len(cache), 2)
- def test_pop_onelevel(self):
+ def test_pop_onelevel(self) -> None:
cache = TreeCache()
cache[("a",)] = "A"
cache[("b",)] = "B"
@@ -36,7 +36,7 @@ class TreeCacheTestCase(unittest.TestCase):
self.assertEqual(cache.get(("b",)), "B")
self.assertEqual(len(cache), 1)
- def test_get_set_twolevel(self):
+ def test_get_set_twolevel(self) -> None:
cache = TreeCache()
cache[("a", "a")] = "AA"
cache[("a", "b")] = "AB"
@@ -46,7 +46,7 @@ class TreeCacheTestCase(unittest.TestCase):
self.assertEqual(cache.get(("b", "a")), "BA")
self.assertEqual(len(cache), 3)
- def test_pop_twolevel(self):
+ def test_pop_twolevel(self) -> None:
cache = TreeCache()
cache[("a", "a")] = "AA"
cache[("a", "b")] = "AB"
@@ -58,7 +58,7 @@ class TreeCacheTestCase(unittest.TestCase):
self.assertEqual(cache.pop(("b", "a")), None)
self.assertEqual(len(cache), 1)
- def test_pop_mixedlevel(self):
+ def test_pop_mixedlevel(self) -> None:
cache = TreeCache()
cache[("a", "a")] = "AA"
cache[("a", "b")] = "AB"
@@ -72,14 +72,14 @@ class TreeCacheTestCase(unittest.TestCase):
self.assertEqual({"AA", "AB"}, set(iterate_tree_cache_entry(popped)))
- def test_clear(self):
+ def test_clear(self) -> None:
cache = TreeCache()
cache[("a",)] = "A"
cache[("b",)] = "B"
cache.clear()
self.assertEqual(len(cache), 0)
- def test_contains(self):
+ def test_contains(self) -> None:
cache = TreeCache()
cache[("a",)] = "A"
self.assertTrue(("a",) in cache)
diff --git a/tests/util/test_wheel_timer.py b/tests/util/test_wheel_timer.py
index 0d5039de04..c9d22b6d8c 100644
--- a/tests/util/test_wheel_timer.py
+++ b/tests/util/test_wheel_timer.py
@@ -18,8 +18,8 @@ from .. import unittest
class WheelTimerTestCase(unittest.TestCase):
- def test_single_insert_fetch(self):
- wheel = WheelTimer(bucket_size=5)
+ def test_single_insert_fetch(self) -> None:
+ wheel: WheelTimer[object] = WheelTimer(bucket_size=5)
obj = object()
wheel.insert(100, obj, 150)
@@ -32,8 +32,8 @@ class WheelTimerTestCase(unittest.TestCase):
self.assertListEqual(wheel.fetch(156), [obj])
self.assertListEqual(wheel.fetch(170), [])
- def test_multi_insert(self):
- wheel = WheelTimer(bucket_size=5)
+ def test_multi_insert(self) -> None:
+ wheel: WheelTimer[object] = WheelTimer(bucket_size=5)
obj1 = object()
obj2 = object()
@@ -50,15 +50,15 @@ class WheelTimerTestCase(unittest.TestCase):
self.assertListEqual(wheel.fetch(200), [obj3])
self.assertListEqual(wheel.fetch(210), [])
- def test_insert_past(self):
- wheel = WheelTimer(bucket_size=5)
+ def test_insert_past(self) -> None:
+ wheel: WheelTimer[object] = WheelTimer(bucket_size=5)
obj = object()
wheel.insert(100, obj, 50)
self.assertListEqual(wheel.fetch(120), [obj])
- def test_insert_past_multi(self):
- wheel = WheelTimer(bucket_size=5)
+ def test_insert_past_multi(self) -> None:
+ wheel: WheelTimer[object] = WheelTimer(bucket_size=5)
obj1 = object()
obj2 = object()
diff --git a/tests/utils.py b/tests/utils.py
index 045a8b5fa7..d76bf9716a 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -125,7 +125,8 @@ def default_config(
"""
config_dict = {
"server_name": name,
- "send_federation": False,
+ # Setting this to an empty list turns off federation sending.
+ "federation_sender_instances": [],
"media_store_path": "media",
# the test signing key is just an arbitrary ed25519 key to keep the config
# parser happy
@@ -183,8 +184,9 @@ def default_config(
# rooms will fail.
"default_room_version": DEFAULT_ROOM_VERSION,
# disable user directory updates, because they get done in the
- # background, which upsets the test runner.
- "update_user_directory": False,
+ # background, which upsets the test runner. Setting this to an
+ # (obviously) fake worker name disables updating the user directory.
+ "update_user_directory_from_worker": "does_not_exist_worker_name",
"caches": {"global_factor": 1, "sync_response_cache_duration": 0},
"listeners": [{"port": 0, "type": "http"}],
}
|