diff options
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/app/_base.py | 4 | ||||
-rw-r--r-- | synapse/config/server.py | 6 | ||||
-rw-r--r-- | synapse/federation/federation_server.py | 4 | ||||
-rw-r--r-- | synapse/federation/transport/client.py | 10 | ||||
-rw-r--r-- | synapse/handlers/auth.py | 2 | ||||
-rw-r--r-- | synapse/handlers/oidc.py | 2 | ||||
-rw-r--r-- | synapse/handlers/search.py | 6 | ||||
-rw-r--r-- | synapse/http/server.py | 4 | ||||
-rw-r--r-- | synapse/module_api/__init__.py | 4 | ||||
-rw-r--r-- | synapse/storage/databases/main/monthly_active_users.py | 6 | ||||
-rw-r--r-- | synapse/storage/prepare_database.py | 4 | ||||
-rw-r--r-- | synapse/util/caches/ttlcache.py | 2 |
12 files changed, 27 insertions, 27 deletions
diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 37321f9133..d28b87a3f4 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -48,7 +48,6 @@ from twisted.logger import LoggingFile, LogLevel from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.python.threadpool import ThreadPool -import synapse from synapse.api.constants import MAX_PDU_SIZE from synapse.app import check_bind_error from synapse.app.phone_stats_home import start_phone_stats_home @@ -60,6 +59,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.logging.context import PreserveLoggingContext +from synapse.logging.opentracing import init_tracer from synapse.metrics import install_gc_manager, register_threadpool from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.jemalloc import setup_jemalloc_stats @@ -431,7 +431,7 @@ async def start(hs: "HomeServer") -> None: refresh_certificate(hs) # Start the tracer - synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa + init_tracer(hs) # noqa # Instantiate the modules so they can register their web resources to the module API # before we start the listeners. diff --git a/synapse/config/server.py b/synapse/config/server.py index d771045b52..b6cd326416 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -186,7 +186,7 @@ KNOWN_RESOURCES = { class HttpResourceConfig: names: List[str] = attr.ib( factory=list, - validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore + validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), ) compress: bool = attr.ib( default=False, @@ -231,9 +231,7 @@ class ManholeConfig: class LimitRemoteRoomsConfig: enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False) complexity: Union[float, int] = attr.ib( - validator=attr.validators.instance_of( - (float, int) # type: ignore[arg-type] # noqa - ), + validator=attr.validators.instance_of((float, int)), # noqa default=1.0, ) complexity_error: str = attr.ib( diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index beab1227b8..884b5d60b4 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -268,8 +268,8 @@ class FederationServer(FederationBase): transaction_id=transaction_id, destination=destination, origin=origin, - origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore - pdus=transaction_data.get("pdus"), # type: ignore + origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore[arg-type] + pdus=transaction_data.get("pdus"), edus=transaction_data.get("edus"), ) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 1421050b9a..9ce06dfa28 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -229,21 +229,21 @@ class TransportLayerClient: """ logger.debug( "send_data dest=%s, txid=%s", - transaction.destination, # type: ignore - transaction.transaction_id, # type: ignore + transaction.destination, + transaction.transaction_id, ) - if transaction.destination == self.server_name: # type: ignore + if transaction.destination == self.server_name: raise RuntimeError("Transport layer cannot send to itself!") # FIXME: This is only used by the tests. The actual json sent is # generated by the json_data_callback. json_data = transaction.get_dict() - path = _create_v1_path("/send/%s", transaction.transaction_id) # type: ignore + path = _create_v1_path("/send/%s", transaction.transaction_id) return await self.client.put_json( - transaction.destination, # type: ignore + transaction.destination, path=path, data=json_data, json_data_callback=json_data_callback, diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 86991d26ce..22678d486d 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -481,7 +481,7 @@ class AuthHandler: sid = authdict["session"] # Convert the URI and method to strings. - uri = request.uri.decode("utf-8") # type: ignore + uri = request.uri.decode("utf-8") method = request.method.decode("utf-8") # If there's no session ID, create a new session. diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 724b9cfcb4..f6ffb7d18d 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -966,7 +966,7 @@ class OidcProvider: "Mapping provider does not support de-duplicating Matrix IDs" ) - attributes = await self._user_mapping_provider.map_user_attributes( # type: ignore + attributes = await self._user_mapping_provider.map_user_attributes( userinfo, token ) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 102dd4b57d..5619f8f50e 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -357,7 +357,7 @@ class SearchHandler: itertools.chain( # The events_before and events_after for each context. itertools.chain.from_iterable( - itertools.chain(context["events_before"], context["events_after"]) # type: ignore[arg-type] + itertools.chain(context["events_before"], context["events_after"]) for context in contexts.values() ), # The returned events. @@ -373,10 +373,10 @@ class SearchHandler: for context in contexts.values(): context["events_before"] = self._event_serializer.serialize_events( - context["events_before"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type] + context["events_before"], time_now, bundle_aggregations=aggregations ) context["events_after"] = self._event_serializer.serialize_events( - context["events_after"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type] + context["events_after"], time_now, bundle_aggregations=aggregations ) results = [ diff --git a/synapse/http/server.py b/synapse/http/server.py index 31ca841889..b8a7a0f5df 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -295,7 +295,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta): if isawaitable(raw_callback_return): callback_return = await raw_callback_return else: - callback_return = raw_callback_return # type: ignore + callback_return = raw_callback_return return callback_return @@ -469,7 +469,7 @@ class JsonResource(DirectServeJsonResource): if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)): callback_return = await raw_callback_return else: - callback_return = raw_callback_return # type: ignore + callback_return = raw_callback_return return callback_return diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 8f9e629274..7e6e23db73 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -109,6 +109,7 @@ from synapse.storage.state import StateFilter from synapse.types import ( DomainSpecificString, JsonDict, + JsonMapping, Requester, StateMap, UserID, @@ -151,6 +152,7 @@ __all__ = [ "PRESENCE_ALL_USERS", "LoginResponse", "JsonDict", + "JsonMapping", "EventBase", "StateMap", "ProfileInfo", @@ -1419,7 +1421,7 @@ class AccountDataManager: f"{user_id} is not local to this homeserver; can't access account data for remote users." ) - async def get_global(self, user_id: str, data_type: str) -> Optional[JsonDict]: + async def get_global(self, user_id: str, data_type: str) -> Optional[JsonMapping]: """ Gets some global account data, of a specified type, for the specified user. diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index 4f1c22c71b..5beb8f1d4b 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -232,10 +232,10 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): # is racy. # Have resolved to invalidate the whole cache for now and do # something about it if and when the perf becomes significant - self._invalidate_all_cache_and_stream( # type: ignore[attr-defined] + self._invalidate_all_cache_and_stream( txn, self.user_last_seen_monthly_active ) - self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) # type: ignore[attr-defined] + self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) reserved_users = await self.get_registered_reserved_users() await self.db_pool.runInteraction( @@ -363,7 +363,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): if self._limit_usage_by_mau or self._mau_stats_only: # Trial users and guests should not be included as part of MAU group - is_guest = await self.is_guest(user_id) # type: ignore[attr-defined] + is_guest = await self.is_guest(user_id) if is_guest: return is_trial = await self.is_trial_user(user_id) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index e3153d1a4a..546d6bae6e 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -501,11 +501,11 @@ def _upgrade_existing_database( if hasattr(module, "run_create"): logger.info("Running %s:run_create", relative_path) - module.run_create(cur, database_engine) # type: ignore + module.run_create(cur, database_engine) if not is_empty and hasattr(module, "run_upgrade"): logger.info("Running %s:run_upgrade", relative_path) - module.run_upgrade(cur, database_engine, config=config) # type: ignore + module.run_upgrade(cur, database_engine, config=config) elif ext == ".pyc" or file_name == "__pycache__": # Sometimes .pyc files turn up anyway even though we've # disabled their generation; e.g. from distribution package diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 0b9ac26b69..f6b3ee31e4 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -107,7 +107,7 @@ class TTLCache(Generic[KT, VT]): self._metrics.inc_hits() return e.value, e.expiry_time, e.ttl - def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore + def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: """Remove a value from the cache If key is in the cache, remove it and return its value, else return default. |