diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index 9df4edee38..f473294070 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -836,9 +836,9 @@ class BackgroundUpdater:
c.execute(sql)
if isinstance(self.db_pool.engine, engines.PostgresEngine):
- runner: Optional[
- Callable[[LoggingDatabaseConnection], None]
- ] = create_index_psql
+ runner: Optional[Callable[[LoggingDatabaseConnection], None]] = (
+ create_index_psql
+ )
elif psql_only:
runner = None
else:
diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py
index 69d5999c0a..84699a2ee1 100644
--- a/synapse/storage/controllers/persist_events.py
+++ b/synapse/storage/controllers/persist_events.py
@@ -773,9 +773,9 @@ class EventsPersistenceStorageController:
)
# Remove any events which are prev_events of any existing events.
- existing_prevs: Collection[
- str
- ] = await self.persist_events_store._get_events_which_are_prevs(result)
+ existing_prevs: Collection[str] = (
+ await self.persist_events_store._get_events_which_are_prevs(result)
+ )
result.difference_update(existing_prevs)
# Finally handle the case where the new events have soft-failed prev
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 8dc9080842..d7d202f028 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -111,8 +111,7 @@ class _PoolConnection(Connection):
A Connection from twisted.enterprise.adbapi.Connection.
"""
- def reconnect(self) -> None:
- ...
+ def reconnect(self) -> None: ...
def make_pool(
@@ -1603,8 +1602,7 @@ class DatabasePool:
retcols: Collection[str],
allow_none: Literal[False] = False,
desc: str = "simple_select_one",
- ) -> Tuple[Any, ...]:
- ...
+ ) -> Tuple[Any, ...]: ...
@overload
async def simple_select_one(
@@ -1614,8 +1612,7 @@ class DatabasePool:
retcols: Collection[str],
allow_none: Literal[True] = True,
desc: str = "simple_select_one",
- ) -> Optional[Tuple[Any, ...]]:
- ...
+ ) -> Optional[Tuple[Any, ...]]: ...
async def simple_select_one(
self,
@@ -1654,8 +1651,7 @@ class DatabasePool:
retcol: str,
allow_none: Literal[False] = False,
desc: str = "simple_select_one_onecol",
- ) -> Any:
- ...
+ ) -> Any: ...
@overload
async def simple_select_one_onecol(
@@ -1665,8 +1661,7 @@ class DatabasePool:
retcol: str,
allow_none: Literal[True] = True,
desc: str = "simple_select_one_onecol",
- ) -> Optional[Any]:
- ...
+ ) -> Optional[Any]: ...
async def simple_select_one_onecol(
self,
@@ -1706,8 +1701,7 @@ class DatabasePool:
keyvalues: Dict[str, Any],
retcol: str,
allow_none: Literal[False] = False,
- ) -> Any:
- ...
+ ) -> Any: ...
@overload
@classmethod
@@ -1718,8 +1712,7 @@ class DatabasePool:
keyvalues: Dict[str, Any],
retcol: str,
allow_none: Literal[True] = True,
- ) -> Optional[Any]:
- ...
+ ) -> Optional[Any]: ...
@classmethod
def simple_select_one_onecol_txn(
@@ -2501,8 +2494,7 @@ def make_tuple_in_list_sql_clause(
database_engine: BaseDatabaseEngine,
columns: Tuple[str, str],
iterable: Collection[Tuple[Any, Any]],
-) -> Tuple[str, list]:
- ...
+) -> Tuple[str, list]: ...
def make_tuple_in_list_sql_clause(
diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index 3e011f3340..8dbcb3f5a0 100644
--- a/synapse/storage/databases/main/devices.py
+++ b/synapse/storage/databases/main/devices.py
@@ -1701,9 +1701,9 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
# Map of (user_id, device_id) -> bool. If there is an entry that implies
# the device exists.
- self.device_id_exists_cache: LruCache[
- Tuple[str, str], Literal[True]
- ] = LruCache(cache_name="device_id_exists", max_size=10000)
+ self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = (
+ LruCache(cache_name="device_id_exists", max_size=10000)
+ )
async def store_device(
self,
diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py
index c96371a0d3..b219ea70ee 100644
--- a/synapse/storage/databases/main/end_to_end_keys.py
+++ b/synapse/storage/databases/main/end_to_end_keys.py
@@ -256,8 +256,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
self,
query_list: Collection[Tuple[str, Optional[str]]],
include_all_devices: Literal[False] = False,
- ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]:
- ...
+ ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ...
@overload
async def get_e2e_device_keys_and_signatures(
@@ -265,8 +264,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
query_list: Collection[Tuple[str, Optional[str]]],
include_all_devices: bool = False,
include_deleted_devices: Literal[False] = False,
- ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]:
- ...
+ ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ...
@overload
async def get_e2e_device_keys_and_signatures(
@@ -274,8 +272,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
query_list: Collection[Tuple[str, Optional[str]]],
include_all_devices: Literal[True],
include_deleted_devices: Literal[True],
- ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]:
- ...
+ ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ...
@trace
@cancellable
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index d5942a10b2..a6fda3f43c 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -1292,9 +1292,9 @@ class PersistEventsStore:
Returns:
filtered list
"""
- new_events_and_contexts: OrderedDict[
- str, Tuple[EventBase, EventContext]
- ] = OrderedDict()
+ new_events_and_contexts: OrderedDict[str, Tuple[EventBase, EventContext]] = (
+ OrderedDict()
+ )
for event, context in events_and_contexts:
prev_event_context = new_events_and_contexts.get(event.event_id)
if prev_event_context:
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index 9c3775bb7c..4c09567a7a 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -263,13 +263,13 @@ class EventsWorkerStore(SQLBaseStore):
5 * 60 * 1000,
)
- self._get_event_cache: AsyncLruCache[
- Tuple[str], EventCacheEntry
- ] = AsyncLruCache(
- cache_name="*getEvent*",
- max_size=hs.config.caches.event_cache_size,
- # `extra_index_cb` Returns a tuple as that is the key type
- extra_index_cb=lambda _, v: (v.event.room_id,),
+ self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = (
+ AsyncLruCache(
+ cache_name="*getEvent*",
+ max_size=hs.config.caches.event_cache_size,
+ # `extra_index_cb` Returns a tuple as that is the key type
+ extra_index_cb=lambda _, v: (v.event.room_id,),
+ )
)
# Map from event ID to a deferred that will result in a map from event
@@ -459,8 +459,7 @@ class EventsWorkerStore(SQLBaseStore):
allow_rejected: bool = ...,
allow_none: Literal[False] = ...,
check_room_id: Optional[str] = ...,
- ) -> EventBase:
- ...
+ ) -> EventBase: ...
@overload
async def get_event(
@@ -471,8 +470,7 @@ class EventsWorkerStore(SQLBaseStore):
allow_rejected: bool = ...,
allow_none: Literal[True] = ...,
check_room_id: Optional[str] = ...,
- ) -> Optional[EventBase]:
- ...
+ ) -> Optional[EventBase]: ...
@cancellable
async def get_event(
@@ -800,9 +798,9 @@ class EventsWorkerStore(SQLBaseStore):
# to all the events we pulled from the DB (this will result in this
# function returning more events than requested, but that can happen
# already due to `_get_events_from_db`).
- fetching_deferred: ObservableDeferred[
- Dict[str, EventCacheEntry]
- ] = ObservableDeferred(defer.Deferred(), consumeErrors=True)
+ fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = (
+ ObservableDeferred(defer.Deferred(), consumeErrors=True)
+ )
for event_id in missing_events_ids:
self._current_event_fetches[event_id] = fetching_deferred
@@ -1871,9 +1869,9 @@ class EventsWorkerStore(SQLBaseStore):
" LIMIT ?"
)
txn.execute(sql, (-last_id, -current_id, instance_name, limit))
- new_event_updates: List[
- Tuple[int, Tuple[str, str, str, str, str, str]]
- ] = []
+ new_event_updates: List[Tuple[int, Tuple[str, str, str, str, str, str]]] = (
+ []
+ )
row: Tuple[int, str, str, str, str, str, str]
# Type safety: iterating over `txn` yields `Tuple`, i.e.
# `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a
diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py
index 0794cc6d25..8277ad8c33 100644
--- a/synapse/storage/databases/main/lock.py
+++ b/synapse/storage/databases/main/lock.py
@@ -79,9 +79,9 @@ class LockStore(SQLBaseStore):
# A map from `(lock_name, lock_key)` to lock that we think we
# currently hold.
- self._live_lock_tokens: WeakValueDictionary[
- Tuple[str, str], Lock
- ] = WeakValueDictionary()
+ self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = (
+ WeakValueDictionary()
+ )
# A map from `(lock_name, lock_key, token)` to read/write lock that we
# think we currently hold. For a given lock_name/lock_key, there can be
diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py
index b5ed1bf9c8..6128332af8 100644
--- a/synapse/storage/databases/main/media_repository.py
+++ b/synapse/storage/databases/main/media_repository.py
@@ -158,9 +158,9 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
)
if hs.config.media.can_load_media_repo:
- self.unused_expiration_time: Optional[
- int
- ] = hs.config.media.unused_expiration_time
+ self.unused_expiration_time: Optional[int] = (
+ hs.config.media.unused_expiration_time
+ )
else:
self.unused_expiration_time = None
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index 8a426d2875..d513c42530 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -394,9 +394,9 @@ class ReceiptsWorkerStore(SQLBaseStore):
content: JsonDict = {}
for receipt_type, user_id, event_id, data in rows:
- content.setdefault(event_id, {}).setdefault(receipt_type, {})[
- user_id
- ] = db_to_json(data)
+ content.setdefault(event_id, {}).setdefault(receipt_type, {})[user_id] = (
+ db_to_json(data)
+ )
return [{"type": EduTypes.RECEIPT, "room_id": room_id, "content": content}]
@@ -483,9 +483,9 @@ class ReceiptsWorkerStore(SQLBaseStore):
if user_id in receipt_type_dict: # existing receipt
# is the existing receipt threaded and we are currently processing an unthreaded one?
if "thread_id" in receipt_type_dict[user_id] and not thread_id:
- receipt_type_dict[
- user_id
- ] = receipt_data # replace with unthreaded one
+ receipt_type_dict[user_id] = (
+ receipt_data # replace with unthreaded one
+ )
else: # receipt does not exist, just set it
receipt_type_dict[user_id] = receipt_data
if thread_id:
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index 3220d515d9..b2a67aff89 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -768,12 +768,10 @@ class StateMapWrapper(Dict[StateKey, str]):
return super().__getitem__(key)
@overload
- def get(self, key: Tuple[str, str]) -> Optional[str]:
- ...
+ def get(self, key: Tuple[str, str]) -> Optional[str]: ...
@overload
- def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]:
- ...
+ def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: ...
def get(
self, key: StateKey, default: Union[str, _T, None] = None
diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py
index 19041cc35b..7ab6003f61 100644
--- a/synapse/storage/databases/main/stream.py
+++ b/synapse/storage/databases/main/stream.py
@@ -988,8 +988,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
txn: LoggingTransaction,
event_id: str,
allow_none: Literal[False] = False,
- ) -> int:
- ...
+ ) -> int: ...
@overload
def get_stream_id_for_event_txn(
@@ -997,8 +996,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
txn: LoggingTransaction,
event_id: str,
allow_none: bool = False,
- ) -> Optional[int]:
- ...
+ ) -> Optional[int]: ...
def get_stream_id_for_event_txn(
self,
@@ -1476,12 +1474,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
_EventDictReturn(event_id, topological_ordering, stream_ordering)
for event_id, instance_name, topological_ordering, stream_ordering in txn
if _filter_results(
- lower_token=to_token
- if direction == Direction.BACKWARDS
- else from_token,
- upper_token=from_token
- if direction == Direction.BACKWARDS
- else to_token,
+ lower_token=(
+ to_token if direction == Direction.BACKWARDS else from_token
+ ),
+ upper_token=(
+ from_token if direction == Direction.BACKWARDS else to_token
+ ),
instance_name=instance_name,
topological_ordering=topological_ordering,
stream_ordering=stream_ordering,
diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py
index 7b95616432..4956870b1a 100644
--- a/synapse/storage/databases/main/task_scheduler.py
+++ b/synapse/storage/databases/main/task_scheduler.py
@@ -136,12 +136,12 @@ class TaskSchedulerWorkerStore(SQLBaseStore):
"status": task.status,
"timestamp": task.timestamp,
"resource_id": task.resource_id,
- "params": None
- if task.params is None
- else json_encoder.encode(task.params),
- "result": None
- if task.result is None
- else json_encoder.encode(task.result),
+ "params": (
+ None if task.params is None else json_encoder.encode(task.params)
+ ),
+ "result": (
+ None if task.result is None else json_encoder.encode(task.result)
+ ),
"error": task.error,
},
desc="insert_scheduled_task",
diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py
index a1c4b8c6c3..0513e7dc06 100644
--- a/synapse/storage/databases/main/user_directory.py
+++ b/synapse/storage/databases/main/user_directory.py
@@ -745,9 +745,11 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
p.user_id,
get_localpart_from_id(p.user_id),
get_domain_from_id(p.user_id),
- _filter_text_for_index(p.display_name)
- if p.display_name
- else None,
+ (
+ _filter_text_for_index(p.display_name)
+ if p.display_name
+ else None
+ ),
)
for p in profiles
],
diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py
index e64495ba8d..d4ac74c1ee 100644
--- a/synapse/storage/databases/state/store.py
+++ b/synapse/storage/databases/state/store.py
@@ -120,11 +120,11 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
# TODO: this hasn't been tuned yet
50000,
)
- self._state_group_members_cache: DictionaryCache[
- int, StateKey, str
- ] = DictionaryCache(
- "*stateGroupMembersCache*",
- 500000,
+ self._state_group_members_cache: DictionaryCache[int, StateKey, str] = (
+ DictionaryCache(
+ "*stateGroupMembersCache*",
+ 500000,
+ )
)
def get_max_state_group_txn(txn: Cursor) -> int:
diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py
index 8c29236b59..ad222e7e2d 100644
--- a/synapse/storage/engines/_base.py
+++ b/synapse/storage/engines/_base.py
@@ -48,8 +48,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
@property
@abc.abstractmethod
- def single_threaded(self) -> bool:
- ...
+ def single_threaded(self) -> bool: ...
@property
@abc.abstractmethod
@@ -68,8 +67,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
@abc.abstractmethod
def check_database(
self, db_conn: ConnectionType, allow_outdated_version: bool = False
- ) -> None:
- ...
+ ) -> None: ...
@abc.abstractmethod
def check_new_database(self, txn: CursorType) -> None:
@@ -79,27 +77,22 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
...
@abc.abstractmethod
- def convert_param_style(self, sql: str) -> str:
- ...
+ def convert_param_style(self, sql: str) -> str: ...
# This method would ideally take a plain ConnectionType, but it seems that
# the Sqlite engine expects to use LoggingDatabaseConnection.cursor
# instead of sqlite3.Connection.cursor: only the former takes a txn_name.
@abc.abstractmethod
- def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None:
- ...
+ def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: ...
@abc.abstractmethod
- def is_deadlock(self, error: Exception) -> bool:
- ...
+ def is_deadlock(self, error: Exception) -> bool: ...
@abc.abstractmethod
- def is_connection_closed(self, conn: ConnectionType) -> bool:
- ...
+ def is_connection_closed(self, conn: ConnectionType) -> bool: ...
@abc.abstractmethod
- def lock_table(self, txn: Cursor, table: str) -> None:
- ...
+ def lock_table(self, txn: Cursor, table: str) -> None: ...
@property
@abc.abstractmethod
diff --git a/synapse/storage/types.py b/synapse/storage/types.py
index b4e0a8f576..74f60cc590 100644
--- a/synapse/storage/types.py
+++ b/synapse/storage/types.py
@@ -42,20 +42,17 @@ SQLQueryParameters = Union[Sequence[Any], Mapping[str, Any]]
class Cursor(Protocol):
- def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any:
- ...
+ def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: ...
- def executemany(self, sql: str, parameters: Sequence[SQLQueryParameters]) -> Any:
- ...
+ def executemany(
+ self, sql: str, parameters: Sequence[SQLQueryParameters]
+ ) -> Any: ...
- def fetchone(self) -> Optional[Tuple]:
- ...
+ def fetchone(self) -> Optional[Tuple]: ...
- def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]:
- ...
+ def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ...
- def fetchall(self) -> List[Tuple]:
- ...
+ def fetchall(self) -> List[Tuple]: ...
@property
def description(
@@ -70,36 +67,28 @@ class Cursor(Protocol):
def rowcount(self) -> int:
return 0
- def __iter__(self) -> Iterator[Tuple]:
- ...
+ def __iter__(self) -> Iterator[Tuple]: ...
- def close(self) -> None:
- ...
+ def close(self) -> None: ...
class Connection(Protocol):
- def cursor(self) -> Cursor:
- ...
+ def cursor(self) -> Cursor: ...
- def close(self) -> None:
- ...
+ def close(self) -> None: ...
- def commit(self) -> None:
- ...
+ def commit(self) -> None: ...
- def rollback(self) -> None:
- ...
+ def rollback(self) -> None: ...
- def __enter__(self) -> "Connection":
- ...
+ def __enter__(self) -> "Connection": ...
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
- ) -> Optional[bool]:
- ...
+ ) -> Optional[bool]: ...
class DBAPI2Module(Protocol):
@@ -129,24 +118,20 @@ class DBAPI2Module(Protocol):
# explain why this is necessary for safety. TL;DR: we shouldn't be able to write
# to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 .
@property
- def Warning(self) -> Type[Exception]:
- ...
+ def Warning(self) -> Type[Exception]: ...
@property
- def Error(self) -> Type[Exception]:
- ...
+ def Error(self) -> Type[Exception]: ...
# Errors are divided into `InterfaceError`s (something went wrong in the database
# driver) and `DatabaseError`s (something went wrong in the database). These are
# both subclasses of `Error`, but we can't currently express this in type
# annotations due to https://github.com/python/mypy/issues/8397
@property
- def InterfaceError(self) -> Type[Exception]:
- ...
+ def InterfaceError(self) -> Type[Exception]: ...
@property
- def DatabaseError(self) -> Type[Exception]:
- ...
+ def DatabaseError(self) -> Type[Exception]: ...
# Everything below is a subclass of `DatabaseError`.
@@ -155,8 +140,7 @@ class DBAPI2Module(Protocol):
# - An invalid date time was provided.
# - A string contained a null code point.
@property
- def DataError(self) -> Type[Exception]:
- ...
+ def DataError(self) -> Type[Exception]: ...
# Roughly: something went wrong in the database, but it's not within the application
# programmer's control. Examples:
@@ -167,21 +151,18 @@ class DBAPI2Module(Protocol):
# - The database ran out of resources, such as storage, memory, connections, etc.
# - The database encountered an error from the operating system.
@property
- def OperationalError(self) -> Type[Exception]:
- ...
+ def OperationalError(self) -> Type[Exception]: ...
# Roughly: we've given the database data which breaks a rule we asked it to enforce.
# Examples:
# - Stop, criminal scum! You violated the foreign key constraint
# - Also check constraints, non-null constraints, etc.
@property
- def IntegrityError(self) -> Type[Exception]:
- ...
+ def IntegrityError(self) -> Type[Exception]: ...
# Roughly: something went wrong within the database server itself.
@property
- def InternalError(self) -> Type[Exception]:
- ...
+ def InternalError(self) -> Type[Exception]: ...
# Roughly: the application did something silly that needs to be fixed. Examples:
# - We don't have permissions to do something.
@@ -189,13 +170,11 @@ class DBAPI2Module(Protocol):
# - We tried to use a reserved name.
# - We referred to a column that doesn't exist.
@property
- def ProgrammingError(self) -> Type[Exception]:
- ...
+ def ProgrammingError(self) -> Type[Exception]: ...
# Roughly: we've tried to do something that this database doesn't support.
@property
- def NotSupportedError(self) -> Type[Exception]:
- ...
+ def NotSupportedError(self) -> Type[Exception]: ...
# We originally wrote
# def connect(self, *args, **kwargs) -> Connection: ...
@@ -204,8 +183,7 @@ class DBAPI2Module(Protocol):
# psycopg2.connect doesn't have a mandatory positional argument. Instead, we use
# the following slightly unusual workaround.
@property
- def connect(self) -> Callable[..., Connection]:
- ...
+ def connect(self) -> Callable[..., Connection]: ...
__all__ = ["Cursor", "Connection", "DBAPI2Module"]
|