summary refs log tree commit diff
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--changelog.d/11692.misc1
-rw-r--r--synapse/api/room_versions.py30
-rw-r--r--synapse/config/emailconfig.py24
-rw-r--r--synapse/config/server.py4
-rw-r--r--synapse/config/workers.py24
-rw-r--r--synapse/crypto/keyring.py18
-rw-r--r--synapse/events/snapshot.py18
-rw-r--r--synapse/federation/sender/per_destination_queue.py12
-rw-r--r--synapse/handlers/auth.py14
-rw-r--r--synapse/handlers/e2e_keys.py10
-rw-r--r--synapse/handlers/sso.py32
-rw-r--r--synapse/http/connectproxyclient.py4
-rw-r--r--synapse/http/matrixfederationclient.py18
-rw-r--r--synapse/http/site.py4
-rw-r--r--synapse/logging/_remote.py10
-rw-r--r--synapse/logging/context.py20
-rw-r--r--synapse/logging/opentracing.py2
-rw-r--r--synapse/metrics/__init__.py20
-rw-r--r--synapse/notifier.py14
-rw-r--r--synapse/push/__init__.py38
-rw-r--r--synapse/push/bulk_push_rule_evaluator.py18
-rw-r--r--synapse/replication/tcp/streams/events.py34
-rw-r--r--synapse/rest/media/v1/media_storage.py6
-rw-r--r--synapse/state/__init__.py8
-rw-r--r--synapse/storage/database.py8
-rw-r--r--synapse/storage/databases/main/end_to_end_keys.py6
-rw-r--r--synapse/storage/databases/main/events.py14
-rw-r--r--synapse/storage/databases/main/events_bg_updates.py12
-rw-r--r--synapse/storage/databases/main/registration.py18
-rw-r--r--synapse/storage/databases/main/roommember.py6
-rw-r--r--synapse/storage/databases/main/ui_auth.py12
-rw-r--r--synapse/storage/keys.py6
-rw-r--r--synapse/storage/prepare_database.py6
-rw-r--r--synapse/storage/relations.py20
-rw-r--r--synapse/storage/state.py6
-rw-r--r--synapse/storage/util/id_generators.py8
-rw-r--r--synapse/streams/config.py10
-rw-r--r--synapse/types.py75
-rw-r--r--synapse/util/async_helpers.py6
-rw-r--r--synapse/util/caches/dictionary_cache.py11
40 files changed, 300 insertions, 307 deletions
diff --git a/changelog.d/11692.misc b/changelog.d/11692.misc
new file mode 100644
index 0000000000..0cdfca54e7
--- /dev/null
+++ b/changelog.d/11692.misc
@@ -0,0 +1 @@
+Use `auto_attribs` and native type hints for attrs classes.
diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py
index 0a895bba48..a747a40814 100644
--- a/synapse/api/room_versions.py
+++ b/synapse/api/room_versions.py
@@ -46,41 +46,41 @@ class RoomDisposition:
     UNSTABLE = "unstable"
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class RoomVersion:
     """An object which describes the unique attributes of a room version."""
 
-    identifier = attr.ib(type=str)  # the identifier for this version
-    disposition = attr.ib(type=str)  # one of the RoomDispositions
-    event_format = attr.ib(type=int)  # one of the EventFormatVersions
-    state_res = attr.ib(type=int)  # one of the StateResolutionVersions
-    enforce_key_validity = attr.ib(type=bool)
+    identifier: str  # the identifier for this version
+    disposition: str  # one of the RoomDispositions
+    event_format: int  # one of the EventFormatVersions
+    state_res: int  # one of the StateResolutionVersions
+    enforce_key_validity: bool
 
     # Before MSC2432, m.room.aliases had special auth rules and redaction rules
-    special_case_aliases_auth = attr.ib(type=bool)
+    special_case_aliases_auth: bool
     # Strictly enforce canonicaljson, do not allow:
     # * Integers outside the range of [-2 ^ 53 + 1, 2 ^ 53 - 1]
     # * Floats
     # * NaN, Infinity, -Infinity
-    strict_canonicaljson = attr.ib(type=bool)
+    strict_canonicaljson: bool
     # MSC2209: Check 'notifications' key while verifying
     # m.room.power_levels auth rules.
-    limit_notifications_power_levels = attr.ib(type=bool)
+    limit_notifications_power_levels: bool
     # MSC2174/MSC2176: Apply updated redaction rules algorithm.
-    msc2176_redaction_rules = attr.ib(type=bool)
+    msc2176_redaction_rules: bool
     # MSC3083: Support the 'restricted' join_rule.
-    msc3083_join_rules = attr.ib(type=bool)
+    msc3083_join_rules: bool
     # MSC3375: Support for the proper redaction rules for MSC3083. This mustn't
     #          be enabled if MSC3083 is not.
-    msc3375_redaction_rules = attr.ib(type=bool)
+    msc3375_redaction_rules: bool
     # MSC2403: Allows join_rules to be set to 'knock', changes auth rules to allow sending
     # m.room.membership event with membership 'knock'.
-    msc2403_knocking = attr.ib(type=bool)
+    msc2403_knocking: bool
     # MSC2716: Adds m.room.power_levels -> content.historical field to control
     # whether "insertion", "chunk", "marker" events can be sent
-    msc2716_historical = attr.ib(type=bool)
+    msc2716_historical: bool
     # MSC2716: Adds support for redacting "insertion", "chunk", and "marker" events
-    msc2716_redactions = attr.ib(type=bool)
+    msc2716_redactions: bool
 
 
 class RoomVersions:
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 510b647c63..949d7dd5ac 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -55,19 +55,19 @@ https://matrix-org.github.io/synapse/latest/templates.html
 ---------------------------------------------------------------------------------------"""
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class EmailSubjectConfig:
-    message_from_person_in_room = attr.ib(type=str)
-    message_from_person = attr.ib(type=str)
-    messages_from_person = attr.ib(type=str)
-    messages_in_room = attr.ib(type=str)
-    messages_in_room_and_others = attr.ib(type=str)
-    messages_from_person_and_others = attr.ib(type=str)
-    invite_from_person = attr.ib(type=str)
-    invite_from_person_to_room = attr.ib(type=str)
-    invite_from_person_to_space = attr.ib(type=str)
-    password_reset = attr.ib(type=str)
-    email_validation = attr.ib(type=str)
+    message_from_person_in_room: str
+    message_from_person: str
+    messages_from_person: str
+    messages_in_room: str
+    messages_in_room_and_others: str
+    messages_from_person_and_others: str
+    invite_from_person: str
+    invite_from_person_to_room: str
+    invite_from_person_to_space: str
+    password_reset: str
+    email_validation: str
 
 
 class EmailConfig(Config):
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 1de2dea9b0..2c2b461cac 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -200,8 +200,8 @@ class HttpListenerConfig:
     """Object describing the http-specific parts of the config of a listener"""
 
     x_forwarded: bool = False
-    resources: List[HttpResourceConfig] = attr.ib(factory=list)
-    additional_resources: Dict[str, dict] = attr.ib(factory=dict)
+    resources: List[HttpResourceConfig] = attr.Factory(list)
+    additional_resources: Dict[str, dict] = attr.Factory(dict)
     tag: Optional[str] = None
 
 
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index 576f519188..bdaba6db37 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -51,12 +51,12 @@ def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
     return obj
 
 
-@attr.s
+@attr.s(auto_attribs=True)
 class InstanceLocationConfig:
     """The host and port to talk to an instance via HTTP replication."""
 
-    host = attr.ib(type=str)
-    port = attr.ib(type=int)
+    host: str
+    port: int
 
 
 @attr.s
@@ -77,34 +77,28 @@ class WriterLocations:
             can only be a single instance.
     """
 
-    events = attr.ib(
+    events: List[str] = attr.ib(
         default=["master"],
-        type=List[str],
         converter=_instance_to_list_converter,
     )
-    typing = attr.ib(
+    typing: List[str] = attr.ib(
         default=["master"],
-        type=List[str],
         converter=_instance_to_list_converter,
     )
-    to_device = attr.ib(
+    to_device: List[str] = attr.ib(
         default=["master"],
-        type=List[str],
         converter=_instance_to_list_converter,
     )
-    account_data = attr.ib(
+    account_data: List[str] = attr.ib(
         default=["master"],
-        type=List[str],
         converter=_instance_to_list_converter,
     )
-    receipts = attr.ib(
+    receipts: List[str] = attr.ib(
         default=["master"],
-        type=List[str],
         converter=_instance_to_list_converter,
     )
-    presence = attr.ib(
+    presence: List[str] = attr.ib(
         default=["master"],
-        type=List[str],
         converter=_instance_to_list_converter,
     )
 
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 993b04099e..72d4a69aac 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -58,7 +58,7 @@ if TYPE_CHECKING:
 logger = logging.getLogger(__name__)
 
 
-@attr.s(slots=True, cmp=False)
+@attr.s(slots=True, frozen=True, cmp=False, auto_attribs=True)
 class VerifyJsonRequest:
     """
     A request to verify a JSON object.
@@ -78,10 +78,10 @@ class VerifyJsonRequest:
         key_ids: The set of key_ids to that could be used to verify the JSON object
     """
 
-    server_name = attr.ib(type=str)
-    get_json_object = attr.ib(type=Callable[[], JsonDict])
-    minimum_valid_until_ts = attr.ib(type=int)
-    key_ids = attr.ib(type=List[str])
+    server_name: str
+    get_json_object: Callable[[], JsonDict]
+    minimum_valid_until_ts: int
+    key_ids: List[str]
 
     @staticmethod
     def from_json_object(
@@ -124,7 +124,7 @@ class KeyLookupError(ValueError):
     pass
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class _FetchKeyRequest:
     """A request for keys for a given server.
 
@@ -138,9 +138,9 @@ class _FetchKeyRequest:
         key_ids: The IDs of the keys to attempt to fetch
     """
 
-    server_name = attr.ib(type=str)
-    minimum_valid_until_ts = attr.ib(type=int)
-    key_ids = attr.ib(type=List[str])
+    server_name: str
+    minimum_valid_until_ts: int
+    key_ids: List[str]
 
 
 class Keyring:
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index f251402ed8..0eab1aefd6 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -28,7 +28,7 @@ if TYPE_CHECKING:
     from synapse.storage.databases.main import DataStore
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class EventContext:
     """
     Holds information relevant to persisting an event
@@ -103,15 +103,15 @@ class EventContext:
             accessed via get_prev_state_ids.
     """
 
-    rejected = attr.ib(default=False, type=Union[bool, str])
-    _state_group = attr.ib(default=None, type=Optional[int])
-    state_group_before_event = attr.ib(default=None, type=Optional[int])
-    prev_group = attr.ib(default=None, type=Optional[int])
-    delta_ids = attr.ib(default=None, type=Optional[StateMap[str]])
-    app_service = attr.ib(default=None, type=Optional[ApplicationService])
+    rejected: Union[bool, str] = False
+    _state_group: Optional[int] = None
+    state_group_before_event: Optional[int] = None
+    prev_group: Optional[int] = None
+    delta_ids: Optional[StateMap[str]] = None
+    app_service: Optional[ApplicationService] = None
 
-    _current_state_ids = attr.ib(default=None, type=Optional[StateMap[str]])
-    _prev_state_ids = attr.ib(default=None, type=Optional[StateMap[str]])
+    _current_state_ids: Optional[StateMap[str]] = None
+    _prev_state_ids: Optional[StateMap[str]] = None
 
     @staticmethod
     def with_state(
diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py
index 391b30fbb5..8152e80b88 100644
--- a/synapse/federation/sender/per_destination_queue.py
+++ b/synapse/federation/sender/per_destination_queue.py
@@ -607,18 +607,18 @@ class PerDestinationQueue:
         self._pending_pdus = []
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _TransactionQueueManager:
     """A helper async context manager for pulling stuff off the queues and
     tracking what was last successfully sent, etc.
     """
 
-    queue = attr.ib(type=PerDestinationQueue)
+    queue: PerDestinationQueue
 
-    _device_stream_id = attr.ib(type=Optional[int], default=None)
-    _device_list_id = attr.ib(type=Optional[int], default=None)
-    _last_stream_ordering = attr.ib(type=Optional[int], default=None)
-    _pdus = attr.ib(type=List[EventBase], factory=list)
+    _device_stream_id: Optional[int] = None
+    _device_list_id: Optional[int] = None
+    _last_stream_ordering: Optional[int] = None
+    _pdus: List[EventBase] = attr.Factory(list)
 
     async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]:
         # First we calculate the EDUs we want to send, if any.
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 84724b207c..2389c9ac52 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -168,25 +168,25 @@ def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]:
     }
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class SsoLoginExtraAttributes:
     """Data we track about SAML2 sessions"""
 
     # time the session was created, in milliseconds
-    creation_time = attr.ib(type=int)
-    extra_attributes = attr.ib(type=JsonDict)
+    creation_time: int
+    extra_attributes: JsonDict
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class LoginTokenAttributes:
     """Data we store in a short-term login token"""
 
-    user_id = attr.ib(type=str)
+    user_id: str
 
-    auth_provider_id = attr.ib(type=str)
+    auth_provider_id: str
     """The SSO Identity Provider that the user authenticated with, to get this token."""
 
-    auth_provider_session_id = attr.ib(type=Optional[str])
+    auth_provider_session_id: Optional[str]
     """The session ID advertised by the SSO Identity Provider."""
 
 
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 14360b4e40..d4dfddf63f 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -1321,14 +1321,14 @@ def _one_time_keys_match(old_key_json: str, new_key: JsonDict) -> bool:
     return old_key == new_key_copy
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class SignatureListItem:
     """An item in the signature list as used by upload_signatures_for_device_keys."""
 
-    signing_key_id = attr.ib(type=str)
-    target_user_id = attr.ib(type=str)
-    target_device_id = attr.ib(type=str)
-    signature = attr.ib(type=JsonDict)
+    signing_key_id: str
+    target_user_id: str
+    target_device_id: str
+    signature: JsonDict
 
 
 class SigningKeyEduUpdater:
diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py
index 65c27bc64a..0bb8b0929e 100644
--- a/synapse/handlers/sso.py
+++ b/synapse/handlers/sso.py
@@ -126,45 +126,45 @@ class SsoIdentityProvider(Protocol):
         raise NotImplementedError()
 
 
-@attr.s
+@attr.s(auto_attribs=True)
 class UserAttributes:
     # the localpart of the mxid that the mapper has assigned to the user.
     # if `None`, the mapper has not picked a userid, and the user should be prompted to
     # enter one.
-    localpart = attr.ib(type=Optional[str])
-    display_name = attr.ib(type=Optional[str], default=None)
-    emails = attr.ib(type=Collection[str], default=attr.Factory(list))
+    localpart: Optional[str]
+    display_name: Optional[str] = None
+    emails: Collection[str] = attr.Factory(list)
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class UsernameMappingSession:
     """Data we track about SSO sessions"""
 
     # A unique identifier for this SSO provider, e.g.  "oidc" or "saml".
-    auth_provider_id = attr.ib(type=str)
+    auth_provider_id: str
 
     # user ID on the IdP server
-    remote_user_id = attr.ib(type=str)
+    remote_user_id: str
 
     # attributes returned by the ID mapper
-    display_name = attr.ib(type=Optional[str])
-    emails = attr.ib(type=Collection[str])
+    display_name: Optional[str]
+    emails: Collection[str]
 
     # An optional dictionary of extra attributes to be provided to the client in the
     # login response.
-    extra_login_attributes = attr.ib(type=Optional[JsonDict])
+    extra_login_attributes: Optional[JsonDict]
 
     # where to redirect the client back to
-    client_redirect_url = attr.ib(type=str)
+    client_redirect_url: str
 
     # expiry time for the session, in milliseconds
-    expiry_time_ms = attr.ib(type=int)
+    expiry_time_ms: int
 
     # choices made by the user
-    chosen_localpart = attr.ib(type=Optional[str], default=None)
-    use_display_name = attr.ib(type=bool, default=True)
-    emails_to_use = attr.ib(type=Collection[str], default=())
-    terms_accepted_version = attr.ib(type=Optional[str], default=None)
+    chosen_localpart: Optional[str] = None
+    use_display_name: bool = True
+    emails_to_use: Collection[str] = ()
+    terms_accepted_version: Optional[str] = None
 
 
 # the HTTP cookie used to track the mapping session id
diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py
index fbafffd69b..203e995bb7 100644
--- a/synapse/http/connectproxyclient.py
+++ b/synapse/http/connectproxyclient.py
@@ -32,9 +32,9 @@ class ProxyConnectError(ConnectError):
     pass
 
 
-@attr.s
+@attr.s(auto_attribs=True)
 class ProxyCredentials:
-    username_password = attr.ib(type=bytes)
+    username_password: bytes
 
     def as_proxy_authorization_value(self) -> bytes:
         """
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index deedde0b5b..2e668363b2 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -123,37 +123,37 @@ class ByteParser(ByteWriteable, Generic[T], abc.ABC):
         pass
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class MatrixFederationRequest:
-    method = attr.ib(type=str)
+    method: str
     """HTTP method
     """
 
-    path = attr.ib(type=str)
+    path: str
     """HTTP path
     """
 
-    destination = attr.ib(type=str)
+    destination: str
     """The remote server to send the HTTP request to.
     """
 
-    json = attr.ib(default=None, type=Optional[JsonDict])
+    json: Optional[JsonDict] = None
     """JSON to send in the body.
     """
 
-    json_callback = attr.ib(default=None, type=Optional[Callable[[], JsonDict]])
+    json_callback: Optional[Callable[[], JsonDict]] = None
     """A callback to generate the JSON.
     """
 
-    query = attr.ib(default=None, type=Optional[dict])
+    query: Optional[dict] = None
     """Query arguments.
     """
 
-    txn_id = attr.ib(default=None, type=Optional[str])
+    txn_id: Optional[str] = None
     """Unique ID for this request (for logging)
     """
 
-    uri = attr.ib(init=False, type=bytes)
+    uri: bytes = attr.ib(init=False)
     """The URI of this request
     """
 
diff --git a/synapse/http/site.py b/synapse/http/site.py
index 80f7a2ff58..c180a1d323 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -534,9 +534,9 @@ class XForwardedForRequest(SynapseRequest):
 
 
 @implementer(IAddress)
-@attr.s(frozen=True, slots=True)
+@attr.s(frozen=True, slots=True, auto_attribs=True)
 class _XForwardedForAddress:
-    host = attr.ib(type=str)
+    host: str
 
 
 class SynapseSite(Site):
diff --git a/synapse/logging/_remote.py b/synapse/logging/_remote.py
index 8202d0494d..475756f1db 100644
--- a/synapse/logging/_remote.py
+++ b/synapse/logging/_remote.py
@@ -39,7 +39,7 @@ from twisted.python.failure import Failure
 logger = logging.getLogger(__name__)
 
 
-@attr.s
+@attr.s(slots=True, auto_attribs=True)
 @implementer(IPushProducer)
 class LogProducer:
     """
@@ -54,10 +54,10 @@ class LogProducer:
 
     # This is essentially ITCPTransport, but that is missing certain fields
     # (connected and registerProducer) which are part of the implementation.
-    transport = attr.ib(type=Connection)
-    _format = attr.ib(type=Callable[[logging.LogRecord], str])
-    _buffer = attr.ib(type=deque)
-    _paused = attr.ib(default=False, type=bool, init=False)
+    transport: Connection
+    _format: Callable[[logging.LogRecord], str]
+    _buffer: Deque[logging.LogRecord]
+    _paused: bool = attr.ib(default=False, init=False)
 
     def pauseProducing(self):
         self._paused = True
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
index d4ee893376..c31c2960ad 100644
--- a/synapse/logging/context.py
+++ b/synapse/logging/context.py
@@ -193,7 +193,7 @@ class ContextResourceUsage:
         return res
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class ContextRequest:
     """
     A bundle of attributes from the SynapseRequest object.
@@ -205,15 +205,15 @@ class ContextRequest:
       their children.
     """
 
-    request_id = attr.ib(type=str)
-    ip_address = attr.ib(type=str)
-    site_tag = attr.ib(type=str)
-    requester = attr.ib(type=Optional[str])
-    authenticated_entity = attr.ib(type=Optional[str])
-    method = attr.ib(type=str)
-    url = attr.ib(type=str)
-    protocol = attr.ib(type=str)
-    user_agent = attr.ib(type=str)
+    request_id: str
+    ip_address: str
+    site_tag: str
+    requester: Optional[str]
+    authenticated_entity: Optional[str]
+    method: str
+    url: str
+    protocol: str
+    user_agent: str
 
 
 LoggingContextOrSentinel = Union["LoggingContext", "_Sentinel"]
diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py
index 622445e9f4..5672d60de3 100644
--- a/synapse/logging/opentracing.py
+++ b/synapse/logging/opentracing.py
@@ -251,7 +251,7 @@ try:
     class _WrappedRustReporter(BaseReporter):
         """Wrap the reporter to ensure `report_span` never throws."""
 
-        _reporter = attr.ib(type=Reporter, default=attr.Factory(Reporter))
+        _reporter: Reporter = attr.Factory(Reporter)
 
         def set_process(self, *args, **kwargs):
             return self._reporter.set_process(*args, **kwargs)
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index ceef57ad88..269c2b989e 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -76,19 +76,17 @@ class RegistryProxy:
                 yield metric
 
 
-@attr.s(slots=True, hash=True)
+@attr.s(slots=True, hash=True, auto_attribs=True)
 class LaterGauge:
 
-    name = attr.ib(type=str)
-    desc = attr.ib(type=str)
-    labels = attr.ib(hash=False, type=Optional[Iterable[str]])
+    name: str
+    desc: str
+    labels: Optional[Iterable[str]] = attr.ib(hash=False)
     # callback: should either return a value (if there are no labels for this metric),
     # or dict mapping from a label tuple to a value
-    caller = attr.ib(
-        type=Callable[
-            [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]]
-        ]
-    )
+    caller: Callable[
+        [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]]
+    ]
 
     def collect(self) -> Iterable[Metric]:
 
@@ -157,7 +155,9 @@ class InFlightGauge(Generic[MetricsEntry]):
         # Create a class which have the sub_metrics values as attributes, which
         # default to 0 on initialization. Used to pass to registered callbacks.
         self._metrics_class: Type[MetricsEntry] = attr.make_class(
-            "_MetricsEntry", attrs={x: attr.ib(0) for x in sub_metrics}, slots=True
+            "_MetricsEntry",
+            attrs={x: attr.ib(default=0) for x in sub_metrics},
+            slots=True,
         )
 
         # Counts number of in flight blocks for a given set of label values
diff --git a/synapse/notifier.py b/synapse/notifier.py
index bbabdb0587..41fd94d772 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -193,15 +193,15 @@ class EventStreamResult:
         return bool(self.events)
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class _PendingRoomEventEntry:
-    event_pos = attr.ib(type=PersistedEventPosition)
-    extra_users = attr.ib(type=Collection[UserID])
+    event_pos: PersistedEventPosition
+    extra_users: Collection[UserID]
 
-    room_id = attr.ib(type=str)
-    type = attr.ib(type=str)
-    state_key = attr.ib(type=Optional[str])
-    membership = attr.ib(type=Optional[str])
+    room_id: str
+    type: str
+    state_key: Optional[str]
+    membership: Optional[str]
 
 
 class Notifier:
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index 820f6f3f7e..5176a1c186 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -23,25 +23,25 @@ if TYPE_CHECKING:
     from synapse.server import HomeServer
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class PusherConfig:
     """Parameters necessary to configure a pusher."""
 
-    id = attr.ib(type=Optional[str])
-    user_name = attr.ib(type=str)
-    access_token = attr.ib(type=Optional[int])
-    profile_tag = attr.ib(type=str)
-    kind = attr.ib(type=str)
-    app_id = attr.ib(type=str)
-    app_display_name = attr.ib(type=str)
-    device_display_name = attr.ib(type=str)
-    pushkey = attr.ib(type=str)
-    ts = attr.ib(type=int)
-    lang = attr.ib(type=Optional[str])
-    data = attr.ib(type=Optional[JsonDict])
-    last_stream_ordering = attr.ib(type=int)
-    last_success = attr.ib(type=Optional[int])
-    failing_since = attr.ib(type=Optional[int])
+    id: Optional[str]
+    user_name: str
+    access_token: Optional[int]
+    profile_tag: str
+    kind: str
+    app_id: str
+    app_display_name: str
+    device_display_name: str
+    pushkey: str
+    ts: int
+    lang: Optional[str]
+    data: Optional[JsonDict]
+    last_stream_ordering: int
+    last_success: Optional[int]
+    failing_since: Optional[int]
 
     def as_dict(self) -> Dict[str, Any]:
         """Information that can be retrieved about a pusher after creation."""
@@ -57,12 +57,12 @@ class PusherConfig:
         }
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class ThrottleParams:
     """Parameters for controlling the rate of sending pushes via email."""
 
-    last_sent_ts = attr.ib(type=int)
-    throttle_ms = attr.ib(type=int)
+    last_sent_ts: int
+    throttle_ms: int
 
 
 class Pusher(metaclass=abc.ABCMeta):
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 009d8e77b0..bee660893b 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -298,7 +298,7 @@ RulesByUser = Dict[str, List[Rule]]
 StateGroup = Union[object, int]
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class RulesForRoomData:
     """The data stored in the cache by `RulesForRoom`.
 
@@ -307,29 +307,29 @@ class RulesForRoomData:
     """
 
     # event_id -> (user_id, state)
-    member_map = attr.ib(type=MemberMap, factory=dict)
+    member_map: MemberMap = attr.Factory(dict)
     # user_id -> rules
-    rules_by_user = attr.ib(type=RulesByUser, factory=dict)
+    rules_by_user: RulesByUser = attr.Factory(dict)
 
     # The last state group we updated the caches for. If the state_group of
     # a new event comes along, we know that we can just return the cached
     # result.
     # On invalidation of the rules themselves (if the user changes them),
     # we invalidate everything and set state_group to `object()`
-    state_group = attr.ib(type=StateGroup, factory=object)
+    state_group: StateGroup = attr.Factory(object)
 
     # A sequence number to keep track of when we're allowed to update the
     # cache. We bump the sequence number when we invalidate the cache. If
     # the sequence number changes while we're calculating stuff we should
     # not update the cache with it.
-    sequence = attr.ib(type=int, default=0)
+    sequence: int = 0
 
     # A cache of user_ids that we *know* aren't interesting, e.g. user_ids
     # owned by AS's, or remote users, etc. (I.e. users we will never need to
     # calculate push for)
     # These never need to be invalidated as we will never set up push for
     # them.
-    uninteresting_user_set = attr.ib(type=Set[str], factory=set)
+    uninteresting_user_set: Set[str] = attr.Factory(set)
 
 
 class RulesForRoom:
@@ -553,7 +553,7 @@ class RulesForRoom:
             self.data.state_group = state_group
 
 
-@attr.attrs(slots=True, frozen=True)
+@attr.attrs(slots=True, frozen=True, auto_attribs=True)
 class _Invalidation:
     # _Invalidation is passed as an `on_invalidate` callback to bulk_get_push_rules,
     # which means that it it is stored on the bulk_get_push_rules cache entry. In order
@@ -564,8 +564,8 @@ class _Invalidation:
     # attrs provides suitable __hash__ and __eq__ methods, provided we remember to
     # set `frozen=True`.
 
-    cache = attr.ib(type=LruCache)
-    room_id = attr.ib(type=str)
+    cache: LruCache
+    room_id: str
 
     def __call__(self) -> None:
         rules_data = self.cache.get(self.room_id, None, update_metrics=False)
diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py
index a390cfcb74..4f4f1ad453 100644
--- a/synapse/replication/tcp/streams/events.py
+++ b/synapse/replication/tcp/streams/events.py
@@ -50,12 +50,12 @@ data part are:
 """
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class EventsStreamRow:
     """A parsed row from the events replication stream"""
 
-    type = attr.ib()  # str: the TypeId of one of the *EventsStreamRows
-    data = attr.ib()  # BaseEventsStreamRow
+    type: str  # the TypeId of one of the *EventsStreamRows
+    data: "BaseEventsStreamRow"
 
 
 class BaseEventsStreamRow:
@@ -79,28 +79,28 @@ class BaseEventsStreamRow:
         return cls(*data)
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class EventsStreamEventRow(BaseEventsStreamRow):
     TypeId = "ev"
 
-    event_id = attr.ib(type=str)
-    room_id = attr.ib(type=str)
-    type = attr.ib(type=str)
-    state_key = attr.ib(type=Optional[str])
-    redacts = attr.ib(type=Optional[str])
-    relates_to = attr.ib(type=Optional[str])
-    membership = attr.ib(type=Optional[str])
-    rejected = attr.ib(type=bool)
+    event_id: str
+    room_id: str
+    type: str
+    state_key: Optional[str]
+    redacts: Optional[str]
+    relates_to: Optional[str]
+    membership: Optional[str]
+    rejected: bool
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class EventsStreamCurrentStateRow(BaseEventsStreamRow):
     TypeId = "state"
 
-    room_id = attr.ib()  # str
-    type = attr.ib()  # str
-    state_key = attr.ib()  # str
-    event_id = attr.ib()  # str, optional
+    room_id: str
+    type: str
+    state_key: str
+    event_id: Optional[str]
 
 
 _EventRows: Tuple[Type[BaseEventsStreamRow], ...] = (
diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index fca239d8c7..9f6c251caf 100644
--- a/synapse/rest/media/v1/media_storage.py
+++ b/synapse/rest/media/v1/media_storage.py
@@ -343,7 +343,7 @@ class SpamMediaException(NotFoundError):
     """
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class ReadableFileWrapper:
     """Wrapper that allows reading a file in chunks, yielding to the reactor,
     and writing to a callback.
@@ -354,8 +354,8 @@ class ReadableFileWrapper:
 
     CHUNK_SIZE = 2 ** 14
 
-    clock = attr.ib(type=Clock)
-    path = attr.ib(type=str)
+    clock: Clock
+    path: str
 
     async def write_chunks_to(self, callback: Callable[[bytes], None]) -> None:
         """Reads the file in chunks and calls the callback with each chunk."""
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index 69ac8c3423..923e31587e 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -450,19 +450,19 @@ class StateHandler:
         return {key: state_map[ev_id] for key, ev_id in new_state.items()}
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _StateResMetrics:
     """Keeps track of some usage metrics about state res."""
 
     # System and User CPU time, in seconds
-    cpu_time = attr.ib(type=float, default=0.0)
+    cpu_time: float = 0.0
 
     # time spent on database transactions (excluding scheduling time). This roughly
     # corresponds to the amount of work done on the db server, excluding event fetches.
-    db_time = attr.ib(type=float, default=0.0)
+    db_time: float = 0.0
 
     # number of events fetched from the db.
-    db_events = attr.ib(type=int, default=0)
+    db_events: int = 0
 
 
 _biggest_room_by_cpu_counter = Counter(
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 2cacc7dd6c..a27cc3605c 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -143,7 +143,7 @@ def make_conn(
     return db_conn
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class LoggingDatabaseConnection:
     """A wrapper around a database connection that returns `LoggingTransaction`
     as its cursor class.
@@ -151,9 +151,9 @@ class LoggingDatabaseConnection:
     This is mainly used on startup to ensure that queries get logged correctly
     """
 
-    conn = attr.ib(type=Connection)
-    engine = attr.ib(type=BaseDatabaseEngine)
-    default_txn_name = attr.ib(type=str)
+    conn: Connection
+    engine: BaseDatabaseEngine
+    default_txn_name: str
 
     def cursor(
         self, *, txn_name=None, after_callbacks=None, exception_callbacks=None
diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py
index 57b5ffbad3..86cab97563 100644
--- a/synapse/storage/databases/main/end_to_end_keys.py
+++ b/synapse/storage/databases/main/end_to_end_keys.py
@@ -50,16 +50,16 @@ if TYPE_CHECKING:
     from synapse.server import HomeServer
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class DeviceKeyLookupResult:
     """The type returned by get_e2e_device_keys_and_signatures"""
 
-    display_name = attr.ib(type=Optional[str])
+    display_name: Optional[str]
 
     # the key data from e2e_device_keys_json. Typically includes fields like
     # "algorithm", "keys" (including the curve25519 identity key and the ed25519 signing
     # key) and "signatures" (a map from (user id) to (key id/device_id) to signature.)
-    keys = attr.ib(type=Optional[JsonDict])
+    keys: Optional[JsonDict]
 
 
 class EndToEndKeyBackgroundStore(SQLBaseStore):
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index dd255aefb9..cce2305597 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -69,7 +69,7 @@ event_counter = Counter(
 )
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class DeltaState:
     """Deltas to use to update the `current_state_events` table.
 
@@ -80,9 +80,9 @@ class DeltaState:
             should e.g. be removed from `current_state_events` table.
     """
 
-    to_delete = attr.ib(type=List[Tuple[str, str]])
-    to_insert = attr.ib(type=StateMap[str])
-    no_longer_in_room = attr.ib(type=bool, default=False)
+    to_delete: List[Tuple[str, str]]
+    to_insert: StateMap[str]
+    no_longer_in_room: bool = False
 
 
 class PersistEventsStore:
@@ -2226,17 +2226,17 @@ class PersistEventsStore:
         )
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _LinkMap:
     """A helper type for tracking links between chains."""
 
     # Stores the set of links as nested maps: source chain ID -> target chain ID
     # -> source sequence number -> target sequence number.
-    maps = attr.ib(type=Dict[int, Dict[int, Dict[int, int]]], factory=dict)
+    maps: Dict[int, Dict[int, Dict[int, int]]] = attr.Factory(dict)
 
     # Stores the links that have been added (with new set to true), as tuples of
     # `(source chain ID, source sequence no, target chain ID, target sequence no.)`
-    additions = attr.ib(type=Set[Tuple[int, int, int, int]], factory=set)
+    additions: Set[Tuple[int, int, int, int]] = attr.Factory(set)
 
     def add_link(
         self,
diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py
index a68f14ba48..0a96664caf 100644
--- a/synapse/storage/databases/main/events_bg_updates.py
+++ b/synapse/storage/databases/main/events_bg_updates.py
@@ -65,22 +65,22 @@ class _BackgroundUpdates:
     REPLACE_STREAM_ORDERING_COLUMN = "replace_stream_ordering_column"
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class _CalculateChainCover:
     """Return value for _calculate_chain_cover_txn."""
 
     # The last room_id/depth/stream processed.
-    room_id = attr.ib(type=str)
-    depth = attr.ib(type=int)
-    stream = attr.ib(type=int)
+    room_id: str
+    depth: int
+    stream: int
 
     # Number of rows processed
-    processed_count = attr.ib(type=int)
+    processed_count: int
 
     # Map from room_id to last depth/stream processed for each room that we have
     # processed all events for (i.e. the rooms we can flip the
     # `has_auth_chain_index` for)
-    finished_room_map = attr.ib(type=Dict[str, Tuple[int, int]])
+    finished_room_map: Dict[str, Tuple[int, int]]
 
 
 class EventsBackgroundUpdatesStore(SQLBaseStore):
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index 4175c82a25..aac94fa464 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -51,7 +51,7 @@ class ExternalIDReuseException(Exception):
     pass
 
 
-@attr.s(frozen=True, slots=True)
+@attr.s(frozen=True, slots=True, auto_attribs=True)
 class TokenLookupResult:
     """Result of looking up an access token.
 
@@ -69,14 +69,14 @@ class TokenLookupResult:
             cached.
     """
 
-    user_id = attr.ib(type=str)
-    is_guest = attr.ib(type=bool, default=False)
-    shadow_banned = attr.ib(type=bool, default=False)
-    token_id = attr.ib(type=Optional[int], default=None)
-    device_id = attr.ib(type=Optional[str], default=None)
-    valid_until_ms = attr.ib(type=Optional[int], default=None)
-    token_owner = attr.ib(type=str)
-    token_used = attr.ib(type=bool, default=False)
+    user_id: str
+    is_guest: bool = False
+    shadow_banned: bool = False
+    token_id: Optional[int] = None
+    device_id: Optional[str] = None
+    valid_until_ms: Optional[int] = None
+    token_owner: str = attr.ib()
+    token_used: bool = False
 
     # Make the token owner default to the user ID, which is the common case.
     @token_owner.default
diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py
index cda80d6511..4489732fda 100644
--- a/synapse/storage/databases/main/roommember.py
+++ b/synapse/storage/databases/main/roommember.py
@@ -1177,18 +1177,18 @@ class RoomMemberStore(RoomMemberWorkerStore, RoomMemberBackgroundUpdateStore):
         await self.db_pool.runInteraction("forget_membership", f)
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _JoinedHostsCache:
     """The cached data used by the `_get_joined_hosts_cache`."""
 
     # Dict of host to the set of their users in the room at the state group.
-    hosts_to_joined_users = attr.ib(type=Dict[str, Set[str]], factory=dict)
+    hosts_to_joined_users: Dict[str, Set[str]] = attr.Factory(dict)
 
     # The state group `hosts_to_joined_users` is derived from. Will be an object
     # if the instance is newly created or if the state is not based on a state
     # group. (An object is used as a sentinel value to ensure that it never is
     # equal to anything else).
-    state_group = attr.ib(type=Union[object, int], factory=object)
+    state_group: Union[object, int] = attr.Factory(object)
 
     def __len__(self):
         return sum(len(v) for v in self.hosts_to_joined_users.values())
diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py
index a1a1a6a14a..2d339b6008 100644
--- a/synapse/storage/databases/main/ui_auth.py
+++ b/synapse/storage/databases/main/ui_auth.py
@@ -23,19 +23,19 @@ from synapse.types import JsonDict
 from synapse.util import json_encoder, stringutils
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class UIAuthSessionData:
-    session_id = attr.ib(type=str)
+    session_id: str
     # The dictionary from the client root level, not the 'auth' key.
-    clientdict = attr.ib(type=JsonDict)
+    clientdict: JsonDict
     # The URI and method the session was intiatied with. These are checked at
     # each stage of the authentication to ensure that the asked for operation
     # has not changed.
-    uri = attr.ib(type=str)
-    method = attr.ib(type=str)
+    uri: str
+    method: str
     # A string description of the operation that the current authentication is
     # authorising.
-    description = attr.ib(type=str)
+    description: str
 
 
 class UIAuthWorkerStore(SQLBaseStore):
diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py
index 540adb8781..71584f3f74 100644
--- a/synapse/storage/keys.py
+++ b/synapse/storage/keys.py
@@ -21,7 +21,7 @@ from signedjson.types import VerifyKey
 logger = logging.getLogger(__name__)
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class FetchKeyResult:
-    verify_key = attr.ib(type=VerifyKey)  # the key itself
-    valid_until_ts = attr.ib(type=int)  # how long we can use this key for
+    verify_key: VerifyKey  # the key itself
+    valid_until_ts: int  # how long we can use this key for
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index e45adfcb55..1823e18720 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -696,7 +696,7 @@ def _get_or_create_schema_state(
     )
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _DirectoryListing:
     """Helper class to store schema file name and the
     absolute path to it.
@@ -705,5 +705,5 @@ class _DirectoryListing:
     `file_name` attr is kept first.
     """
 
-    file_name = attr.ib(type=str)
-    absolute_path = attr.ib(type=str)
+    file_name: str
+    absolute_path: str
diff --git a/synapse/storage/relations.py b/synapse/storage/relations.py
index 10a46b5e82..b1536c1ca4 100644
--- a/synapse/storage/relations.py
+++ b/synapse/storage/relations.py
@@ -23,7 +23,7 @@ from synapse.types import JsonDict
 logger = logging.getLogger(__name__)
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class PaginationChunk:
     """Returned by relation pagination APIs.
 
@@ -35,9 +35,9 @@ class PaginationChunk:
             None then there are no previous results.
     """
 
-    chunk = attr.ib(type=List[JsonDict])
-    next_batch = attr.ib(type=Optional[Any], default=None)
-    prev_batch = attr.ib(type=Optional[Any], default=None)
+    chunk: List[JsonDict]
+    next_batch: Optional[Any] = None
+    prev_batch: Optional[Any] = None
 
     def to_dict(self) -> Dict[str, Any]:
         d = {"chunk": self.chunk}
@@ -51,7 +51,7 @@ class PaginationChunk:
         return d
 
 
-@attr.s(frozen=True, slots=True)
+@attr.s(frozen=True, slots=True, auto_attribs=True)
 class RelationPaginationToken:
     """Pagination token for relation pagination API.
 
@@ -64,8 +64,8 @@ class RelationPaginationToken:
         stream: The stream ordering of the boundary event.
     """
 
-    topological = attr.ib(type=int)
-    stream = attr.ib(type=int)
+    topological: int
+    stream: int
 
     @staticmethod
     def from_string(string: str) -> "RelationPaginationToken":
@@ -82,7 +82,7 @@ class RelationPaginationToken:
         return attr.astuple(self)
 
 
-@attr.s(frozen=True, slots=True)
+@attr.s(frozen=True, slots=True, auto_attribs=True)
 class AggregationPaginationToken:
     """Pagination token for relation aggregation pagination API.
 
@@ -94,8 +94,8 @@ class AggregationPaginationToken:
         stream: The MAX stream ordering in the boundary group.
     """
 
-    count = attr.ib(type=int)
-    stream = attr.ib(type=int)
+    count: int
+    stream: int
 
     @staticmethod
     def from_string(string: str) -> "AggregationPaginationToken":
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index b5ba1560d1..df8b2f1088 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -45,7 +45,7 @@ logger = logging.getLogger(__name__)
 T = TypeVar("T")
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class StateFilter:
     """A filter used when querying for state.
 
@@ -58,8 +58,8 @@ class StateFilter:
             appear in `types`.
     """
 
-    types = attr.ib(type="frozendict[str, Optional[FrozenSet[str]]]")
-    include_others = attr.ib(default=False, type=bool)
+    types: "frozendict[str, Optional[FrozenSet[str]]]"
+    include_others: bool = False
 
     def __attrs_post_init__(self):
         # If `include_others` is set we canonicalise the filter by removing
diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py
index b8112e1c05..3c13859faa 100644
--- a/synapse/storage/util/id_generators.py
+++ b/synapse/storage/util/id_generators.py
@@ -762,13 +762,13 @@ class _AsyncCtxManagerWrapper(Generic[T]):
         return self.inner.__exit__(exc_type, exc, tb)
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _MultiWriterCtxManager:
     """Async context manager returned by MultiWriterIdGenerator"""
 
-    id_gen = attr.ib(type=MultiWriterIdGenerator)
-    multiple_ids = attr.ib(type=Optional[int], default=None)
-    stream_ids = attr.ib(type=List[int], factory=list)
+    id_gen: MultiWriterIdGenerator
+    multiple_ids: Optional[int] = None
+    stream_ids: List[int] = attr.Factory(list)
 
     async def __aenter__(self) -> Union[int, List[int]]:
         # It's safe to run this in autocommit mode as fetching values from a
diff --git a/synapse/streams/config.py b/synapse/streams/config.py
index c08d591f29..b52723e2b8 100644
--- a/synapse/streams/config.py
+++ b/synapse/streams/config.py
@@ -28,14 +28,14 @@ logger = logging.getLogger(__name__)
 MAX_LIMIT = 1000
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class PaginationConfig:
     """A configuration object which stores pagination parameters."""
 
-    from_token = attr.ib(type=Optional[StreamToken])
-    to_token = attr.ib(type=Optional[StreamToken])
-    direction = attr.ib(type=str)
-    limit = attr.ib(type=Optional[int])
+    from_token: Optional[StreamToken]
+    to_token: Optional[StreamToken]
+    direction: str
+    limit: Optional[int]
 
     @classmethod
     async def from_request(
diff --git a/synapse/types.py b/synapse/types.py
index 74a2c51857..f89fb216a6 100644
--- a/synapse/types.py
+++ b/synapse/types.py
@@ -20,6 +20,7 @@ from typing import (
     Any,
     ClassVar,
     Dict,
+    List,
     Mapping,
     Match,
     MutableMapping,
@@ -80,7 +81,7 @@ class ISynapseReactor(
     """The interfaces necessary for Synapse to function."""
 
 
-@attr.s(frozen=True, slots=True)
+@attr.s(frozen=True, slots=True, auto_attribs=True)
 class Requester:
     """
     Represents the user making a request
@@ -98,13 +99,13 @@ class Requester:
             "puppeting" the user.
     """
 
-    user = attr.ib(type="UserID")
-    access_token_id = attr.ib(type=Optional[int])
-    is_guest = attr.ib(type=bool)
-    shadow_banned = attr.ib(type=bool)
-    device_id = attr.ib(type=Optional[str])
-    app_service = attr.ib(type=Optional["ApplicationService"])
-    authenticated_entity = attr.ib(type=str)
+    user: "UserID"
+    access_token_id: Optional[int]
+    is_guest: bool
+    shadow_banned: bool
+    device_id: Optional[str]
+    app_service: Optional["ApplicationService"]
+    authenticated_entity: str
 
     def serialize(self):
         """Converts self to a type that can be serialized as JSON, and then
@@ -211,7 +212,7 @@ def get_localpart_from_id(string: str) -> str:
 DS = TypeVar("DS", bound="DomainSpecificString")
 
 
-@attr.s(slots=True, frozen=True, repr=False)
+@attr.s(slots=True, frozen=True, repr=False, auto_attribs=True)
 class DomainSpecificString(metaclass=abc.ABCMeta):
     """Common base class among ID/name strings that have a local part and a
     domain name, prefixed with a sigil.
@@ -224,8 +225,8 @@ class DomainSpecificString(metaclass=abc.ABCMeta):
 
     SIGIL: ClassVar[str] = abc.abstractproperty()  # type: ignore
 
-    localpart = attr.ib(type=str)
-    domain = attr.ib(type=str)
+    localpart: str
+    domain: str
 
     # Because this is a frozen class, it is deeply immutable.
     def __copy__(self):
@@ -461,14 +462,12 @@ class RoomStreamToken:
     attributes, must be hashable.
     """
 
-    topological = attr.ib(
-        type=Optional[int],
+    topological: Optional[int] = attr.ib(
         validator=attr.validators.optional(attr.validators.instance_of(int)),
     )
-    stream = attr.ib(type=int, validator=attr.validators.instance_of(int))
+    stream: int = attr.ib(validator=attr.validators.instance_of(int))
 
-    instance_map = attr.ib(
-        type="frozendict[str, int]",
+    instance_map: "frozendict[str, int]" = attr.ib(
         factory=frozendict,
         validator=attr.validators.deep_mapping(
             key_validator=attr.validators.instance_of(str),
@@ -477,7 +476,7 @@ class RoomStreamToken:
         ),
     )
 
-    def __attrs_post_init__(self):
+    def __attrs_post_init__(self) -> None:
         """Validates that both `topological` and `instance_map` aren't set."""
 
         if self.instance_map and self.topological:
@@ -593,7 +592,7 @@ class RoomStreamToken:
             return "s%d" % (self.stream,)
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class StreamToken:
     """A collection of positions within multiple streams.
 
@@ -601,20 +600,20 @@ class StreamToken:
     must be hashable.
     """
 
-    room_key = attr.ib(
-        type=RoomStreamToken, validator=attr.validators.instance_of(RoomStreamToken)
+    room_key: RoomStreamToken = attr.ib(
+        validator=attr.validators.instance_of(RoomStreamToken)
     )
-    presence_key = attr.ib(type=int)
-    typing_key = attr.ib(type=int)
-    receipt_key = attr.ib(type=int)
-    account_data_key = attr.ib(type=int)
-    push_rules_key = attr.ib(type=int)
-    to_device_key = attr.ib(type=int)
-    device_list_key = attr.ib(type=int)
-    groups_key = attr.ib(type=int)
+    presence_key: int
+    typing_key: int
+    receipt_key: int
+    account_data_key: int
+    push_rules_key: int
+    to_device_key: int
+    device_list_key: int
+    groups_key: int
 
     _SEPARATOR = "_"
-    START: "StreamToken"
+    START: ClassVar["StreamToken"]
 
     @classmethod
     async def from_string(cls, store: "DataStore", string: str) -> "StreamToken":
@@ -674,7 +673,7 @@ class StreamToken:
 StreamToken.START = StreamToken(RoomStreamToken(None, 0), 0, 0, 0, 0, 0, 0, 0, 0)
 
 
-@attr.s(slots=True, frozen=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class PersistedEventPosition:
     """Position of a newly persisted event with instance that persisted it.
 
@@ -682,8 +681,8 @@ class PersistedEventPosition:
     RoomStreamToken.
     """
 
-    instance_name = attr.ib(type=str)
-    stream = attr.ib(type=int)
+    instance_name: str
+    stream: int
 
     def persisted_after(self, token: RoomStreamToken) -> bool:
         return token.get_stream_pos_for_instance(self.instance_name) < self.stream
@@ -733,15 +732,15 @@ class ThirdPartyInstanceID:
     __str__ = to_string
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
 class ReadReceipt:
     """Information about a read-receipt"""
 
-    room_id = attr.ib()
-    receipt_type = attr.ib()
-    user_id = attr.ib()
-    event_ids = attr.ib()
-    data = attr.ib()
+    room_id: str
+    receipt_type: str
+    user_id: str
+    event_ids: List[str]
+    data: JsonDict
 
 
 def get_verify_key_from_cross_signing_key(key_info):
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index 150a04b53e..3f7299aff7 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -309,12 +309,12 @@ def gather_results(  # type: ignore[misc]
     return deferred.addCallback(tuple)
 
 
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class _LinearizerEntry:
     # The number of things executing.
-    count = attr.ib(type=int)
+    count: int
     # Deferreds for the things blocked from executing.
-    deferreds = attr.ib(type=collections.OrderedDict)
+    deferreds: collections.OrderedDict
 
 
 class Linearizer:
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index 485ddb1893..d267703df0 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -33,7 +33,7 @@ DV = TypeVar("DV")
 
 # This class can't be generic because it uses slots with attrs.
 # See: https://github.com/python-attrs/attrs/issues/313
-@attr.s(slots=True)
+@attr.s(slots=True, auto_attribs=True)
 class DictionaryEntry:  # should be: Generic[DKT, DV].
     """Returned when getting an entry from the cache
 
@@ -41,14 +41,13 @@ class DictionaryEntry:  # should be: Generic[DKT, DV].
         full: Whether the cache has the full or dict or just some keys.
             If not full then not all requested keys will necessarily be present
             in `value`
-        known_absent: Keys that were looked up in the dict and were not
-            there.
+        known_absent: Keys that were looked up in the dict and were not there.
         value: The full or partial dict value
     """
 
-    full = attr.ib(type=bool)
-    known_absent = attr.ib(type=Set[Any])  # should be: Set[DKT]
-    value = attr.ib(type=Dict[Any, Any])  # should be: Dict[DKT, DV]
+    full: bool
+    known_absent: Set[Any]  # should be: Set[DKT]
+    value: Dict[Any, Any]  # should be: Dict[DKT, DV]
 
     def __len__(self) -> int:
         return len(self.value)