diff --git a/CHANGES.md b/CHANGES.md
index 62ea684e58..0e5e052951 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,30 @@
+Synapse 1.40.0 (2021-08-10)
+===========================
+
+No significant changes.
+
+
+Synapse 1.40.0rc3 (2021-08-09)
+==============================
+
+Features
+--------
+
+- Support [MSC3289: room version 8](https://github.com/matrix-org/matrix-doc/pull/3289). ([\#10449](https://github.com/matrix-org/synapse/issues/10449))
+
+
+Bugfixes
+--------
+
+- Mark the experimental room version from [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) as unstable. ([\#10449](https://github.com/matrix-org/synapse/issues/10449))
+
+
+Improved Documentation
+----------------------
+
+- Fix broken links in `upgrade.md`. Contributed by @dklimpel. ([\#10543](https://github.com/matrix-org/synapse/issues/10543))
+
+
Synapse 1.40.0rc2 (2021-08-04)
==============================
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e7eef23419..4486a4b2cd 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -252,6 +252,7 @@ To prepare a Pull Request, please:
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
5. add a [changelog entry](#changelog) and push it to your Pull Request;
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
+7. if you need to update your PR, please avoid rebasing and just add new commits to your branch.
## Changelog
diff --git a/changelog.d/10475.feature b/changelog.d/10475.feature
new file mode 100644
index 0000000000..52eab11b03
--- /dev/null
+++ b/changelog.d/10475.feature
@@ -0,0 +1 @@
+Add support for sending federation requests through a proxy. Contributed by @Bubu and @dklimpel.
\ No newline at end of file
diff --git a/changelog.d/10527.misc b/changelog.d/10527.misc
index 3cf22f9daf..ffc4e4289c 100644
--- a/changelog.d/10527.misc
+++ b/changelog.d/10527.misc
@@ -1 +1 @@
-Prepare for the new spaces summary endpoint (updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946)).
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10530.misc b/changelog.d/10530.misc
index 3cf22f9daf..ffc4e4289c 100644
--- a/changelog.d/10530.misc
+++ b/changelog.d/10530.misc
@@ -1 +1 @@
-Prepare for the new spaces summary endpoint (updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946)).
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10538.feature b/changelog.d/10538.feature
new file mode 100644
index 0000000000..120c8e8ca0
--- /dev/null
+++ b/changelog.d/10538.feature
@@ -0,0 +1 @@
+Add support for new redaction rules for historical events specified in [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716).
diff --git a/changelog.d/10543.doc b/changelog.d/10543.doc
deleted file mode 100644
index 6c06722eb4..0000000000
--- a/changelog.d/10543.doc
+++ /dev/null
@@ -1 +0,0 @@
-Fix broken links in `upgrade.md`. Contributed by @dklimpel.
diff --git a/changelog.d/10549.feature b/changelog.d/10549.feature
new file mode 100644
index 0000000000..ffc4e4289c
--- /dev/null
+++ b/changelog.d/10549.feature
@@ -0,0 +1 @@
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10550.bugfix b/changelog.d/10550.bugfix
new file mode 100644
index 0000000000..2e1b7c8bbb
--- /dev/null
+++ b/changelog.d/10550.bugfix
@@ -0,0 +1 @@
+Fix longstanding bug which caused the user "status" to be reset when the user went offline. Contributed by @dklimpel.
diff --git a/changelog.d/10551.doc b/changelog.d/10551.doc
new file mode 100644
index 0000000000..4a2b0785bf
--- /dev/null
+++ b/changelog.d/10551.doc
@@ -0,0 +1 @@
+Updated the reverse proxy documentation to highlight the homserver configuration that is needed to make Synapse aware that is is intentionally reverse proxied.
diff --git a/changelog.d/10552.misc b/changelog.d/10552.misc
new file mode 100644
index 0000000000..fc5f6aea5f
--- /dev/null
+++ b/changelog.d/10552.misc
@@ -0,0 +1 @@
+Update `/batch_send` endpoint to only return `state_events` created by the `state_events_from_before` passed in.
diff --git a/changelog.d/10558.feature b/changelog.d/10558.feature
new file mode 100644
index 0000000000..1f461bc70a
--- /dev/null
+++ b/changelog.d/10558.feature
@@ -0,0 +1 @@
+Admin API to delete several media for a specific user. Contributed by @dklimpel.
diff --git a/changelog.d/10560.feature b/changelog.d/10560.feature
new file mode 100644
index 0000000000..ffc4e4289c
--- /dev/null
+++ b/changelog.d/10560.feature
@@ -0,0 +1 @@
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10563.misc b/changelog.d/10563.misc
new file mode 100644
index 0000000000..8e4e90c8f4
--- /dev/null
+++ b/changelog.d/10563.misc
@@ -0,0 +1 @@
+Update contributing.md to warn against rebasing an open PR.
diff --git a/changelog.d/10572.misc b/changelog.d/10572.misc
new file mode 100644
index 0000000000..008d7be444
--- /dev/null
+++ b/changelog.d/10572.misc
@@ -0,0 +1 @@
+Clarify error message when failing to join a restricted room.
diff --git a/changelog.d/10574.feature b/changelog.d/10574.feature
new file mode 100644
index 0000000000..ffc4e4289c
--- /dev/null
+++ b/changelog.d/10574.feature
@@ -0,0 +1 @@
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10575.feature b/changelog.d/10575.feature
new file mode 100644
index 0000000000..ffc4e4289c
--- /dev/null
+++ b/changelog.d/10575.feature
@@ -0,0 +1 @@
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10579.feature b/changelog.d/10579.feature
new file mode 100644
index 0000000000..ffc4e4289c
--- /dev/null
+++ b/changelog.d/10579.feature
@@ -0,0 +1 @@
+Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/10580.bugfix b/changelog.d/10580.bugfix
new file mode 100644
index 0000000000..f8da7382b7
--- /dev/null
+++ b/changelog.d/10580.bugfix
@@ -0,0 +1 @@
+Allow public rooms to be previewed in the spaces summary APIs from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/debian/changelog b/debian/changelog
index 39d9c8169e..e101423fe4 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,15 @@
+matrix-synapse-py3 (1.40.0) stable; urgency=medium
+
+ * New synapse release 1.40.0.
+
+ -- Synapse Packaging team <packages@matrix.org> Tue, 10 Aug 2021 13:50:48 +0100
+
+matrix-synapse-py3 (1.40.0~rc3) stable; urgency=medium
+
+ * New synapse release 1.40.0~rc3.
+
+ -- Synapse Packaging team <packages@matrix.org> Mon, 09 Aug 2021 13:41:08 +0100
+
matrix-synapse-py3 (1.40.0~rc2) stable; urgency=medium
* New synapse release 1.40.0~rc2.
diff --git a/docs/admin_api/media_admin_api.md b/docs/admin_api/media_admin_api.md
index 61bed1e0d5..ea05bd6e44 100644
--- a/docs/admin_api/media_admin_api.md
+++ b/docs/admin_api/media_admin_api.md
@@ -12,6 +12,7 @@
- [Delete local media](#delete-local-media)
* [Delete a specific local media](#delete-a-specific-local-media)
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
+ * [Delete media uploaded by a user](#delete-media-uploaded-by-a-user)
- [Purge Remote Media API](#purge-remote-media-api)
# Querying media
@@ -47,7 +48,8 @@ The API returns a JSON body like the following:
## List all media uploaded by a user
Listing all media that has been uploaded by a local user can be achieved through
-the use of the [List media of a user](user_admin_api.md#list-media-of-a-user)
+the use of the
+[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
Admin API.
# Quarantine media
@@ -281,6 +283,11 @@ The following fields are returned in the JSON response body:
* `deleted_media`: an array of strings - List of deleted `media_id`
* `total`: integer - Total number of deleted `media_id`
+## Delete media uploaded by a user
+
+You can find details of how to delete multiple media uploaded by a user in
+[User Admin API](user_admin_api.md#delete-media-uploaded-by-a-user).
+
# Purge Remote Media API
The purge remote media API allows server admins to purge old cached remote media.
diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md
index 160899754e..33811f5bbb 100644
--- a/docs/admin_api/user_admin_api.md
+++ b/docs/admin_api/user_admin_api.md
@@ -443,8 +443,9 @@ The following fields are returned in the JSON response body:
- `joined_rooms` - An array of `room_id`.
- `total` - Number of rooms.
+## User media
-## List media of a user
+### List media uploaded by a user
Gets a list of all local media that a specific `user_id` has created.
By default, the response is ordered by descending creation date and ascending media ID.
The newest media is on top. You can change the order with parameters
@@ -543,7 +544,6 @@ The following fields are returned in the JSON response body:
- `media` - An array of objects, each containing information about a media.
Media objects contain the following fields:
-
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
- `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
- `media_id` - string - The id used to refer to the media.
@@ -551,13 +551,58 @@ The following fields are returned in the JSON response body:
- `media_type` - string - The MIME-type of the media.
- `quarantined_by` - string - The user ID that initiated the quarantine request
for this media.
-
- `safe_from_quarantine` - bool - Status if this media is safe from quarantining.
- `upload_name` - string - The name the media was uploaded with.
-
- `next_token`: integer - Indication for pagination. See above.
- `total` - integer - Total number of media.
+### Delete media uploaded by a user
+
+This API deletes the *local* media from the disk of your own server
+that a specific `user_id` has created. This includes any local thumbnails.
+
+This API will not affect media that has been uploaded to external
+media repositories (e.g https://github.com/turt2live/matrix-media-repo/).
+
+By default, the API deletes media ordered by descending creation date and ascending media ID.
+The newest media is deleted first. You can change the order with parameters
+`order_by` and `dir`. If no `limit` is set the API deletes `100` files per request.
+
+The API is:
+
+```
+DELETE /_synapse/admin/v1/users/<user_id>/media
+```
+
+To use it, you will need to authenticate by providing an `access_token` for a
+server admin: [Admin API](../usage/administration/admin_api)
+
+A response body like the following is returned:
+
+```json
+{
+ "deleted_media": [
+ "abcdefghijklmnopqrstuvwx"
+ ],
+ "total": 1
+}
+```
+
+The following fields are returned in the JSON response body:
+
+* `deleted_media`: an array of strings - List of deleted `media_id`
+* `total`: integer - Total number of deleted `media_id`
+
+**Note**: There is no `next_token`. This is not useful for deleting media, because
+after deleting media the remaining media have a new order.
+
+**Parameters**
+
+This API has the same parameters as
+[List media uploaded by a user](#list-media-uploaded-by-a-user).
+With the parameters you can for example limit the number of files to delete at once or
+delete largest/smallest or newest/oldest files first.
+
## Login as a user
Get an access token that can be used to authenticate as that user. Useful for
@@ -1012,4 +1057,3 @@ The following parameters should be set in the URL:
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
be local.
-
diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md
index 76bb45aff2..5f8d20129e 100644
--- a/docs/reverse_proxy.md
+++ b/docs/reverse_proxy.md
@@ -33,6 +33,19 @@ Let's assume that we expect clients to connect to our server at
`https://example.com:8448`. The following sections detail the configuration of
the reverse proxy and the homeserver.
+
+## Homeserver Configuration
+
+The HTTP configuration will need to be updated for Synapse to correctly record
+client IP addresses and generate redirect URLs while behind a reverse proxy.
+
+In `homeserver.yaml` set `x_forwarded: true` in the port 8008 section and
+consider setting `bind_addresses: ['127.0.0.1']` so that the server only
+listens to traffic on localhost. (Do not change `bind_addresses` to `127.0.0.1`
+when using a containerized Synapse, as that will prevent it from responding
+to proxied traffic.)
+
+
## Reverse-proxy configuration examples
**NOTE**: You only need one of these.
@@ -239,16 +252,6 @@ relay "matrix_federation" {
}
```
-## Homeserver Configuration
-
-You will also want to set `bind_addresses: ['127.0.0.1']` and
-`x_forwarded: true` for port 8008 in `homeserver.yaml` to ensure that
-client IP addresses are recorded correctly.
-
-Having done so, you can then use `https://matrix.example.com` (instead
-of `https://matrix.example.com:8448`) as the "Custom server" when
-connecting to Synapse from a client.
-
## Health check endpoint
diff --git a/docs/setup/forward_proxy.md b/docs/setup/forward_proxy.md
index a0720ab342..494c14893b 100644
--- a/docs/setup/forward_proxy.md
+++ b/docs/setup/forward_proxy.md
@@ -45,18 +45,18 @@ The proxy will be **used** for:
- recaptcha validation
- CAS auth validation
- OpenID Connect
+- Outbound federation
- Federation (checking public key revocation)
+- Fetching public keys of other servers
+- Downloading remote media
It will **not be used** for:
- Application Services
- Identity servers
-- Outbound federation
- In worker configurations
- connections between workers
- connections from workers to Redis
-- Fetching public keys of other servers
-- Downloading remote media
## Troubleshooting
diff --git a/docs/upgrade.md b/docs/upgrade.md
index ce9167e6de..8831c9d6cf 100644
--- a/docs/upgrade.md
+++ b/docs/upgrade.md
@@ -86,6 +86,33 @@ process, for example:
```
+# Upgrading to v1.xx.0
+
+## Add support for routing outbound HTTP requests via a proxy for federation
+
+Since Synapse 1.6.0 (2019-11-26) you can set a proxy for outbound HTTP requests via
+http_proxy/https_proxy environment variables. This proxy was set for:
+- push
+- url previews
+- phone-home stats
+- recaptcha validation
+- CAS auth validation
+- OpenID Connect
+- Federation (checking public key revocation)
+
+In this version we have added support for outbound requests for:
+- Outbound federation
+- Downloading remote media
+- Fetching public keys of other servers
+
+These requests use the same proxy configuration. If you have a proxy configuration we
+recommend to verify the configuration. It may be necessary to adjust the `no_proxy`
+environment variable.
+
+See [using a forward proxy with Synapse documentation](setup/forward_proxy.md) for
+details.
+
+
# Upgrading to v1.39.0
## Deprecation of the current third-party rules module interface
diff --git a/mypy.ini b/mypy.ini
index 8717ae738e..5d6cd557bc 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -86,6 +86,7 @@ files =
tests/test_event_auth.py,
tests/test_utils,
tests/handlers/test_password_providers.py,
+ tests/handlers/test_space_summary.py,
tests/rest/client/v1/test_login.py,
tests/rest/client/v2_alpha/test_auth.py,
tests/util/test_itertools.py,
diff --git a/synapse/__init__.py b/synapse/__init__.py
index da52463531..919293cd80 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -47,7 +47,7 @@ try:
except ImportError:
pass
-__version__ = "1.40.0rc2"
+__version__ = "1.40.0"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index a986fdb47a..e0e24fddac 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -62,7 +62,7 @@ class JoinRules:
INVITE = "invite"
PRIVATE = "private"
# As defined for MSC3083.
- MSC3083_RESTRICTED = "restricted"
+ RESTRICTED = "restricted"
class RestrictedJoinRuleTypes:
diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py
index bc678efe49..11280c4462 100644
--- a/synapse/api/room_versions.py
+++ b/synapse/api/room_versions.py
@@ -76,6 +76,8 @@ class RoomVersion:
# MSC2716: Adds m.room.power_levels -> content.historical field to control
# whether "insertion", "chunk", "marker" events can be sent
msc2716_historical = attr.ib(type=bool)
+ # MSC2716: Adds support for redacting "insertion", "chunk", and "marker" events
+ msc2716_redactions = attr.ib(type=bool)
class RoomVersions:
@@ -92,6 +94,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
V2 = RoomVersion(
"2",
@@ -106,6 +109,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
V3 = RoomVersion(
"3",
@@ -120,6 +124,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
V4 = RoomVersion(
"4",
@@ -134,6 +139,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
V5 = RoomVersion(
"5",
@@ -148,6 +154,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
V6 = RoomVersion(
"6",
@@ -162,6 +169,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
MSC2176 = RoomVersion(
"org.matrix.msc2176",
@@ -176,10 +184,11 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=False,
msc2716_historical=False,
+ msc2716_redactions=False,
)
- MSC3083 = RoomVersion(
- "org.matrix.msc3083.v2",
- RoomDisposition.UNSTABLE,
+ V7 = RoomVersion(
+ "7",
+ RoomDisposition.STABLE,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
@@ -187,12 +196,13 @@ class RoomVersions:
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
- msc3083_join_rules=True,
- msc2403_knocking=False,
+ msc3083_join_rules=False,
+ msc2403_knocking=True,
msc2716_historical=False,
+ msc2716_redactions=False,
)
- V7 = RoomVersion(
- "7",
+ V8 = RoomVersion(
+ "8",
RoomDisposition.STABLE,
EventFormatVersions.V3,
StateResolutionVersions.V2,
@@ -201,13 +211,29 @@ class RoomVersions:
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
- msc3083_join_rules=False,
+ msc3083_join_rules=True,
msc2403_knocking=True,
msc2716_historical=False,
+ msc2716_redactions=False,
)
MSC2716 = RoomVersion(
"org.matrix.msc2716",
- RoomDisposition.STABLE,
+ RoomDisposition.UNSTABLE,
+ EventFormatVersions.V3,
+ StateResolutionVersions.V2,
+ enforce_key_validity=True,
+ special_case_aliases_auth=False,
+ strict_canonicaljson=True,
+ limit_notifications_power_levels=True,
+ msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
+ msc2403_knocking=True,
+ msc2716_historical=True,
+ msc2716_redactions=False,
+ )
+ MSC2716v2 = RoomVersion(
+ "org.matrix.msc2716v2",
+ RoomDisposition.UNSTABLE,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
@@ -218,6 +244,7 @@ class RoomVersions:
msc3083_join_rules=False,
msc2403_knocking=True,
msc2716_historical=True,
+ msc2716_redactions=True,
)
@@ -231,9 +258,9 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
RoomVersions.V5,
RoomVersions.V6,
RoomVersions.MSC2176,
- RoomVersions.MSC3083,
RoomVersions.V7,
RoomVersions.MSC2716,
+ RoomVersions.V8,
)
}
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index 4c92e9a2d4..c3a0c10499 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -370,10 +370,7 @@ def _is_membership_change_allowed(
raise AuthError(403, "You are banned from this room")
elif join_rule == JoinRules.PUBLIC:
pass
- elif (
- room_version.msc3083_join_rules
- and join_rule == JoinRules.MSC3083_RESTRICTED
- ):
+ elif room_version.msc3083_join_rules and join_rule == JoinRules.RESTRICTED:
# This is the same as public, but the event must contain a reference
# to the server who authorised the join. If the event does not contain
# the proper content it is rejected.
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index a0c07f62f4..b6da2f60af 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -17,7 +17,7 @@ from typing import Any, Mapping, Union
from frozendict import frozendict
-from synapse.api.constants import EventTypes, RelationTypes
+from synapse.api.constants import EventContentFields, EventTypes, RelationTypes
from synapse.api.errors import Codes, SynapseError
from synapse.api.room_versions import RoomVersion
from synapse.util.async_helpers import yieldable_gather_results
@@ -135,6 +135,12 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict:
add_fields("history_visibility")
elif event_type == EventTypes.Redaction and room_version.msc2176_redaction_rules:
add_fields("redacts")
+ elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_INSERTION:
+ add_fields(EventContentFields.MSC2716_NEXT_CHUNK_ID)
+ elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_CHUNK:
+ add_fields(EventContentFields.MSC2716_CHUNK_ID)
+ elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_MARKER:
+ add_fields(EventContentFields.MSC2716_MARKER_INSERTION)
allowed_fields = {k: v for k, v in event_dict.items() if k in allowed_keys}
diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py
index 53fac1f8a3..4288ffff09 100644
--- a/synapse/handlers/event_auth.py
+++ b/synapse/handlers/event_auth.py
@@ -213,7 +213,7 @@ class EventAuthHandler:
raise AuthError(
403,
- "You do not belong to any of the required rooms to join this room.",
+ "You do not belong to any of the required rooms/spaces to join this room.",
)
async def has_restricted_join_rules(
@@ -240,7 +240,7 @@ class EventAuthHandler:
# If the join rule is not restricted, this doesn't apply.
join_rules_event = await self._store.get_event(join_rules_event_id)
- return join_rules_event.content.get("join_rule") == JoinRules.MSC3083_RESTRICTED
+ return join_rules_event.content.get("join_rule") == JoinRules.RESTRICTED
async def get_rooms_that_allow_join(
self, state_ids: StateMap[str]
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 016c5df2ca..7ca14e1d84 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -1184,8 +1184,7 @@ class PresenceHandler(BasePresenceHandler):
new_fields = {"state": presence}
if not ignore_status_msg:
- msg = status_msg if presence != PresenceState.OFFLINE else None
- new_fields["status_msg"] = msg
+ new_fields["status_msg"] = status_msg
if presence == PresenceState.ONLINE or (
presence == PresenceState.BUSY and self._busy_presence_enabled
@@ -1478,7 +1477,7 @@ def format_user_presence_state(
content["user_id"] = state.user_id
if state.last_active_ts:
content["last_active_ago"] = now - state.last_active_ts
- if state.status_msg and state.state != PresenceState.OFFLINE:
+ if state.status_msg:
content["status_msg"] = state.status_msg
if state.state == PresenceState.ONLINE:
content["currently_active"] = state.currently_active
@@ -1840,9 +1839,7 @@ def handle_timeout(
# don't set them as offline.
sync_or_active = max(state.last_user_sync_ts, state.last_active_ts)
if now - sync_or_active > SYNC_ONLINE_TIMEOUT:
- state = state.copy_and_replace(
- state=PresenceState.OFFLINE, status_msg=None
- )
+ state = state.copy_and_replace(state=PresenceState.OFFLINE)
changed = True
else:
# We expect to be poked occasionally by the other side.
@@ -1850,7 +1847,7 @@ def handle_timeout(
# no one gets stuck online forever.
if now - state.last_federation_update_ts > FEDERATION_TIMEOUT:
# The other side seems to have disappeared.
- state = state.copy_and_replace(state=PresenceState.OFFLINE, status_msg=None)
+ state = state.copy_and_replace(state=PresenceState.OFFLINE)
changed = True
return state if changed else None
diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py
index 2517f278b6..d0060f9046 100644
--- a/synapse/handlers/space_summary.py
+++ b/synapse/handlers/space_summary.py
@@ -18,7 +18,7 @@ import re
from collections import deque
from typing import (
TYPE_CHECKING,
- Collection,
+ Deque,
Dict,
Iterable,
List,
@@ -38,9 +38,12 @@ from synapse.api.constants import (
Membership,
RoomTypes,
)
+from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.events import EventBase
from synapse.events.utils import format_event_for_client_v2
from synapse.types import JsonDict
+from synapse.util.caches.response_cache import ResponseCache
+from synapse.util.stringutils import random_string
if TYPE_CHECKING:
from synapse.server import HomeServer
@@ -57,16 +60,73 @@ MAX_ROOMS_PER_SPACE = 50
MAX_SERVERS_PER_SPACE = 3
+@attr.s(slots=True, frozen=True, auto_attribs=True)
+class _PaginationKey:
+ """The key used to find unique pagination session."""
+
+ # The first three entries match the request parameters (and cannot change
+ # during a pagination session).
+ room_id: str
+ suggested_only: bool
+ max_depth: Optional[int]
+ # The randomly generated token.
+ token: str
+
+
+@attr.s(slots=True, frozen=True, auto_attribs=True)
+class _PaginationSession:
+ """The information that is stored for pagination."""
+
+ # The time the pagination session was created, in milliseconds.
+ creation_time_ms: int
+ # The queue of rooms which are still to process.
+ room_queue: Deque["_RoomQueueEntry"]
+ # A set of rooms which have been processed.
+ processed_rooms: Set[str]
+
+
class SpaceSummaryHandler:
+ # The time a pagination session remains valid for.
+ _PAGINATION_SESSION_VALIDITY_PERIOD_MS = 5 * 60 * 1000
+
def __init__(self, hs: "HomeServer"):
self._clock = hs.get_clock()
- self._auth = hs.get_auth()
self._event_auth_handler = hs.get_event_auth_handler()
self._store = hs.get_datastore()
self._event_serializer = hs.get_event_client_serializer()
self._server_name = hs.hostname
self._federation_client = hs.get_federation_client()
+ # A map of query information to the current pagination state.
+ #
+ # TODO Allow for multiple workers to share this data.
+ # TODO Expire pagination tokens.
+ self._pagination_sessions: Dict[_PaginationKey, _PaginationSession] = {}
+
+ # If a user tries to fetch the same page multiple times in quick succession,
+ # only process the first attempt and return its result to subsequent requests.
+ self._pagination_response_cache: ResponseCache[
+ Tuple[str, bool, Optional[int], Optional[int], Optional[str]]
+ ] = ResponseCache(
+ hs.get_clock(),
+ "get_room_hierarchy",
+ )
+
+ def _expire_pagination_sessions(self):
+ """Expire pagination session which are old."""
+ expire_before = (
+ self._clock.time_msec() - self._PAGINATION_SESSION_VALIDITY_PERIOD_MS
+ )
+ to_expire = []
+
+ for key, value in self._pagination_sessions.items():
+ if value.creation_time_ms < expire_before:
+ to_expire.append(key)
+
+ for key in to_expire:
+ logger.debug("Expiring pagination session id %s", key)
+ del self._pagination_sessions[key]
+
async def get_space_summary(
self,
requester: str,
@@ -92,9 +152,13 @@ class SpaceSummaryHandler:
Returns:
summary dict to return
"""
- # first of all, check that the user is in the room in question (or it's
- # world-readable)
- await self._auth.check_user_in_room_or_world_readable(room_id, requester)
+ # First of all, check that the room is accessible.
+ if not await self._is_local_room_accessible(room_id, requester):
+ raise AuthError(
+ 403,
+ "User %s not in room %s, and room previews are disabled"
+ % (requester, room_id),
+ )
# the queue of rooms to process
room_queue = deque((_RoomQueueEntry(room_id, ()),))
@@ -130,7 +194,7 @@ class SpaceSummaryHandler:
requester, None, room_id, suggested_only, max_children
)
- events: Collection[JsonDict] = []
+ events: Sequence[JsonDict] = []
if room_entry:
rooms_result.append(room_entry.room)
events = room_entry.children
@@ -158,48 +222,10 @@ class SpaceSummaryHandler:
room = room_entry.room
fed_room_id = room_entry.room_id
- # The room should only be included in the summary if:
- # a. the user is in the room;
- # b. the room is world readable; or
- # c. the user could join the room, e.g. the join rules
- # are set to public or the user is in a space that
- # has been granted access to the room.
- #
- # Note that we know the user is not in the root room (which is
- # why the remote call was made in the first place), but the user
- # could be in one of the children rooms and we just didn't know
- # about the link.
-
- # The API doesn't return the room version so assume that a
- # join rule of knock is valid.
- include_room = (
- room.get("join_rules") in (JoinRules.PUBLIC, JoinRules.KNOCK)
- or room.get("world_readable") is True
- )
-
- # Check if the user is a member of any of the allowed spaces
- # from the response.
- allowed_rooms = room.get("allowed_room_ids") or room.get(
- "allowed_spaces"
- )
- if (
- not include_room
- and allowed_rooms
- and isinstance(allowed_rooms, list)
- ):
- include_room = await self._event_auth_handler.is_user_in_rooms(
- allowed_rooms, requester
- )
-
- # Finally, if this isn't the requested room, check ourselves
- # if we can access the room.
- if not include_room and fed_room_id != queue_entry.room_id:
- include_room = await self._is_room_accessible(
- fed_room_id, requester, None
- )
-
# The user can see the room, include it!
- if include_room:
+ if await self._is_remote_room_accessible(
+ requester, fed_room_id, room
+ ):
# Before returning to the client, remove the allowed_room_ids
# and allowed_spaces keys.
room.pop("allowed_room_ids", None)
@@ -245,6 +271,158 @@ class SpaceSummaryHandler:
return {"rooms": rooms_result, "events": events_result}
+ async def get_room_hierarchy(
+ self,
+ requester: str,
+ requested_room_id: str,
+ suggested_only: bool = False,
+ max_depth: Optional[int] = None,
+ limit: Optional[int] = None,
+ from_token: Optional[str] = None,
+ ) -> JsonDict:
+ """
+ Implementation of the room hierarchy C-S API.
+
+ Args:
+ requester: The user ID of the user making this request.
+ requested_room_id: The room ID to start the hierarchy at (the "root" room).
+ suggested_only: Whether we should only return children with the "suggested"
+ flag set.
+ max_depth: The maximum depth in the tree to explore, must be a
+ non-negative integer.
+
+ 0 would correspond to just the root room, 1 would include just
+ the root room's children, etc.
+ limit: An optional limit on the number of rooms to return per
+ page. Must be a positive integer.
+ from_token: An optional pagination token.
+
+ Returns:
+ The JSON hierarchy dictionary.
+ """
+ # If a user tries to fetch the same page multiple times in quick succession,
+ # only process the first attempt and return its result to subsequent requests.
+ #
+ # This is due to the pagination process mutating internal state, attempting
+ # to process multiple requests for the same page will result in errors.
+ return await self._pagination_response_cache.wrap(
+ (requested_room_id, suggested_only, max_depth, limit, from_token),
+ self._get_room_hierarchy,
+ requester,
+ requested_room_id,
+ suggested_only,
+ max_depth,
+ limit,
+ from_token,
+ )
+
+ async def _get_room_hierarchy(
+ self,
+ requester: str,
+ requested_room_id: str,
+ suggested_only: bool = False,
+ max_depth: Optional[int] = None,
+ limit: Optional[int] = None,
+ from_token: Optional[str] = None,
+ ) -> JsonDict:
+ """See docstring for SpaceSummaryHandler.get_room_hierarchy."""
+
+ # First of all, check that the room is accessible.
+ if not await self._is_local_room_accessible(requested_room_id, requester):
+ raise AuthError(
+ 403,
+ "User %s not in room %s, and room previews are disabled"
+ % (requester, requested_room_id),
+ )
+
+ # If this is continuing a previous session, pull the persisted data.
+ if from_token:
+ self._expire_pagination_sessions()
+
+ pagination_key = _PaginationKey(
+ requested_room_id, suggested_only, max_depth, from_token
+ )
+ if pagination_key not in self._pagination_sessions:
+ raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM)
+
+ # Load the previous state.
+ pagination_session = self._pagination_sessions[pagination_key]
+ room_queue = pagination_session.room_queue
+ processed_rooms = pagination_session.processed_rooms
+ else:
+ # the queue of rooms to process
+ room_queue = deque((_RoomQueueEntry(requested_room_id, ()),))
+
+ # Rooms we have already processed.
+ processed_rooms = set()
+
+ rooms_result: List[JsonDict] = []
+
+ # Cap the limit to a server-side maximum.
+ if limit is None:
+ limit = MAX_ROOMS
+ else:
+ limit = min(limit, MAX_ROOMS)
+
+ # Iterate through the queue until we reach the limit or run out of
+ # rooms to include.
+ while room_queue and len(rooms_result) < limit:
+ queue_entry = room_queue.popleft()
+ room_id = queue_entry.room_id
+ current_depth = queue_entry.depth
+ if room_id in processed_rooms:
+ # already done this room
+ continue
+
+ logger.debug("Processing room %s", room_id)
+
+ is_in_room = await self._store.is_host_joined(room_id, self._server_name)
+ if is_in_room:
+ room_entry = await self._summarize_local_room(
+ requester,
+ None,
+ room_id,
+ suggested_only,
+ # TODO Handle max children.
+ max_children=None,
+ )
+
+ if room_entry:
+ rooms_result.append(room_entry.as_json())
+
+ # Add the child to the queue. We have already validated
+ # that the vias are a list of server names.
+ #
+ # If the current depth is the maximum depth, do not queue
+ # more entries.
+ if max_depth is None or current_depth < max_depth:
+ room_queue.extendleft(
+ _RoomQueueEntry(
+ ev["state_key"], ev["content"]["via"], current_depth + 1
+ )
+ for ev in reversed(room_entry.children)
+ )
+
+ processed_rooms.add(room_id)
+ else:
+ # TODO Federation.
+ pass
+
+ result: JsonDict = {"rooms": rooms_result}
+
+ # If there's additional data, generate a pagination token (and persist state).
+ if room_queue:
+ next_batch = random_string(24)
+ result["next_batch"] = next_batch
+ pagination_key = _PaginationKey(
+ requested_room_id, suggested_only, max_depth, next_batch
+ )
+ self._pagination_sessions[pagination_key] = _PaginationSession(
+ self._clock.time_msec(), room_queue, processed_rooms
+ )
+
+ return result
+
async def federation_space_summary(
self,
origin: str,
@@ -336,7 +514,7 @@ class SpaceSummaryHandler:
Returns:
A room entry if the room should be returned. None, otherwise.
"""
- if not await self._is_room_accessible(room_id, requester, origin):
+ if not await self._is_local_room_accessible(room_id, requester, origin):
return None
room_entry = await self._build_room_entry(room_id, for_federation=bool(origin))
@@ -438,8 +616,8 @@ class SpaceSummaryHandler:
return results
- async def _is_room_accessible(
- self, room_id: str, requester: Optional[str], origin: Optional[str]
+ async def _is_local_room_accessible(
+ self, room_id: str, requester: Optional[str], origin: Optional[str] = None
) -> bool:
"""
Calculate whether the room should be shown in the spaces summary.
@@ -550,6 +728,51 @@ class SpaceSummaryHandler:
)
return False
+ async def _is_remote_room_accessible(
+ self, requester: str, room_id: str, room: JsonDict
+ ) -> bool:
+ """
+ Calculate whether the room received over federation should be shown in the spaces summary.
+
+ It should be included if:
+
+ * The requester is joined or can join the room (per MSC3173).
+ * The history visibility is set to world readable.
+
+ Note that the local server is not in the requested room (which is why the
+ remote call was made in the first place), but the user could have access
+ due to an invite, etc.
+
+ Args:
+ requester: The user requesting the summary.
+ room_id: The room ID returned over federation.
+ room: The summary of the child room returned over federation.
+
+ Returns:
+ True if the room should be included in the spaces summary.
+ """
+ # The API doesn't return the room version so assume that a
+ # join rule of knock is valid.
+ if (
+ room.get("join_rules") in (JoinRules.PUBLIC, JoinRules.KNOCK)
+ or room.get("world_readable") is True
+ ):
+ return True
+
+ # Check if the user is a member of any of the allowed spaces
+ # from the response.
+ allowed_rooms = room.get("allowed_room_ids") or room.get("allowed_spaces")
+ if allowed_rooms and isinstance(allowed_rooms, list):
+ if await self._event_auth_handler.is_user_in_rooms(
+ allowed_rooms, requester
+ ):
+ return True
+
+ # Finally, check locally if we can access the room. The user might
+ # already be in the room (if it was a child room), or there might be a
+ # pending invite, etc.
+ return await self._is_local_room_accessible(room_id, requester)
+
async def _build_room_entry(self, room_id: str, for_federation: bool) -> JsonDict:
"""
Generate en entry suitable for the 'rooms' list in the summary response.
@@ -565,7 +788,7 @@ class SpaceSummaryHandler:
stats = await self._store.get_room_with_stats(room_id)
# currently this should be impossible because we call
- # check_user_in_room_or_world_readable on the room before we get here, so
+ # _is_local_room_accessible on the room before we get here, so
# there should always be an entry
assert stats is not None, "unable to retrieve stats for %s" % (room_id,)
@@ -645,6 +868,7 @@ class SpaceSummaryHandler:
class _RoomQueueEntry:
room_id: str
via: Sequence[str]
+ depth: int = 0
@attr.s(frozen=True, slots=True, auto_attribs=True)
@@ -655,7 +879,12 @@ class _RoomEntry:
# An iterable of the sorted, stripped children events for children of this room.
#
# This may not include all children.
- children: Collection[JsonDict] = ()
+ children: Sequence[JsonDict] = ()
+
+ def as_json(self) -> JsonDict:
+ result = dict(self.room)
+ result["children_state"] = self.children
+ return result
def _has_valid_via(e: EventBase) -> bool:
diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py
index 17e1c5abb1..c577142268 100644
--- a/synapse/http/connectproxyclient.py
+++ b/synapse/http/connectproxyclient.py
@@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import base64
import logging
+from typing import Optional
+import attr
from zope.interface import implementer
from twisted.internet import defer, protocol
@@ -21,7 +24,6 @@ from twisted.internet.error import ConnectError
from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint
from twisted.internet.protocol import ClientFactory, Protocol, connectionDone
from twisted.web import http
-from twisted.web.http_headers import Headers
logger = logging.getLogger(__name__)
@@ -30,6 +32,22 @@ class ProxyConnectError(ConnectError):
pass
+@attr.s
+class ProxyCredentials:
+ username_password = attr.ib(type=bytes)
+
+ def as_proxy_authorization_value(self) -> bytes:
+ """
+ Return the value for a Proxy-Authorization header (i.e. 'Basic abdef==').
+
+ Returns:
+ A transformation of the authentication string the encoded value for
+ a Proxy-Authorization header.
+ """
+ # Encode as base64 and prepend the authorization type
+ return b"Basic " + base64.encodebytes(self.username_password)
+
+
@implementer(IStreamClientEndpoint)
class HTTPConnectProxyEndpoint:
"""An Endpoint implementation which will send a CONNECT request to an http proxy
@@ -46,7 +64,7 @@ class HTTPConnectProxyEndpoint:
proxy_endpoint: the endpoint to use to connect to the proxy
host: hostname that we want to CONNECT to
port: port that we want to connect to
- headers: Extra HTTP headers to include in the CONNECT request
+ proxy_creds: credentials to authenticate at proxy
"""
def __init__(
@@ -55,20 +73,20 @@ class HTTPConnectProxyEndpoint:
proxy_endpoint: IStreamClientEndpoint,
host: bytes,
port: int,
- headers: Headers,
+ proxy_creds: Optional[ProxyCredentials],
):
self._reactor = reactor
self._proxy_endpoint = proxy_endpoint
self._host = host
self._port = port
- self._headers = headers
+ self._proxy_creds = proxy_creds
def __repr__(self):
return "<HTTPConnectProxyEndpoint %s>" % (self._proxy_endpoint,)
def connect(self, protocolFactory: ClientFactory):
f = HTTPProxiedClientFactory(
- self._host, self._port, protocolFactory, self._headers
+ self._host, self._port, protocolFactory, self._proxy_creds
)
d = self._proxy_endpoint.connect(f)
# once the tcp socket connects successfully, we need to wait for the
@@ -87,7 +105,7 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
dst_host: hostname that we want to CONNECT to
dst_port: port that we want to connect to
wrapped_factory: The original Factory
- headers: Extra HTTP headers to include in the CONNECT request
+ proxy_creds: credentials to authenticate at proxy
"""
def __init__(
@@ -95,12 +113,12 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
dst_host: bytes,
dst_port: int,
wrapped_factory: ClientFactory,
- headers: Headers,
+ proxy_creds: Optional[ProxyCredentials],
):
self.dst_host = dst_host
self.dst_port = dst_port
self.wrapped_factory = wrapped_factory
- self.headers = headers
+ self.proxy_creds = proxy_creds
self.on_connection = defer.Deferred()
def startedConnecting(self, connector):
@@ -114,7 +132,7 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
self.dst_port,
wrapped_protocol,
self.on_connection,
- self.headers,
+ self.proxy_creds,
)
def clientConnectionFailed(self, connector, reason):
@@ -145,7 +163,7 @@ class HTTPConnectProtocol(protocol.Protocol):
connected_deferred: a Deferred which will be callbacked with
wrapped_protocol when the CONNECT completes
- headers: Extra HTTP headers to include in the CONNECT request
+ proxy_creds: credentials to authenticate at proxy
"""
def __init__(
@@ -154,16 +172,16 @@ class HTTPConnectProtocol(protocol.Protocol):
port: int,
wrapped_protocol: Protocol,
connected_deferred: defer.Deferred,
- headers: Headers,
+ proxy_creds: Optional[ProxyCredentials],
):
self.host = host
self.port = port
self.wrapped_protocol = wrapped_protocol
self.connected_deferred = connected_deferred
- self.headers = headers
+ self.proxy_creds = proxy_creds
self.http_setup_client = HTTPConnectSetupClient(
- self.host, self.port, self.headers
+ self.host, self.port, self.proxy_creds
)
self.http_setup_client.on_connected.addCallback(self.proxyConnected)
@@ -205,30 +223,38 @@ class HTTPConnectSetupClient(http.HTTPClient):
Args:
host: The hostname to send in the CONNECT message
port: The port to send in the CONNECT message
- headers: Extra headers to send with the CONNECT message
+ proxy_creds: credentials to authenticate at proxy
"""
- def __init__(self, host: bytes, port: int, headers: Headers):
+ def __init__(
+ self,
+ host: bytes,
+ port: int,
+ proxy_creds: Optional[ProxyCredentials],
+ ):
self.host = host
self.port = port
- self.headers = headers
+ self.proxy_creds = proxy_creds
self.on_connected = defer.Deferred()
def connectionMade(self):
logger.debug("Connected to proxy, sending CONNECT")
self.sendCommand(b"CONNECT", b"%s:%d" % (self.host, self.port))
- # Send any additional specified headers
- for name, values in self.headers.getAllRawHeaders():
- for value in values:
- self.sendHeader(name, value)
+ # Determine whether we need to set Proxy-Authorization headers
+ if self.proxy_creds:
+ # Set a Proxy-Authorization header
+ self.sendHeader(
+ b"Proxy-Authorization",
+ self.proxy_creds.as_proxy_authorization_value(),
+ )
self.endHeaders()
def handleStatus(self, version: bytes, status: bytes, message: bytes):
logger.debug("Got Status: %s %s %s", status, message, version)
if status != b"200":
- raise ProxyConnectError("Unexpected status on CONNECT: %s" % status)
+ raise ProxyConnectError(f"Unexpected status on CONNECT: {status!s}")
def handleEndHeaders(self):
logger.debug("End Headers")
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index c16b7f10e6..1238bfd287 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -14,6 +14,10 @@
import logging
import urllib.parse
from typing import Any, Generator, List, Optional
+from urllib.request import ( # type: ignore[attr-defined]
+ getproxies_environment,
+ proxy_bypass_environment,
+)
from netaddr import AddrFormatError, IPAddress, IPSet
from zope.interface import implementer
@@ -30,9 +34,12 @@ from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer, IResponse
from synapse.crypto.context_factory import FederationPolicyForHTTPS
-from synapse.http.client import BlacklistingAgentWrapper
+from synapse.http import proxyagent
+from synapse.http.client import BlacklistingAgentWrapper, BlacklistingReactorWrapper
+from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
from synapse.http.federation.srv_resolver import Server, SrvResolver
from synapse.http.federation.well_known_resolver import WellKnownResolver
+from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.types import ISynapseReactor
from synapse.util import Clock
@@ -57,6 +64,14 @@ class MatrixFederationAgent:
user_agent:
The user agent header to use for federation requests.
+ ip_whitelist: Allowed IP addresses.
+
+ ip_blacklist: Disallowed IP addresses.
+
+ proxy_reactor: twisted reactor to use for connections to the proxy server
+ reactor might have some blacklisting applied (i.e. for DNS queries),
+ but we need unblocked access to the proxy.
+
_srv_resolver:
SrvResolver implementation to use for looking up SRV records. None
to use a default implementation.
@@ -71,11 +86,18 @@ class MatrixFederationAgent:
reactor: ISynapseReactor,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
user_agent: bytes,
+ ip_whitelist: IPSet,
ip_blacklist: IPSet,
_srv_resolver: Optional[SrvResolver] = None,
_well_known_resolver: Optional[WellKnownResolver] = None,
):
- self._reactor = reactor
+ # proxy_reactor is not blacklisted
+ proxy_reactor = reactor
+
+ # We need to use a DNS resolver which filters out blacklisted IP
+ # addresses, to prevent DNS rebinding.
+ reactor = BlacklistingReactorWrapper(reactor, ip_whitelist, ip_blacklist)
+
self._clock = Clock(reactor)
self._pool = HTTPConnectionPool(reactor)
self._pool.retryAutomatically = False
@@ -83,24 +105,27 @@ class MatrixFederationAgent:
self._pool.cachedConnectionTimeout = 2 * 60
self._agent = Agent.usingEndpointFactory(
- self._reactor,
+ reactor,
MatrixHostnameEndpointFactory(
- reactor, tls_client_options_factory, _srv_resolver
+ reactor,
+ proxy_reactor,
+ tls_client_options_factory,
+ _srv_resolver,
),
pool=self._pool,
)
self.user_agent = user_agent
if _well_known_resolver is None:
- # Note that the name resolver has already been wrapped in a
- # IPBlacklistingResolver by MatrixFederationHttpClient.
_well_known_resolver = WellKnownResolver(
- self._reactor,
+ reactor,
agent=BlacklistingAgentWrapper(
- Agent(
- self._reactor,
+ ProxyAgent(
+ reactor,
+ proxy_reactor,
pool=self._pool,
contextFactory=tls_client_options_factory,
+ use_proxy=True,
),
ip_blacklist=ip_blacklist,
),
@@ -200,10 +225,12 @@ class MatrixHostnameEndpointFactory:
def __init__(
self,
reactor: IReactorCore,
+ proxy_reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
srv_resolver: Optional[SrvResolver],
):
self._reactor = reactor
+ self._proxy_reactor = proxy_reactor
self._tls_client_options_factory = tls_client_options_factory
if srv_resolver is None:
@@ -211,9 +238,10 @@ class MatrixHostnameEndpointFactory:
self._srv_resolver = srv_resolver
- def endpointForURI(self, parsed_uri):
+ def endpointForURI(self, parsed_uri: URI):
return MatrixHostnameEndpoint(
self._reactor,
+ self._proxy_reactor,
self._tls_client_options_factory,
self._srv_resolver,
parsed_uri,
@@ -227,23 +255,45 @@ class MatrixHostnameEndpoint:
Args:
reactor: twisted reactor to use for underlying requests
+ proxy_reactor: twisted reactor to use for connections to the proxy server.
+ 'reactor' might have some blacklisting applied (i.e. for DNS queries),
+ but we need unblocked access to the proxy.
tls_client_options_factory:
factory to use for fetching client tls options, or none to disable TLS.
srv_resolver: The SRV resolver to use
parsed_uri: The parsed URI that we're wanting to connect to.
+
+ Raises:
+ ValueError if the environment variables contain an invalid proxy specification.
+ RuntimeError if no tls_options_factory is given for a https connection
"""
def __init__(
self,
reactor: IReactorCore,
+ proxy_reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
srv_resolver: SrvResolver,
parsed_uri: URI,
):
self._reactor = reactor
-
self._parsed_uri = parsed_uri
+ # http_proxy is not needed because federation is always over TLS
+ proxies = getproxies_environment()
+ https_proxy = proxies["https"].encode() if "https" in proxies else None
+ self.no_proxy = proxies["no"] if "no" in proxies else None
+
+ # endpoint and credentials to use to connect to the outbound https proxy, if any.
+ (
+ self._https_proxy_endpoint,
+ self._https_proxy_creds,
+ ) = proxyagent.http_proxy_endpoint(
+ https_proxy,
+ proxy_reactor,
+ tls_client_options_factory,
+ )
+
# set up the TLS connection params
#
# XXX disabling TLS is really only supported here for the benefit of the
@@ -273,9 +323,33 @@ class MatrixHostnameEndpoint:
host = server.host
port = server.port
+ should_skip_proxy = False
+ if self.no_proxy is not None:
+ should_skip_proxy = proxy_bypass_environment(
+ host.decode(),
+ proxies={"no": self.no_proxy},
+ )
+
+ endpoint: IStreamClientEndpoint
try:
- logger.debug("Connecting to %s:%i", host.decode("ascii"), port)
- endpoint = HostnameEndpoint(self._reactor, host, port)
+ if self._https_proxy_endpoint and not should_skip_proxy:
+ logger.debug(
+ "Connecting to %s:%i via %s",
+ host.decode("ascii"),
+ port,
+ self._https_proxy_endpoint,
+ )
+ endpoint = HTTPConnectProxyEndpoint(
+ self._reactor,
+ self._https_proxy_endpoint,
+ host,
+ port,
+ proxy_creds=self._https_proxy_creds,
+ )
+ else:
+ logger.debug("Connecting to %s:%i", host.decode("ascii"), port)
+ # not using a proxy
+ endpoint = HostnameEndpoint(self._reactor, host, port)
if self._tls_options:
endpoint = wrapClientTLS(self._tls_options, endpoint)
result = await make_deferred_yieldable(
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 2efa15bf04..2e9898997c 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -59,7 +59,6 @@ from synapse.api.errors import (
from synapse.http import QuieterFileBodyProducer
from synapse.http.client import (
BlacklistingAgentWrapper,
- BlacklistingReactorWrapper,
BodyExceededMaxSize,
ByteWriteable,
encode_query_args,
@@ -69,7 +68,7 @@ from synapse.http.federation.matrix_federation_agent import MatrixFederationAgen
from synapse.logging import opentracing
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
-from synapse.types import ISynapseReactor, JsonDict
+from synapse.types import JsonDict
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
from synapse.util.metrics import Measure
@@ -325,13 +324,7 @@ class MatrixFederationHttpClient:
self.signing_key = hs.signing_key
self.server_name = hs.hostname
- # We need to use a DNS resolver which filters out blacklisted IP
- # addresses, to prevent DNS rebinding.
- self.reactor: ISynapseReactor = BlacklistingReactorWrapper(
- hs.get_reactor(),
- hs.config.federation_ip_range_whitelist,
- hs.config.federation_ip_range_blacklist,
- )
+ self.reactor = hs.get_reactor()
user_agent = hs.version_string
if hs.config.user_agent_suffix:
@@ -342,6 +335,7 @@ class MatrixFederationHttpClient:
self.reactor,
tls_client_options_factory,
user_agent,
+ hs.config.federation_ip_range_whitelist,
hs.config.federation_ip_range_blacklist,
)
diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
index 19e987f118..a3f31452d0 100644
--- a/synapse/http/proxyagent.py
+++ b/synapse/http/proxyagent.py
@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import base64
import logging
import re
from typing import Any, Dict, Optional, Tuple
@@ -21,7 +20,6 @@ from urllib.request import ( # type: ignore[attr-defined]
proxy_bypass_environment,
)
-import attr
from zope.interface import implementer
from twisted.internet import defer
@@ -38,7 +36,7 @@ from twisted.web.error import SchemeNotSupported
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IPolicyForHTTPS
-from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
+from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint, ProxyCredentials
from synapse.types import ISynapseReactor
logger = logging.getLogger(__name__)
@@ -46,22 +44,6 @@ logger = logging.getLogger(__name__)
_VALID_URI = re.compile(br"\A[\x21-\x7e]+\Z")
-@attr.s
-class ProxyCredentials:
- username_password = attr.ib(type=bytes)
-
- def as_proxy_authorization_value(self) -> bytes:
- """
- Return the value for a Proxy-Authorization header (i.e. 'Basic abdef==').
-
- Returns:
- A transformation of the authentication string the encoded value for
- a Proxy-Authorization header.
- """
- # Encode as base64 and prepend the authorization type
- return b"Basic " + base64.encodebytes(self.username_password)
-
-
@implementer(IAgent)
class ProxyAgent(_AgentBase):
"""An Agent implementation which will use an HTTP proxy if one was requested
@@ -95,6 +77,7 @@ class ProxyAgent(_AgentBase):
Raises:
ValueError if use_proxy is set and the environment variables
contain an invalid proxy specification.
+ RuntimeError if no tls_options_factory is given for a https connection
"""
def __init__(
@@ -131,11 +114,11 @@ class ProxyAgent(_AgentBase):
https_proxy = proxies["https"].encode() if "https" in proxies else None
no_proxy = proxies["no"] if "no" in proxies else None
- self.http_proxy_endpoint, self.http_proxy_creds = _http_proxy_endpoint(
+ self.http_proxy_endpoint, self.http_proxy_creds = http_proxy_endpoint(
http_proxy, self.proxy_reactor, contextFactory, **self._endpoint_kwargs
)
- self.https_proxy_endpoint, self.https_proxy_creds = _http_proxy_endpoint(
+ self.https_proxy_endpoint, self.https_proxy_creds = http_proxy_endpoint(
https_proxy, self.proxy_reactor, contextFactory, **self._endpoint_kwargs
)
@@ -224,22 +207,12 @@ class ProxyAgent(_AgentBase):
and self.https_proxy_endpoint
and not should_skip_proxy
):
- connect_headers = Headers()
-
- # Determine whether we need to set Proxy-Authorization headers
- if self.https_proxy_creds:
- # Set a Proxy-Authorization header
- connect_headers.addRawHeader(
- b"Proxy-Authorization",
- self.https_proxy_creds.as_proxy_authorization_value(),
- )
-
endpoint = HTTPConnectProxyEndpoint(
self.proxy_reactor,
self.https_proxy_endpoint,
parsed_uri.host,
parsed_uri.port,
- headers=connect_headers,
+ self.https_proxy_creds,
)
else:
# not using a proxy
@@ -268,10 +241,10 @@ class ProxyAgent(_AgentBase):
)
-def _http_proxy_endpoint(
+def http_proxy_endpoint(
proxy: Optional[bytes],
reactor: IReactorCore,
- tls_options_factory: IPolicyForHTTPS,
+ tls_options_factory: Optional[IPolicyForHTTPS],
**kwargs,
) -> Tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]:
"""Parses an http proxy setting and returns an endpoint for the proxy
@@ -294,6 +267,7 @@ def _http_proxy_endpoint(
Raise:
ValueError if proxy has no hostname or unsupported scheme.
+ RuntimeError if no tls_options_factory is given for a https connection
"""
if proxy is None:
return None, None
@@ -305,8 +279,13 @@ def _http_proxy_endpoint(
proxy_endpoint = HostnameEndpoint(reactor, host, port, **kwargs)
if scheme == b"https":
- tls_options = tls_options_factory.creatorForNetloc(host, port)
- proxy_endpoint = wrapClientTLS(tls_options, proxy_endpoint)
+ if tls_options_factory:
+ tls_options = tls_options_factory.creatorForNetloc(host, port)
+ proxy_endpoint = wrapClientTLS(tls_options, proxy_endpoint)
+ else:
+ raise RuntimeError(
+ f"No TLS options for a https connection via proxy {proxy!s}"
+ )
return proxy_endpoint, credentials
diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py
index 0a19a333d7..5f0555039d 100644
--- a/synapse/rest/admin/media.py
+++ b/synapse/rest/admin/media.py
@@ -259,7 +259,9 @@ class DeleteMediaByID(RestServlet):
logging.info("Deleting local media by ID: %s", media_id)
- deleted_media, total = await self.media_repository.delete_local_media(media_id)
+ deleted_media, total = await self.media_repository.delete_local_media_ids(
+ [media_id]
+ )
return 200, {"deleted_media": deleted_media, "total": total}
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index eef76ab18a..41f21ba118 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -172,7 +172,7 @@ class UserRestServletV2(RestServlet):
target_user = UserID.from_string(user_id)
if not self.hs.is_mine(target_user):
- raise SynapseError(400, "Can only lookup local users")
+ raise SynapseError(400, "Can only look up local users")
ret = await self.admin_handler.get_user(target_user)
@@ -796,7 +796,7 @@ class PushersRestServlet(RestServlet):
await assert_requester_is_admin(self.auth, request)
if not self.is_mine(UserID.from_string(user_id)):
- raise SynapseError(400, "Can only lookup local users")
+ raise SynapseError(400, "Can only look up local users")
if not await self.store.get_user_by_id(user_id):
raise NotFoundError("User not found")
@@ -811,10 +811,10 @@ class PushersRestServlet(RestServlet):
class UserMediaRestServlet(RestServlet):
"""
Gets information about all uploaded local media for a specific `user_id`.
+ With DELETE request you can delete all this media.
Example:
- http://localhost:8008/_synapse/admin/v1/users/
- @user:server/media
+ http://localhost:8008/_synapse/admin/v1/users/@user:server/media
Args:
The parameters `from` and `limit` are required for pagination.
@@ -830,6 +830,7 @@ class UserMediaRestServlet(RestServlet):
self.is_mine = hs.is_mine
self.auth = hs.get_auth()
self.store = hs.get_datastore()
+ self.media_repository = hs.get_media_repository()
async def on_GET(
self, request: SynapseRequest, user_id: str
@@ -840,7 +841,7 @@ class UserMediaRestServlet(RestServlet):
await assert_requester_is_admin(self.auth, request)
if not self.is_mine(UserID.from_string(user_id)):
- raise SynapseError(400, "Can only lookup local users")
+ raise SynapseError(400, "Can only look up local users")
user = await self.store.get_user_by_id(user_id)
if user is None:
@@ -898,6 +899,73 @@ class UserMediaRestServlet(RestServlet):
return 200, ret
+ async def on_DELETE(
+ self, request: SynapseRequest, user_id: str
+ ) -> Tuple[int, JsonDict]:
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
+ await assert_requester_is_admin(self.auth, request)
+
+ if not self.is_mine(UserID.from_string(user_id)):
+ raise SynapseError(400, "Can only look up local users")
+
+ user = await self.store.get_user_by_id(user_id)
+ if user is None:
+ raise NotFoundError("Unknown user")
+
+ start = parse_integer(request, "from", default=0)
+ limit = parse_integer(request, "limit", default=100)
+
+ if start < 0:
+ raise SynapseError(
+ 400,
+ "Query parameter from must be a string representing a positive integer.",
+ errcode=Codes.INVALID_PARAM,
+ )
+
+ if limit < 0:
+ raise SynapseError(
+ 400,
+ "Query parameter limit must be a string representing a positive integer.",
+ errcode=Codes.INVALID_PARAM,
+ )
+
+ # If neither `order_by` nor `dir` is set, set the default order
+ # to newest media is on top for backward compatibility.
+ if b"order_by" not in request.args and b"dir" not in request.args:
+ order_by = MediaSortOrder.CREATED_TS.value
+ direction = "b"
+ else:
+ order_by = parse_string(
+ request,
+ "order_by",
+ default=MediaSortOrder.CREATED_TS.value,
+ allowed_values=(
+ MediaSortOrder.MEDIA_ID.value,
+ MediaSortOrder.UPLOAD_NAME.value,
+ MediaSortOrder.CREATED_TS.value,
+ MediaSortOrder.LAST_ACCESS_TS.value,
+ MediaSortOrder.MEDIA_LENGTH.value,
+ MediaSortOrder.MEDIA_TYPE.value,
+ MediaSortOrder.QUARANTINED_BY.value,
+ MediaSortOrder.SAFE_FROM_QUARANTINE.value,
+ ),
+ )
+ direction = parse_string(
+ request, "dir", default="f", allowed_values=("f", "b")
+ )
+
+ media, _ = await self.store.get_local_media_by_user_paginate(
+ start, limit, user_id, order_by, direction
+ )
+
+ deleted_media, total = await self.media_repository.delete_local_media_ids(
+ ([row["media_id"] for row in media])
+ )
+
+ return 200, {"deleted_media": deleted_media, "total": total}
+
class UserTokenRestServlet(RestServlet):
"""An admin API for logging in as a user.
@@ -1017,7 +1085,7 @@ class RateLimitRestServlet(RestServlet):
await assert_requester_is_admin(self.auth, request)
if not self.hs.is_mine_id(user_id):
- raise SynapseError(400, "Can only lookup local users")
+ raise SynapseError(400, "Can only look up local users")
if not await self.store.get_user_by_id(user_id):
raise NotFoundError("User not found")
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index f887970b76..f1bc43be2d 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -437,6 +437,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
prev_state_ids = list(prev_state_map.values())
auth_event_ids = prev_state_ids
+ state_events_at_start = []
for state_event in body["state_events_at_start"]:
assert_params_in_dict(
state_event, ["type", "origin_server_ts", "content", "sender"]
@@ -502,6 +503,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
)
event_id = event.event_id
+ state_events_at_start.append(event_id)
auth_event_ids.append(event_id)
events_to_create = body["events"]
@@ -651,7 +653,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
event_ids.append(base_insertion_event.event_id)
return 200, {
- "state_events": auth_event_ids,
+ "state_events": state_events_at_start,
"events": event_ids,
"next_chunk_id": insertion_event["content"][
EventContentFields.MSC2716_NEXT_CHUNK_ID
@@ -1445,6 +1447,46 @@ class RoomSpaceSummaryRestServlet(RestServlet):
)
+class RoomHierarchyRestServlet(RestServlet):
+ PATTERNS = (
+ re.compile(
+ "^/_matrix/client/unstable/org.matrix.msc2946"
+ "/rooms/(?P<room_id>[^/]*)/hierarchy$"
+ ),
+ )
+
+ def __init__(self, hs: "HomeServer"):
+ super().__init__()
+ self._auth = hs.get_auth()
+ self._space_summary_handler = hs.get_space_summary_handler()
+
+ async def on_GET(
+ self, request: SynapseRequest, room_id: str
+ ) -> Tuple[int, JsonDict]:
+ requester = await self._auth.get_user_by_req(request, allow_guest=True)
+
+ max_depth = parse_integer(request, "max_depth")
+ if max_depth is not None and max_depth < 0:
+ raise SynapseError(
+ 400, "'max_depth' must be a non-negative integer", Codes.BAD_JSON
+ )
+
+ limit = parse_integer(request, "limit")
+ if limit is not None and limit <= 0:
+ raise SynapseError(
+ 400, "'limit' must be a positive integer", Codes.BAD_JSON
+ )
+
+ return 200, await self._space_summary_handler.get_room_hierarchy(
+ requester.user.to_string(),
+ room_id,
+ suggested_only=parse_boolean(request, "suggested_only", default=False),
+ max_depth=max_depth,
+ limit=limit,
+ from_token=parse_string(request, "from"),
+ )
+
+
def register_servlets(hs: "HomeServer", http_server, is_worker=False):
msc2716_enabled = hs.config.experimental.msc2716_enabled
@@ -1463,6 +1505,7 @@ def register_servlets(hs: "HomeServer", http_server, is_worker=False):
RoomTypingRestServlet(hs).register(http_server)
RoomEventContextServlet(hs).register(http_server)
RoomSpaceSummaryRestServlet(hs).register(http_server)
+ RoomHierarchyRestServlet(hs).register(http_server)
RoomEventServlet(hs).register(http_server)
JoinedRoomsRestServlet(hs).register(http_server)
RoomAliasListServlet(hs).register(http_server)
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 4f702f890c..0f5ce41ff8 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -836,7 +836,9 @@ class MediaRepository:
return {"deleted": deleted}
- async def delete_local_media(self, media_id: str) -> Tuple[List[str], int]:
+ async def delete_local_media_ids(
+ self, media_ids: List[str]
+ ) -> Tuple[List[str], int]:
"""
Delete the given local or remote media ID from this server
@@ -845,7 +847,7 @@ class MediaRepository:
Returns:
A tuple of (list of deleted media IDs, total deleted media IDs).
"""
- return await self._remove_local_media_from_disk([media_id])
+ return await self._remove_local_media_from_disk(media_ids)
async def delete_old_local_media(
self,
diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py
index e2a5fc018c..7a826c086e 100644
--- a/tests/events/test_utils.py
+++ b/tests/events/test_utils.py
@@ -341,7 +341,7 @@ class PruneEventTestCase(unittest.TestCase):
"signatures": {},
"unsigned": {},
},
- room_version=RoomVersions.MSC3083,
+ room_version=RoomVersions.V8,
)
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 18e92e90d7..29845a80da 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
+from typing import Optional
from unittest.mock import Mock, call
from signedjson.key import generate_signing_key
@@ -339,8 +339,11 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase):
class PresenceTimeoutTestCase(unittest.TestCase):
+ """Tests different timers and that the timer does not change `status_msg` of user."""
+
def test_idle_timer(self):
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -348,12 +351,14 @@ class PresenceTimeoutTestCase(unittest.TestCase):
state=PresenceState.ONLINE,
last_active_ts=now - IDLE_TIMER - 1,
last_user_sync_ts=now,
+ status_msg=status_msg,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)
+ self.assertEquals(new_state.status_msg, status_msg)
def test_busy_no_idle(self):
"""
@@ -361,6 +366,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
presence state into unavailable.
"""
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -368,15 +374,18 @@ class PresenceTimeoutTestCase(unittest.TestCase):
state=PresenceState.BUSY,
last_active_ts=now - IDLE_TIMER - 1,
last_user_sync_ts=now,
+ status_msg=status_msg,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.BUSY)
+ self.assertEquals(new_state.status_msg, status_msg)
def test_sync_timeout(self):
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -384,15 +393,18 @@ class PresenceTimeoutTestCase(unittest.TestCase):
state=PresenceState.ONLINE,
last_active_ts=0,
last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
+ status_msg=status_msg,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.OFFLINE)
+ self.assertEquals(new_state.status_msg, status_msg)
def test_sync_online(self):
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -400,6 +412,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
state=PresenceState.ONLINE,
last_active_ts=now - SYNC_ONLINE_TIMEOUT - 1,
last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
+ status_msg=status_msg,
)
new_state = handle_timeout(
@@ -408,9 +421,11 @@ class PresenceTimeoutTestCase(unittest.TestCase):
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.ONLINE)
+ self.assertEquals(new_state.status_msg, status_msg)
def test_federation_ping(self):
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -419,12 +434,13 @@ class PresenceTimeoutTestCase(unittest.TestCase):
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now - FEDERATION_PING_INTERVAL - 1,
+ status_msg=status_msg,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
- self.assertEquals(new_state, new_state)
+ self.assertEquals(state, new_state)
def test_no_timeout(self):
user_id = "@foo:bar"
@@ -444,6 +460,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
def test_federation_timeout(self):
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -452,6 +469,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now - FEDERATION_TIMEOUT - 1,
+ status_msg=status_msg,
)
new_state = handle_timeout(
@@ -460,9 +478,11 @@ class PresenceTimeoutTestCase(unittest.TestCase):
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.OFFLINE)
+ self.assertEquals(new_state.status_msg, status_msg)
def test_last_active(self):
user_id = "@foo:bar"
+ status_msg = "I'm here!"
now = 5000000
state = UserPresenceState.default(user_id)
@@ -471,6 +491,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1,
last_user_sync_ts=now,
last_federation_update_ts=now,
+ status_msg=status_msg,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
@@ -516,6 +537,144 @@ class PresenceHandlerTestCase(unittest.HomeserverTestCase):
)
self.assertEqual(state.state, PresenceState.OFFLINE)
+ def test_user_goes_offline_by_timeout_status_msg_remain(self):
+ """Test that if a user doesn't update the records for a while
+ users presence goes `OFFLINE` because of timeout and `status_msg` remains.
+ """
+ user_id = "@test:server"
+ status_msg = "I'm here!"
+
+ # Mark user as online
+ self._set_presencestate_with_status_msg(
+ user_id, PresenceState.ONLINE, status_msg
+ )
+
+ # Check that if we wait a while without telling the handler the user has
+ # stopped syncing that their presence state doesn't get timed out.
+ self.reactor.advance(SYNC_ONLINE_TIMEOUT / 2)
+
+ state = self.get_success(
+ self.presence_handler.get_state(UserID.from_string(user_id))
+ )
+ self.assertEqual(state.state, PresenceState.ONLINE)
+ self.assertEqual(state.status_msg, status_msg)
+
+ # Check that if the timeout fires, then the syncing user gets timed out
+ self.reactor.advance(SYNC_ONLINE_TIMEOUT)
+
+ state = self.get_success(
+ self.presence_handler.get_state(UserID.from_string(user_id))
+ )
+ # status_msg should remain even after going offline
+ self.assertEqual(state.state, PresenceState.OFFLINE)
+ self.assertEqual(state.status_msg, status_msg)
+
+ def test_user_goes_offline_manually_with_no_status_msg(self):
+ """Test that if a user change presence manually to `OFFLINE`
+ and no status is set, that `status_msg` is `None`.
+ """
+ user_id = "@test:server"
+ status_msg = "I'm here!"
+
+ # Mark user as online
+ self._set_presencestate_with_status_msg(
+ user_id, PresenceState.ONLINE, status_msg
+ )
+
+ # Mark user as offline
+ self.get_success(
+ self.presence_handler.set_state(
+ UserID.from_string(user_id), {"presence": PresenceState.OFFLINE}
+ )
+ )
+
+ state = self.get_success(
+ self.presence_handler.get_state(UserID.from_string(user_id))
+ )
+ self.assertEqual(state.state, PresenceState.OFFLINE)
+ self.assertEqual(state.status_msg, None)
+
+ def test_user_goes_offline_manually_with_status_msg(self):
+ """Test that if a user change presence manually to `OFFLINE`
+ and a status is set, that `status_msg` appears.
+ """
+ user_id = "@test:server"
+ status_msg = "I'm here!"
+
+ # Mark user as online
+ self._set_presencestate_with_status_msg(
+ user_id, PresenceState.ONLINE, status_msg
+ )
+
+ # Mark user as offline
+ self._set_presencestate_with_status_msg(
+ user_id, PresenceState.OFFLINE, "And now here."
+ )
+
+ def test_user_reset_online_with_no_status(self):
+ """Test that if a user set again the presence manually
+ and no status is set, that `status_msg` is `None`.
+ """
+ user_id = "@test:server"
+ status_msg = "I'm here!"
+
+ # Mark user as online
+ self._set_presencestate_with_status_msg(
+ user_id, PresenceState.ONLINE, status_msg
+ )
+
+ # Mark user as online again
+ self.get_success(
+ self.presence_handler.set_state(
+ UserID.from_string(user_id), {"presence": PresenceState.ONLINE}
+ )
+ )
+
+ state = self.get_success(
+ self.presence_handler.get_state(UserID.from_string(user_id))
+ )
+ # status_msg should remain even after going offline
+ self.assertEqual(state.state, PresenceState.ONLINE)
+ self.assertEqual(state.status_msg, None)
+
+ def test_set_presence_with_status_msg_none(self):
+ """Test that if a user set again the presence manually
+ and status is `None`, that `status_msg` is `None`.
+ """
+ user_id = "@test:server"
+ status_msg = "I'm here!"
+
+ # Mark user as online
+ self._set_presencestate_with_status_msg(
+ user_id, PresenceState.ONLINE, status_msg
+ )
+
+ # Mark user as online and `status_msg = None`
+ self._set_presencestate_with_status_msg(user_id, PresenceState.ONLINE, None)
+
+ def _set_presencestate_with_status_msg(
+ self, user_id: str, state: PresenceState, status_msg: Optional[str]
+ ):
+ """Set a PresenceState and status_msg and check the result.
+
+ Args:
+ user_id: User for that the status is to be set.
+ PresenceState: The new PresenceState.
+ status_msg: Status message that is to be set.
+ """
+ self.get_success(
+ self.presence_handler.set_state(
+ UserID.from_string(user_id),
+ {"presence": state, "status_msg": status_msg},
+ )
+ )
+
+ new_state = self.get_success(
+ self.presence_handler.get_state(UserID.from_string(user_id))
+ )
+ self.assertEqual(new_state.state, state)
+ self.assertEqual(new_state.status_msg, status_msg)
+
class PresenceFederationQueueTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, hs):
diff --git a/tests/handlers/test_space_summary.py b/tests/handlers/test_space_summary.py
index f982a8c8b4..83c2bdd8f9 100644
--- a/tests/handlers/test_space_summary.py
+++ b/tests/handlers/test_space_summary.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Any, Iterable, Optional, Tuple
+from typing import Any, Iterable, List, Optional, Tuple
from unittest import mock
from synapse.api.constants import (
@@ -23,14 +23,14 @@ from synapse.api.constants import (
RestrictedJoinRuleTypes,
RoomTypes,
)
-from synapse.api.errors import AuthError
+from synapse.api.errors import AuthError, SynapseError
from synapse.api.room_versions import RoomVersions
from synapse.events import make_event_from_dict
from synapse.handlers.space_summary import _child_events_comparison_key, _RoomEntry
from synapse.rest import admin
from synapse.rest.client.v1 import login, room
from synapse.server import HomeServer
-from synapse.types import JsonDict
+from synapse.types import JsonDict, UserID
from tests import unittest
@@ -123,30 +123,111 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
self.room = self.helper.create_room_as(self.user, tok=self.token)
self._add_child(self.space, self.room, self.token)
- def _add_child(self, space_id: str, room_id: str, token: str) -> None:
+ def _add_child(
+ self, space_id: str, room_id: str, token: str, order: Optional[str] = None
+ ) -> None:
"""Add a child room to a space."""
+ content: JsonDict = {"via": [self.hs.hostname]}
+ if order is not None:
+ content["order"] = order
self.helper.send_state(
space_id,
event_type=EventTypes.SpaceChild,
- body={"via": [self.hs.hostname]},
+ body=content,
tok=token,
state_key=room_id,
)
- def _assert_rooms(self, result: JsonDict, rooms: Iterable[str]) -> None:
- """Assert that the expected room IDs are in the response."""
- self.assertCountEqual([room.get("room_id") for room in result["rooms"]], rooms)
-
- def _assert_events(
- self, result: JsonDict, events: Iterable[Tuple[str, str]]
+ def _assert_rooms(
+ self, result: JsonDict, rooms_and_children: Iterable[Tuple[str, Iterable[str]]]
) -> None:
- """Assert that the expected parent / child room IDs are in the response."""
+ """
+ Assert that the expected room IDs and events are in the response.
+
+ Args:
+ result: The result from the API call.
+ rooms_and_children: An iterable of tuples where each tuple is:
+ The expected room ID.
+ The expected IDs of any children rooms.
+ """
+ room_ids = []
+ children_ids = []
+ for room_id, children in rooms_and_children:
+ room_ids.append(room_id)
+ if children:
+ children_ids.extend([(room_id, child_id) for child_id in children])
+ self.assertCountEqual(
+ [room.get("room_id") for room in result["rooms"]], room_ids
+ )
self.assertCountEqual(
[
(event.get("room_id"), event.get("state_key"))
for event in result["events"]
],
- events,
+ children_ids,
+ )
+
+ def _assert_hierarchy(
+ self, result: JsonDict, rooms_and_children: Iterable[Tuple[str, Iterable[str]]]
+ ) -> None:
+ """
+ Assert that the expected room IDs are in the response.
+
+ Args:
+ result: The result from the API call.
+ rooms_and_children: An iterable of tuples where each tuple is:
+ The expected room ID.
+ The expected IDs of any children rooms.
+ """
+ result_room_ids = []
+ result_children_ids = []
+ for result_room in result["rooms"]:
+ result_room_ids.append(result_room["room_id"])
+ result_children_ids.append(
+ [
+ (cs["room_id"], cs["state_key"])
+ for cs in result_room.get("children_state")
+ ]
+ )
+
+ room_ids = []
+ children_ids = []
+ for room_id, children in rooms_and_children:
+ room_ids.append(room_id)
+ children_ids.append([(room_id, child_id) for child_id in children])
+
+ # Note that order matters.
+ self.assertEqual(result_room_ids, room_ids)
+ self.assertEqual(result_children_ids, children_ids)
+
+ def _poke_fed_invite(self, room_id: str, from_user: str) -> None:
+ """
+ Creates a invite (as if received over federation) for the room from the
+ given hostname.
+
+ Args:
+ room_id: The room ID to issue an invite for.
+ fed_hostname: The user to invite from.
+ """
+ # Poke an invite over federation into the database.
+ fed_handler = self.hs.get_federation_handler()
+ fed_hostname = UserID.from_string(from_user).domain
+ event = make_event_from_dict(
+ {
+ "room_id": room_id,
+ "event_id": "!abcd:" + fed_hostname,
+ "type": EventTypes.Member,
+ "sender": from_user,
+ "state_key": self.user,
+ "content": {"membership": Membership.INVITE},
+ "prev_events": [],
+ "auth_events": [],
+ "depth": 1,
+ "origin_server_ts": 1234,
+ }
+ )
+ self.get_success(
+ fed_handler.on_invite_request(fed_hostname, event, RoomVersions.V6)
)
def test_simple_space(self):
@@ -154,16 +235,36 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
result = self.get_success(self.handler.get_space_summary(self.user, self.space))
# The result should have the space and the room in it, along with a link
# from space -> room.
- self._assert_rooms(result, [self.space, self.room])
- self._assert_events(result, [(self.space, self.room)])
+ expected = [(self.space, [self.room]), (self.room, ())]
+ self._assert_rooms(result, expected)
+
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space)
+ )
+ self._assert_hierarchy(result, expected)
def test_visibility(self):
"""A user not in a space cannot inspect it."""
user2 = self.register_user("user2", "pass")
token2 = self.login("user2", "pass")
- # The user cannot see the space.
+ # The user can see the space since it is publicly joinable.
+ result = self.get_success(self.handler.get_space_summary(user2, self.space))
+ expected = [(self.space, [self.room]), (self.room, ())]
+ self._assert_rooms(result, expected)
+
+ result = self.get_success(self.handler.get_room_hierarchy(user2, self.space))
+ self._assert_hierarchy(result, expected)
+
+ # If the space is made invite-only, it should no longer be viewable.
+ self.helper.send_state(
+ self.space,
+ event_type=EventTypes.JoinRules,
+ body={"join_rule": JoinRules.INVITE},
+ tok=self.token,
+ )
self.get_failure(self.handler.get_space_summary(user2, self.space), AuthError)
+ self.get_failure(self.handler.get_room_hierarchy(user2, self.space), AuthError)
# If the space is made world-readable it should return a result.
self.helper.send_state(
@@ -173,8 +274,10 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
tok=self.token,
)
result = self.get_success(self.handler.get_space_summary(user2, self.space))
- self._assert_rooms(result, [self.space, self.room])
- self._assert_events(result, [(self.space, self.room)])
+ self._assert_rooms(result, expected)
+
+ result = self.get_success(self.handler.get_room_hierarchy(user2, self.space))
+ self._assert_hierarchy(result, expected)
# Make it not world-readable again and confirm it results in an error.
self.helper.send_state(
@@ -184,12 +287,26 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
tok=self.token,
)
self.get_failure(self.handler.get_space_summary(user2, self.space), AuthError)
+ self.get_failure(self.handler.get_room_hierarchy(user2, self.space), AuthError)
# Join the space and results should be returned.
+ self.helper.invite(self.space, targ=user2, tok=self.token)
self.helper.join(self.space, user2, tok=token2)
result = self.get_success(self.handler.get_space_summary(user2, self.space))
- self._assert_rooms(result, [self.space, self.room])
- self._assert_events(result, [(self.space, self.room)])
+ self._assert_rooms(result, expected)
+
+ result = self.get_success(self.handler.get_room_hierarchy(user2, self.space))
+ self._assert_hierarchy(result, expected)
+
+ # Attempting to view an unknown room returns the same error.
+ self.get_failure(
+ self.handler.get_space_summary(user2, "#not-a-space:" + self.hs.hostname),
+ AuthError,
+ )
+ self.get_failure(
+ self.handler.get_room_hierarchy(user2, "#not-a-space:" + self.hs.hostname),
+ AuthError,
+ )
def _create_room_with_join_rule(
self, join_rule: str, room_version: Optional[str] = None, **extra_content
@@ -231,13 +348,13 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
invited_room = self._create_room_with_join_rule(JoinRules.INVITE)
self.helper.invite(invited_room, targ=user2, tok=self.token)
restricted_room = self._create_room_with_join_rule(
- JoinRules.MSC3083_RESTRICTED,
- room_version=RoomVersions.MSC3083.identifier,
+ JoinRules.RESTRICTED,
+ room_version=RoomVersions.V8.identifier,
allow=[],
)
restricted_accessible_room = self._create_room_with_join_rule(
- JoinRules.MSC3083_RESTRICTED,
- room_version=RoomVersions.MSC3083.identifier,
+ JoinRules.RESTRICTED,
+ room_version=RoomVersions.V8.identifier,
allow=[
{
"type": RestrictedJoinRuleTypes.ROOM_MEMBERSHIP,
@@ -260,34 +377,33 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
# Join the space.
self.helper.join(self.space, user2, tok=token2)
result = self.get_success(self.handler.get_space_summary(user2, self.space))
-
- self._assert_rooms(
- result,
- [
+ expected = [
+ (
self.space,
- self.room,
- public_room,
- knock_room,
- invited_room,
- restricted_accessible_room,
- world_readable_room,
- joined_room,
- ],
- )
- self._assert_events(
- result,
- [
- (self.space, self.room),
- (self.space, public_room),
- (self.space, knock_room),
- (self.space, not_invited_room),
- (self.space, invited_room),
- (self.space, restricted_room),
- (self.space, restricted_accessible_room),
- (self.space, world_readable_room),
- (self.space, joined_room),
- ],
- )
+ [
+ self.room,
+ public_room,
+ knock_room,
+ not_invited_room,
+ invited_room,
+ restricted_room,
+ restricted_accessible_room,
+ world_readable_room,
+ joined_room,
+ ],
+ ),
+ (self.room, ()),
+ (public_room, ()),
+ (knock_room, ()),
+ (invited_room, ()),
+ (restricted_accessible_room, ()),
+ (world_readable_room, ()),
+ (joined_room, ()),
+ ]
+ self._assert_rooms(result, expected)
+
+ result = self.get_success(self.handler.get_room_hierarchy(user2, self.space))
+ self._assert_hierarchy(result, expected)
def test_complex_space(self):
"""
@@ -319,18 +435,143 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
result = self.get_success(self.handler.get_space_summary(self.user, self.space))
# The result should include each room a single time and each link.
- self._assert_rooms(result, [self.space, self.room, subspace, subroom])
- self._assert_events(
- result,
- [
- (self.space, self.room),
- (self.space, room2),
- (self.space, subspace),
- (subspace, subroom),
- (subspace, self.room),
- (subspace, room2),
- ],
+ expected = [
+ (self.space, [self.room, room2, subspace]),
+ (self.room, ()),
+ (subspace, [subroom, self.room, room2]),
+ (subroom, ()),
+ ]
+ self._assert_rooms(result, expected)
+
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space)
+ )
+ self._assert_hierarchy(result, expected)
+
+ def test_pagination(self):
+ """Test simple pagination works."""
+ room_ids = []
+ for i in range(1, 10):
+ room = self.helper.create_room_as(self.user, tok=self.token)
+ self._add_child(self.space, room, self.token, order=str(i))
+ room_ids.append(room)
+ # The room created initially doesn't have an order, so comes last.
+ room_ids.append(self.room)
+
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space, limit=7)
+ )
+ # The result should have the space and all of the links, plus some of the
+ # rooms and a pagination token.
+ expected: List[Tuple[str, Iterable[str]]] = [(self.space, room_ids)]
+ expected += [(room_id, ()) for room_id in room_ids[:6]]
+ self._assert_hierarchy(result, expected)
+ self.assertIn("next_batch", result)
+
+ # Check the next page.
+ result = self.get_success(
+ self.handler.get_room_hierarchy(
+ self.user, self.space, limit=5, from_token=result["next_batch"]
+ )
+ )
+ # The result should have the space and the room in it, along with a link
+ # from space -> room.
+ expected = [(room_id, ()) for room_id in room_ids[6:]]
+ self._assert_hierarchy(result, expected)
+ self.assertNotIn("next_batch", result)
+
+ def test_invalid_pagination_token(self):
+ """"""
+ room_ids = []
+ for i in range(1, 10):
+ room = self.helper.create_room_as(self.user, tok=self.token)
+ self._add_child(self.space, room, self.token, order=str(i))
+ room_ids.append(room)
+ # The room created initially doesn't have an order, so comes last.
+ room_ids.append(self.room)
+
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space, limit=7)
+ )
+ self.assertIn("next_batch", result)
+
+ # Changing the room ID, suggested-only, or max-depth causes an error.
+ self.get_failure(
+ self.handler.get_room_hierarchy(
+ self.user, self.room, from_token=result["next_batch"]
+ ),
+ SynapseError,
+ )
+ self.get_failure(
+ self.handler.get_room_hierarchy(
+ self.user,
+ self.space,
+ suggested_only=True,
+ from_token=result["next_batch"],
+ ),
+ SynapseError,
+ )
+ self.get_failure(
+ self.handler.get_room_hierarchy(
+ self.user, self.space, max_depth=0, from_token=result["next_batch"]
+ ),
+ SynapseError,
+ )
+
+ # An invalid token is ignored.
+ self.get_failure(
+ self.handler.get_room_hierarchy(self.user, self.space, from_token="foo"),
+ SynapseError,
+ )
+
+ def test_max_depth(self):
+ """Create a deep tree to test the max depth against."""
+ spaces = [self.space]
+ rooms = [self.room]
+ for _ in range(5):
+ spaces.append(
+ self.helper.create_room_as(
+ self.user,
+ tok=self.token,
+ extra_content={
+ "creation_content": {
+ EventContentFields.ROOM_TYPE: RoomTypes.SPACE
+ }
+ },
+ )
+ )
+ self._add_child(spaces[-2], spaces[-1], self.token)
+ rooms.append(self.helper.create_room_as(self.user, tok=self.token))
+ self._add_child(spaces[-1], rooms[-1], self.token)
+
+ # Test just the space itself.
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space, max_depth=0)
)
+ expected: List[Tuple[str, Iterable[str]]] = [(spaces[0], [rooms[0], spaces[1]])]
+ self._assert_hierarchy(result, expected)
+
+ # A single additional layer.
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space, max_depth=1)
+ )
+ expected += [
+ (rooms[0], ()),
+ (spaces[1], [rooms[1], spaces[2]]),
+ ]
+ self._assert_hierarchy(result, expected)
+
+ # A few layers.
+ result = self.get_success(
+ self.handler.get_room_hierarchy(self.user, self.space, max_depth=3)
+ )
+ expected += [
+ (rooms[1], ()),
+ (spaces[2], [rooms[2], spaces[3]]),
+ (rooms[2], ()),
+ (spaces[3], [rooms[3], spaces[4]]),
+ ]
+ self._assert_hierarchy(result, expected)
def test_fed_complex(self):
"""
@@ -387,15 +628,13 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
self.handler.get_space_summary(self.user, self.space)
)
- self._assert_rooms(result, [self.space, self.room, subspace, subroom])
- self._assert_events(
- result,
- [
- (self.space, self.room),
- (self.space, subspace),
- (subspace, subroom),
- ],
- )
+ expected = [
+ (self.space, [self.room, subspace]),
+ (self.room, ()),
+ (subspace, [subroom]),
+ (subroom, ()),
+ ]
+ self._assert_rooms(result, expected)
def test_fed_filtering(self):
"""
@@ -416,24 +655,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
joined_room = self.helper.create_room_as(self.user, tok=self.token)
# Poke an invite over federation into the database.
- fed_handler = self.hs.get_federation_handler()
- event = make_event_from_dict(
- {
- "room_id": invited_room,
- "event_id": "!abcd:" + fed_hostname,
- "type": EventTypes.Member,
- "sender": "@remote:" + fed_hostname,
- "state_key": self.user,
- "content": {"membership": Membership.INVITE},
- "prev_events": [],
- "auth_events": [],
- "depth": 1,
- "origin_server_ts": 1234,
- }
- )
- self.get_success(
- fed_handler.on_invite_request(fed_hostname, event, RoomVersions.V6)
- )
+ self._poke_fed_invite(invited_room, "@remote:" + fed_hostname)
async def summarize_remote_room(
_self, room, suggested_only, max_children, exclude_rooms
@@ -477,7 +699,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
{
"room_id": restricted_room,
"world_readable": False,
- "join_rules": JoinRules.MSC3083_RESTRICTED,
+ "join_rules": JoinRules.RESTRICTED,
"allowed_spaces": [],
},
),
@@ -486,7 +708,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
{
"room_id": restricted_accessible_room,
"world_readable": False,
- "join_rules": JoinRules.MSC3083_RESTRICTED,
+ "join_rules": JoinRules.RESTRICTED,
"allowed_spaces": [self.room],
},
),
@@ -541,32 +763,73 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
self.handler.get_space_summary(self.user, self.space)
)
- self._assert_rooms(
- result,
- [
- self.space,
- self.room,
+ expected = [
+ (self.space, [self.room, subspace]),
+ (self.room, ()),
+ (
subspace,
- public_room,
- knock_room,
- invited_room,
- restricted_accessible_room,
- world_readable_room,
- joined_room,
- ],
- )
- self._assert_events(
- result,
- [
- (self.space, self.room),
- (self.space, subspace),
- (subspace, public_room),
- (subspace, knock_room),
- (subspace, not_invited_room),
- (subspace, invited_room),
- (subspace, restricted_room),
- (subspace, restricted_accessible_room),
- (subspace, world_readable_room),
- (subspace, joined_room),
- ],
- )
+ [
+ public_room,
+ knock_room,
+ not_invited_room,
+ invited_room,
+ restricted_room,
+ restricted_accessible_room,
+ world_readable_room,
+ joined_room,
+ ],
+ ),
+ (public_room, ()),
+ (knock_room, ()),
+ (invited_room, ()),
+ (restricted_accessible_room, ()),
+ (world_readable_room, ()),
+ (joined_room, ()),
+ ]
+ self._assert_rooms(result, expected)
+
+ def test_fed_invited(self):
+ """
+ A room which the user was invited to should be included in the response.
+
+ This differs from test_fed_filtering in that the room itself is being
+ queried over federation, instead of it being included as a sub-room of
+ a space in the response.
+ """
+ fed_hostname = self.hs.hostname + "2"
+ fed_room = "#subroom:" + fed_hostname
+
+ # Poke an invite over federation into the database.
+ self._poke_fed_invite(fed_room, "@remote:" + fed_hostname)
+
+ async def summarize_remote_room(
+ _self, room, suggested_only, max_children, exclude_rooms
+ ):
+ return [
+ _RoomEntry(
+ fed_room,
+ {
+ "room_id": fed_room,
+ "world_readable": False,
+ "join_rules": JoinRules.INVITE,
+ },
+ ),
+ ]
+
+ # Add a room to the space which is on another server.
+ self._add_child(self.space, fed_room, self.token)
+
+ with mock.patch(
+ "synapse.handlers.space_summary.SpaceSummaryHandler._summarize_remote_room",
+ new=summarize_remote_room,
+ ):
+ result = self.get_success(
+ self.handler.get_space_summary(self.user, self.space)
+ )
+
+ expected = [
+ (self.space, [self.room, fed_room]),
+ (self.room, ()),
+ (fed_room, ()),
+ ]
+ self._assert_rooms(result, expected)
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index a37bce08c3..992d8f94fd 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -11,9 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import base64
import logging
-from typing import Optional
-from unittest.mock import Mock
+import os
+from typing import Iterable, Optional
+from unittest.mock import Mock, patch
import treq
from netaddr import IPSet
@@ -22,11 +24,12 @@ from zope.interface import implementer
from twisted.internet import defer
from twisted.internet._sslverify import ClientTLSOptions, OpenSSLCertificateOptions
+from twisted.internet.interfaces import IProtocolFactory
from twisted.internet.protocol import Factory
-from twisted.protocols.tls import TLSMemoryBIOFactory
+from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol
from twisted.web._newclient import ResponseNeverReceived
from twisted.web.client import Agent
-from twisted.web.http import HTTPChannel
+from twisted.web.http import HTTPChannel, Request
from twisted.web.http_headers import Headers
from twisted.web.iweb import IPolicyForHTTPS
@@ -49,24 +52,6 @@ from tests.utils import default_config
logger = logging.getLogger(__name__)
-test_server_connection_factory = None
-
-
-def get_connection_factory():
- # this needs to happen once, but not until we are ready to run the first test
- global test_server_connection_factory
- if test_server_connection_factory is None:
- test_server_connection_factory = TestServerTLSConnectionFactory(
- sanlist=[
- b"DNS:testserv",
- b"DNS:target-server",
- b"DNS:xn--bcher-kva.com",
- b"IP:1.2.3.4",
- b"IP:::1",
- ]
- )
- return test_server_connection_factory
-
# Once Async Mocks or lambdas are supported this can go away.
def generate_resolve_service(result):
@@ -100,24 +85,38 @@ class MatrixFederationAgentTests(unittest.TestCase):
had_well_known_cache=self.had_well_known_cache,
)
- self.agent = MatrixFederationAgent(
- reactor=self.reactor,
- tls_client_options_factory=self.tls_factory,
- user_agent="test-agent", # Note that this is unused since _well_known_resolver is provided.
- ip_blacklist=IPSet(),
- _srv_resolver=self.mock_resolver,
- _well_known_resolver=self.well_known_resolver,
- )
-
- def _make_connection(self, client_factory, expected_sni):
+ def _make_connection(
+ self,
+ client_factory: IProtocolFactory,
+ ssl: bool = True,
+ expected_sni: bytes = None,
+ tls_sanlist: Optional[Iterable[bytes]] = None,
+ ) -> HTTPChannel:
"""Builds a test server, and completes the outgoing client connection
+ Args:
+ client_factory: the the factory that the
+ application is trying to use to make the outbound connection. We will
+ invoke it to build the client Protocol
+
+ ssl: If true, we will expect an ssl connection and wrap
+ server_factory with a TLSMemoryBIOFactory
+ False is set only for when proxy expect http connection.
+ Otherwise federation requests use always https.
+
+ expected_sni: the expected SNI value
+
+ tls_sanlist: list of SAN entries for the TLS cert presented by the server.
Returns:
- HTTPChannel: the test server
+ the server Protocol returned by server_factory
"""
# build the test server
- server_tls_protocol = _build_test_server(get_connection_factory())
+ server_factory = _get_test_protocol_factory()
+ if ssl:
+ server_factory = _wrap_server_factory_for_tls(server_factory, tls_sanlist)
+
+ server_protocol = server_factory.buildProtocol(None)
# now, tell the client protocol factory to build the client protocol (it will be a
# _WrappingProtocol, around a TLSMemoryBIOProtocol, around an
@@ -128,35 +127,39 @@ class MatrixFederationAgentTests(unittest.TestCase):
# stubbing that out here.
client_protocol = client_factory.buildProtocol(None)
client_protocol.makeConnection(
- FakeTransport(server_tls_protocol, self.reactor, client_protocol)
+ FakeTransport(server_protocol, self.reactor, client_protocol)
)
- # tell the server tls protocol to send its stuff back to the client, too
- server_tls_protocol.makeConnection(
- FakeTransport(client_protocol, self.reactor, server_tls_protocol)
+ # tell the server protocol to send its stuff back to the client, too
+ server_protocol.makeConnection(
+ FakeTransport(client_protocol, self.reactor, server_protocol)
)
- # grab a hold of the TLS connection, in case it gets torn down
- server_tls_connection = server_tls_protocol._tlsConnection
-
- # fish the test server back out of the server-side TLS protocol.
- http_protocol = server_tls_protocol.wrappedProtocol
+ if ssl:
+ # fish the test server back out of the server-side TLS protocol.
+ http_protocol = server_protocol.wrappedProtocol
+ # grab a hold of the TLS connection, in case it gets torn down
+ tls_connection = server_protocol._tlsConnection
+ else:
+ http_protocol = server_protocol
+ tls_connection = None
- # give the reactor a pump to get the TLS juices flowing.
- self.reactor.pump((0.1,))
+ # give the reactor a pump to get the TLS juices flowing (if needed)
+ self.reactor.advance(0)
# check the SNI
- server_name = server_tls_connection.get_servername()
- self.assertEqual(
- server_name,
- expected_sni,
- "Expected SNI %s but got %s" % (expected_sni, server_name),
- )
+ if expected_sni is not None:
+ server_name = tls_connection.get_servername()
+ self.assertEqual(
+ server_name,
+ expected_sni,
+ f"Expected SNI {expected_sni!s} but got {server_name!s}",
+ )
return http_protocol
@defer.inlineCallbacks
- def _make_get_request(self, uri):
+ def _make_get_request(self, uri: bytes):
"""
Sends a simple GET request via the agent, and checks its logcontext management
"""
@@ -180,20 +183,20 @@ class MatrixFederationAgentTests(unittest.TestCase):
def _handle_well_known_connection(
self,
- client_factory,
- expected_sni,
- content,
+ client_factory: IProtocolFactory,
+ expected_sni: bytes,
+ content: bytes,
response_headers: Optional[dict] = None,
- ):
+ ) -> HTTPChannel:
"""Handle an outgoing HTTPs connection: wire it up to a server, check that the
request is for a .well-known, and send the response.
Args:
- client_factory (IProtocolFactory): outgoing connection
- expected_sni (bytes): SNI that we expect the outgoing connection to send
- content (bytes): content to send back as the .well-known
+ client_factory: outgoing connection
+ expected_sni: SNI that we expect the outgoing connection to send
+ content: content to send back as the .well-known
Returns:
- HTTPChannel: server impl
+ server impl
"""
# make the connection for .well-known
well_known_server = self._make_connection(
@@ -209,7 +212,10 @@ class MatrixFederationAgentTests(unittest.TestCase):
return well_known_server
def _send_well_known_response(
- self, request, content, headers: Optional[dict] = None
+ self,
+ request: Request,
+ content: bytes,
+ headers: Optional[dict] = None,
):
"""Check that an incoming request looks like a valid .well-known request, and
send back the response.
@@ -225,10 +231,37 @@ class MatrixFederationAgentTests(unittest.TestCase):
self.reactor.pump((0.1,))
- def test_get(self):
+ def _make_agent(self) -> MatrixFederationAgent:
"""
- happy-path test of a GET request with an explicit port
+ If a proxy server is set, the MatrixFederationAgent must be created again
+ because it is created too early during setUp
"""
+ return MatrixFederationAgent(
+ reactor=self.reactor,
+ tls_client_options_factory=self.tls_factory,
+ user_agent="test-agent", # Note that this is unused since _well_known_resolver is provided.
+ ip_whitelist=IPSet(),
+ ip_blacklist=IPSet(),
+ _srv_resolver=self.mock_resolver,
+ _well_known_resolver=self.well_known_resolver,
+ )
+
+ def test_get(self):
+ """happy-path test of a GET request with an explicit port"""
+ self._do_get()
+
+ @patch.dict(
+ os.environ,
+ {"https_proxy": "proxy.com", "no_proxy": "testserv"},
+ )
+ def test_get_bypass_proxy(self):
+ """test of a GET request with an explicit port and bypass proxy"""
+ self._do_get()
+
+ def _do_get(self):
+ """test of a GET request with an explicit port"""
+ self.agent = self._make_agent()
+
self.reactor.lookups["testserv"] = "1.2.3.4"
test_d = self._make_get_request(b"matrix://testserv:8448/foo/bar")
@@ -282,10 +315,188 @@ class MatrixFederationAgentTests(unittest.TestCase):
json = self.successResultOf(treq.json_content(response))
self.assertEqual(json, {"a": 1})
+ @patch.dict(
+ os.environ, {"https_proxy": "http://proxy.com", "no_proxy": "unused.com"}
+ )
+ def test_get_via_http_proxy(self):
+ """test for federation request through a http proxy"""
+ self._do_get_via_proxy(expect_proxy_ssl=False, expected_auth_credentials=None)
+
+ @patch.dict(
+ os.environ,
+ {"https_proxy": "http://user:pass@proxy.com", "no_proxy": "unused.com"},
+ )
+ def test_get_via_http_proxy_with_auth(self):
+ """test for federation request through a http proxy with authentication"""
+ self._do_get_via_proxy(
+ expect_proxy_ssl=False, expected_auth_credentials=b"user:pass"
+ )
+
+ @patch.dict(
+ os.environ, {"https_proxy": "https://proxy.com", "no_proxy": "unused.com"}
+ )
+ def test_get_via_https_proxy(self):
+ """test for federation request through a https proxy"""
+ self._do_get_via_proxy(expect_proxy_ssl=True, expected_auth_credentials=None)
+
+ @patch.dict(
+ os.environ,
+ {"https_proxy": "https://user:pass@proxy.com", "no_proxy": "unused.com"},
+ )
+ def test_get_via_https_proxy_with_auth(self):
+ """test for federation request through a https proxy with authentication"""
+ self._do_get_via_proxy(
+ expect_proxy_ssl=True, expected_auth_credentials=b"user:pass"
+ )
+
+ def _do_get_via_proxy(
+ self,
+ expect_proxy_ssl: bool = False,
+ expected_auth_credentials: Optional[bytes] = None,
+ ):
+ """Send a https federation request via an agent and check that it is correctly
+ received at the proxy and client. The proxy can use either http or https.
+ Args:
+ expect_proxy_ssl: True if we expect the request to connect to the proxy via https.
+ expected_auth_credentials: credentials we expect to be presented to authenticate at the proxy
+ """
+ self.agent = self._make_agent()
+
+ self.reactor.lookups["testserv"] = "1.2.3.4"
+ self.reactor.lookups["proxy.com"] = "9.9.9.9"
+ test_d = self._make_get_request(b"matrix://testserv:8448/foo/bar")
+
+ # Nothing happened yet
+ self.assertNoResult(test_d)
+
+ # Make sure treq is trying to connect
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+ # make sure we are connecting to the proxy
+ self.assertEqual(host, "9.9.9.9")
+ self.assertEqual(port, 1080)
+
+ # make a test server to act as the proxy, and wire up the client
+ proxy_server = self._make_connection(
+ client_factory,
+ ssl=expect_proxy_ssl,
+ tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None,
+ expected_sni=b"proxy.com" if expect_proxy_ssl else None,
+ )
+
+ assert isinstance(proxy_server, HTTPChannel)
+
+ # now there should be a pending CONNECT request
+ self.assertEqual(len(proxy_server.requests), 1)
+
+ request = proxy_server.requests[0]
+ self.assertEqual(request.method, b"CONNECT")
+ self.assertEqual(request.path, b"testserv:8448")
+
+ # Check whether auth credentials have been supplied to the proxy
+ proxy_auth_header_values = request.requestHeaders.getRawHeaders(
+ b"Proxy-Authorization"
+ )
+
+ if expected_auth_credentials is not None:
+ # Compute the correct header value for Proxy-Authorization
+ encoded_credentials = base64.b64encode(expected_auth_credentials)
+ expected_header_value = b"Basic " + encoded_credentials
+
+ # Validate the header's value
+ self.assertIn(expected_header_value, proxy_auth_header_values)
+ else:
+ # Check that the Proxy-Authorization header has not been supplied to the proxy
+ self.assertIsNone(proxy_auth_header_values)
+
+ # tell the proxy server not to close the connection
+ proxy_server.persistent = True
+
+ request.finish()
+
+ # now we make another test server to act as the upstream HTTP server.
+ server_ssl_protocol = _wrap_server_factory_for_tls(
+ _get_test_protocol_factory()
+ ).buildProtocol(None)
+
+ # Tell the HTTP server to send outgoing traffic back via the proxy's transport.
+ proxy_server_transport = proxy_server.transport
+ server_ssl_protocol.makeConnection(proxy_server_transport)
+
+ # ... and replace the protocol on the proxy's transport with the
+ # TLSMemoryBIOProtocol for the test server, so that incoming traffic
+ # to the proxy gets sent over to the HTTP(s) server.
+
+ # See also comment at `_do_https_request_via_proxy`
+ # in ../test_proxyagent.py for more details
+ if expect_proxy_ssl:
+ assert isinstance(proxy_server_transport, TLSMemoryBIOProtocol)
+ proxy_server_transport.wrappedProtocol = server_ssl_protocol
+ else:
+ assert isinstance(proxy_server_transport, FakeTransport)
+ client_protocol = proxy_server_transport.other
+ c2s_transport = client_protocol.transport
+ c2s_transport.other = server_ssl_protocol
+
+ self.reactor.advance(0)
+
+ server_name = server_ssl_protocol._tlsConnection.get_servername()
+ expected_sni = b"testserv"
+ self.assertEqual(
+ server_name,
+ expected_sni,
+ f"Expected SNI {expected_sni!s} but got {server_name!s}",
+ )
+
+ # now there should be a pending request
+ http_server = server_ssl_protocol.wrappedProtocol
+ self.assertEqual(len(http_server.requests), 1)
+
+ request = http_server.requests[0]
+ self.assertEqual(request.method, b"GET")
+ self.assertEqual(request.path, b"/foo/bar")
+ self.assertEqual(
+ request.requestHeaders.getRawHeaders(b"host"), [b"testserv:8448"]
+ )
+ self.assertEqual(
+ request.requestHeaders.getRawHeaders(b"user-agent"), [b"test-agent"]
+ )
+ # Check that the destination server DID NOT receive proxy credentials
+ self.assertIsNone(request.requestHeaders.getRawHeaders(b"Proxy-Authorization"))
+ content = request.content.read()
+ self.assertEqual(content, b"")
+
+ # Deferred is still without a result
+ self.assertNoResult(test_d)
+
+ # send the headers
+ request.responseHeaders.setRawHeaders(b"Content-Type", [b"application/json"])
+ request.write("")
+
+ self.reactor.pump((0.1,))
+
+ response = self.successResultOf(test_d)
+
+ # that should give us a Response object
+ self.assertEqual(response.code, 200)
+
+ # Send the body
+ request.write('{ "a": 1 }'.encode("ascii"))
+ request.finish()
+
+ self.reactor.pump((0.1,))
+
+ # check it can be read
+ json = self.successResultOf(treq.json_content(response))
+ self.assertEqual(json, {"a": 1})
+
def test_get_ip_address(self):
"""
Test the behaviour when the server name contains an explicit IP (with no port)
"""
+ self.agent = self._make_agent()
+
# there will be a getaddrinfo on the IP
self.reactor.lookups["1.2.3.4"] = "1.2.3.4"
@@ -320,6 +531,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
Test the behaviour when the server name contains an explicit IPv6 address
(with no port)
"""
+ self.agent = self._make_agent()
# there will be a getaddrinfo on the IP
self.reactor.lookups["::1"] = "::1"
@@ -355,6 +567,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
Test the behaviour when the server name contains an explicit IPv6 address
(with explicit port)
"""
+ self.agent = self._make_agent()
# there will be a getaddrinfo on the IP
self.reactor.lookups["::1"] = "::1"
@@ -389,6 +602,8 @@ class MatrixFederationAgentTests(unittest.TestCase):
"""
Test the behaviour when the certificate on the server doesn't match the hostname
"""
+ self.agent = self._make_agent()
+
self.mock_resolver.resolve_service.side_effect = generate_resolve_service([])
self.reactor.lookups["testserv1"] = "1.2.3.4"
@@ -441,6 +656,8 @@ class MatrixFederationAgentTests(unittest.TestCase):
Test the behaviour when the server name contains an explicit IP, but
the server cert doesn't cover it
"""
+ self.agent = self._make_agent()
+
# there will be a getaddrinfo on the IP
self.reactor.lookups["1.2.3.5"] = "1.2.3.5"
@@ -471,6 +688,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
"""
Test the behaviour when the server name has no port, no SRV, and no well-known
"""
+ self.agent = self._make_agent()
self.mock_resolver.resolve_service.side_effect = generate_resolve_service([])
self.reactor.lookups["testserv"] = "1.2.3.4"
@@ -524,6 +742,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
def test_get_well_known(self):
"""Test the behaviour when the .well-known delegates elsewhere"""
+ self.agent = self._make_agent()
self.mock_resolver.resolve_service.side_effect = generate_resolve_service([])
self.reactor.lookups["testserv"] = "1.2.3.4"
@@ -587,6 +806,8 @@ class MatrixFederationAgentTests(unittest.TestCase):
"""Test the behaviour when the server name has no port and no SRV record, but
the .well-known has a 300 redirect
"""
+ self.agent = self._make_agent()
+
self.mock_resolver.resolve_service.side_effect = generate_resolve_service([])
self.reactor.lookups["testserv"] = "1.2.3.4"
self.reactor.lookups["target-server"] = "1::f"
@@ -675,6 +896,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
"""
Test the behaviour when the server name has an *invalid* well-known (and no SRV)
"""
+ self.agent = self._make_agent()
self.mock_resolver.resolve_service.side_effect = generate_resolve_service([])
self.reactor.lookups["testserv"] = "1.2.3.4"
@@ -743,6 +965,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
reactor=self.reactor,
tls_client_options_factory=tls_factory,
user_agent=b"test-agent", # This is unused since _well_known_resolver is passed below.
+ ip_whitelist=IPSet(),
ip_blacklist=IPSet(),
_srv_resolver=self.mock_resolver,
_well_known_resolver=WellKnownResolver(
@@ -780,6 +1003,8 @@ class MatrixFederationAgentTests(unittest.TestCase):
"""
Test the behaviour when there is a single SRV record
"""
+ self.agent = self._make_agent()
+
self.mock_resolver.resolve_service.side_effect = generate_resolve_service(
[Server(host=b"srvtarget", port=8443)]
)
@@ -820,6 +1045,8 @@ class MatrixFederationAgentTests(unittest.TestCase):
"""Test the behaviour when the .well-known redirects to a place where there
is a SRV.
"""
+ self.agent = self._make_agent()
+
self.reactor.lookups["testserv"] = "1.2.3.4"
self.reactor.lookups["srvtarget"] = "5.6.7.8"
@@ -876,6 +1103,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
def test_idna_servername(self):
"""test the behaviour when the server name has idna chars in"""
+ self.agent = self._make_agent()
self.mock_resolver.resolve_service.side_effect = generate_resolve_service([])
@@ -937,6 +1165,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
def test_idna_srv_target(self):
"""test the behaviour when the target of a SRV record has idna chars"""
+ self.agent = self._make_agent()
self.mock_resolver.resolve_service.side_effect = generate_resolve_service(
[Server(host=b"xn--trget-3qa.com", port=8443)] # târget.com
@@ -1140,6 +1369,8 @@ class MatrixFederationAgentTests(unittest.TestCase):
def test_srv_fallbacks(self):
"""Test that other SRV results are tried if the first one fails."""
+ self.agent = self._make_agent()
+
self.mock_resolver.resolve_service.side_effect = generate_resolve_service(
[
Server(host=b"target.com", port=8443),
@@ -1266,34 +1497,49 @@ def _check_logcontext(context):
raise AssertionError("Expected logcontext %s but was %s" % (context, current))
-def _build_test_server(connection_creator):
- """Construct a test server
-
- This builds an HTTP channel, wrapped with a TLSMemoryBIOProtocol
-
+def _wrap_server_factory_for_tls(
+ factory: IProtocolFactory, sanlist: Iterable[bytes] = None
+) -> IProtocolFactory:
+ """Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory
+ The resultant factory will create a TLS server which presents a certificate
+ signed by our test CA, valid for the domains in `sanlist`
Args:
- connection_creator (IOpenSSLServerConnectionCreator): thing to build
- SSL connections
- sanlist (list[bytes]): list of the SAN entries for the cert returned
- by the server
+ factory: protocol factory to wrap
+ sanlist: list of domains the cert should be valid for
+ Returns:
+ interfaces.IProtocolFactory
+ """
+ if sanlist is None:
+ sanlist = [
+ b"DNS:testserv",
+ b"DNS:target-server",
+ b"DNS:xn--bcher-kva.com",
+ b"IP:1.2.3.4",
+ b"IP:::1",
+ ]
+
+ connection_creator = TestServerTLSConnectionFactory(sanlist=sanlist)
+ return TLSMemoryBIOFactory(
+ connection_creator, isClient=False, wrappedFactory=factory
+ )
+
+def _get_test_protocol_factory() -> IProtocolFactory:
+ """Get a protocol Factory which will build an HTTPChannel
Returns:
- TLSMemoryBIOProtocol
+ interfaces.IProtocolFactory
"""
server_factory = Factory.forProtocol(HTTPChannel)
+
# Request.finish expects the factory to have a 'log' method.
server_factory.log = _log_request
- server_tls_factory = TLSMemoryBIOFactory(
- connection_creator, isClient=False, wrappedFactory=server_factory
- )
-
- return server_tls_factory.buildProtocol(None)
+ return server_factory
-def _log_request(request):
+def _log_request(request: str):
"""Implements Factory.log, which is expected by Request.finish"""
- logger.info("Completed request %s", request)
+ logger.info(f"Completed request {request}")
@implementer(IPolicyForHTTPS)
diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py
index e5865c161d..2db77c6a73 100644
--- a/tests/http/test_proxyagent.py
+++ b/tests/http/test_proxyagent.py
@@ -29,7 +29,8 @@ from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol
from twisted.web.http import HTTPChannel
from synapse.http.client import BlacklistingReactorWrapper
-from synapse.http.proxyagent import ProxyAgent, ProxyCredentials, parse_proxy
+from synapse.http.connectproxyclient import ProxyCredentials
+from synapse.http.proxyagent import ProxyAgent, parse_proxy
from tests.http import TestServerTLSConnectionFactory, get_test_https_policy
from tests.server import FakeTransport, ThreadedMemoryReactorClock
@@ -392,7 +393,9 @@ class MatrixFederationAgentTests(TestCase):
"""
Tests that requests can be made through a proxy.
"""
- self._do_http_request_via_proxy(ssl=False, auth_credentials=None)
+ self._do_http_request_via_proxy(
+ expect_proxy_ssl=False, expected_auth_credentials=None
+ )
@patch.dict(
os.environ,
@@ -402,13 +405,17 @@ class MatrixFederationAgentTests(TestCase):
"""
Tests that authenticated requests can be made through a proxy.
"""
- self._do_http_request_via_proxy(ssl=False, auth_credentials=b"bob:pinkponies")
+ self._do_http_request_via_proxy(
+ expect_proxy_ssl=False, expected_auth_credentials=b"bob:pinkponies"
+ )
@patch.dict(
os.environ, {"http_proxy": "https://proxy.com:8888", "no_proxy": "unused.com"}
)
def test_http_request_via_https_proxy(self):
- self._do_http_request_via_proxy(ssl=True, auth_credentials=None)
+ self._do_http_request_via_proxy(
+ expect_proxy_ssl=True, expected_auth_credentials=None
+ )
@patch.dict(
os.environ,
@@ -418,12 +425,16 @@ class MatrixFederationAgentTests(TestCase):
},
)
def test_http_request_via_https_proxy_with_auth(self):
- self._do_http_request_via_proxy(ssl=True, auth_credentials=b"bob:pinkponies")
+ self._do_http_request_via_proxy(
+ expect_proxy_ssl=True, expected_auth_credentials=b"bob:pinkponies"
+ )
@patch.dict(os.environ, {"https_proxy": "proxy.com", "no_proxy": "unused.com"})
def test_https_request_via_proxy(self):
"""Tests that TLS-encrypted requests can be made through a proxy"""
- self._do_https_request_via_proxy(ssl=False, auth_credentials=None)
+ self._do_https_request_via_proxy(
+ expect_proxy_ssl=False, expected_auth_credentials=None
+ )
@patch.dict(
os.environ,
@@ -431,14 +442,18 @@ class MatrixFederationAgentTests(TestCase):
)
def test_https_request_via_proxy_with_auth(self):
"""Tests that authenticated, TLS-encrypted requests can be made through a proxy"""
- self._do_https_request_via_proxy(ssl=False, auth_credentials=b"bob:pinkponies")
+ self._do_https_request_via_proxy(
+ expect_proxy_ssl=False, expected_auth_credentials=b"bob:pinkponies"
+ )
@patch.dict(
os.environ, {"https_proxy": "https://proxy.com", "no_proxy": "unused.com"}
)
def test_https_request_via_https_proxy(self):
"""Tests that TLS-encrypted requests can be made through a proxy"""
- self._do_https_request_via_proxy(ssl=True, auth_credentials=None)
+ self._do_https_request_via_proxy(
+ expect_proxy_ssl=True, expected_auth_credentials=None
+ )
@patch.dict(
os.environ,
@@ -446,20 +461,22 @@ class MatrixFederationAgentTests(TestCase):
)
def test_https_request_via_https_proxy_with_auth(self):
"""Tests that authenticated, TLS-encrypted requests can be made through a proxy"""
- self._do_https_request_via_proxy(ssl=True, auth_credentials=b"bob:pinkponies")
+ self._do_https_request_via_proxy(
+ expect_proxy_ssl=True, expected_auth_credentials=b"bob:pinkponies"
+ )
def _do_http_request_via_proxy(
self,
- ssl: bool = False,
- auth_credentials: Optional[bytes] = None,
+ expect_proxy_ssl: bool = False,
+ expected_auth_credentials: Optional[bytes] = None,
):
"""Send a http request via an agent and check that it is correctly received at
the proxy. The proxy can use either http or https.
Args:
- ssl: True if we expect the request to connect via https to proxy
- auth_credentials: credentials to authenticate at proxy
+ expect_proxy_ssl: True if we expect the request to connect via https to proxy
+ expected_auth_credentials: credentials to authenticate at proxy
"""
- if ssl:
+ if expect_proxy_ssl:
agent = ProxyAgent(
self.reactor, use_proxy=True, contextFactory=get_test_https_policy()
)
@@ -480,9 +497,9 @@ class MatrixFederationAgentTests(TestCase):
http_server = self._make_connection(
client_factory,
_get_test_protocol_factory(),
- ssl=ssl,
- tls_sanlist=[b"DNS:proxy.com"] if ssl else None,
- expected_sni=b"proxy.com" if ssl else None,
+ ssl=expect_proxy_ssl,
+ tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None,
+ expected_sni=b"proxy.com" if expect_proxy_ssl else None,
)
# the FakeTransport is async, so we need to pump the reactor
@@ -498,9 +515,9 @@ class MatrixFederationAgentTests(TestCase):
b"Proxy-Authorization"
)
- if auth_credentials is not None:
+ if expected_auth_credentials is not None:
# Compute the correct header value for Proxy-Authorization
- encoded_credentials = base64.b64encode(auth_credentials)
+ encoded_credentials = base64.b64encode(expected_auth_credentials)
expected_header_value = b"Basic " + encoded_credentials
# Validate the header's value
@@ -523,14 +540,14 @@ class MatrixFederationAgentTests(TestCase):
def _do_https_request_via_proxy(
self,
- ssl: bool = False,
- auth_credentials: Optional[bytes] = None,
+ expect_proxy_ssl: bool = False,
+ expected_auth_credentials: Optional[bytes] = None,
):
"""Send a https request via an agent and check that it is correctly received at
the proxy and client. The proxy can use either http or https.
Args:
- ssl: True if we expect the request to connect via https to proxy
- auth_credentials: credentials to authenticate at proxy
+ expect_proxy_ssl: True if we expect the request to connect via https to proxy
+ expected_auth_credentials: credentials to authenticate at proxy
"""
agent = ProxyAgent(
self.reactor,
@@ -552,9 +569,9 @@ class MatrixFederationAgentTests(TestCase):
proxy_server = self._make_connection(
client_factory,
_get_test_protocol_factory(),
- ssl=ssl,
- tls_sanlist=[b"DNS:proxy.com"] if ssl else None,
- expected_sni=b"proxy.com" if ssl else None,
+ ssl=expect_proxy_ssl,
+ tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None,
+ expected_sni=b"proxy.com" if expect_proxy_ssl else None,
)
assert isinstance(proxy_server, HTTPChannel)
@@ -570,9 +587,9 @@ class MatrixFederationAgentTests(TestCase):
b"Proxy-Authorization"
)
- if auth_credentials is not None:
+ if expected_auth_credentials is not None:
# Compute the correct header value for Proxy-Authorization
- encoded_credentials = base64.b64encode(auth_credentials)
+ encoded_credentials = base64.b64encode(expected_auth_credentials)
expected_header_value = b"Basic " + encoded_credentials
# Validate the header's value
@@ -606,7 +623,7 @@ class MatrixFederationAgentTests(TestCase):
# Protocol to implement the proxy, which starts out by forwarding to an
# HTTPChannel (to implement the CONNECT command) and can then be switched
# into a mode where it forwards its traffic to another Protocol.)
- if ssl:
+ if expect_proxy_ssl:
assert isinstance(proxy_server_transport, TLSMemoryBIOProtocol)
proxy_server_transport.wrappedProtocol = server_ssl_protocol
else:
diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
index 42f50c0921..13fab5579b 100644
--- a/tests/rest/admin/test_user.py
+++ b/tests/rest/admin/test_user.py
@@ -15,17 +15,21 @@
import hashlib
import hmac
import json
+import os
import urllib.parse
from binascii import unhexlify
from typing import List, Optional
from unittest.mock import Mock, patch
+from parameterized import parameterized
+
import synapse.rest.admin
from synapse.api.constants import UserTypes
from synapse.api.errors import Codes, HttpResponseException, ResourceLimitError
from synapse.api.room_versions import RoomVersions
from synapse.rest.client.v1 import login, logout, profile, room
from synapse.rest.client.v2_alpha import devices, sync
+from synapse.rest.media.v1.filepath import MediaFilePaths
from synapse.types import JsonDict, UserID
from tests import unittest
@@ -72,7 +76,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
channel = self.make_request("POST", self.url, b"{}")
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(
"Shared secret registration is not enabled", channel.json_body["error"]
)
@@ -104,7 +108,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
body = json.dumps({"nonce": nonce})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("username must be specified", channel.json_body["error"])
# 61 seconds
@@ -112,7 +116,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("unrecognised nonce", channel.json_body["error"])
def test_register_incorrect_nonce(self):
@@ -166,7 +170,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob:test", channel.json_body["user_id"])
def test_nonce_reuse(self):
@@ -191,13 +195,13 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob:test", channel.json_body["user_id"])
# Now, try and reuse it
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("unrecognised nonce", channel.json_body["error"])
def test_missing_parts(self):
@@ -219,7 +223,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
body = json.dumps({})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("nonce must be specified", channel.json_body["error"])
#
@@ -230,28 +234,28 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
body = json.dumps({"nonce": nonce()})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("username must be specified", channel.json_body["error"])
# Must be a string
body = json.dumps({"nonce": nonce(), "username": 1234})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid username", channel.json_body["error"])
# Must not have null bytes
body = json.dumps({"nonce": nonce(), "username": "abcd\u0000"})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid username", channel.json_body["error"])
# Must not have null bytes
body = json.dumps({"nonce": nonce(), "username": "a" * 1000})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid username", channel.json_body["error"])
#
@@ -262,28 +266,28 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
body = json.dumps({"nonce": nonce(), "username": "a"})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("password must be specified", channel.json_body["error"])
# Must be a string
body = json.dumps({"nonce": nonce(), "username": "a", "password": 1234})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid password", channel.json_body["error"])
# Must not have null bytes
body = json.dumps({"nonce": nonce(), "username": "a", "password": "abcd\u0000"})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid password", channel.json_body["error"])
# Super long
body = json.dumps({"nonce": nonce(), "username": "a", "password": "A" * 1000})
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid password", channel.json_body["error"])
#
@@ -301,7 +305,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Invalid user type", channel.json_body["error"])
def test_displayname(self):
@@ -322,11 +326,11 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob1:test", channel.json_body["user_id"])
channel = self.make_request("GET", "/profile/@bob1:test/displayname")
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("bob1", channel.json_body["displayname"])
# displayname is None
@@ -348,11 +352,11 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob2:test", channel.json_body["user_id"])
channel = self.make_request("GET", "/profile/@bob2:test/displayname")
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("bob2", channel.json_body["displayname"])
# displayname is empty
@@ -374,7 +378,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob3:test", channel.json_body["user_id"])
channel = self.make_request("GET", "/profile/@bob3:test/displayname")
@@ -399,11 +403,11 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob4:test", channel.json_body["user_id"])
channel = self.make_request("GET", "/profile/@bob4:test/displayname")
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("Bob's Name", channel.json_body["displayname"])
@override_config(
@@ -449,7 +453,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
)
channel = self.make_request("POST", self.url, body.encode("utf8"))
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual("@bob:test", channel.json_body["user_id"])
@@ -638,7 +642,7 @@ class UsersListTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
# invalid search order
@@ -1085,7 +1089,7 @@ class DeactivateAccountTestCase(unittest.HomeserverTestCase):
content={"erase": False},
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# Get user
channel = self.make_request(
@@ -2180,7 +2184,7 @@ class PushersRestTestCase(unittest.HomeserverTestCase):
)
self.assertEqual(400, channel.code, msg=channel.json_body)
- self.assertEqual("Can only lookup local users", channel.json_body["error"])
+ self.assertEqual("Can only look up local users", channel.json_body["error"])
def test_get_pushers(self):
"""
@@ -2249,6 +2253,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.media_repo = hs.get_media_repository_resource()
+ self.filepaths = MediaFilePaths(hs.config.media_store_path)
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
@@ -2258,37 +2263,34 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
self.other_user
)
- def test_no_auth(self):
- """
- Try to list media of an user without authentication.
- """
- channel = self.make_request("GET", self.url, b"{}")
+ @parameterized.expand(["GET", "DELETE"])
+ def test_no_auth(self, method: str):
+ """Try to list media of an user without authentication."""
+ channel = self.make_request(method, self.url, {})
- self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(401, channel.code, msg=channel.json_body)
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
- def test_requester_is_no_admin(self):
- """
- If the user is not a server admin, an error is returned.
- """
+ @parameterized.expand(["GET", "DELETE"])
+ def test_requester_is_no_admin(self, method: str):
+ """If the user is not a server admin, an error is returned."""
other_user_token = self.login("user", "pass")
channel = self.make_request(
- "GET",
+ method,
self.url,
access_token=other_user_token,
)
- self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(403, channel.code, msg=channel.json_body)
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
- def test_user_does_not_exist(self):
- """
- Tests that a lookup for a user that does not exist returns a 404
- """
+ @parameterized.expand(["GET", "DELETE"])
+ def test_user_does_not_exist(self, method: str):
+ """Tests that a lookup for a user that does not exist returns a 404"""
url = "/_synapse/admin/v1/users/@unknown_person:test/media"
channel = self.make_request(
- "GET",
+ method,
url,
access_token=self.admin_user_tok,
)
@@ -2296,25 +2298,22 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(404, channel.code, msg=channel.json_body)
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
- def test_user_is_not_local(self):
- """
- Tests that a lookup for a user that is not a local returns a 400
- """
+ @parameterized.expand(["GET", "DELETE"])
+ def test_user_is_not_local(self, method: str):
+ """Tests that a lookup for a user that is not a local returns a 400"""
url = "/_synapse/admin/v1/users/@unknown_person:unknown_domain/media"
channel = self.make_request(
- "GET",
+ method,
url,
access_token=self.admin_user_tok,
)
self.assertEqual(400, channel.code, msg=channel.json_body)
- self.assertEqual("Can only lookup local users", channel.json_body["error"])
+ self.assertEqual("Can only look up local users", channel.json_body["error"])
- def test_limit(self):
- """
- Testing list of media with limit
- """
+ def test_limit_GET(self):
+ """Testing list of media with limit"""
number_media = 20
other_user_tok = self.login("user", "pass")
@@ -2326,16 +2325,31 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 5)
self.assertEqual(channel.json_body["next_token"], 5)
self._check_fields(channel.json_body["media"])
- def test_from(self):
- """
- Testing list of media with a defined starting point (from)
- """
+ def test_limit_DELETE(self):
+ """Testing delete of media with limit"""
+
+ number_media = 20
+ other_user_tok = self.login("user", "pass")
+ self._create_media_for_user(other_user_tok, number_media)
+
+ channel = self.make_request(
+ "DELETE",
+ self.url + "?limit=5",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(channel.json_body["total"], 5)
+ self.assertEqual(len(channel.json_body["deleted_media"]), 5)
+
+ def test_from_GET(self):
+ """Testing list of media with a defined starting point (from)"""
number_media = 20
other_user_tok = self.login("user", "pass")
@@ -2347,16 +2361,31 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 15)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["media"])
- def test_limit_and_from(self):
- """
- Testing list of media with a defined starting point and limit
- """
+ def test_from_DELETE(self):
+ """Testing delete of media with a defined starting point (from)"""
+
+ number_media = 20
+ other_user_tok = self.login("user", "pass")
+ self._create_media_for_user(other_user_tok, number_media)
+
+ channel = self.make_request(
+ "DELETE",
+ self.url + "?from=5",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(channel.json_body["total"], 15)
+ self.assertEqual(len(channel.json_body["deleted_media"]), 15)
+
+ def test_limit_and_from_GET(self):
+ """Testing list of media with a defined starting point and limit"""
number_media = 20
other_user_tok = self.login("user", "pass")
@@ -2368,59 +2397,78 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(channel.json_body["next_token"], 15)
self.assertEqual(len(channel.json_body["media"]), 10)
self._check_fields(channel.json_body["media"])
- def test_invalid_parameter(self):
- """
- If parameters are invalid, an error is returned.
- """
+ def test_limit_and_from_DELETE(self):
+ """Testing delete of media with a defined starting point and limit"""
+
+ number_media = 20
+ other_user_tok = self.login("user", "pass")
+ self._create_media_for_user(other_user_tok, number_media)
+
+ channel = self.make_request(
+ "DELETE",
+ self.url + "?from=5&limit=10",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(channel.json_body["total"], 10)
+ self.assertEqual(len(channel.json_body["deleted_media"]), 10)
+
+ @parameterized.expand(["GET", "DELETE"])
+ def test_invalid_parameter(self, method: str):
+ """If parameters are invalid, an error is returned."""
# unkown order_by
channel = self.make_request(
- "GET",
+ method,
self.url + "?order_by=bar",
access_token=self.admin_user_tok,
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
# invalid search order
channel = self.make_request(
- "GET",
+ method,
self.url + "?dir=bar",
access_token=self.admin_user_tok,
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
# negative limit
channel = self.make_request(
- "GET",
+ method,
self.url + "?limit=-5",
access_token=self.admin_user_tok,
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
# negative from
channel = self.make_request(
- "GET",
+ method,
self.url + "?from=-5",
access_token=self.admin_user_tok,
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
def test_next_token(self):
"""
Testing that `next_token` appears at the right place
+
+ For deleting media `next_token` is not useful, because
+ after deleting media the media has a new order.
"""
number_media = 20
@@ -2435,7 +2483,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), number_media)
self.assertNotIn("next_token", channel.json_body)
@@ -2448,7 +2496,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), number_media)
self.assertNotIn("next_token", channel.json_body)
@@ -2461,7 +2509,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 19)
self.assertEqual(channel.json_body["next_token"], 19)
@@ -2475,12 +2523,12 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 1)
self.assertNotIn("next_token", channel.json_body)
- def test_user_has_no_media(self):
+ def test_user_has_no_media_GET(self):
"""
Tests that a normal lookup for media is successfully
if user has no media created
@@ -2496,11 +2544,24 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(0, channel.json_body["total"])
self.assertEqual(0, len(channel.json_body["media"]))
- def test_get_media(self):
+ def test_user_has_no_media_DELETE(self):
"""
- Tests that a normal lookup for media is successfully
+ Tests that a delete is successful if user has no media
"""
+ channel = self.make_request(
+ "DELETE",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(0, channel.json_body["total"])
+ self.assertEqual(0, len(channel.json_body["deleted_media"]))
+
+ def test_get_media(self):
+ """Tests that a normal lookup for media is successful"""
+
number_media = 5
other_user_tok = self.login("user", "pass")
self._create_media_for_user(other_user_tok, number_media)
@@ -2517,6 +2578,35 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["media"])
+ def test_delete_media(self):
+ """Tests that a normal delete of media is successful"""
+
+ number_media = 5
+ other_user_tok = self.login("user", "pass")
+ media_ids = self._create_media_for_user(other_user_tok, number_media)
+
+ # Test if the file exists
+ local_paths = []
+ for media_id in media_ids:
+ local_path = self.filepaths.local_media_filepath(media_id)
+ self.assertTrue(os.path.exists(local_path))
+ local_paths.append(local_path)
+
+ channel = self.make_request(
+ "DELETE",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(number_media, channel.json_body["total"])
+ self.assertEqual(number_media, len(channel.json_body["deleted_media"]))
+ self.assertCountEqual(channel.json_body["deleted_media"], media_ids)
+
+ # Test if the file is deleted
+ for local_path in local_paths:
+ self.assertFalse(os.path.exists(local_path))
+
def test_order_by(self):
"""
Testing order list with parameter `order_by`
@@ -2622,13 +2712,16 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
[media2] + sorted([media1, media3]), "safe_from_quarantine", "b"
)
- def _create_media_for_user(self, user_token: str, number_media: int):
+ def _create_media_for_user(self, user_token: str, number_media: int) -> List[str]:
"""
Create a number of media for a specific user
Args:
user_token: Access token of the user
number_media: Number of media to be created for the user
+ Returns:
+ List of created media ID
"""
+ media_ids = []
for _ in range(number_media):
# file size is 67 Byte
image_data = unhexlify(
@@ -2637,7 +2730,9 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
b"0a2db40000000049454e44ae426082"
)
- self._create_media_and_access(user_token, image_data)
+ media_ids.append(self._create_media_and_access(user_token, image_data))
+
+ return media_ids
def _create_media_and_access(
self,
@@ -2680,7 +2775,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
200,
channel.code,
msg=(
- "Expected to receive a 200 on accessing media: %s" % server_and_media_id
+ f"Expected to receive a 200 on accessing media: {server_and_media_id}"
),
)
@@ -2718,12 +2813,12 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
url = self.url + "?"
if order_by is not None:
- url += "order_by=%s&" % (order_by,)
+ url += f"order_by={order_by}&"
if dir is not None and dir in ("b", "f"):
- url += "dir=%s" % (dir,)
+ url += f"dir={dir}"
channel = self.make_request(
"GET",
- url.encode("ascii"),
+ url,
access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
@@ -2762,7 +2857,7 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"POST", self.url, b"{}", access_token=self.admin_user_tok
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
return channel.json_body["access_token"]
def test_no_auth(self):
@@ -2803,7 +2898,7 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# We should only see the one device (from the login in `prepare`)
self.assertEqual(len(channel.json_body["devices"]), 1)
@@ -2815,11 +2910,11 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
# Test that we can successfully make a request
channel = self.make_request("GET", "devices", b"{}", access_token=puppet_token)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# Logout with the puppet token
channel = self.make_request("POST", "logout", b"{}", access_token=puppet_token)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# The puppet token should no longer work
channel = self.make_request("GET", "devices", b"{}", access_token=puppet_token)
@@ -2829,7 +2924,7 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
def test_user_logout_all(self):
"""Tests that the target user calling `/logout/all` does *not* expire
@@ -2840,17 +2935,17 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
# Test that we can successfully make a request
channel = self.make_request("GET", "devices", b"{}", access_token=puppet_token)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# Logout all with the real user token
channel = self.make_request(
"POST", "logout/all", b"{}", access_token=self.other_user_tok
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# The puppet token should still work
channel = self.make_request("GET", "devices", b"{}", access_token=puppet_token)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# .. but the real user's tokens shouldn't
channel = self.make_request(
@@ -2867,13 +2962,13 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
# Test that we can successfully make a request
channel = self.make_request("GET", "devices", b"{}", access_token=puppet_token)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# Logout all with the admin user token
channel = self.make_request(
"POST", "logout/all", b"{}", access_token=self.admin_user_tok
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
# The puppet token should no longer work
channel = self.make_request("GET", "devices", b"{}", access_token=puppet_token)
@@ -2883,7 +2978,7 @@ class UserTokenRestTestCase(unittest.HomeserverTestCase):
channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
@unittest.override_config(
{
@@ -3243,7 +3338,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
)
self.assertEqual(400, channel.code, msg=channel.json_body)
- self.assertEqual("Can only lookup local users", channel.json_body["error"])
+ self.assertEqual("Can only look up local users", channel.json_body["error"])
channel = self.make_request(
"POST",
@@ -3279,7 +3374,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
content={"messages_per_second": "string"},
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
# messages_per_second is negative
@@ -3290,7 +3385,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
content={"messages_per_second": -1},
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
# burst_count is a string
@@ -3301,7 +3396,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
content={"burst_count": "string"},
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
# burst_count is negative
@@ -3312,7 +3407,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
content={"burst_count": -1},
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
def test_return_zero_when_null(self):
@@ -3337,7 +3432,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
self.url,
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["messages_per_second"])
self.assertEqual(0, channel.json_body["burst_count"])
@@ -3351,7 +3446,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
self.url,
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertNotIn("messages_per_second", channel.json_body)
self.assertNotIn("burst_count", channel.json_body)
@@ -3362,7 +3457,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
content={"messages_per_second": 10, "burst_count": 11},
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(10, channel.json_body["messages_per_second"])
self.assertEqual(11, channel.json_body["burst_count"])
@@ -3373,7 +3468,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
access_token=self.admin_user_tok,
content={"messages_per_second": 20, "burst_count": 21},
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(20, channel.json_body["messages_per_second"])
self.assertEqual(21, channel.json_body["burst_count"])
@@ -3383,7 +3478,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
self.url,
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(20, channel.json_body["messages_per_second"])
self.assertEqual(21, channel.json_body["burst_count"])
@@ -3393,7 +3488,7 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
self.url,
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertNotIn("messages_per_second", channel.json_body)
self.assertNotIn("burst_count", channel.json_body)
@@ -3403,6 +3498,6 @@ class RateLimitTestCase(unittest.HomeserverTestCase):
self.url,
access_token=self.admin_user_tok,
)
- self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertNotIn("messages_per_second", channel.json_body)
self.assertNotIn("burst_count", channel.json_body)
diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py
index fc2d35596e..954ad1a1fd 100644
--- a/tests/rest/client/v1/utils.py
+++ b/tests/rest/client/v1/utils.py
@@ -47,10 +47,10 @@ class RestHelper:
def create_room_as(
self,
- room_creator: str = None,
+ room_creator: Optional[str] = None,
is_public: bool = True,
- room_version: str = None,
- tok: str = None,
+ room_version: Optional[str] = None,
+ tok: Optional[str] = None,
expect_code: int = 200,
extra_content: Optional[Dict] = None,
custom_headers: Optional[
diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py
index e5550aec4d..6ebd01bcbe 100644
--- a/tests/test_event_auth.py
+++ b/tests/test_event_auth.py
@@ -384,7 +384,7 @@ class EventAuthTestCase(unittest.TestCase):
},
)
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
authorised_join_event,
auth_events,
do_sig_check=False,
@@ -400,7 +400,7 @@ class EventAuthTestCase(unittest.TestCase):
"@inviter:foo.test"
)
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
_join_event(
pleb,
additional_content={
@@ -414,7 +414,7 @@ class EventAuthTestCase(unittest.TestCase):
# A join which is missing an authorised server is rejected.
with self.assertRaises(AuthError):
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
_join_event(pleb),
auth_events,
do_sig_check=False,
@@ -427,7 +427,7 @@ class EventAuthTestCase(unittest.TestCase):
)
with self.assertRaises(AuthError):
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
_join_event(
pleb,
additional_content={
@@ -442,7 +442,7 @@ class EventAuthTestCase(unittest.TestCase):
# *would* be valid, but is sent be a different user.)
with self.assertRaises(AuthError):
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
_member_event(
pleb,
"join",
@@ -459,7 +459,7 @@ class EventAuthTestCase(unittest.TestCase):
auth_events[("m.room.member", pleb)] = _member_event(pleb, "ban")
with self.assertRaises(AuthError):
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
authorised_join_event,
auth_events,
do_sig_check=False,
@@ -468,7 +468,7 @@ class EventAuthTestCase(unittest.TestCase):
# A user who left can re-join.
auth_events[("m.room.member", pleb)] = _member_event(pleb, "leave")
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
authorised_join_event,
auth_events,
do_sig_check=False,
@@ -478,7 +478,7 @@ class EventAuthTestCase(unittest.TestCase):
# be authorised since the user is already joined.)
auth_events[("m.room.member", pleb)] = _member_event(pleb, "join")
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
_join_event(pleb),
auth_events,
do_sig_check=False,
@@ -490,7 +490,7 @@ class EventAuthTestCase(unittest.TestCase):
pleb, "invite", sender=creator
)
event_auth.check(
- RoomVersions.MSC3083,
+ RoomVersions.V8,
_join_event(pleb),
auth_events,
do_sig_check=False,
|