diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py
index bd7a1b6891..c37bb6440e 100644
--- a/tests/handlers/test_cas.py
+++ b/tests/handlers/test_cas.py
@@ -118,4 +118,4 @@ class CasHandlerTestCase(HomeserverTestCase):
def _mock_request():
"""Returns a mock which will stand in as a SynapseRequest"""
- return Mock(spec=["getClientIP", "get_user_agent"])
+ return Mock(spec=["getClientIP", "getHeader"])
diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py
index f5df657814..b3dfa40d25 100644
--- a/tests/handlers/test_oidc.py
+++ b/tests/handlers/test_oidc.py
@@ -13,20 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
-import re
-from typing import Dict
-from urllib.parse import parse_qs, urlencode, urlparse
+from typing import Optional
+from urllib.parse import parse_qs, urlparse
from mock import ANY, Mock, patch
import pymacaroons
-from twisted.web.resource import Resource
-
-from synapse.api.errors import RedirectException
from synapse.handlers.sso import MappingException
-from synapse.rest.client.v1 import login
-from synapse.rest.synapse.client.pick_username import pick_username_resource
from synapse.server import HomeServer
from synapse.types import UserID
@@ -151,6 +145,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
hs = self.setup_test_homeserver(proxied_http_client=self.http_client)
self.handler = hs.get_oidc_handler()
+ self.provider = self.handler._providers["oidc"]
sso_handler = hs.get_sso_handler()
# Mock the render error method.
self.render_error = Mock(return_value=None)
@@ -162,9 +157,10 @@ class OidcHandlerTestCase(HomeserverTestCase):
return hs
def metadata_edit(self, values):
- return patch.dict(self.handler._provider_metadata, values)
+ return patch.dict(self.provider._provider_metadata, values)
def assertRenderedError(self, error, error_description=None):
+ self.render_error.assert_called_once()
args = self.render_error.call_args[0]
self.assertEqual(args[1], error)
if error_description is not None:
@@ -175,15 +171,15 @@ class OidcHandlerTestCase(HomeserverTestCase):
def test_config(self):
"""Basic config correctly sets up the callback URL and client auth correctly."""
- self.assertEqual(self.handler._callback_url, CALLBACK_URL)
- self.assertEqual(self.handler._client_auth.client_id, CLIENT_ID)
- self.assertEqual(self.handler._client_auth.client_secret, CLIENT_SECRET)
+ self.assertEqual(self.provider._callback_url, CALLBACK_URL)
+ self.assertEqual(self.provider._client_auth.client_id, CLIENT_ID)
+ self.assertEqual(self.provider._client_auth.client_secret, CLIENT_SECRET)
@override_config({"oidc_config": {"discover": True}})
def test_discovery(self):
"""The handler should discover the endpoints from OIDC discovery document."""
# This would throw if some metadata were invalid
- metadata = self.get_success(self.handler.load_metadata())
+ metadata = self.get_success(self.provider.load_metadata())
self.http_client.get_json.assert_called_once_with(WELL_KNOWN)
self.assertEqual(metadata.issuer, ISSUER)
@@ -195,47 +191,47 @@ class OidcHandlerTestCase(HomeserverTestCase):
# subsequent calls should be cached
self.http_client.reset_mock()
- self.get_success(self.handler.load_metadata())
+ self.get_success(self.provider.load_metadata())
self.http_client.get_json.assert_not_called()
@override_config({"oidc_config": COMMON_CONFIG})
def test_no_discovery(self):
"""When discovery is disabled, it should not try to load from discovery document."""
- self.get_success(self.handler.load_metadata())
+ self.get_success(self.provider.load_metadata())
self.http_client.get_json.assert_not_called()
@override_config({"oidc_config": COMMON_CONFIG})
def test_load_jwks(self):
"""JWKS loading is done once (then cached) if used."""
- jwks = self.get_success(self.handler.load_jwks())
+ jwks = self.get_success(self.provider.load_jwks())
self.http_client.get_json.assert_called_once_with(JWKS_URI)
self.assertEqual(jwks, {"keys": []})
# subsequent calls should be cachedโฆ
self.http_client.reset_mock()
- self.get_success(self.handler.load_jwks())
+ self.get_success(self.provider.load_jwks())
self.http_client.get_json.assert_not_called()
# โฆunless forced
self.http_client.reset_mock()
- self.get_success(self.handler.load_jwks(force=True))
+ self.get_success(self.provider.load_jwks(force=True))
self.http_client.get_json.assert_called_once_with(JWKS_URI)
# Throw if the JWKS uri is missing
with self.metadata_edit({"jwks_uri": None}):
- self.get_failure(self.handler.load_jwks(force=True), RuntimeError)
+ self.get_failure(self.provider.load_jwks(force=True), RuntimeError)
# Return empty key set if JWKS are not used
- self.handler._scopes = [] # not asking the openid scope
+ self.provider._scopes = [] # not asking the openid scope
self.http_client.get_json.reset_mock()
- jwks = self.get_success(self.handler.load_jwks(force=True))
+ jwks = self.get_success(self.provider.load_jwks(force=True))
self.http_client.get_json.assert_not_called()
self.assertEqual(jwks, {"keys": []})
@override_config({"oidc_config": COMMON_CONFIG})
def test_validate_config(self):
"""Provider metadatas are extensively validated."""
- h = self.handler
+ h = self.provider
# Default test config does not throw
h._validate_metadata()
@@ -314,13 +310,13 @@ class OidcHandlerTestCase(HomeserverTestCase):
"""Provider metadata validation can be disabled by config."""
with self.metadata_edit({"issuer": "http://insecure"}):
# This should not throw
- self.handler._validate_metadata()
+ self.provider._validate_metadata()
def test_redirect_request(self):
"""The redirect request has the right arguments & generates a valid session cookie."""
req = Mock(spec=["addCookie"])
url = self.get_success(
- self.handler.handle_redirect_request(req, b"http://client/redirect")
+ self.provider.handle_redirect_request(req, b"http://client/redirect")
)
url = urlparse(url)
auth_endpoint = urlparse(AUTHORIZATION_ENDPOINT)
@@ -349,9 +345,13 @@ class OidcHandlerTestCase(HomeserverTestCase):
cookie = args[1]
macaroon = pymacaroons.Macaroon.deserialize(cookie)
- state = self.handler._get_value_from_macaroon(macaroon, "state")
- nonce = self.handler._get_value_from_macaroon(macaroon, "nonce")
- redirect = self.handler._get_value_from_macaroon(
+ state = self.handler._token_generator._get_value_from_macaroon(
+ macaroon, "state"
+ )
+ nonce = self.handler._token_generator._get_value_from_macaroon(
+ macaroon, "nonce"
+ )
+ redirect = self.handler._token_generator._get_value_from_macaroon(
macaroon, "client_redirect_url"
)
@@ -384,7 +384,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
# ensure that we are correctly testing the fallback when "get_extra_attributes"
# is not implemented.
- mapping_provider = self.handler._user_mapping_provider
+ mapping_provider = self.provider._user_mapping_provider
with self.assertRaises(AttributeError):
_ = mapping_provider.get_extra_attributes
@@ -399,9 +399,9 @@ class OidcHandlerTestCase(HomeserverTestCase):
"username": username,
}
expected_user_id = "@%s:%s" % (username, self.hs.hostname)
- self.handler._exchange_code = simple_async_mock(return_value=token)
- self.handler._parse_id_token = simple_async_mock(return_value=userinfo)
- self.handler._fetch_userinfo = simple_async_mock(return_value=userinfo)
+ self.provider._exchange_code = simple_async_mock(return_value=token)
+ self.provider._parse_id_token = simple_async_mock(return_value=userinfo)
+ self.provider._fetch_userinfo = simple_async_mock(return_value=userinfo)
auth_handler = self.hs.get_auth_handler()
auth_handler.complete_sso_login = simple_async_mock()
@@ -411,12 +411,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
client_redirect_url = "http://client/redirect"
user_agent = "Browser"
ip_address = "10.0.0.1"
- session = self.handler._generate_oidc_session_token(
- state=state,
- nonce=nonce,
- client_redirect_url=client_redirect_url,
- ui_auth_session_id=None,
- )
+ session = self._generate_oidc_session_token(state, nonce, client_redirect_url)
request = _build_callback_request(
code, state, session, user_agent=user_agent, ip_address=ip_address
)
@@ -426,14 +421,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
auth_handler.complete_sso_login.assert_called_once_with(
expected_user_id, request, client_redirect_url, None,
)
- self.handler._exchange_code.assert_called_once_with(code)
- self.handler._parse_id_token.assert_called_once_with(token, nonce=nonce)
- self.handler._fetch_userinfo.assert_not_called()
+ self.provider._exchange_code.assert_called_once_with(code)
+ self.provider._parse_id_token.assert_called_once_with(token, nonce=nonce)
+ self.provider._fetch_userinfo.assert_not_called()
self.render_error.assert_not_called()
# Handle mapping errors
with patch.object(
- self.handler,
+ self.provider,
"_remote_id_from_userinfo",
new=Mock(side_effect=MappingException()),
):
@@ -441,36 +436,36 @@ class OidcHandlerTestCase(HomeserverTestCase):
self.assertRenderedError("mapping_error")
# Handle ID token errors
- self.handler._parse_id_token = simple_async_mock(raises=Exception())
+ self.provider._parse_id_token = simple_async_mock(raises=Exception())
self.get_success(self.handler.handle_oidc_callback(request))
self.assertRenderedError("invalid_token")
auth_handler.complete_sso_login.reset_mock()
- self.handler._exchange_code.reset_mock()
- self.handler._parse_id_token.reset_mock()
- self.handler._fetch_userinfo.reset_mock()
+ self.provider._exchange_code.reset_mock()
+ self.provider._parse_id_token.reset_mock()
+ self.provider._fetch_userinfo.reset_mock()
# With userinfo fetching
- self.handler._scopes = [] # do not ask the "openid" scope
+ self.provider._scopes = [] # do not ask the "openid" scope
self.get_success(self.handler.handle_oidc_callback(request))
auth_handler.complete_sso_login.assert_called_once_with(
expected_user_id, request, client_redirect_url, None,
)
- self.handler._exchange_code.assert_called_once_with(code)
- self.handler._parse_id_token.assert_not_called()
- self.handler._fetch_userinfo.assert_called_once_with(token)
+ self.provider._exchange_code.assert_called_once_with(code)
+ self.provider._parse_id_token.assert_not_called()
+ self.provider._fetch_userinfo.assert_called_once_with(token)
self.render_error.assert_not_called()
# Handle userinfo fetching error
- self.handler._fetch_userinfo = simple_async_mock(raises=Exception())
+ self.provider._fetch_userinfo = simple_async_mock(raises=Exception())
self.get_success(self.handler.handle_oidc_callback(request))
self.assertRenderedError("fetch_error")
# Handle code exchange failure
from synapse.handlers.oidc_handler import OidcError
- self.handler._exchange_code = simple_async_mock(
+ self.provider._exchange_code = simple_async_mock(
raises=OidcError("invalid_request")
)
self.get_success(self.handler.handle_oidc_callback(request))
@@ -500,11 +495,8 @@ class OidcHandlerTestCase(HomeserverTestCase):
self.assertRenderedError("invalid_session")
# Mismatching session
- session = self.handler._generate_oidc_session_token(
- state="state",
- nonce="nonce",
- client_redirect_url="http://client/redirect",
- ui_auth_session_id=None,
+ session = self._generate_oidc_session_token(
+ state="state", nonce="nonce", client_redirect_url="http://client/redirect",
)
request.args = {}
request.args[b"state"] = [b"mismatching state"]
@@ -528,7 +520,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
return_value=FakeResponse(code=200, phrase=b"OK", body=token_json)
)
code = "code"
- ret = self.get_success(self.handler._exchange_code(code))
+ ret = self.get_success(self.provider._exchange_code(code))
kwargs = self.http_client.request.call_args[1]
self.assertEqual(ret, token)
@@ -552,7 +544,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
)
from synapse.handlers.oidc_handler import OidcError
- exc = self.get_failure(self.handler._exchange_code(code), OidcError)
+ exc = self.get_failure(self.provider._exchange_code(code), OidcError)
self.assertEqual(exc.value.error, "foo")
self.assertEqual(exc.value.error_description, "bar")
@@ -562,7 +554,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
code=500, phrase=b"Internal Server Error", body=b"Not JSON",
)
)
- exc = self.get_failure(self.handler._exchange_code(code), OidcError)
+ exc = self.get_failure(self.provider._exchange_code(code), OidcError)
self.assertEqual(exc.value.error, "server_error")
# Internal server error with JSON body
@@ -574,14 +566,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
)
)
- exc = self.get_failure(self.handler._exchange_code(code), OidcError)
+ exc = self.get_failure(self.provider._exchange_code(code), OidcError)
self.assertEqual(exc.value.error, "internal_server_error")
# 4xx error without "error" field
self.http_client.request = simple_async_mock(
return_value=FakeResponse(code=400, phrase=b"Bad request", body=b"{}",)
)
- exc = self.get_failure(self.handler._exchange_code(code), OidcError)
+ exc = self.get_failure(self.provider._exchange_code(code), OidcError)
self.assertEqual(exc.value.error, "server_error")
# 2xx error with "error" field
@@ -590,7 +582,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
code=200, phrase=b"OK", body=b'{"error": "some_error"}',
)
)
- exc = self.get_failure(self.handler._exchange_code(code), OidcError)
+ exc = self.get_failure(self.provider._exchange_code(code), OidcError)
self.assertEqual(exc.value.error, "some_error")
@override_config(
@@ -616,18 +608,15 @@ class OidcHandlerTestCase(HomeserverTestCase):
"username": "foo",
"phone": "1234567",
}
- self.handler._exchange_code = simple_async_mock(return_value=token)
- self.handler._parse_id_token = simple_async_mock(return_value=userinfo)
+ self.provider._exchange_code = simple_async_mock(return_value=token)
+ self.provider._parse_id_token = simple_async_mock(return_value=userinfo)
auth_handler = self.hs.get_auth_handler()
auth_handler.complete_sso_login = simple_async_mock()
state = "state"
client_redirect_url = "http://client/redirect"
- session = self.handler._generate_oidc_session_token(
- state=state,
- nonce="nonce",
- client_redirect_url=client_redirect_url,
- ui_auth_session_id=None,
+ session = self._generate_oidc_session_token(
+ state=state, nonce="nonce", client_redirect_url=client_redirect_url,
)
request = _build_callback_request("code", state, session)
@@ -841,116 +830,25 @@ class OidcHandlerTestCase(HomeserverTestCase):
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
self.assertRenderedError("mapping_error", "localpart is invalid: ")
+ def _generate_oidc_session_token(
+ self,
+ state: str,
+ nonce: str,
+ client_redirect_url: str,
+ ui_auth_session_id: Optional[str] = None,
+ ) -> str:
+ from synapse.handlers.oidc_handler import OidcSessionData
-class UsernamePickerTestCase(HomeserverTestCase):
- if not HAS_OIDC:
- skip = "requires OIDC"
-
- servlets = [login.register_servlets]
-
- def default_config(self):
- config = super().default_config()
- config["public_baseurl"] = BASE_URL
- oidc_config = {
- "enabled": True,
- "client_id": CLIENT_ID,
- "client_secret": CLIENT_SECRET,
- "issuer": ISSUER,
- "scopes": SCOPES,
- "user_mapping_provider": {
- "config": {"display_name_template": "{{ user.displayname }}"}
- },
- }
-
- # Update this config with what's in the default config so that
- # override_config works as expected.
- oidc_config.update(config.get("oidc_config", {}))
- config["oidc_config"] = oidc_config
-
- # whitelist this client URI so we redirect straight to it rather than
- # serving a confirmation page
- config["sso"] = {"client_whitelist": ["https://whitelisted.client"]}
- return config
-
- def create_resource_dict(self) -> Dict[str, Resource]:
- d = super().create_resource_dict()
- d["/_synapse/client/pick_username"] = pick_username_resource(self.hs)
- return d
-
- def test_username_picker(self):
- """Test the happy path of a username picker flow."""
- client_redirect_url = "https://whitelisted.client"
-
- # first of all, mock up an OIDC callback to the OidcHandler, which should
- # raise a RedirectException
- userinfo = {"sub": "tester", "displayname": "Jonny"}
- f = self.get_failure(
- _make_callback_with_userinfo(
- self.hs, userinfo, client_redirect_url=client_redirect_url
+ return self.handler._token_generator.generate_oidc_session_token(
+ state=state,
+ session_data=OidcSessionData(
+ idp_id="oidc",
+ nonce=nonce,
+ client_redirect_url=client_redirect_url,
+ ui_auth_session_id=ui_auth_session_id,
),
- RedirectException,
- )
-
- # check the Location and cookies returned by the RedirectException
- self.assertEqual(f.value.location, b"/_synapse/client/pick_username")
- cookieheader = f.value.cookies[0]
- regex = re.compile(b"^username_mapping_session=([a-zA-Z]+);")
- m = regex.search(cookieheader)
- if not m:
- self.fail("cookie header %s does not match %s" % (cookieheader, regex))
-
- # introspect the sso handler a bit to check that the username mapping session
- # looks ok.
- session_id = m.group(1).decode("ascii")
- username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions
- self.assertIn(
- session_id, username_mapping_sessions, "session id not found in map"
- )
- session = username_mapping_sessions[session_id]
- self.assertEqual(session.remote_user_id, "tester")
- self.assertEqual(session.display_name, "Jonny")
- self.assertEqual(session.client_redirect_url, client_redirect_url)
-
- # the expiry time should be about 15 minutes away
- expected_expiry = self.clock.time_msec() + (15 * 60 * 1000)
- self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000)
-
- # Now, submit a username to the username picker, which should serve a redirect
- # back to the client
- submit_path = f.value.location + b"/submit"
- content = urlencode({b"username": b"bobby"}).encode("utf8")
- chan = self.make_request(
- "POST",
- path=submit_path,
- content=content,
- content_is_form=True,
- custom_headers=[
- ("Cookie", cookieheader),
- # old versions of twisted don't do form-parsing without a valid
- # content-length header.
- ("Content-Length", str(len(content))),
- ],
- )
- self.assertEqual(chan.code, 302, chan.result)
- location_headers = chan.headers.getRawHeaders("Location")
- # ensure that the returned location starts with the requested redirect URL
- self.assertEqual(
- location_headers[0][: len(client_redirect_url)], client_redirect_url
)
- # fish the login token out of the returned redirect uri
- parts = urlparse(location_headers[0])
- query = parse_qs(parts.query)
- login_token = query["loginToken"][0]
-
- # finally, submit the matrix login token to the login API, which gives us our
- # matrix access token, mxid, and device id.
- chan = self.make_request(
- "POST", "/login", content={"type": "m.login.token", "token": login_token},
- )
- self.assertEqual(chan.code, 200, chan.result)
- self.assertEqual(chan.json_body["user_id"], "@bobby:test")
-
async def _make_callback_with_userinfo(
hs: HomeServer, userinfo: dict, client_redirect_url: str = "http://client/redirect"
@@ -965,17 +863,20 @@ async def _make_callback_with_userinfo(
userinfo: the OIDC userinfo dict
client_redirect_url: the URL to redirect to on success.
"""
+ from synapse.handlers.oidc_handler import OidcSessionData
+
handler = hs.get_oidc_handler()
- handler._exchange_code = simple_async_mock(return_value={})
- handler._parse_id_token = simple_async_mock(return_value=userinfo)
- handler._fetch_userinfo = simple_async_mock(return_value=userinfo)
+ provider = handler._providers["oidc"]
+ provider._exchange_code = simple_async_mock(return_value={})
+ provider._parse_id_token = simple_async_mock(return_value=userinfo)
+ provider._fetch_userinfo = simple_async_mock(return_value=userinfo)
state = "state"
- session = handler._generate_oidc_session_token(
+ session = handler._token_generator.generate_oidc_session_token(
state=state,
- nonce="nonce",
- client_redirect_url=client_redirect_url,
- ui_auth_session_id=None,
+ session_data=OidcSessionData(
+ idp_id="oidc", nonce="nonce", client_redirect_url=client_redirect_url,
+ ),
)
request = _build_callback_request("code", state, session)
@@ -1011,7 +912,7 @@ def _build_callback_request(
"addCookie",
"requestHeaders",
"getClientIP",
- "get_user_agent",
+ "getHeader",
]
)
@@ -1020,5 +921,4 @@ def _build_callback_request(
request.args[b"code"] = [code.encode("utf-8")]
request.args[b"state"] = [state.encode("utf-8")]
request.getClientIP.return_value = ip_address
- request.get_user_agent.return_value = user_agent
return request
diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py
index 919547556b..022943a10a 100644
--- a/tests/handlers/test_profile.py
+++ b/tests/handlers/test_profile.py
@@ -105,6 +105,21 @@ class ProfileTestCase(unittest.TestCase):
"Frank",
)
+ # Set displayname to an empty string
+ yield defer.ensureDeferred(
+ self.handler.set_displayname(
+ self.frank, synapse.types.create_requester(self.frank), ""
+ )
+ )
+
+ self.assertIsNone(
+ (
+ yield defer.ensureDeferred(
+ self.store.get_profile_displayname(self.frank.localpart)
+ )
+ )
+ )
+
@defer.inlineCallbacks
def test_set_my_name_if_disabled(self):
self.hs.config.enable_set_displayname = False
@@ -223,6 +238,21 @@ class ProfileTestCase(unittest.TestCase):
"http://my.server/me.png",
)
+ # Set avatar to an empty string
+ yield defer.ensureDeferred(
+ self.handler.set_avatar_url(
+ self.frank, synapse.types.create_requester(self.frank), "",
+ )
+ )
+
+ self.assertIsNone(
+ (
+ yield defer.ensureDeferred(
+ self.store.get_profile_avatar_url(self.frank.localpart)
+ )
+ ),
+ )
+
@defer.inlineCallbacks
def test_set_my_avatar_if_disabled(self):
self.hs.config.enable_set_avatar_url = False
diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py
index 548038214b..261c7083d1 100644
--- a/tests/handlers/test_saml.py
+++ b/tests/handlers/test_saml.py
@@ -262,4 +262,4 @@ class SamlHandlerTestCase(HomeserverTestCase):
def _mock_request():
"""Returns a mock which will stand in as a SynapseRequest"""
- return Mock(spec=["getClientIP", "get_user_agent"])
+ return Mock(spec=["getClientIP", "getHeader"])
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index 4e51839d0f..686012dd25 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -1095,7 +1095,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
# Expire both caches and repeat the request
self.reactor.pump((10000.0,))
- # Repated the request, this time it should fail if the lookup fails.
+ # Repeat the request, this time it should fail if the lookup fails.
fetch_d = defer.ensureDeferred(
self.well_known_resolver.get_well_known(b"testserv")
)
@@ -1130,7 +1130,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
content=b'{ "m.server": "' + (b"a" * WELL_KNOWN_MAX_SIZE) + b'" }',
)
- # The result is sucessful, but disabled delegation.
+ # The result is successful, but disabled delegation.
r = self.successResultOf(fetch_d)
self.assertIsNone(r.delegated_server)
diff --git a/tests/http/test_client.py b/tests/http/test_client.py
new file mode 100644
index 0000000000..f17c122e93
--- /dev/null
+++ b/tests/http/test_client.py
@@ -0,0 +1,101 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from io import BytesIO
+
+from mock import Mock
+
+from twisted.python.failure import Failure
+from twisted.web.client import ResponseDone
+
+from synapse.http.client import BodyExceededMaxSize, read_body_with_max_size
+
+from tests.unittest import TestCase
+
+
+class ReadBodyWithMaxSizeTests(TestCase):
+ def setUp(self):
+ """Start reading the body, returns the response, result and proto"""
+ self.response = Mock()
+ self.result = BytesIO()
+ self.deferred = read_body_with_max_size(self.response, self.result, 6)
+
+ # Fish the protocol out of the response.
+ self.protocol = self.response.deliverBody.call_args[0][0]
+ self.protocol.transport = Mock()
+
+ def _cleanup_error(self):
+ """Ensure that the error in the Deferred is handled gracefully."""
+ called = [False]
+
+ def errback(f):
+ called[0] = True
+
+ self.deferred.addErrback(errback)
+ self.assertTrue(called[0])
+
+ def test_no_error(self):
+ """A response that is NOT too large."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"12345")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"12345")
+ self.assertEqual(self.deferred.result, 5)
+
+ def test_too_large(self):
+ """A response which is too large raises an exception."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"1234567890")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"1234567890")
+ self.assertIsInstance(self.deferred.result, Failure)
+ self.assertIsInstance(self.deferred.result.value, BodyExceededMaxSize)
+ self._cleanup_error()
+
+ def test_multiple_packets(self):
+ """Data should be accummulated through mutliple packets."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"12")
+ self.protocol.dataReceived(b"34")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"1234")
+ self.assertEqual(self.deferred.result, 4)
+
+ def test_additional_data(self):
+ """A connection can receive data after being closed."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"1234567890")
+ self.assertIsInstance(self.deferred.result, Failure)
+ self.assertIsInstance(self.deferred.result.value, BodyExceededMaxSize)
+ self.protocol.transport.loseConnection.assert_called_once()
+
+ # More data might have come in.
+ self.protocol.dataReceived(b"1234567890")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"1234567890")
+ self.assertIsInstance(self.deferred.result, Failure)
+ self.assertIsInstance(self.deferred.result.value, BodyExceededMaxSize)
+ self._cleanup_error()
diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py
index b2e9533b07..d06ea518ce 100644
--- a/tests/http/test_endpoint.py
+++ b/tests/http/test_endpoint.py
@@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.http.endpoint import parse_and_validate_server_name, parse_server_name
+from synapse.util.stringutils import parse_and_validate_server_name, parse_server_name
from tests import unittest
diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py
index 212484a7fe..9c52c8fdca 100644
--- a/tests/http/test_fedclient.py
+++ b/tests/http/test_fedclient.py
@@ -560,4 +560,4 @@ class FederationClientTests(HomeserverTestCase):
self.pump()
f = self.failureResultOf(test_d)
- self.assertIsInstance(f.value, ValueError)
+ self.assertIsInstance(f.value, RequestSendFailed)
diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py
index 22abf76515..9a56e1c14a 100644
--- a/tests/http/test_proxyagent.py
+++ b/tests/http/test_proxyagent.py
@@ -15,12 +15,14 @@
import logging
import treq
+from netaddr import IPSet
from twisted.internet import interfaces # noqa: F401
from twisted.internet.protocol import Factory
from twisted.protocols.tls import TLSMemoryBIOFactory
from twisted.web.http import HTTPChannel
+from synapse.http.client import BlacklistingReactorWrapper
from synapse.http.proxyagent import ProxyAgent
from tests.http import TestServerTLSConnectionFactory, get_test_https_policy
@@ -292,6 +294,134 @@ class MatrixFederationAgentTests(TestCase):
body = self.successResultOf(treq.content(resp))
self.assertEqual(body, b"result")
+ def test_http_request_via_proxy_with_blacklist(self):
+ # The blacklist includes the configured proxy IP.
+ agent = ProxyAgent(
+ BlacklistingReactorWrapper(
+ self.reactor, ip_whitelist=None, ip_blacklist=IPSet(["1.0.0.0/8"])
+ ),
+ self.reactor,
+ http_proxy=b"proxy.com:8888",
+ )
+
+ self.reactor.lookups["proxy.com"] = "1.2.3.5"
+ d = agent.request(b"GET", b"http://test.com")
+
+ # there should be a pending TCP connection
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+ self.assertEqual(host, "1.2.3.5")
+ self.assertEqual(port, 8888)
+
+ # make a test server, and wire up the client
+ http_server = self._make_connection(
+ client_factory, _get_test_protocol_factory()
+ )
+
+ # the FakeTransport is async, so we need to pump the reactor
+ self.reactor.advance(0)
+
+ # now there should be a pending request
+ self.assertEqual(len(http_server.requests), 1)
+
+ request = http_server.requests[0]
+ self.assertEqual(request.method, b"GET")
+ self.assertEqual(request.path, b"http://test.com")
+ self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+ request.write(b"result")
+ request.finish()
+
+ self.reactor.advance(0)
+
+ resp = self.successResultOf(d)
+ body = self.successResultOf(treq.content(resp))
+ self.assertEqual(body, b"result")
+
+ def test_https_request_via_proxy_with_blacklist(self):
+ # The blacklist includes the configured proxy IP.
+ agent = ProxyAgent(
+ BlacklistingReactorWrapper(
+ self.reactor, ip_whitelist=None, ip_blacklist=IPSet(["1.0.0.0/8"])
+ ),
+ self.reactor,
+ contextFactory=get_test_https_policy(),
+ https_proxy=b"proxy.com",
+ )
+
+ self.reactor.lookups["proxy.com"] = "1.2.3.5"
+ d = agent.request(b"GET", b"https://test.com/abc")
+
+ # there should be a pending TCP connection
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+ self.assertEqual(host, "1.2.3.5")
+ self.assertEqual(port, 1080)
+
+ # make a test HTTP server, and wire up the client
+ proxy_server = self._make_connection(
+ client_factory, _get_test_protocol_factory()
+ )
+
+ # fish the transports back out so that we can do the old switcheroo
+ s2c_transport = proxy_server.transport
+ client_protocol = s2c_transport.other
+ c2s_transport = client_protocol.transport
+
+ # the FakeTransport is async, so we need to pump the reactor
+ self.reactor.advance(0)
+
+ # now there should be a pending CONNECT request
+ self.assertEqual(len(proxy_server.requests), 1)
+
+ request = proxy_server.requests[0]
+ self.assertEqual(request.method, b"CONNECT")
+ self.assertEqual(request.path, b"test.com:443")
+
+ # tell the proxy server not to close the connection
+ proxy_server.persistent = True
+
+ # this just stops the http Request trying to do a chunked response
+ # request.setHeader(b"Content-Length", b"0")
+ request.finish()
+
+ # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel
+ ssl_factory = _wrap_server_factory_for_tls(_get_test_protocol_factory())
+ ssl_protocol = ssl_factory.buildProtocol(None)
+ http_server = ssl_protocol.wrappedProtocol
+
+ ssl_protocol.makeConnection(
+ FakeTransport(client_protocol, self.reactor, ssl_protocol)
+ )
+ c2s_transport.other = ssl_protocol
+
+ self.reactor.advance(0)
+
+ server_name = ssl_protocol._tlsConnection.get_servername()
+ expected_sni = b"test.com"
+ self.assertEqual(
+ server_name,
+ expected_sni,
+ "Expected SNI %s but got %s" % (expected_sni, server_name),
+ )
+
+ # now there should be a pending request
+ self.assertEqual(len(http_server.requests), 1)
+
+ request = http_server.requests[0]
+ self.assertEqual(request.method, b"GET")
+ self.assertEqual(request.path, b"/abc")
+ self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+ request.write(b"result")
+ request.finish()
+
+ self.reactor.advance(0)
+
+ resp = self.successResultOf(d)
+ body = self.successResultOf(treq.content(resp))
+ self.assertEqual(body, b"result")
+
def _wrap_server_factory_for_tls(factory, sanlist=None):
"""Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory
diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py
index 0504cd187e..9d22c04073 100644
--- a/tests/rest/admin/test_admin.py
+++ b/tests/rest/admin/test_admin.py
@@ -58,8 +58,6 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
-
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
@@ -155,9 +153,6 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
- self.hs = hs
-
# Allow for uploading and downloading to/from the media repo
self.media_repo = hs.get_media_repository_resource()
self.download_resource = self.media_repo.children[b"download"]
@@ -431,7 +426,11 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
# Mark the second item as safe from quarantine.
_, media_id_2 = server_and_media_id_2.split("/")
- self.get_success(self.store.mark_local_media_as_safe(media_id_2))
+ # Quarantine the media
+ url = "/_synapse/admin/v1/media/protect/%s" % (urllib.parse.quote(media_id_2),)
+ channel = self.make_request("POST", url, access_token=admin_user_tok)
+ self.pump(1.0)
+ self.assertEqual(200, int(channel.code), msg=channel.result["body"])
# Quarantine all media by this user
url = "/_synapse/admin/v1/user/%s/media/quarantine" % urllib.parse.quote(
diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py
index aa389df12f..d0090faa4f 100644
--- a/tests/rest/admin/test_event_reports.py
+++ b/tests/rest/admin/test_event_reports.py
@@ -32,8 +32,6 @@ class EventReportsTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
-
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
@@ -371,8 +369,6 @@ class EventReportDetailTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
-
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py
index c2b998cdae..51a7731693 100644
--- a/tests/rest/admin/test_media.py
+++ b/tests/rest/admin/test_media.py
@@ -35,7 +35,6 @@ class DeleteMediaByIDTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.handler = hs.get_device_handler()
self.media_repo = hs.get_media_repository_resource()
self.server_name = hs.hostname
@@ -181,7 +180,6 @@ class DeleteMediaByDateSizeTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.handler = hs.get_device_handler()
self.media_repo = hs.get_media_repository_resource()
self.server_name = hs.hostname
diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
index fa620f97f3..a0f32c5512 100644
--- a/tests/rest/admin/test_room.py
+++ b/tests/rest/admin/test_room.py
@@ -605,8 +605,6 @@ class RoomTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
-
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py
index 73f8a8ec99..f48be3d65a 100644
--- a/tests/rest/admin/test_statistics.py
+++ b/tests/rest/admin/test_statistics.py
@@ -31,7 +31,6 @@ class UserMediaStatisticsTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
self.media_repo = hs.get_media_repository_resource()
self.admin_user = self.register_user("admin", "pass", admin=True)
diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
index 9b2e4765f6..e48f8c1d7b 100644
--- a/tests/rest/admin/test_user.py
+++ b/tests/rest/admin/test_user.py
@@ -25,8 +25,10 @@ from mock import Mock
import synapse.rest.admin
from synapse.api.constants import UserTypes
from synapse.api.errors import Codes, HttpResponseException, ResourceLimitError
+from synapse.api.room_versions import RoomVersions
from synapse.rest.client.v1 import login, logout, profile, room
from synapse.rest.client.v2_alpha import devices, sync
+from synapse.types import JsonDict
from tests import unittest
from tests.test_utils import make_awaitable
@@ -467,13 +469,6 @@ class UsersListTestCase(unittest.HomeserverTestCase):
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
- self.user1 = self.register_user(
- "user1", "pass1", admin=False, displayname="Name 1"
- )
- self.user2 = self.register_user(
- "user2", "pass2", admin=False, displayname="Name 2"
- )
-
def test_no_auth(self):
"""
Try to list users without authentication.
@@ -487,6 +482,7 @@ class UsersListTestCase(unittest.HomeserverTestCase):
"""
If the user is not a server admin, an error is returned.
"""
+ self._create_users(1)
other_user_token = self.login("user1", "pass1")
channel = self.make_request("GET", self.url, access_token=other_user_token)
@@ -498,6 +494,8 @@ class UsersListTestCase(unittest.HomeserverTestCase):
"""
List all users, including deactivated users.
"""
+ self._create_users(2)
+
channel = self.make_request(
"GET",
self.url + "?deactivated=true",
@@ -510,14 +508,7 @@ class UsersListTestCase(unittest.HomeserverTestCase):
self.assertEqual(3, channel.json_body["total"])
# Check that all fields are available
- for u in channel.json_body["users"]:
- self.assertIn("name", u)
- self.assertIn("is_guest", u)
- self.assertIn("admin", u)
- self.assertIn("user_type", u)
- self.assertIn("deactivated", u)
- self.assertIn("displayname", u)
- self.assertIn("avatar_url", u)
+ self._check_fields(channel.json_body["users"])
def test_search_term(self):
"""Test that searching for a users works correctly"""
@@ -548,6 +539,7 @@ class UsersListTestCase(unittest.HomeserverTestCase):
# Check that users were returned
self.assertTrue("users" in channel.json_body)
+ self._check_fields(channel.json_body["users"])
users = channel.json_body["users"]
# Check that the expected number of users were returned
@@ -560,25 +552,30 @@ class UsersListTestCase(unittest.HomeserverTestCase):
u = users[0]
self.assertEqual(expected_user_id, u["name"])
+ self._create_users(2)
+
+ user1 = "@user1:test"
+ user2 = "@user2:test"
+
# Perform search tests
- _search_test(self.user1, "er1")
- _search_test(self.user1, "me 1")
+ _search_test(user1, "er1")
+ _search_test(user1, "me 1")
- _search_test(self.user2, "er2")
- _search_test(self.user2, "me 2")
+ _search_test(user2, "er2")
+ _search_test(user2, "me 2")
- _search_test(self.user1, "er1", "user_id")
- _search_test(self.user2, "er2", "user_id")
+ _search_test(user1, "er1", "user_id")
+ _search_test(user2, "er2", "user_id")
# Test case insensitive
- _search_test(self.user1, "ER1")
- _search_test(self.user1, "NAME 1")
+ _search_test(user1, "ER1")
+ _search_test(user1, "NAME 1")
- _search_test(self.user2, "ER2")
- _search_test(self.user2, "NAME 2")
+ _search_test(user2, "ER2")
+ _search_test(user2, "NAME 2")
- _search_test(self.user1, "ER1", "user_id")
- _search_test(self.user2, "ER2", "user_id")
+ _search_test(user1, "ER1", "user_id")
+ _search_test(user2, "ER2", "user_id")
_search_test(None, "foo")
_search_test(None, "bar")
@@ -586,6 +583,373 @@ class UsersListTestCase(unittest.HomeserverTestCase):
_search_test(None, "foo", "user_id")
_search_test(None, "bar", "user_id")
+ def test_invalid_parameter(self):
+ """
+ If parameters are invalid, an error is returned.
+ """
+
+ # negative limit
+ channel = self.make_request(
+ "GET", self.url + "?limit=-5", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
+
+ # negative from
+ channel = self.make_request(
+ "GET", self.url + "?from=-5", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
+
+ # invalid guests
+ channel = self.make_request(
+ "GET", self.url + "?guests=not_bool", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
+
+ # invalid deactivated
+ channel = self.make_request(
+ "GET", self.url + "?deactivated=not_bool", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
+
+ def test_limit(self):
+ """
+ Testing list of users with limit
+ """
+
+ number_users = 20
+ # Create one less user (since there's already an admin user).
+ self._create_users(number_users - 1)
+
+ channel = self.make_request(
+ "GET", self.url + "?limit=5", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(len(channel.json_body["users"]), 5)
+ self.assertEqual(channel.json_body["next_token"], "5")
+ self._check_fields(channel.json_body["users"])
+
+ def test_from(self):
+ """
+ Testing list of users with a defined starting point (from)
+ """
+
+ number_users = 20
+ # Create one less user (since there's already an admin user).
+ self._create_users(number_users - 1)
+
+ channel = self.make_request(
+ "GET", self.url + "?from=5", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(len(channel.json_body["users"]), 15)
+ self.assertNotIn("next_token", channel.json_body)
+ self._check_fields(channel.json_body["users"])
+
+ def test_limit_and_from(self):
+ """
+ Testing list of users with a defined starting point and limit
+ """
+
+ number_users = 20
+ # Create one less user (since there's already an admin user).
+ self._create_users(number_users - 1)
+
+ channel = self.make_request(
+ "GET", self.url + "?from=5&limit=10", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(channel.json_body["next_token"], "15")
+ self.assertEqual(len(channel.json_body["users"]), 10)
+ self._check_fields(channel.json_body["users"])
+
+ def test_next_token(self):
+ """
+ Testing that `next_token` appears at the right place
+ """
+
+ number_users = 20
+ # Create one less user (since there's already an admin user).
+ self._create_users(number_users - 1)
+
+ # `next_token` does not appear
+ # Number of results is the number of entries
+ channel = self.make_request(
+ "GET", self.url + "?limit=20", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(len(channel.json_body["users"]), number_users)
+ self.assertNotIn("next_token", channel.json_body)
+
+ # `next_token` does not appear
+ # Number of max results is larger than the number of entries
+ channel = self.make_request(
+ "GET", self.url + "?limit=21", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(len(channel.json_body["users"]), number_users)
+ self.assertNotIn("next_token", channel.json_body)
+
+ # `next_token` does appear
+ # Number of max results is smaller than the number of entries
+ channel = self.make_request(
+ "GET", self.url + "?limit=19", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(len(channel.json_body["users"]), 19)
+ self.assertEqual(channel.json_body["next_token"], "19")
+
+ # Check
+ # Set `from` to value of `next_token` for request remaining entries
+ # `next_token` does not appear
+ channel = self.make_request(
+ "GET", self.url + "?from=19", access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(channel.json_body["total"], number_users)
+ self.assertEqual(len(channel.json_body["users"]), 1)
+ self.assertNotIn("next_token", channel.json_body)
+
+ def _check_fields(self, content: JsonDict):
+ """Checks that the expected user attributes are present in content
+ Args:
+ content: List that is checked for content
+ """
+ for u in content:
+ self.assertIn("name", u)
+ self.assertIn("is_guest", u)
+ self.assertIn("admin", u)
+ self.assertIn("user_type", u)
+ self.assertIn("deactivated", u)
+ self.assertIn("displayname", u)
+ self.assertIn("avatar_url", u)
+
+ def _create_users(self, number_users: int):
+ """
+ Create a number of users
+ Args:
+ number_users: Number of users to be created
+ """
+ for i in range(1, number_users + 1):
+ self.register_user(
+ "user%d" % i, "pass%d" % i, admin=False, displayname="Name %d" % i,
+ )
+
+
+class DeactivateAccountTestCase(unittest.HomeserverTestCase):
+
+ servlets = [
+ synapse.rest.admin.register_servlets,
+ login.register_servlets,
+ ]
+
+ def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
+
+ self.admin_user = self.register_user("admin", "pass", admin=True)
+ self.admin_user_tok = self.login("admin", "pass")
+
+ self.other_user = self.register_user("user", "pass", displayname="User1")
+ self.other_user_token = self.login("user", "pass")
+ self.url_other_user = "/_synapse/admin/v2/users/%s" % urllib.parse.quote(
+ self.other_user
+ )
+ self.url = "/_synapse/admin/v1/deactivate/%s" % urllib.parse.quote(
+ self.other_user
+ )
+
+ # set attributes for user
+ self.get_success(
+ self.store.set_profile_avatar_url("user", "mxc://servername/mediaid")
+ )
+ self.get_success(
+ self.store.user_add_threepid("@user:test", "email", "foo@bar.com", 0, 0)
+ )
+
+ def test_no_auth(self):
+ """
+ Try to deactivate users without authentication.
+ """
+ channel = self.make_request("POST", self.url, b"{}")
+
+ self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
+
+ def test_requester_is_not_admin(self):
+ """
+ If the user is not a server admin, an error is returned.
+ """
+ url = "/_synapse/admin/v1/deactivate/@bob:test"
+
+ channel = self.make_request("POST", url, access_token=self.other_user_token)
+
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("You are not a server admin", channel.json_body["error"])
+
+ channel = self.make_request(
+ "POST", url, access_token=self.other_user_token, content=b"{}",
+ )
+
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("You are not a server admin", channel.json_body["error"])
+
+ def test_user_does_not_exist(self):
+ """
+ Tests that deactivation for a user that does not exist returns a 404
+ """
+
+ channel = self.make_request(
+ "POST",
+ "/_synapse/admin/v1/deactivate/@unknown_person:test",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(404, channel.code, msg=channel.json_body)
+ self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
+
+ def test_erase_is_not_bool(self):
+ """
+ If parameter `erase` is not boolean, return an error
+ """
+ body = json.dumps({"erase": "False"})
+
+ channel = self.make_request(
+ "POST",
+ self.url,
+ content=body.encode(encoding="utf_8"),
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.BAD_JSON, channel.json_body["errcode"])
+
+ def test_user_is_not_local(self):
+ """
+ Tests that deactivation for a user that is not a local returns a 400
+ """
+ url = "/_synapse/admin/v1/deactivate/@unknown_person:unknown_domain"
+
+ channel = self.make_request("POST", url, access_token=self.admin_user_tok)
+
+ self.assertEqual(400, channel.code, msg=channel.json_body)
+ self.assertEqual("Can only deactivate local users", channel.json_body["error"])
+
+ def test_deactivate_user_erase_true(self):
+ """
+ Test deactivating an user and set `erase` to `true`
+ """
+
+ # Get user
+ channel = self.make_request(
+ "GET", self.url_other_user, access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertEqual(False, channel.json_body["deactivated"])
+ self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
+ self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
+ self.assertEqual("User1", channel.json_body["displayname"])
+
+ # Deactivate user
+ body = json.dumps({"erase": True})
+
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content=body.encode(encoding="utf_8"),
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+
+ # Get user
+ channel = self.make_request(
+ "GET", self.url_other_user, access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertEqual(0, len(channel.json_body["threepids"]))
+ self.assertIsNone(channel.json_body["avatar_url"])
+ self.assertIsNone(channel.json_body["displayname"])
+
+ self._is_erased("@user:test", True)
+
+ def test_deactivate_user_erase_false(self):
+ """
+ Test deactivating an user and set `erase` to `false`
+ """
+
+ # Get user
+ channel = self.make_request(
+ "GET", self.url_other_user, access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertEqual(False, channel.json_body["deactivated"])
+ self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
+ self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
+ self.assertEqual("User1", channel.json_body["displayname"])
+
+ # Deactivate user
+ body = json.dumps({"erase": False})
+
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content=body.encode(encoding="utf_8"),
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+
+ # Get user
+ channel = self.make_request(
+ "GET", self.url_other_user, access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertEqual(0, len(channel.json_body["threepids"]))
+ self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
+ self.assertEqual("User1", channel.json_body["displayname"])
+
+ self._is_erased("@user:test", False)
+
+ def _is_erased(self, user_id: str, expect: bool) -> None:
+ """Assert that the user is erased or not
+ """
+ d = self.store.is_user_erased(user_id)
+ if expect:
+ self.assertTrue(self.get_success(d))
+ else:
+ self.assertFalse(self.get_success(d))
+
class UserRestTestCase(unittest.HomeserverTestCase):
@@ -986,6 +1350,26 @@ class UserRestTestCase(unittest.HomeserverTestCase):
Test deactivating another user.
"""
+ # set attributes for user
+ self.get_success(
+ self.store.set_profile_avatar_url("user", "mxc://servername/mediaid")
+ )
+ self.get_success(
+ self.store.user_add_threepid("@user:test", "email", "foo@bar.com", 0, 0)
+ )
+
+ # Get user
+ channel = self.make_request(
+ "GET", self.url_other_user, access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertEqual(False, channel.json_body["deactivated"])
+ self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
+ self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
+ self.assertEqual("User", channel.json_body["displayname"])
+
# Deactivate user
body = json.dumps({"deactivated": True})
@@ -999,6 +1383,9 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertEqual(0, len(channel.json_body["threepids"]))
+ self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
+ self.assertEqual("User", channel.json_body["displayname"])
# the user is deactivated, the threepid will be deleted
# Get user
@@ -1009,6 +1396,9 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertEqual(0, len(channel.json_body["threepids"]))
+ self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
+ self.assertEqual("User", channel.json_body["displayname"])
@override_config({"user_directory": {"enabled": True, "search_all_users": True}})
def test_change_name_deactivate_user_user_directory(self):
@@ -1204,8 +1594,6 @@ class UserMembershipRestTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
-
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
@@ -1236,24 +1624,26 @@ class UserMembershipRestTestCase(unittest.HomeserverTestCase):
def test_user_does_not_exist(self):
"""
- Tests that a lookup for a user that does not exist returns a 404
+ Tests that a lookup for a user that does not exist returns an empty list
"""
url = "/_synapse/admin/v1/users/@unknown_person:test/joined_rooms"
channel = self.make_request("GET", url, access_token=self.admin_user_tok,)
- self.assertEqual(404, channel.code, msg=channel.json_body)
- self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(0, channel.json_body["total"])
+ self.assertEqual(0, len(channel.json_body["joined_rooms"]))
def test_user_is_not_local(self):
"""
- Tests that a lookup for a user that is not a local returns a 400
+ Tests that a lookup for a user that is not a local and participates in no conversation returns an empty list
"""
url = "/_synapse/admin/v1/users/@unknown_person:unknown_domain/joined_rooms"
channel = self.make_request("GET", url, access_token=self.admin_user_tok,)
- self.assertEqual(400, channel.code, msg=channel.json_body)
- self.assertEqual("Can only lookup local users", channel.json_body["error"])
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(0, channel.json_body["total"])
+ self.assertEqual(0, len(channel.json_body["joined_rooms"]))
def test_no_memberships(self):
"""
@@ -1284,6 +1674,49 @@ class UserMembershipRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(number_rooms, channel.json_body["total"])
self.assertEqual(number_rooms, len(channel.json_body["joined_rooms"]))
+ def test_get_rooms_with_nonlocal_user(self):
+ """
+ Tests that a normal lookup for rooms is successful with a non-local user
+ """
+
+ other_user_tok = self.login("user", "pass")
+ event_builder_factory = self.hs.get_event_builder_factory()
+ event_creation_handler = self.hs.get_event_creation_handler()
+ storage = self.hs.get_storage()
+
+ # Create two rooms, one with a local user only and one with both a local
+ # and remote user.
+ self.helper.create_room_as(self.other_user, tok=other_user_tok)
+ local_and_remote_room_id = self.helper.create_room_as(
+ self.other_user, tok=other_user_tok
+ )
+
+ # Add a remote user to the room.
+ builder = event_builder_factory.for_room_version(
+ RoomVersions.V1,
+ {
+ "type": "m.room.member",
+ "sender": "@joiner:remote_hs",
+ "state_key": "@joiner:remote_hs",
+ "room_id": local_and_remote_room_id,
+ "content": {"membership": "join"},
+ },
+ )
+
+ event, context = self.get_success(
+ event_creation_handler.create_new_client_event(builder)
+ )
+
+ self.get_success(storage.persistence.persist_event(event, context))
+
+ # Now get rooms
+ url = "/_synapse/admin/v1/users/@joiner:remote_hs/joined_rooms"
+ channel = self.make_request("GET", url, access_token=self.admin_user_tok,)
+
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(1, channel.json_body["total"])
+ self.assertEqual([local_and_remote_room_id], channel.json_body["joined_rooms"])
+
class PushersRestTestCase(unittest.HomeserverTestCase):
@@ -1401,7 +1834,6 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
self.media_repo = hs.get_media_repository_resource()
self.admin_user = self.register_user("admin", "pass", admin=True)
@@ -1868,8 +2300,6 @@ class WhoisRestTestCase(unittest.HomeserverTestCase):
]
def prepare(self, reactor, clock, hs):
- self.store = hs.get_datastore()
-
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py
index 1d1dc9f8a2..2672ce24c6 100644
--- a/tests/rest/client/v1/test_login.py
+++ b/tests/rest/client/v1/test_login.py
@@ -15,8 +15,8 @@
import time
import urllib.parse
-from html.parser import HTMLParser
-from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
+from typing import Any, Dict, Union
+from urllib.parse import urlencode
from mock import Mock
@@ -30,12 +30,15 @@ from synapse.rest.client.v1 import login, logout
from synapse.rest.client.v2_alpha import devices, register
from synapse.rest.client.v2_alpha.account import WhoamiRestServlet
from synapse.rest.synapse.client.pick_idp import PickIdpResource
+from synapse.rest.synapse.client.pick_username import pick_username_resource
+from synapse.types import create_requester
from tests import unittest
from tests.handlers.test_oidc import HAS_OIDC
from tests.handlers.test_saml import has_saml2
from tests.rest.client.v1.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG
-from tests.unittest import override_config, skip_unless
+from tests.test_utils.html_parsers import TestHtmlParser
+from tests.unittest import HomeserverTestCase, override_config, skip_unless
try:
import jwt
@@ -66,6 +69,12 @@ TEST_SAML_METADATA = """
LOGIN_URL = b"/_matrix/client/r0/login"
TEST_URL = b"/_matrix/client/r0/account/whoami"
+# a (valid) url with some annoying characters in. %3D is =, %26 is &, %2B is +
+TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q"+%3D%2B"="fรถ%26=o"'
+
+# the query params in TEST_CLIENT_REDIRECT_URL
+EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [("<ab c>", ""), ('q" =+"', '"fรถ&=o"')]
+
class LoginRestServletTestCase(unittest.HomeserverTestCase):
@@ -386,23 +395,44 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
},
}
+ # default OIDC provider
config["oidc_config"] = TEST_OIDC_CONFIG
+ # additional OIDC providers
+ config["oidc_providers"] = [
+ {
+ "idp_id": "idp1",
+ "idp_name": "IDP1",
+ "discover": False,
+ "issuer": "https://issuer1",
+ "client_id": "test-client-id",
+ "client_secret": "test-client-secret",
+ "scopes": ["profile"],
+ "authorization_endpoint": "https://issuer1/auth",
+ "token_endpoint": "https://issuer1/token",
+ "userinfo_endpoint": "https://issuer1/userinfo",
+ "user_mapping_provider": {
+ "config": {"localpart_template": "{{ user.sub }}"}
+ },
+ }
+ ]
return config
def create_resource_dict(self) -> Dict[str, Resource]:
+ from synapse.rest.oidc import OIDCResource
+
d = super().create_resource_dict()
d["/_synapse/client/pick_idp"] = PickIdpResource(self.hs)
+ d["/_synapse/oidc"] = OIDCResource(self.hs)
return d
def test_multi_sso_redirect(self):
"""/login/sso/redirect should redirect to an identity picker"""
- client_redirect_url = "https://x?<abc>"
-
# first hit the redirect url, which should redirect to our idp picker
channel = self.make_request(
"GET",
- "/_matrix/client/r0/login/sso/redirect?redirectUrl=" + client_redirect_url,
+ "/_matrix/client/r0/login/sso/redirect?redirectUrl="
+ + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL),
)
self.assertEqual(channel.code, 302, channel.result)
uri = channel.headers.getRawHeaders("Location")[0]
@@ -412,46 +442,22 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
self.assertEqual(channel.code, 200, channel.result)
# parse the form to check it has fields assumed elsewhere in this class
- class FormPageParser(HTMLParser):
- def __init__(self):
- super().__init__()
-
- # the values of the hidden inputs: map from name to value
- self.hiddens = {} # type: Dict[str, Optional[str]]
-
- # the values of the radio buttons
- self.radios = [] # type: List[Optional[str]]
-
- def handle_starttag(
- self, tag: str, attrs: Iterable[Tuple[str, Optional[str]]]
- ) -> None:
- attr_dict = dict(attrs)
- if tag == "input":
- if attr_dict["type"] == "radio" and attr_dict["name"] == "idp":
- self.radios.append(attr_dict["value"])
- elif attr_dict["type"] == "hidden":
- input_name = attr_dict["name"]
- assert input_name
- self.hiddens[input_name] = attr_dict["value"]
-
- def error(_, message):
- self.fail(message)
-
- p = FormPageParser()
+ p = TestHtmlParser()
p.feed(channel.result["body"].decode("utf-8"))
p.close()
- self.assertCountEqual(p.radios, ["cas", "oidc", "saml"])
+ self.assertCountEqual(p.radios["idp"], ["cas", "oidc", "oidc-idp1", "saml"])
- self.assertEqual(p.hiddens["redirectUrl"], client_redirect_url)
+ self.assertEqual(p.hiddens["redirectUrl"], TEST_CLIENT_REDIRECT_URL)
def test_multi_sso_redirect_to_cas(self):
"""If CAS is chosen, should redirect to the CAS server"""
- client_redirect_url = "https://x?<abc>"
channel = self.make_request(
"GET",
- "/_synapse/client/pick_idp?redirectUrl=" + client_redirect_url + "&idp=cas",
+ "/_synapse/client/pick_idp?redirectUrl="
+ + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
+ + "&idp=cas",
shorthand=False,
)
self.assertEqual(channel.code, 302, channel.result)
@@ -467,16 +473,14 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
service_uri = cas_uri_params["service"][0]
_, service_uri_query = service_uri.split("?", 1)
service_uri_params = urllib.parse.parse_qs(service_uri_query)
- self.assertEqual(service_uri_params["redirectUrl"][0], client_redirect_url)
+ self.assertEqual(service_uri_params["redirectUrl"][0], TEST_CLIENT_REDIRECT_URL)
def test_multi_sso_redirect_to_saml(self):
"""If SAML is chosen, should redirect to the SAML server"""
- client_redirect_url = "https://x?<abc>"
-
channel = self.make_request(
"GET",
"/_synapse/client/pick_idp?redirectUrl="
- + client_redirect_url
+ + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
+ "&idp=saml",
)
self.assertEqual(channel.code, 302, channel.result)
@@ -489,16 +493,16 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
# the RelayState is used to carry the client redirect url
saml_uri_params = urllib.parse.parse_qs(saml_uri_query)
relay_state_param = saml_uri_params["RelayState"][0]
- self.assertEqual(relay_state_param, client_redirect_url)
+ self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL)
- def test_multi_sso_redirect_to_oidc(self):
+ def test_login_via_oidc(self):
"""If OIDC is chosen, should redirect to the OIDC auth endpoint"""
- client_redirect_url = "https://x?<abc>"
+ # pick the default OIDC provider
channel = self.make_request(
"GET",
"/_synapse/client/pick_idp?redirectUrl="
- + client_redirect_url
+ + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
+ "&idp=oidc",
)
self.assertEqual(channel.code, 302, channel.result)
@@ -518,8 +522,40 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie)
self.assertEqual(
self._get_value_from_macaroon(macaroon, "client_redirect_url"),
- client_redirect_url,
+ TEST_CLIENT_REDIRECT_URL,
+ )
+
+ channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {"sub": "user1"})
+
+ # that should serve a confirmation page
+ self.assertEqual(channel.code, 200, channel.result)
+ self.assertTrue(
+ channel.headers.getRawHeaders("Content-Type")[-1].startswith("text/html")
+ )
+ p = TestHtmlParser()
+ p.feed(channel.text_body)
+ p.close()
+
+ # ... which should contain our redirect link
+ self.assertEqual(len(p.links), 1)
+ path, query = p.links[0].split("?", 1)
+ self.assertEqual(path, "https://x")
+
+ # it will have url-encoded the params properly, so we'll have to parse them
+ params = urllib.parse.parse_qsl(
+ query, keep_blank_values=True, strict_parsing=True, errors="strict"
+ )
+ self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS)
+ self.assertEqual(params[2][0], "loginToken")
+
+ # finally, submit the matrix login token to the login API, which gives us our
+ # matrix access token, mxid, and device id.
+ login_token = params[2][1]
+ chan = self.make_request(
+ "POST", "/login", content={"type": "m.login.token", "token": login_token},
)
+ self.assertEqual(chan.code, 200, chan.result)
+ self.assertEqual(chan.json_body["user_id"], "@user1:test")
def test_multi_sso_redirect_to_unknown(self):
"""An unknown IdP should cause a 400"""
@@ -667,7 +703,9 @@ class CASTestCase(unittest.HomeserverTestCase):
# Deactivate the account.
self.get_success(
- self.deactivate_account_handler.deactivate_account(self.user_id, False)
+ self.deactivate_account_handler.deactivate_account(
+ self.user_id, False, create_requester(self.user_id)
+ )
)
# Request the CAS ticket.
@@ -1057,3 +1095,107 @@ class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase):
channel = self.make_request(b"POST", LOGIN_URL, params)
self.assertEquals(channel.result["code"], b"401", channel.result)
+
+
+@skip_unless(HAS_OIDC, "requires OIDC")
+class UsernamePickerTestCase(HomeserverTestCase):
+ """Tests for the username picker flow of SSO login"""
+
+ servlets = [login.register_servlets]
+
+ def default_config(self):
+ config = super().default_config()
+ config["public_baseurl"] = BASE_URL
+
+ config["oidc_config"] = {}
+ config["oidc_config"].update(TEST_OIDC_CONFIG)
+ config["oidc_config"]["user_mapping_provider"] = {
+ "config": {"display_name_template": "{{ user.displayname }}"}
+ }
+
+ # whitelist this client URI so we redirect straight to it rather than
+ # serving a confirmation page
+ config["sso"] = {"client_whitelist": ["https://x"]}
+ return config
+
+ def create_resource_dict(self) -> Dict[str, Resource]:
+ from synapse.rest.oidc import OIDCResource
+
+ d = super().create_resource_dict()
+ d["/_synapse/client/pick_username"] = pick_username_resource(self.hs)
+ d["/_synapse/oidc"] = OIDCResource(self.hs)
+ return d
+
+ def test_username_picker(self):
+ """Test the happy path of a username picker flow."""
+
+ # do the start of the login flow
+ channel = self.helper.auth_via_oidc(
+ {"sub": "tester", "displayname": "Jonny"}, TEST_CLIENT_REDIRECT_URL
+ )
+
+ # that should redirect to the username picker
+ self.assertEqual(channel.code, 302, channel.result)
+ picker_url = channel.headers.getRawHeaders("Location")[0]
+ self.assertEqual(picker_url, "/_synapse/client/pick_username")
+
+ # ... with a username_mapping_session cookie
+ cookies = {} # type: Dict[str,str]
+ channel.extract_cookies(cookies)
+ self.assertIn("username_mapping_session", cookies)
+ session_id = cookies["username_mapping_session"]
+
+ # introspect the sso handler a bit to check that the username mapping session
+ # looks ok.
+ username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions
+ self.assertIn(
+ session_id, username_mapping_sessions, "session id not found in map",
+ )
+ session = username_mapping_sessions[session_id]
+ self.assertEqual(session.remote_user_id, "tester")
+ self.assertEqual(session.display_name, "Jonny")
+ self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL)
+
+ # the expiry time should be about 15 minutes away
+ expected_expiry = self.clock.time_msec() + (15 * 60 * 1000)
+ self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000)
+
+ # Now, submit a username to the username picker, which should serve a redirect
+ # back to the client
+ submit_path = picker_url + "/submit"
+ content = urlencode({b"username": b"bobby"}).encode("utf8")
+ chan = self.make_request(
+ "POST",
+ path=submit_path,
+ content=content,
+ content_is_form=True,
+ custom_headers=[
+ ("Cookie", "username_mapping_session=" + session_id),
+ # old versions of twisted don't do form-parsing without a valid
+ # content-length header.
+ ("Content-Length", str(len(content))),
+ ],
+ )
+ self.assertEqual(chan.code, 302, chan.result)
+ location_headers = chan.headers.getRawHeaders("Location")
+ # ensure that the returned location matches the requested redirect URL
+ path, query = location_headers[0].split("?", 1)
+ self.assertEqual(path, "https://x")
+
+ # it will have url-encoded the params properly, so we'll have to parse them
+ params = urllib.parse.parse_qsl(
+ query, keep_blank_values=True, strict_parsing=True, errors="strict"
+ )
+ self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS)
+ self.assertEqual(params[2][0], "loginToken")
+
+ # fish the login token out of the returned redirect uri
+ login_token = params[2][1]
+
+ # finally, submit the matrix login token to the login API, which gives us our
+ # matrix access token, mxid, and device id.
+ chan = self.make_request(
+ "POST", "/login", content={"type": "m.login.token", "token": login_token},
+ )
+ self.assertEqual(chan.code, 200, chan.result)
+ self.assertEqual(chan.json_body["user_id"], "@bobby:test")
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index 6105eac47c..d4e3165436 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -29,7 +29,7 @@ from synapse.handlers.pagination import PurgeStatus
from synapse.rest import admin
from synapse.rest.client.v1 import directory, login, profile, room
from synapse.rest.client.v2_alpha import account
-from synapse.types import JsonDict, RoomAlias, UserID
+from synapse.types import JsonDict, RoomAlias, UserID, create_requester
from synapse.util.stringutils import random_string
from tests import unittest
@@ -1687,7 +1687,9 @@ class ContextTestCase(unittest.HomeserverTestCase):
deactivate_account_handler = self.hs.get_deactivate_account_handler()
self.get_success(
- deactivate_account_handler.deactivate_account(self.user_id, erase_data=True)
+ deactivate_account_handler.deactivate_account(
+ self.user_id, True, create_requester(self.user_id)
+ )
)
# Invite another user in the room. This is needed because messages will be
diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py
index 81b7f84360..b1333df82d 100644
--- a/tests/rest/client/v1/utils.py
+++ b/tests/rest/client/v1/utils.py
@@ -2,7 +2,7 @@
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
-# Copyright 2019-2020 The Matrix.org Foundation C.I.C.
+# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,7 +20,7 @@ import json
import re
import time
import urllib.parse
-from typing import Any, Dict, Optional
+from typing import Any, Dict, Mapping, MutableMapping, Optional
from mock import patch
@@ -32,8 +32,9 @@ from twisted.web.server import Site
from synapse.api.constants import Membership
from synapse.types import JsonDict
-from tests.server import FakeSite, make_request
+from tests.server import FakeChannel, FakeSite, make_request
from tests.test_utils import FakeResponse
+from tests.test_utils.html_parsers import TestHtmlParser
@attr.s
@@ -362,41 +363,128 @@ class RestHelper:
the normal places.
"""
client_redirect_url = "https://x"
+ channel = self.auth_via_oidc({"sub": remote_user_id}, client_redirect_url)
- # first hit the redirect url (which will issue a cookie and state)
+ # expect a confirmation page
+ assert channel.code == 200, channel.result
+
+ # fish the matrix login token out of the body of the confirmation page
+ m = re.search(
+ 'a href="%s.*loginToken=([^"]*)"' % (client_redirect_url,),
+ channel.text_body,
+ )
+ assert m, channel.text_body
+ login_token = m.group(1)
+
+ # finally, submit the matrix login token to the login API, which gives us our
+ # matrix access token and device id.
channel = make_request(
self.hs.get_reactor(),
self.site,
- "GET",
- "/login/sso/redirect?redirectUrl=" + client_redirect_url,
+ "POST",
+ "/login",
+ content={"type": "m.login.token", "token": login_token},
)
- # that will redirect to the OIDC IdP, but we skip that and go straight
+ assert channel.code == 200
+ return channel.json_body
+
+ def auth_via_oidc(
+ self,
+ user_info_dict: JsonDict,
+ client_redirect_url: Optional[str] = None,
+ ui_auth_session_id: Optional[str] = None,
+ ) -> FakeChannel:
+ """Perform an OIDC authentication flow via a mock OIDC provider.
+
+ This can be used for either login or user-interactive auth.
+
+ Starts by making a request to the relevant synapse redirect endpoint, which is
+ expected to serve a 302 to the OIDC provider. We then make a request to the
+ OIDC callback endpoint, intercepting the HTTP requests that will get sent back
+ to the OIDC provider.
+
+ Requires that "oidc_config" in the homeserver config be set appropriately
+ (TEST_OIDC_CONFIG is a suitable example) - and by implication, needs a
+ "public_base_url".
+
+ Also requires the login servlet and the OIDC callback resource to be mounted at
+ the normal places.
+
+ Args:
+ user_info_dict: the remote userinfo that the OIDC provider should present.
+ Typically this should be '{"sub": "<remote user id>"}'.
+ client_redirect_url: for a login flow, the client redirect URL to pass to
+ the login redirect endpoint
+ ui_auth_session_id: if set, we will perform a UI Auth flow. The session id
+ of the UI auth.
+
+ Returns:
+ A FakeChannel containing the result of calling the OIDC callback endpoint.
+ Note that the response code may be a 200, 302 or 400 depending on how things
+ went.
+ """
+
+ cookies = {}
+
+ # if we're doing a ui auth, hit the ui auth redirect endpoint
+ if ui_auth_session_id:
+ # can't set the client redirect url for UI Auth
+ assert client_redirect_url is None
+ oauth_uri = self.initiate_sso_ui_auth(ui_auth_session_id, cookies)
+ else:
+ # otherwise, hit the login redirect endpoint
+ oauth_uri = self.initiate_sso_login(client_redirect_url, cookies)
+
+ # we now have a URI for the OIDC IdP, but we skip that and go straight
# back to synapse's OIDC callback resource. However, we do need the "state"
- # param that synapse passes to the IdP via query params, and the cookie that
- # synapse passes to the client.
- assert channel.code == 302
- oauth_uri = channel.headers.getRawHeaders("Location")[0]
- params = urllib.parse.parse_qs(urllib.parse.urlparse(oauth_uri).query)
- redirect_uri = "%s?%s" % (
+ # param that synapse passes to the IdP via query params, as well as the cookie
+ # that synapse passes to the client.
+
+ oauth_uri_path, _ = oauth_uri.split("?", 1)
+ assert oauth_uri_path == TEST_OIDC_AUTH_ENDPOINT, (
+ "unexpected SSO URI " + oauth_uri_path
+ )
+ return self.complete_oidc_auth(oauth_uri, cookies, user_info_dict)
+
+ def complete_oidc_auth(
+ self, oauth_uri: str, cookies: Mapping[str, str], user_info_dict: JsonDict,
+ ) -> FakeChannel:
+ """Mock out an OIDC authentication flow
+
+ Assumes that an OIDC auth has been initiated by one of initiate_sso_login or
+ initiate_sso_ui_auth; completes the OIDC bits of the flow by making a request to
+ Synapse's OIDC callback endpoint, intercepting the HTTP requests that will get
+ sent back to the OIDC provider.
+
+ Requires the OIDC callback resource to be mounted at the normal place.
+
+ Args:
+ oauth_uri: the OIDC URI returned by synapse's redirect endpoint (ie,
+ from initiate_sso_login or initiate_sso_ui_auth).
+ cookies: the cookies set by synapse's redirect endpoint, which will be
+ sent back to the callback endpoint.
+ user_info_dict: the remote userinfo that the OIDC provider should present.
+ Typically this should be '{"sub": "<remote user id>"}'.
+
+ Returns:
+ A FakeChannel containing the result of calling the OIDC callback endpoint.
+ """
+ _, oauth_uri_qs = oauth_uri.split("?", 1)
+ params = urllib.parse.parse_qs(oauth_uri_qs)
+ callback_uri = "%s?%s" % (
urllib.parse.urlparse(params["redirect_uri"][0]).path,
urllib.parse.urlencode({"state": params["state"][0], "code": "TEST_CODE"}),
)
- cookies = {}
- for h in channel.headers.getRawHeaders("Set-Cookie"):
- parts = h.split(";")
- k, v = parts[0].split("=", maxsplit=1)
- cookies[k] = v
# before we hit the callback uri, stub out some methods in the http client so
# that we don't have to handle full HTTPS requests.
-
# (expected url, json response) pairs, in the order we expect them.
expected_requests = [
# first we get a hit to the token endpoint, which we tell to return
# a dummy OIDC access token
- ("https://issuer.test/token", {"access_token": "TEST"}),
+ (TEST_OIDC_TOKEN_ENDPOINT, {"access_token": "TEST"}),
# and then one to the user_info endpoint, which returns our remote user id.
- ("https://issuer.test/userinfo", {"sub": remote_user_id}),
+ (TEST_OIDC_USERINFO_ENDPOINT, user_info_dict),
]
async def mock_req(method: str, uri: str, data=None, headers=None):
@@ -413,38 +501,85 @@ class RestHelper:
self.hs.get_reactor(),
self.site,
"GET",
- redirect_uri,
+ callback_uri,
custom_headers=[
("Cookie", "%s=%s" % (k, v)) for (k, v) in cookies.items()
],
)
+ return channel
- # expect a confirmation page
- assert channel.code == 200
+ def initiate_sso_login(
+ self, client_redirect_url: Optional[str], cookies: MutableMapping[str, str]
+ ) -> str:
+ """Make a request to the login-via-sso redirect endpoint, and return the target
- # fish the matrix login token out of the body of the confirmation page
- m = re.search(
- 'a href="%s.*loginToken=([^"]*)"' % (client_redirect_url,),
- channel.result["body"].decode("utf-8"),
- )
- assert m
- login_token = m.group(1)
+ Assumes that exactly one SSO provider has been configured. Requires the login
+ servlet to be mounted.
- # finally, submit the matrix login token to the login API, which gives us our
- # matrix access token and device id.
+ Args:
+ client_redirect_url: the client redirect URL to pass to the login redirect
+ endpoint
+ cookies: any cookies returned will be added to this dict
+
+ Returns:
+ the URI that the client gets redirected to (ie, the SSO server)
+ """
+ params = {}
+ if client_redirect_url:
+ params["redirectUrl"] = client_redirect_url
+
+ # hit the redirect url (which will issue a cookie and state)
channel = make_request(
self.hs.get_reactor(),
self.site,
- "POST",
- "/login",
- content={"type": "m.login.token", "token": login_token},
+ "GET",
+ "/_matrix/client/r0/login/sso/redirect?" + urllib.parse.urlencode(params),
)
- assert channel.code == 200
- return channel.json_body
+
+ assert channel.code == 302
+ channel.extract_cookies(cookies)
+ return channel.headers.getRawHeaders("Location")[0]
+
+ def initiate_sso_ui_auth(
+ self, ui_auth_session_id: str, cookies: MutableMapping[str, str]
+ ) -> str:
+ """Make a request to the ui-auth-via-sso endpoint, and return the target
+
+ Assumes that exactly one SSO provider has been configured. Requires the
+ AuthRestServlet to be mounted.
+
+ Args:
+ ui_auth_session_id: the session id of the UI auth
+ cookies: any cookies returned will be added to this dict
+
+ Returns:
+ the URI that the client gets linked to (ie, the SSO server)
+ """
+ sso_redirect_endpoint = (
+ "/_matrix/client/r0/auth/m.login.sso/fallback/web?"
+ + urllib.parse.urlencode({"session": ui_auth_session_id})
+ )
+ # hit the redirect url (which will issue a cookie and state)
+ channel = make_request(
+ self.hs.get_reactor(), self.site, "GET", sso_redirect_endpoint
+ )
+ # that should serve a confirmation page
+ assert channel.code == 200, channel.text_body
+ channel.extract_cookies(cookies)
+
+ # parse the confirmation page to fish out the link.
+ p = TestHtmlParser()
+ p.feed(channel.text_body)
+ p.close()
+ assert len(p.links) == 1, "not exactly one link in confirmation page"
+ oauth_uri = p.links[0]
+ return oauth_uri
# an 'oidc_config' suitable for login_via_oidc.
TEST_OIDC_AUTH_ENDPOINT = "https://issuer.test/auth"
+TEST_OIDC_TOKEN_ENDPOINT = "https://issuer.test/token"
+TEST_OIDC_USERINFO_ENDPOINT = "https://issuer.test/userinfo"
TEST_OIDC_CONFIG = {
"enabled": True,
"discover": False,
@@ -453,7 +588,7 @@ TEST_OIDC_CONFIG = {
"client_secret": "test-client-secret",
"scopes": ["profile"],
"authorization_endpoint": TEST_OIDC_AUTH_ENDPOINT,
- "token_endpoint": "https://issuer.test/token",
- "userinfo_endpoint": "https://issuer.test/userinfo",
+ "token_endpoint": TEST_OIDC_TOKEN_ENDPOINT,
+ "userinfo_endpoint": TEST_OIDC_USERINFO_ENDPOINT,
"user_mapping_provider": {"config": {"localpart_template": "{{ user.sub }}"}},
}
diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py
index bb91e0c331..a6488a3d29 100644
--- a/tests/rest/client/v2_alpha/test_auth.py
+++ b/tests/rest/client/v2_alpha/test_auth.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
+# Copyright 2020-2021 The Matrix.org Foundation C.I.C
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
from typing import Union
from twisted.internet.defer import succeed
@@ -386,6 +386,44 @@ class UIAuthTests(unittest.HomeserverTestCase):
@skip_unless(HAS_OIDC, "requires OIDC")
@override_config({"oidc_config": TEST_OIDC_CONFIG})
+ def test_ui_auth_via_sso(self):
+ """Test a successful UI Auth flow via SSO
+
+ This includes:
+ * hitting the UIA SSO redirect endpoint
+ * checking it serves a confirmation page which links to the OIDC provider
+ * calling back to the synapse oidc callback
+ * checking that the original operation succeeds
+ """
+
+ # log the user in
+ remote_user_id = UserID.from_string(self.user).localpart
+ login_resp = self.helper.login_via_oidc(remote_user_id)
+ self.assertEqual(login_resp["user_id"], self.user)
+
+ # initiate a UI Auth process by attempting to delete the device
+ channel = self.delete_device(self.user_tok, self.device_id, 401)
+
+ # check that SSO is offered
+ flows = channel.json_body["flows"]
+ self.assertIn({"stages": ["m.login.sso"]}, flows)
+
+ # run the UIA-via-SSO flow
+ session_id = channel.json_body["session"]
+ channel = self.helper.auth_via_oidc(
+ {"sub": remote_user_id}, ui_auth_session_id=session_id
+ )
+
+ # that should serve a confirmation page
+ self.assertEqual(channel.code, 200, channel.result)
+
+ # and now the delete request should succeed.
+ self.delete_device(
+ self.user_tok, self.device_id, 200, body={"auth": {"session": session_id}},
+ )
+
+ @skip_unless(HAS_OIDC, "requires OIDC")
+ @override_config({"oidc_config": TEST_OIDC_CONFIG})
def test_does_not_offer_password_for_sso_user(self):
login_resp = self.helper.login_via_oidc("username")
user_tok = login_resp["access_token"]
@@ -419,3 +457,32 @@ class UIAuthTests(unittest.HomeserverTestCase):
self.assertIn({"stages": ["m.login.password"]}, flows)
self.assertIn({"stages": ["m.login.sso"]}, flows)
self.assertEqual(len(flows), 2)
+
+ @skip_unless(HAS_OIDC, "requires OIDC")
+ @override_config({"oidc_config": TEST_OIDC_CONFIG})
+ def test_ui_auth_fails_for_incorrect_sso_user(self):
+ """If the user tries to authenticate with the wrong SSO user, they get an error
+ """
+ # log the user in
+ login_resp = self.helper.login_via_oidc(UserID.from_string(self.user).localpart)
+ self.assertEqual(login_resp["user_id"], self.user)
+
+ # start a UI Auth flow by attempting to delete a device
+ channel = self.delete_device(self.user_tok, self.device_id, 401)
+
+ flows = channel.json_body["flows"]
+ self.assertIn({"stages": ["m.login.sso"]}, flows)
+ session_id = channel.json_body["session"]
+
+ # do the OIDC auth, but auth as the wrong user
+ channel = self.helper.auth_via_oidc(
+ {"sub": "wrong_user"}, ui_auth_session_id=session_id
+ )
+
+ # that should return a failure message
+ self.assertSubstring("We were unable to validate", channel.text_body)
+
+ # ... and the delete op should now fail with a 403
+ self.delete_device(
+ self.user_tok, self.device_id, 403, body={"auth": {"session": session_id}}
+ )
diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py
index ae2b32b131..a6c6985173 100644
--- a/tests/rest/media/v1/test_media_storage.py
+++ b/tests/rest/media/v1/test_media_storage.py
@@ -202,7 +202,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
config = self.default_config()
config["media_store_path"] = self.media_store_path
- config["thumbnail_requirements"] = {}
config["max_image_pixels"] = 2000000
provider_config = {
@@ -313,15 +312,39 @@ class MediaRepoTests(unittest.HomeserverTestCase):
self.assertEqual(headers.getRawHeaders(b"Content-Disposition"), None)
def test_thumbnail_crop(self):
+ """Test that a cropped remote thumbnail is available."""
self._test_thumbnail(
"crop", self.test_image.expected_cropped, self.test_image.expected_found
)
def test_thumbnail_scale(self):
+ """Test that a scaled remote thumbnail is available."""
self._test_thumbnail(
"scale", self.test_image.expected_scaled, self.test_image.expected_found
)
+ def test_invalid_type(self):
+ """An invalid thumbnail type is never available."""
+ self._test_thumbnail("invalid", None, False)
+
+ @unittest.override_config(
+ {"thumbnail_sizes": [{"width": 32, "height": 32, "method": "scale"}]}
+ )
+ def test_no_thumbnail_crop(self):
+ """
+ Override the config to generate only scaled thumbnails, but request a cropped one.
+ """
+ self._test_thumbnail("crop", None, False)
+
+ @unittest.override_config(
+ {"thumbnail_sizes": [{"width": 32, "height": 32, "method": "crop"}]}
+ )
+ def test_no_thumbnail_scale(self):
+ """
+ Override the config to generate only cropped thumbnails, but request a scaled one.
+ """
+ self._test_thumbnail("scale", None, False)
+
def _test_thumbnail(self, method, expected_body, expected_found):
params = "?width=32&height=32&method=" + method
channel = make_request(
diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py
index 14de0921be..c5e44af9f7 100644
--- a/tests/rest/test_well_known.py
+++ b/tests/rest/test_well_known.py
@@ -40,12 +40,3 @@ class WellKnownTests(unittest.HomeserverTestCase):
"m.identity_server": {"base_url": "https://testis"},
},
)
-
- def test_well_known_no_public_baseurl(self):
- self.hs.config.public_baseurl = None
-
- channel = self.make_request(
- "GET", "/.well-known/matrix/client", shorthand=False
- )
-
- self.assertEqual(channel.code, 404)
diff --git a/tests/server.py b/tests/server.py
index 7d1ad362c4..5a85d5fe7f 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -2,7 +2,7 @@ import json
import logging
from collections import deque
from io import SEEK_END, BytesIO
-from typing import Callable, Iterable, Optional, Tuple, Union
+from typing import Callable, Iterable, MutableMapping, Optional, Tuple, Union
import attr
from typing_extensions import Deque
@@ -51,9 +51,21 @@ class FakeChannel:
@property
def json_body(self):
- if not self.result:
- raise Exception("No result yet.")
- return json.loads(self.result["body"].decode("utf8"))
+ return json.loads(self.text_body)
+
+ @property
+ def text_body(self) -> str:
+ """The body of the result, utf-8-decoded.
+
+ Raises an exception if the request has not yet completed.
+ """
+ if not self.is_finished:
+ raise Exception("Request not yet completed")
+ return self.result["body"].decode("utf8")
+
+ def is_finished(self) -> bool:
+ """check if the response has been completely received"""
+ return self.result.get("done", False)
@property
def code(self):
@@ -62,7 +74,7 @@ class FakeChannel:
return int(self.result["code"])
@property
- def headers(self):
+ def headers(self) -> Headers:
if not self.result:
raise Exception("No result yet.")
h = Headers()
@@ -124,7 +136,7 @@ class FakeChannel:
self._reactor.run()
x = 0
- while not self.result.get("done"):
+ while not self.is_finished():
# If there's a producer, tell it to resume producing so we get content
if self._producer:
self._producer.resumeProducing()
@@ -136,6 +148,16 @@ class FakeChannel:
self._reactor.advance(0.1)
+ def extract_cookies(self, cookies: MutableMapping[str, str]) -> None:
+ """Process the contents of any Set-Cookie headers in the response
+
+ Any cookines found are added to the given dict
+ """
+ for h in self.headers.getRawHeaders("Set-Cookie"):
+ parts = h.split(";")
+ k, v = parts[0].split("=", maxsplit=1)
+ cookies[k] = v
+
class FakeSite:
"""
diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py
new file mode 100644
index 0000000000..0c46ad595b
--- /dev/null
+++ b/tests/storage/test_event_chain.py
@@ -0,0 +1,741 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, List, Set, Tuple
+
+from twisted.trial import unittest
+
+from synapse.api.constants import EventTypes
+from synapse.api.room_versions import RoomVersions
+from synapse.events import EventBase
+from synapse.rest import admin
+from synapse.rest.client.v1 import login, room
+from synapse.storage.databases.main.events import _LinkMap
+from synapse.types import create_requester
+
+from tests.unittest import HomeserverTestCase
+
+
+class EventChainStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
+ self._next_stream_ordering = 1
+
+ def test_simple(self):
+ """Test that the example in `docs/auth_chain_difference_algorithm.md`
+ works.
+ """
+
+ event_factory = self.hs.get_event_builder_factory()
+ bob = "@creator:test"
+ alice = "@alice:test"
+ room_id = "!room:test"
+
+ # Ensure that we have a rooms entry so that we generate the chain index.
+ self.get_success(
+ self.store.store_room(
+ room_id=room_id,
+ room_creator_user_id="",
+ is_public=True,
+ room_version=RoomVersions.V6,
+ )
+ )
+
+ create = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Create,
+ "state_key": "",
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "create"},
+ },
+ ).build(prev_event_ids=[], auth_event_ids=[])
+ )
+
+ bob_join = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": bob,
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "bob_join"},
+ },
+ ).build(prev_event_ids=[], auth_event_ids=[create.event_id])
+ )
+
+ power = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.PowerLevels,
+ "state_key": "",
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "power"},
+ },
+ ).build(
+ prev_event_ids=[], auth_event_ids=[create.event_id, bob_join.event_id],
+ )
+ )
+
+ alice_invite = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": alice,
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "alice_invite"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
+ )
+ )
+
+ alice_join = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": alice,
+ "sender": alice,
+ "room_id": room_id,
+ "content": {"tag": "alice_join"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, alice_invite.event_id, power.event_id],
+ )
+ )
+
+ power_2 = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.PowerLevels,
+ "state_key": "",
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "power_2"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
+ )
+ )
+
+ bob_join_2 = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": bob,
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "bob_join_2"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
+ )
+ )
+
+ alice_join2 = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": alice,
+ "sender": alice,
+ "room_id": room_id,
+ "content": {"tag": "alice_join2"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[
+ create.event_id,
+ alice_join.event_id,
+ power_2.event_id,
+ ],
+ )
+ )
+
+ events = [
+ create,
+ bob_join,
+ power,
+ alice_invite,
+ alice_join,
+ bob_join_2,
+ power_2,
+ alice_join2,
+ ]
+
+ expected_links = [
+ (bob_join, create),
+ (power, create),
+ (power, bob_join),
+ (alice_invite, create),
+ (alice_invite, power),
+ (alice_invite, bob_join),
+ (bob_join_2, power),
+ (alice_join2, power_2),
+ ]
+
+ self.persist(events)
+ chain_map, link_map = self.fetch_chains(events)
+
+ # Check that the expected links and only the expected links have been
+ # added.
+ self.assertEqual(len(expected_links), len(list(link_map.get_additions())))
+
+ for start, end in expected_links:
+ start_id, start_seq = chain_map[start.event_id]
+ end_id, end_seq = chain_map[end.event_id]
+
+ self.assertIn(
+ (start_seq, end_seq), list(link_map.get_links_between(start_id, end_id))
+ )
+
+ # Test that everything can reach the create event, but the create event
+ # can't reach anything.
+ for event in events[1:]:
+ self.assertTrue(
+ link_map.exists_path_from(
+ chain_map[event.event_id], chain_map[create.event_id]
+ ),
+ )
+
+ self.assertFalse(
+ link_map.exists_path_from(
+ chain_map[create.event_id], chain_map[event.event_id],
+ ),
+ )
+
+ def test_out_of_order_events(self):
+ """Test that we handle persisting events that we don't have the full
+ auth chain for yet (which should only happen for out of band memberships).
+ """
+ event_factory = self.hs.get_event_builder_factory()
+ bob = "@creator:test"
+ alice = "@alice:test"
+ room_id = "!room:test"
+
+ # Ensure that we have a rooms entry so that we generate the chain index.
+ self.get_success(
+ self.store.store_room(
+ room_id=room_id,
+ room_creator_user_id="",
+ is_public=True,
+ room_version=RoomVersions.V6,
+ )
+ )
+
+ # First persist the base room.
+ create = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Create,
+ "state_key": "",
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "create"},
+ },
+ ).build(prev_event_ids=[], auth_event_ids=[])
+ )
+
+ bob_join = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": bob,
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "bob_join"},
+ },
+ ).build(prev_event_ids=[], auth_event_ids=[create.event_id])
+ )
+
+ power = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.PowerLevels,
+ "state_key": "",
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "power"},
+ },
+ ).build(
+ prev_event_ids=[], auth_event_ids=[create.event_id, bob_join.event_id],
+ )
+ )
+
+ self.persist([create, bob_join, power])
+
+ # Now persist an invite and a couple of memberships out of order.
+ alice_invite = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": alice,
+ "sender": bob,
+ "room_id": room_id,
+ "content": {"tag": "alice_invite"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
+ )
+ )
+
+ alice_join = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": alice,
+ "sender": alice,
+ "room_id": room_id,
+ "content": {"tag": "alice_join"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, alice_invite.event_id, power.event_id],
+ )
+ )
+
+ alice_join2 = self.get_success(
+ event_factory.for_room_version(
+ RoomVersions.V6,
+ {
+ "type": EventTypes.Member,
+ "state_key": alice,
+ "sender": alice,
+ "room_id": room_id,
+ "content": {"tag": "alice_join2"},
+ },
+ ).build(
+ prev_event_ids=[],
+ auth_event_ids=[create.event_id, alice_join.event_id, power.event_id],
+ )
+ )
+
+ self.persist([alice_join])
+ self.persist([alice_join2])
+ self.persist([alice_invite])
+
+ # The end result should be sane.
+ events = [create, bob_join, power, alice_invite, alice_join]
+
+ chain_map, link_map = self.fetch_chains(events)
+
+ expected_links = [
+ (bob_join, create),
+ (power, create),
+ (power, bob_join),
+ (alice_invite, create),
+ (alice_invite, power),
+ (alice_invite, bob_join),
+ ]
+
+ # Check that the expected links and only the expected links have been
+ # added.
+ self.assertEqual(len(expected_links), len(list(link_map.get_additions())))
+
+ for start, end in expected_links:
+ start_id, start_seq = chain_map[start.event_id]
+ end_id, end_seq = chain_map[end.event_id]
+
+ self.assertIn(
+ (start_seq, end_seq), list(link_map.get_links_between(start_id, end_id))
+ )
+
+ def persist(
+ self, events: List[EventBase],
+ ):
+ """Persist the given events and check that the links generated match
+ those given.
+ """
+
+ persist_events_store = self.hs.get_datastores().persist_events
+
+ for e in events:
+ e.internal_metadata.stream_ordering = self._next_stream_ordering
+ self._next_stream_ordering += 1
+
+ def _persist(txn):
+ # We need to persist the events to the events and state_events
+ # tables.
+ persist_events_store._store_event_txn(txn, [(e, {}) for e in events])
+
+ # Actually call the function that calculates the auth chain stuff.
+ persist_events_store._persist_event_auth_chain_txn(txn, events)
+
+ self.get_success(
+ persist_events_store.db_pool.runInteraction("_persist", _persist,)
+ )
+
+ def fetch_chains(
+ self, events: List[EventBase]
+ ) -> Tuple[Dict[str, Tuple[int, int]], _LinkMap]:
+
+ # Fetch the map from event ID -> (chain ID, sequence number)
+ rows = self.get_success(
+ self.store.db_pool.simple_select_many_batch(
+ table="event_auth_chains",
+ column="event_id",
+ iterable=[e.event_id for e in events],
+ retcols=("event_id", "chain_id", "sequence_number"),
+ keyvalues={},
+ )
+ )
+
+ chain_map = {
+ row["event_id"]: (row["chain_id"], row["sequence_number"]) for row in rows
+ }
+
+ # Fetch all the links and pass them to the _LinkMap.
+ rows = self.get_success(
+ self.store.db_pool.simple_select_many_batch(
+ table="event_auth_chain_links",
+ column="origin_chain_id",
+ iterable=[chain_id for chain_id, _ in chain_map.values()],
+ retcols=(
+ "origin_chain_id",
+ "origin_sequence_number",
+ "target_chain_id",
+ "target_sequence_number",
+ ),
+ keyvalues={},
+ )
+ )
+
+ link_map = _LinkMap()
+ for row in rows:
+ added = link_map.add_link(
+ (row["origin_chain_id"], row["origin_sequence_number"]),
+ (row["target_chain_id"], row["target_sequence_number"]),
+ )
+
+ # We shouldn't have persisted any redundant links
+ self.assertTrue(added)
+
+ return chain_map, link_map
+
+
+class LinkMapTestCase(unittest.TestCase):
+ def test_simple(self):
+ """Basic tests for the LinkMap.
+ """
+ link_map = _LinkMap()
+
+ link_map.add_link((1, 1), (2, 1), new=False)
+ self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1)])
+ self.assertCountEqual(link_map.get_links_from((1, 1)), [(2, 1)])
+ self.assertCountEqual(link_map.get_additions(), [])
+ self.assertTrue(link_map.exists_path_from((1, 5), (2, 1)))
+ self.assertFalse(link_map.exists_path_from((1, 5), (2, 2)))
+ self.assertTrue(link_map.exists_path_from((1, 5), (1, 1)))
+ self.assertFalse(link_map.exists_path_from((1, 1), (1, 5)))
+
+ # Attempting to add a redundant link is ignored.
+ self.assertFalse(link_map.add_link((1, 4), (2, 1)))
+ self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1)])
+
+ # Adding new non-redundant links works
+ self.assertTrue(link_map.add_link((1, 3), (2, 3)))
+ self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1), (3, 3)])
+
+ self.assertTrue(link_map.add_link((2, 5), (1, 3)))
+ self.assertCountEqual(link_map.get_links_between(2, 1), [(5, 3)])
+ self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1), (3, 3)])
+
+ self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3), (2, 5, 1, 3)])
+
+
+class EventChainBackgroundUpdateTestCase(HomeserverTestCase):
+
+ servlets = [
+ admin.register_servlets,
+ room.register_servlets,
+ login.register_servlets,
+ ]
+
+ def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
+ self.user_id = self.register_user("foo", "pass")
+ self.token = self.login("foo", "pass")
+ self.requester = create_requester(self.user_id)
+
+ def _generate_room(self) -> Tuple[str, List[Set[str]]]:
+ """Insert a room without a chain cover index.
+ """
+ room_id = self.helper.create_room_as(self.user_id, tok=self.token)
+
+ # Mark the room as not having a chain cover index
+ self.get_success(
+ self.store.db_pool.simple_update(
+ table="rooms",
+ keyvalues={"room_id": room_id},
+ updatevalues={"has_auth_chain_index": False},
+ desc="test",
+ )
+ )
+
+ # Create a fork in the DAG with different events.
+ event_handler = self.hs.get_event_creation_handler()
+ latest_event_ids = self.get_success(
+ self.store.get_prev_events_for_room(room_id)
+ )
+ event, context = self.get_success(
+ event_handler.create_event(
+ self.requester,
+ {
+ "type": "some_state_type",
+ "state_key": "",
+ "content": {},
+ "room_id": room_id,
+ "sender": self.user_id,
+ },
+ prev_event_ids=latest_event_ids,
+ )
+ )
+ self.get_success(
+ event_handler.handle_new_client_event(self.requester, event, context)
+ )
+ state1 = set(self.get_success(context.get_current_state_ids()).values())
+
+ event, context = self.get_success(
+ event_handler.create_event(
+ self.requester,
+ {
+ "type": "some_state_type",
+ "state_key": "",
+ "content": {},
+ "room_id": room_id,
+ "sender": self.user_id,
+ },
+ prev_event_ids=latest_event_ids,
+ )
+ )
+ self.get_success(
+ event_handler.handle_new_client_event(self.requester, event, context)
+ )
+ state2 = set(self.get_success(context.get_current_state_ids()).values())
+
+ # Delete the chain cover info.
+
+ def _delete_tables(txn):
+ txn.execute("DELETE FROM event_auth_chains")
+ txn.execute("DELETE FROM event_auth_chain_links")
+
+ self.get_success(self.store.db_pool.runInteraction("test", _delete_tables))
+
+ return room_id, [state1, state2]
+
+ def test_background_update_single_room(self):
+ """Test that the background update to calculate auth chains for historic
+ rooms works correctly.
+ """
+
+ # Create a room
+ room_id, states = self._generate_room()
+
+ # Insert and run the background update.
+ self.get_success(
+ self.store.db_pool.simple_insert(
+ "background_updates",
+ {"update_name": "chain_cover", "progress_json": "{}"},
+ )
+ )
+
+ # Ugh, have to reset this flag
+ self.store.db_pool.updates._all_done = False
+
+ while not self.get_success(
+ self.store.db_pool.updates.has_completed_background_updates()
+ ):
+ self.get_success(
+ self.store.db_pool.updates.do_next_background_update(100), by=0.1
+ )
+
+ # Test that the `has_auth_chain_index` has been set
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id)))
+
+ # Test that calculating the auth chain difference using the newly
+ # calculated chain cover works.
+ self.get_success(
+ self.store.db_pool.runInteraction(
+ "test",
+ self.store._get_auth_chain_difference_using_cover_index_txn,
+ room_id,
+ states,
+ )
+ )
+
+ def test_background_update_multiple_rooms(self):
+ """Test that the background update to calculate auth chains for historic
+ rooms works correctly.
+ """
+ # Create a room
+ room_id1, states1 = self._generate_room()
+ room_id2, states2 = self._generate_room()
+ room_id3, states2 = self._generate_room()
+
+ # Insert and run the background update.
+ self.get_success(
+ self.store.db_pool.simple_insert(
+ "background_updates",
+ {"update_name": "chain_cover", "progress_json": "{}"},
+ )
+ )
+
+ # Ugh, have to reset this flag
+ self.store.db_pool.updates._all_done = False
+
+ while not self.get_success(
+ self.store.db_pool.updates.has_completed_background_updates()
+ ):
+ self.get_success(
+ self.store.db_pool.updates.do_next_background_update(100), by=0.1
+ )
+
+ # Test that the `has_auth_chain_index` has been set
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id1)))
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id2)))
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id3)))
+
+ # Test that calculating the auth chain difference using the newly
+ # calculated chain cover works.
+ self.get_success(
+ self.store.db_pool.runInteraction(
+ "test",
+ self.store._get_auth_chain_difference_using_cover_index_txn,
+ room_id1,
+ states1,
+ )
+ )
+
+ def test_background_update_single_large_room(self):
+ """Test that the background update to calculate auth chains for historic
+ rooms works correctly.
+ """
+
+ # Create a room
+ room_id, states = self._generate_room()
+
+ # Add a bunch of state so that it takes multiple iterations of the
+ # background update to process the room.
+ for i in range(0, 150):
+ self.helper.send_state(
+ room_id, event_type="m.test", body={"index": i}, tok=self.token
+ )
+
+ # Insert and run the background update.
+ self.get_success(
+ self.store.db_pool.simple_insert(
+ "background_updates",
+ {"update_name": "chain_cover", "progress_json": "{}"},
+ )
+ )
+
+ # Ugh, have to reset this flag
+ self.store.db_pool.updates._all_done = False
+
+ iterations = 0
+ while not self.get_success(
+ self.store.db_pool.updates.has_completed_background_updates()
+ ):
+ iterations += 1
+ self.get_success(
+ self.store.db_pool.updates.do_next_background_update(100), by=0.1
+ )
+
+ # Ensure that we did actually take multiple iterations to process the
+ # room.
+ self.assertGreater(iterations, 1)
+
+ # Test that the `has_auth_chain_index` has been set
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id)))
+
+ # Test that calculating the auth chain difference using the newly
+ # calculated chain cover works.
+ self.get_success(
+ self.store.db_pool.runInteraction(
+ "test",
+ self.store._get_auth_chain_difference_using_cover_index_txn,
+ room_id,
+ states,
+ )
+ )
+
+ def test_background_update_multiple_large_room(self):
+ """Test that the background update to calculate auth chains for historic
+ rooms works correctly.
+ """
+
+ # Create the rooms
+ room_id1, _ = self._generate_room()
+ room_id2, _ = self._generate_room()
+
+ # Add a bunch of state so that it takes multiple iterations of the
+ # background update to process the room.
+ for i in range(0, 150):
+ self.helper.send_state(
+ room_id1, event_type="m.test", body={"index": i}, tok=self.token
+ )
+
+ for i in range(0, 150):
+ self.helper.send_state(
+ room_id2, event_type="m.test", body={"index": i}, tok=self.token
+ )
+
+ # Insert and run the background update.
+ self.get_success(
+ self.store.db_pool.simple_insert(
+ "background_updates",
+ {"update_name": "chain_cover", "progress_json": "{}"},
+ )
+ )
+
+ # Ugh, have to reset this flag
+ self.store.db_pool.updates._all_done = False
+
+ iterations = 0
+ while not self.get_success(
+ self.store.db_pool.updates.has_completed_background_updates()
+ ):
+ iterations += 1
+ self.get_success(
+ self.store.db_pool.updates.do_next_background_update(100), by=0.1
+ )
+
+ # Ensure that we did actually take multiple iterations to process the
+ # room.
+ self.assertGreater(iterations, 1)
+
+ # Test that the `has_auth_chain_index` has been set
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id1)))
+ self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id2)))
diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py
index 482506d731..9d04a066d8 100644
--- a/tests/storage/test_event_federation.py
+++ b/tests/storage/test_event_federation.py
@@ -13,6 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import attr
+from parameterized import parameterized
+
+from synapse.events import _EventInternalMetadata
+
import tests.unittest
import tests.utils
@@ -113,7 +118,8 @@ class EventFederationWorkerStoreTestCase(tests.unittest.HomeserverTestCase):
r = self.get_success(self.store.get_rooms_with_many_extremities(5, 1, [room1]))
self.assertTrue(r == [room2] or r == [room3])
- def test_auth_difference(self):
+ @parameterized.expand([(True,), (False,)])
+ def test_auth_difference(self, use_chain_cover_index: bool):
room_id = "@ROOM:local"
# The silly auth graph we use to test the auth difference algorithm,
@@ -159,46 +165,223 @@ class EventFederationWorkerStoreTestCase(tests.unittest.HomeserverTestCase):
"j": 1,
}
+ # Mark the room as not having a cover index
+
+ def store_room(txn):
+ self.store.db_pool.simple_insert_txn(
+ txn,
+ "rooms",
+ {
+ "room_id": room_id,
+ "creator": "room_creator_user_id",
+ "is_public": True,
+ "room_version": "6",
+ "has_auth_chain_index": use_chain_cover_index,
+ },
+ )
+
+ self.get_success(self.store.db_pool.runInteraction("store_room", store_room))
+
# We rudely fiddle with the appropriate tables directly, as that's much
# easier than constructing events properly.
- def insert_event(txn, event_id, stream_ordering):
+ def insert_event(txn):
+ stream_ordering = 0
+
+ for event_id in auth_graph:
+ stream_ordering += 1
+ depth = depth_map[event_id]
+
+ self.store.db_pool.simple_insert_txn(
+ txn,
+ table="events",
+ values={
+ "event_id": event_id,
+ "room_id": room_id,
+ "depth": depth,
+ "topological_ordering": depth,
+ "type": "m.test",
+ "processed": True,
+ "outlier": False,
+ "stream_ordering": stream_ordering,
+ },
+ )
+
+ self.hs.datastores.persist_events._persist_event_auth_chain_txn(
+ txn,
+ [
+ FakeEvent(event_id, room_id, auth_graph[event_id])
+ for event_id in auth_graph
+ ],
+ )
+
+ self.get_success(self.store.db_pool.runInteraction("insert", insert_event,))
+
+ # Now actually test that various combinations give the right result:
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a"}, {"b"}])
+ )
+ self.assertSetEqual(difference, {"a", "b"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a"}, {"b"}, {"c"}])
+ )
+ self.assertSetEqual(difference, {"a", "b", "c", "e", "f"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a", "c"}, {"b"}])
+ )
+ self.assertSetEqual(difference, {"a", "b", "c"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a", "c"}, {"b", "c"}])
+ )
+ self.assertSetEqual(difference, {"a", "b"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a"}, {"b"}, {"d"}])
+ )
+ self.assertSetEqual(difference, {"a", "b", "d", "e"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a"}, {"b"}, {"c"}, {"d"}])
+ )
+ self.assertSetEqual(difference, {"a", "b", "c", "d", "e", "f"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a"}, {"b"}, {"e"}])
+ )
+ self.assertSetEqual(difference, {"a", "b"})
+
+ difference = self.get_success(
+ self.store.get_auth_chain_difference(room_id, [{"a"}])
+ )
+ self.assertSetEqual(difference, set())
+
+ def test_auth_difference_partial_cover(self):
+ """Test that we correctly handle rooms where not all events have a chain
+ cover calculated. This can happen in some obscure edge cases, including
+ during the background update that calculates the chain cover for old
+ rooms.
+ """
+
+ room_id = "@ROOM:local"
+
+ # The silly auth graph we use to test the auth difference algorithm,
+ # where the top are the most recent events.
+ #
+ # A B
+ # \ /
+ # D E
+ # \ |
+ # ` F C
+ # | /|
+ # G ยด |
+ # | \ |
+ # H I
+ # | |
+ # K J
+
+ auth_graph = {
+ "a": ["e"],
+ "b": ["e"],
+ "c": ["g", "i"],
+ "d": ["f"],
+ "e": ["f"],
+ "f": ["g"],
+ "g": ["h", "i"],
+ "h": ["k"],
+ "i": ["j"],
+ "k": [],
+ "j": [],
+ }
+
+ depth_map = {
+ "a": 7,
+ "b": 7,
+ "c": 4,
+ "d": 6,
+ "e": 6,
+ "f": 5,
+ "g": 3,
+ "h": 2,
+ "i": 2,
+ "k": 1,
+ "j": 1,
+ }
- depth = depth_map[event_id]
+ # We rudely fiddle with the appropriate tables directly, as that's much
+ # easier than constructing events properly.
+ def insert_event(txn):
+ # First insert the room and mark it as having a chain cover.
self.store.db_pool.simple_insert_txn(
txn,
- table="events",
- values={
- "event_id": event_id,
+ "rooms",
+ {
"room_id": room_id,
- "depth": depth,
- "topological_ordering": depth,
- "type": "m.test",
- "processed": True,
- "outlier": False,
- "stream_ordering": stream_ordering,
+ "creator": "room_creator_user_id",
+ "is_public": True,
+ "room_version": "6",
+ "has_auth_chain_index": True,
},
)
- self.store.db_pool.simple_insert_many_txn(
+ stream_ordering = 0
+
+ for event_id in auth_graph:
+ stream_ordering += 1
+ depth = depth_map[event_id]
+
+ self.store.db_pool.simple_insert_txn(
+ txn,
+ table="events",
+ values={
+ "event_id": event_id,
+ "room_id": room_id,
+ "depth": depth,
+ "topological_ordering": depth,
+ "type": "m.test",
+ "processed": True,
+ "outlier": False,
+ "stream_ordering": stream_ordering,
+ },
+ )
+
+ # Insert all events apart from 'B'
+ self.hs.datastores.persist_events._persist_event_auth_chain_txn(
txn,
- table="event_auth",
- values=[
- {"event_id": event_id, "room_id": room_id, "auth_id": a}
- for a in auth_graph[event_id]
+ [
+ FakeEvent(event_id, room_id, auth_graph[event_id])
+ for event_id in auth_graph
+ if event_id != "b"
],
)
- next_stream_ordering = 0
- for event_id in auth_graph:
- next_stream_ordering += 1
- self.get_success(
- self.store.db_pool.runInteraction(
- "insert", insert_event, event_id, next_stream_ordering
- )
+ # Now we insert the event 'B' without a chain cover, by temporarily
+ # pretending the room doesn't have a chain cover.
+
+ self.store.db_pool.simple_update_txn(
+ txn,
+ table="rooms",
+ keyvalues={"room_id": room_id},
+ updatevalues={"has_auth_chain_index": False},
+ )
+
+ self.hs.datastores.persist_events._persist_event_auth_chain_txn(
+ txn, [FakeEvent("b", room_id, auth_graph["b"])],
+ )
+
+ self.store.db_pool.simple_update_txn(
+ txn,
+ table="rooms",
+ keyvalues={"room_id": room_id},
+ updatevalues={"has_auth_chain_index": True},
)
+ self.get_success(self.store.db_pool.runInteraction("insert", insert_event,))
+
# Now actually test that various combinations give the right result:
difference = self.get_success(
@@ -240,3 +423,21 @@ class EventFederationWorkerStoreTestCase(tests.unittest.HomeserverTestCase):
self.store.get_auth_chain_difference(room_id, [{"a"}])
)
self.assertSetEqual(difference, set())
+
+
+@attr.s
+class FakeEvent:
+ event_id = attr.ib()
+ room_id = attr.ib()
+ auth_events = attr.ib()
+
+ type = "foo"
+ state_key = "foo"
+
+ internal_metadata = _EventInternalMetadata({})
+
+ def auth_event_ids(self):
+ return self.auth_events
+
+ def is_state(self):
+ return True
diff --git a/tests/storage/test_id_generators.py b/tests/storage/test_id_generators.py
index cc0612cf65..3e2fd4da01 100644
--- a/tests/storage/test_id_generators.py
+++ b/tests/storage/test_id_generators.py
@@ -51,9 +51,7 @@ class MultiWriterIdGeneratorTestCase(HomeserverTestCase):
self.db_pool,
stream_name="test_stream",
instance_name=instance_name,
- table="foobar",
- instance_column="instance_name",
- id_column="stream_id",
+ tables=[("foobar", "instance_name", "stream_id")],
sequence_name="foobar_seq",
writers=writers,
)
@@ -487,9 +485,7 @@ class BackwardsMultiWriterIdGeneratorTestCase(HomeserverTestCase):
self.db_pool,
stream_name="test_stream",
instance_name=instance_name,
- table="foobar",
- instance_column="instance_name",
- id_column="stream_id",
+ tables=[("foobar", "instance_name", "stream_id")],
sequence_name="foobar_seq",
writers=writers,
positive=False,
@@ -579,3 +575,107 @@ class BackwardsMultiWriterIdGeneratorTestCase(HomeserverTestCase):
self.assertEqual(id_gen_2.get_positions(), {"first": -1, "second": -2})
self.assertEqual(id_gen_1.get_persisted_upto_position(), -2)
self.assertEqual(id_gen_2.get_persisted_upto_position(), -2)
+
+
+class MultiTableMultiWriterIdGeneratorTestCase(HomeserverTestCase):
+ if not USE_POSTGRES_FOR_TESTS:
+ skip = "Requires Postgres"
+
+ def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
+ self.db_pool = self.store.db_pool # type: DatabasePool
+
+ self.get_success(self.db_pool.runInteraction("_setup_db", self._setup_db))
+
+ def _setup_db(self, txn):
+ txn.execute("CREATE SEQUENCE foobar_seq")
+ txn.execute(
+ """
+ CREATE TABLE foobar1 (
+ stream_id BIGINT NOT NULL,
+ instance_name TEXT NOT NULL,
+ data TEXT
+ );
+ """
+ )
+
+ txn.execute(
+ """
+ CREATE TABLE foobar2 (
+ stream_id BIGINT NOT NULL,
+ instance_name TEXT NOT NULL,
+ data TEXT
+ );
+ """
+ )
+
+ def _create_id_generator(
+ self, instance_name="master", writers=["master"]
+ ) -> MultiWriterIdGenerator:
+ def _create(conn):
+ return MultiWriterIdGenerator(
+ conn,
+ self.db_pool,
+ stream_name="test_stream",
+ instance_name=instance_name,
+ tables=[
+ ("foobar1", "instance_name", "stream_id"),
+ ("foobar2", "instance_name", "stream_id"),
+ ],
+ sequence_name="foobar_seq",
+ writers=writers,
+ )
+
+ return self.get_success_or_raise(self.db_pool.runWithConnection(_create))
+
+ def _insert_rows(
+ self,
+ table: str,
+ instance_name: str,
+ number: int,
+ update_stream_table: bool = True,
+ ):
+ """Insert N rows as the given instance, inserting with stream IDs pulled
+ from the postgres sequence.
+ """
+
+ def _insert(txn):
+ for _ in range(number):
+ txn.execute(
+ "INSERT INTO %s VALUES (nextval('foobar_seq'), ?)" % (table,),
+ (instance_name,),
+ )
+ if update_stream_table:
+ txn.execute(
+ """
+ INSERT INTO stream_positions VALUES ('test_stream', ?, lastval())
+ ON CONFLICT (stream_name, instance_name) DO UPDATE SET stream_id = lastval()
+ """,
+ (instance_name,),
+ )
+
+ self.get_success(self.db_pool.runInteraction("_insert_rows", _insert))
+
+ def test_load_existing_stream(self):
+ """Test creating ID gens with multiple tables that have rows from after
+ the position in `stream_positions` table.
+ """
+ self._insert_rows("foobar1", "first", 3)
+ self._insert_rows("foobar2", "second", 3)
+ self._insert_rows("foobar2", "second", 1, update_stream_table=False)
+
+ first_id_gen = self._create_id_generator("first", writers=["first", "second"])
+ second_id_gen = self._create_id_generator("second", writers=["first", "second"])
+
+ # The first ID gen will notice that it can advance its token to 7 as it
+ # has no in progress writes...
+ self.assertEqual(first_id_gen.get_positions(), {"first": 7, "second": 6})
+ self.assertEqual(first_id_gen.get_current_token_for_writer("first"), 7)
+ self.assertEqual(first_id_gen.get_current_token_for_writer("second"), 6)
+ self.assertEqual(first_id_gen.get_persisted_upto_position(), 7)
+
+ # ... but the second ID gen doesn't know that.
+ self.assertEqual(second_id_gen.get_positions(), {"first": 3, "second": 7})
+ self.assertEqual(second_id_gen.get_current_token_for_writer("first"), 3)
+ self.assertEqual(second_id_gen.get_current_token_for_writer("second"), 7)
+ self.assertEqual(first_id_gen.get_persisted_upto_position(), 7)
diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py
index 3fd0a38cf5..ea63bd56b4 100644
--- a/tests/storage/test_profile.py
+++ b/tests/storage/test_profile.py
@@ -48,6 +48,19 @@ class ProfileStoreTestCase(unittest.TestCase):
),
)
+ # test set to None
+ yield defer.ensureDeferred(
+ self.store.set_profile_displayname(self.u_frank.localpart, None)
+ )
+
+ self.assertIsNone(
+ (
+ yield defer.ensureDeferred(
+ self.store.get_profile_displayname(self.u_frank.localpart)
+ )
+ )
+ )
+
@defer.inlineCallbacks
def test_avatar_url(self):
yield defer.ensureDeferred(self.store.create_profile(self.u_frank.localpart))
@@ -66,3 +79,16 @@ class ProfileStoreTestCase(unittest.TestCase):
)
),
)
+
+ # test set to None
+ yield defer.ensureDeferred(
+ self.store.set_profile_avatar_url(self.u_frank.localpart, None)
+ )
+
+ self.assertIsNone(
+ (
+ yield defer.ensureDeferred(
+ self.store.get_profile_avatar_url(self.u_frank.localpart)
+ )
+ )
+ )
diff --git a/tests/test_types.py b/tests/test_types.py
index 480bea1bdc..acdeea7a09 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -58,6 +58,10 @@ class RoomAliasTestCase(unittest.HomeserverTestCase):
self.assertEquals(room.to_string(), "#channel:my.domain")
+ def test_validate(self):
+ id_string = "#test:domain,test"
+ self.assertFalse(RoomAlias.is_valid(id_string))
+
class GroupIDTestCase(unittest.TestCase):
def test_parse(self):
diff --git a/tests/test_utils/html_parsers.py b/tests/test_utils/html_parsers.py
new file mode 100644
index 0000000000..ad563eb3f0
--- /dev/null
+++ b/tests/test_utils/html_parsers.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from html.parser import HTMLParser
+from typing import Dict, Iterable, List, Optional, Tuple
+
+
+class TestHtmlParser(HTMLParser):
+ """A generic HTML page parser which extracts useful things from the HTML"""
+
+ def __init__(self):
+ super().__init__()
+
+ # a list of links found in the doc
+ self.links = [] # type: List[str]
+
+ # the values of any hidden <input>s: map from name to value
+ self.hiddens = {} # type: Dict[str, Optional[str]]
+
+ # the values of any radio buttons: map from name to list of values
+ self.radios = {} # type: Dict[str, List[Optional[str]]]
+
+ def handle_starttag(
+ self, tag: str, attrs: Iterable[Tuple[str, Optional[str]]]
+ ) -> None:
+ attr_dict = dict(attrs)
+ if tag == "a":
+ href = attr_dict["href"]
+ if href:
+ self.links.append(href)
+ elif tag == "input":
+ input_name = attr_dict.get("name")
+ if attr_dict["type"] == "radio":
+ assert input_name
+ self.radios.setdefault(input_name, []).append(attr_dict["value"])
+ elif attr_dict["type"] == "hidden":
+ assert input_name
+ self.hiddens[input_name] = attr_dict["value"]
+
+ def error(_, message):
+ raise AssertionError(message)
diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py
index 0ab0a91483..522c8061f9 100644
--- a/tests/util/test_itertools.py
+++ b/tests/util/test_itertools.py
@@ -12,7 +12,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.util.iterutils import chunk_seq
+from typing import Dict, List
+
+from synapse.util.iterutils import chunk_seq, sorted_topologically
from tests.unittest import TestCase
@@ -45,3 +47,48 @@ class ChunkSeqTests(TestCase):
self.assertEqual(
list(parts), [],
)
+
+
+class SortTopologically(TestCase):
+ def test_empty(self):
+ "Test that an empty graph works correctly"
+
+ graph = {} # type: Dict[int, List[int]]
+ self.assertEqual(list(sorted_topologically([], graph)), [])
+
+ def test_handle_empty_graph(self):
+ "Test that a graph where a node doesn't have an entry is treated as empty"
+
+ graph = {} # type: Dict[int, List[int]]
+
+ # For disconnected nodes the output is simply sorted.
+ self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2])
+
+ def test_disconnected(self):
+ "Test that a graph with no edges work"
+
+ graph = {1: [], 2: []} # type: Dict[int, List[int]]
+
+ # For disconnected nodes the output is simply sorted.
+ self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2])
+
+ def test_linear(self):
+ "Test that a simple `4 -> 3 -> 2 -> 1` graph works"
+
+ graph = {1: [], 2: [1], 3: [2], 4: [3]} # type: Dict[int, List[int]]
+
+ self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
+
+ def test_subset(self):
+ "Test that only sorting a subset of the graph works"
+ graph = {1: [], 2: [1], 3: [2], 4: [3]} # type: Dict[int, List[int]]
+
+ self.assertEqual(list(sorted_topologically([4, 3], graph)), [3, 4])
+
+ def test_fork(self):
+ "Test that a forked graph works"
+ graph = {1: [], 2: [1], 3: [1], 4: [2, 3]} # type: Dict[int, List[int]]
+
+ # Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should
+ # always get the same one.
+ self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
diff --git a/tests/utils.py b/tests/utils.py
index 977eeaf6ee..09614093bc 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -159,7 +159,6 @@ def default_config(name, parse=False):
"remote": {"per_second": 10000, "burst_count": 10000},
},
"saml2_enabled": False,
- "public_baseurl": None,
"default_identity_server": None,
"key_refresh_interval": 24 * 60 * 60 * 1000,
"old_signing_keys": {},
|