From 2e052110ee0bca17b8e27b6b48ee8b7c64bc94ae Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 23 May 2019 11:45:39 +0100 Subject: Rewrite store_server_verify_key to store several keys at once (#5234) Storing server keys hammered the database a bit. This replaces the implementation which stored a single key, with one which can do many updates at once. --- tests/crypto/test_keyring.py | 14 ++++++++++++-- tests/storage/test_keys.py | 44 ++++++++++++++++++++++++++++++-------------- 2 files changed, 42 insertions(+), 16 deletions(-) (limited to 'tests') diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 3c79d4afe7..bcffe53a91 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -192,8 +192,18 @@ class KeyringTestCase(unittest.HomeserverTestCase): kr = keyring.Keyring(self.hs) key1 = signedjson.key.generate_signing_key(1) - r = self.hs.datastore.store_server_verify_key( - "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1) + key1_id = "%s:%s" % (key1.alg, key1.version) + + r = self.hs.datastore.store_server_verify_keys( + "server9", + time.time() * 1000, + [ + ( + "server9", + key1_id, + signedjson.key.get_verify_key(key1), + ), + ], ) self.get_success(r) json1 = {} diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 6bfaa00fe9..71ad7aee32 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -31,23 +31,32 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): def test_get_server_verify_keys(self): store = self.hs.get_datastore() - d = store.store_server_verify_key("server1", "from_server", 0, KEY_1) - self.get_success(d) - d = store.store_server_verify_key("server1", "from_server", 0, KEY_2) + key_id_1 = "ed25519:key1" + key_id_2 = "ed25519:KEY_ID_2" + d = store.store_server_verify_keys( + "from_server", + 10, + [ + ("server1", key_id_1, KEY_1), + ("server1", key_id_2, KEY_2), + ], + ) self.get_success(d) d = store.get_server_verify_keys( - [ - ("server1", "ed25519:key1"), - ("server1", "ed25519:key2"), - ("server1", "ed25519:key3"), - ] + [("server1", key_id_1), ("server1", key_id_2), ("server1", "ed25519:key3")] ) res = self.get_success(d) self.assertEqual(len(res.keys()), 3) - self.assertEqual(res[("server1", "ed25519:key1")].version, "key1") - self.assertEqual(res[("server1", "ed25519:key2")].version, "key2") + res1 = res[("server1", key_id_1)] + self.assertEqual(res1, KEY_1) + self.assertEqual(res1.version, "key1") + + res2 = res[("server1", key_id_2)] + self.assertEqual(res2, KEY_2) + # version comes from the ID it was stored with + self.assertEqual(res2.version, "KEY_ID_2") # non-existent result gives None self.assertIsNone(res[("server1", "ed25519:key3")]) @@ -60,9 +69,14 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): key_id_1 = "ed25519:key1" key_id_2 = "ed25519:key2" - d = store.store_server_verify_key("srv1", "from_server", 0, KEY_1) - self.get_success(d) - d = store.store_server_verify_key("srv1", "from_server", 0, KEY_2) + d = store.store_server_verify_keys( + "from_server", + 0, + [ + ("srv1", key_id_1, KEY_1), + ("srv1", key_id_2, KEY_2), + ], + ) self.get_success(d) d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)]) @@ -81,7 +95,9 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): new_key_2 = signedjson.key.get_verify_key( signedjson.key.generate_signing_key("key2") ) - d = store.store_server_verify_key("srv1", "from_server", 10, new_key_2) + d = store.store_server_verify_keys( + "from_server", 10, [("srv1", key_id_2, new_key_2)] + ) self.get_success(d) d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)]) -- cgit 1.5.1 From b75537beaf841089f9f07c9dbed04a7a420a8b1f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 3 Apr 2019 18:10:24 +0100 Subject: Store key validity time in the storage layer This is a first step to checking that the key is valid at the required moment. The idea here is that, rather than passing VerifyKey objects in and out of the storage layer, we instead pass FetchKeyResult objects, which simply wrap the VerifyKey and add a valid_until_ts field. --- changelog.d/5237.misc | 1 + synapse/crypto/keyring.py | 47 +++++++++++++++------- synapse/storage/keys.py | 31 +++++++++----- .../delta/54/add_validity_to_server_keys.sql | 23 +++++++++++ tests/crypto/test_keyring.py | 22 ++++++---- tests/storage/test_keys.py | 44 +++++++++++++------- 6 files changed, 122 insertions(+), 46 deletions(-) create mode 100644 changelog.d/5237.misc create mode 100644 synapse/storage/schema/delta/54/add_validity_to_server_keys.sql (limited to 'tests') diff --git a/changelog.d/5237.misc b/changelog.d/5237.misc new file mode 100644 index 0000000000..f4fe3b821b --- /dev/null +++ b/changelog.d/5237.misc @@ -0,0 +1 @@ +Store key validity time in the storage layer. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 9d629b2238..14a27288fd 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -20,7 +20,6 @@ from collections import namedtuple from six import raise_from from six.moves import urllib -import nacl.signing from signedjson.key import ( decode_verify_key_bytes, encode_verify_key_base64, @@ -43,6 +42,7 @@ from synapse.api.errors import ( RequestSendFailed, SynapseError, ) +from synapse.storage.keys import FetchKeyResult from synapse.util import logcontext, unwrapFirstError from synapse.util.logcontext import ( LoggingContext, @@ -307,11 +307,15 @@ class Keyring(object): # complete this VerifyKeyRequest. result_keys = results.get(server_name, {}) for key_id in verify_request.key_ids: - key = result_keys.get(key_id) - if key: + fetch_key_result = result_keys.get(key_id) + if fetch_key_result: with PreserveLoggingContext(): verify_request.deferred.callback( - (server_name, key_id, key) + ( + server_name, + key_id, + fetch_key_result.verify_key, + ) ) break else: @@ -348,12 +352,12 @@ class Keyring(object): def get_keys_from_store(self, server_name_and_key_ids): """ Args: - server_name_and_key_ids (iterable(Tuple[str, iterable[str]]): + server_name_and_key_ids (iterable[Tuple[str, iterable[str]]]): list of (server_name, iterable[key_id]) tuples to fetch keys for Returns: - Deferred: resolves to dict[str, dict[str, VerifyKey|None]]: map from - server_name -> key_id -> VerifyKey + Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]]: + map from server_name -> key_id -> FetchKeyResult """ keys_to_fetch = ( (server_name, key_id) @@ -430,6 +434,18 @@ class Keyring(object): def get_server_verify_key_v2_indirect( self, server_names_and_key_ids, perspective_name, perspective_keys ): + """ + Args: + server_names_and_key_ids (iterable[Tuple[str, iterable[str]]]): + list of (server_name, iterable[key_id]) tuples to fetch keys for + perspective_name (str): name of the notary server to query for the keys + perspective_keys (dict[str, VerifyKey]): map of key_id->key for the + notary server + + Returns: + Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult]]]: map + from server_name -> key_id -> FetchKeyResult + """ # TODO(mark): Set the minimum_valid_until_ts to that needed by # the events being validated or the current time if validating # an incoming request. @@ -506,7 +522,7 @@ class Keyring(object): @defer.inlineCallbacks def get_server_verify_key_v2_direct(self, server_name, key_ids): - keys = {} # type: dict[str, nacl.signing.VerifyKey] + keys = {} # type: dict[str, FetchKeyResult] for requested_key_id in key_ids: if requested_key_id in keys: @@ -583,9 +599,9 @@ class Keyring(object): actually in the response Returns: - Deferred[dict[str, nacl.signing.VerifyKey]]: - map from key_id to key object + Deferred[dict[str, FetchKeyResult]]: map from key_id to result object """ + ts_valid_until_ms = response_json[u"valid_until_ts"] # start by extracting the keys from the response, since they may be required # to validate the signature on the response. @@ -595,7 +611,9 @@ class Keyring(object): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key + verify_keys[key_id] = FetchKeyResult( + verify_key=verify_key, valid_until_ts=ts_valid_until_ms + ) # TODO: improve this signature checking server_name = response_json["server_name"] @@ -606,7 +624,7 @@ class Keyring(object): ) verify_signed_json( - response_json, server_name, verify_keys[key_id] + response_json, server_name, verify_keys[key_id].verify_key ) for key_id, key_data in response_json["old_verify_keys"].items(): @@ -614,7 +632,9 @@ class Keyring(object): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key + verify_keys[key_id] = FetchKeyResult( + verify_key=verify_key, valid_until_ts=key_data["expired_ts"] + ) # re-sign the json with our own key, so that it is ready if we are asked to # give it out as a notary server @@ -623,7 +643,6 @@ class Keyring(object): ) signed_key_json_bytes = encode_canonical_json(signed_key_json) - ts_valid_until_ms = signed_key_json[u"valid_until_ts"] # for reasons I don't quite understand, we store this json for the key ids we # requested, as well as those we got. diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 3c5f52009b..5300720dbb 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -19,6 +19,7 @@ import logging import six +import attr from signedjson.key import decode_verify_key_bytes from synapse.util import batch_iter @@ -36,6 +37,12 @@ else: db_binary_type = memoryview +@attr.s(slots=True, frozen=True) +class FetchKeyResult(object): + verify_key = attr.ib() # VerifyKey: the key itself + valid_until_ts = attr.ib() # int: how long we can use this key for + + class KeyStore(SQLBaseStore): """Persistence for signature verification keys """ @@ -54,8 +61,8 @@ class KeyStore(SQLBaseStore): iterable of (server_name, key-id) tuples to fetch keys for Returns: - Deferred: resolves to dict[Tuple[str, str], VerifyKey|None]: - map from (server_name, key_id) -> VerifyKey, or None if the key is + Deferred: resolves to dict[Tuple[str, str], FetchKeyResult|None]: + map from (server_name, key_id) -> FetchKeyResult, or None if the key is unknown """ keys = {} @@ -65,17 +72,19 @@ class KeyStore(SQLBaseStore): # batch_iter always returns tuples so it's safe to do len(batch) sql = ( - "SELECT server_name, key_id, verify_key FROM server_signature_keys " - "WHERE 1=0" + "SELECT server_name, key_id, verify_key, ts_valid_until_ms " + "FROM server_signature_keys WHERE 1=0" ) + " OR (server_name=? AND key_id=?)" * len(batch) txn.execute(sql, tuple(itertools.chain.from_iterable(batch))) for row in txn: - server_name, key_id, key_bytes = row - keys[(server_name, key_id)] = decode_verify_key_bytes( - key_id, bytes(key_bytes) + server_name, key_id, key_bytes, ts_valid_until_ms = row + res = FetchKeyResult( + verify_key=decode_verify_key_bytes(key_id, bytes(key_bytes)), + valid_until_ts=ts_valid_until_ms, ) + keys[(server_name, key_id)] = res def _txn(txn): for batch in batch_iter(server_name_and_key_ids, 50): @@ -89,20 +98,21 @@ class KeyStore(SQLBaseStore): Args: from_server (str): Where the verification keys were looked up ts_added_ms (int): The time to record that the key was added - verify_keys (iterable[tuple[str, str, nacl.signing.VerifyKey]]): + verify_keys (iterable[tuple[str, str, FetchKeyResult]]): keys to be stored. Each entry is a triplet of (server_name, key_id, key). """ key_values = [] value_values = [] invalidations = [] - for server_name, key_id, verify_key in verify_keys: + for server_name, key_id, fetch_result in verify_keys: key_values.append((server_name, key_id)) value_values.append( ( from_server, ts_added_ms, - db_binary_type(verify_key.encode()), + fetch_result.valid_until_ts, + db_binary_type(fetch_result.verify_key.encode()), ) ) # invalidate takes a tuple corresponding to the params of @@ -125,6 +135,7 @@ class KeyStore(SQLBaseStore): value_names=( "from_server", "ts_added_ms", + "ts_valid_until_ms", "verify_key", ), value_values=value_values, diff --git a/synapse/storage/schema/delta/54/add_validity_to_server_keys.sql b/synapse/storage/schema/delta/54/add_validity_to_server_keys.sql new file mode 100644 index 0000000000..c01aa9d2d9 --- /dev/null +++ b/synapse/storage/schema/delta/54/add_validity_to_server_keys.sql @@ -0,0 +1,23 @@ +/* Copyright 2019 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* When we can use this key until, before we have to refresh it. */ +ALTER TABLE server_signature_keys ADD COLUMN ts_valid_until_ms BIGINT; + +UPDATE server_signature_keys SET ts_valid_until_ms = ( + SELECT MAX(ts_valid_until_ms) FROM server_keys_json skj WHERE + skj.server_name = server_signature_keys.server_name AND + skj.key_id = server_signature_keys.key_id +); diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index bcffe53a91..83de32b05d 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.crypto import keyring from synapse.crypto.keyring import KeyLookupError +from synapse.storage.keys import FetchKeyResult from synapse.util import logcontext from synapse.util.logcontext import LoggingContext @@ -201,7 +202,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): ( "server9", key1_id, - signedjson.key.get_verify_key(key1), + FetchKeyResult(signedjson.key.get_verify_key(key1), 1000), ), ], ) @@ -251,9 +252,10 @@ class KeyringTestCase(unittest.HomeserverTestCase): server_name_and_key_ids = [(SERVER_NAME, ("key1",))] keys = self.get_success(kr.get_keys_from_server(server_name_and_key_ids)) k = keys[SERVER_NAME][testverifykey_id] - self.assertEqual(k, testverifykey) - self.assertEqual(k.alg, "ed25519") - self.assertEqual(k.version, "ver1") + self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS) + self.assertEqual(k.verify_key, testverifykey) + self.assertEqual(k.verify_key.alg, "ed25519") + self.assertEqual(k.verify_key.version, "ver1") # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) @@ -321,9 +323,10 @@ class KeyringTestCase(unittest.HomeserverTestCase): keys = self.get_success(kr.get_keys_from_perspectives(server_name_and_key_ids)) self.assertIn(SERVER_NAME, keys) k = keys[SERVER_NAME][testverifykey_id] - self.assertEqual(k, testverifykey) - self.assertEqual(k.alg, "ed25519") - self.assertEqual(k.version, "ver1") + self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS) + self.assertEqual(k.verify_key, testverifykey) + self.assertEqual(k.verify_key.alg, "ed25519") + self.assertEqual(k.verify_key.version, "ver1") # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) @@ -346,7 +349,10 @@ class KeyringTestCase(unittest.HomeserverTestCase): @defer.inlineCallbacks def run_in_context(f, *args, **kwargs): - with LoggingContext("testctx"): + with LoggingContext("testctx") as ctx: + # we set the "request" prop to make it easier to follow what's going on in the + # logs. + ctx.request = "testctx" rv = yield f(*args, **kwargs) defer.returnValue(rv) diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 71ad7aee32..e07ff01201 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -17,6 +17,8 @@ import signedjson.key from twisted.internet.defer import Deferred +from synapse.storage.keys import FetchKeyResult + import tests.unittest KEY_1 = signedjson.key.decode_verify_key_base64( @@ -37,8 +39,8 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): "from_server", 10, [ - ("server1", key_id_1, KEY_1), - ("server1", key_id_2, KEY_2), + ("server1", key_id_1, FetchKeyResult(KEY_1, 100)), + ("server1", key_id_2, FetchKeyResult(KEY_2, 200)), ], ) self.get_success(d) @@ -50,13 +52,15 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): self.assertEqual(len(res.keys()), 3) res1 = res[("server1", key_id_1)] - self.assertEqual(res1, KEY_1) - self.assertEqual(res1.version, "key1") + self.assertEqual(res1.verify_key, KEY_1) + self.assertEqual(res1.verify_key.version, "key1") + self.assertEqual(res1.valid_until_ts, 100) res2 = res[("server1", key_id_2)] - self.assertEqual(res2, KEY_2) + self.assertEqual(res2.verify_key, KEY_2) # version comes from the ID it was stored with - self.assertEqual(res2.version, "KEY_ID_2") + self.assertEqual(res2.verify_key.version, "KEY_ID_2") + self.assertEqual(res2.valid_until_ts, 200) # non-existent result gives None self.assertIsNone(res[("server1", "ed25519:key3")]) @@ -73,8 +77,8 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): "from_server", 0, [ - ("srv1", key_id_1, KEY_1), - ("srv1", key_id_2, KEY_2), + ("srv1", key_id_1, FetchKeyResult(KEY_1, 100)), + ("srv1", key_id_2, FetchKeyResult(KEY_2, 200)), ], ) self.get_success(d) @@ -82,26 +86,38 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)]) res = self.get_success(d) self.assertEqual(len(res.keys()), 2) - self.assertEqual(res[("srv1", key_id_1)], KEY_1) - self.assertEqual(res[("srv1", key_id_2)], KEY_2) + + res1 = res[("srv1", key_id_1)] + self.assertEqual(res1.verify_key, KEY_1) + self.assertEqual(res1.valid_until_ts, 100) + + res2 = res[("srv1", key_id_2)] + self.assertEqual(res2.verify_key, KEY_2) + self.assertEqual(res2.valid_until_ts, 200) # we should be able to look up the same thing again without a db hit res = store.get_server_verify_keys([("srv1", key_id_1)]) if isinstance(res, Deferred): res = self.successResultOf(res) self.assertEqual(len(res.keys()), 1) - self.assertEqual(res[("srv1", key_id_1)], KEY_1) + self.assertEqual(res[("srv1", key_id_1)].verify_key, KEY_1) new_key_2 = signedjson.key.get_verify_key( signedjson.key.generate_signing_key("key2") ) d = store.store_server_verify_keys( - "from_server", 10, [("srv1", key_id_2, new_key_2)] + "from_server", 10, [("srv1", key_id_2, FetchKeyResult(new_key_2, 300))] ) self.get_success(d) d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)]) res = self.get_success(d) self.assertEqual(len(res.keys()), 2) - self.assertEqual(res[("srv1", key_id_1)], KEY_1) - self.assertEqual(res[("srv1", key_id_2)], new_key_2) + + res1 = res[("srv1", key_id_1)] + self.assertEqual(res1.verify_key, KEY_1) + self.assertEqual(res1.valid_until_ts, 100) + + res2 = res[("srv1", key_id_2)] + self.assertEqual(res2.verify_key, new_key_2) + self.assertEqual(res2.valid_until_ts, 300) -- cgit 1.5.1 From 895b79ac2ece74500fb8a4ea158a6aec2adc0856 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 9 Apr 2019 18:28:17 +0100 Subject: Factor out KeyFetchers from KeyRing Rather than have three methods which have to have the same interface, factor out a separate interface which is provided by three implementations. I find it easier to grok the code this way. --- changelog.d/5244.misc | 1 + synapse/crypto/keyring.py | 315 ++++++++++++++++++++++++------------------- tests/crypto/test_keyring.py | 34 ++++- 3 files changed, 204 insertions(+), 146 deletions(-) create mode 100644 changelog.d/5244.misc (limited to 'tests') diff --git a/changelog.d/5244.misc b/changelog.d/5244.misc new file mode 100644 index 0000000000..9cc1fb869d --- /dev/null +++ b/changelog.d/5244.misc @@ -0,0 +1 @@ +Refactor synapse.crypto.keyring to use a KeyFetcher interface. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 14a27288fd..eaf41b983c 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -80,12 +80,13 @@ class KeyLookupError(ValueError): class Keyring(object): def __init__(self, hs): - self.store = hs.get_datastore() self.clock = hs.get_clock() - self.client = hs.get_http_client() - self.config = hs.get_config() - self.perspective_servers = self.config.perspectives - self.hs = hs + + self._key_fetchers = ( + StoreKeyFetcher(hs), + PerspectivesKeyFetcher(hs), + ServerKeyFetcher(hs), + ) # map from server name to Deferred. Has an entry for each server with # an ongoing key download; the Deferred completes once the download @@ -271,13 +272,6 @@ class Keyring(object): verify_requests (list[VerifyKeyRequest]): list of verify requests """ - # These are functions that produce keys given a list of key ids - key_fetch_fns = ( - self.get_keys_from_store, # First try the local store - self.get_keys_from_perspectives, # Then try via perspectives - self.get_keys_from_server, # Then try directly - ) - @defer.inlineCallbacks def do_iterations(): with Measure(self.clock, "get_server_verify_keys"): @@ -288,8 +282,8 @@ class Keyring(object): verify_request.key_ids ) - for fn in key_fetch_fns: - results = yield fn(missing_keys.items()) + for f in self._key_fetchers: + results = yield f.get_keys(missing_keys.items()) # We now need to figure out which verify requests we have keys # for and which we don't @@ -348,8 +342,9 @@ class Keyring(object): run_in_background(do_iterations).addErrback(on_err) - @defer.inlineCallbacks - def get_keys_from_store(self, server_name_and_key_ids): + +class KeyFetcher(object): + def get_keys(self, server_name_and_key_ids): """ Args: server_name_and_key_ids (iterable[Tuple[str, iterable[str]]]): @@ -359,6 +354,18 @@ class Keyring(object): Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]]: map from server_name -> key_id -> FetchKeyResult """ + raise NotImplementedError + + +class StoreKeyFetcher(KeyFetcher): + """KeyFetcher impl which fetches keys from our data store""" + + def __init__(self, hs): + self.store = hs.get_datastore() + + @defer.inlineCallbacks + def get_keys(self, server_name_and_key_ids): + """see KeyFetcher.get_keys""" keys_to_fetch = ( (server_name, key_id) for server_name, key_ids in server_name_and_key_ids @@ -370,8 +377,127 @@ class Keyring(object): keys.setdefault(server_name, {})[key_id] = key defer.returnValue(keys) + +class BaseV2KeyFetcher(object): + def __init__(self, hs): + self.store = hs.get_datastore() + self.config = hs.get_config() + + @defer.inlineCallbacks + def process_v2_response( + self, from_server, response_json, time_added_ms, requested_ids=[] + ): + """Parse a 'Server Keys' structure from the result of a /key request + + This is used to parse either the entirety of the response from + GET /_matrix/key/v2/server, or a single entry from the list returned by + POST /_matrix/key/v2/query. + + Checks that each signature in the response that claims to come from the origin + server is valid. (Does not check that there actually is such a signature, for + some reason.) + + Stores the json in server_keys_json so that it can be used for future responses + to /_matrix/key/v2/query. + + Args: + from_server (str): the name of the server producing this result: either + the origin server for a /_matrix/key/v2/server request, or the notary + for a /_matrix/key/v2/query. + + response_json (dict): the json-decoded Server Keys response object + + time_added_ms (int): the timestamp to record in server_keys_json + + requested_ids (iterable[str]): a list of the key IDs that were requested. + We will store the json for these key ids as well as any that are + actually in the response + + Returns: + Deferred[dict[str, FetchKeyResult]]: map from key_id to result object + """ + ts_valid_until_ms = response_json[u"valid_until_ts"] + + # start by extracting the keys from the response, since they may be required + # to validate the signature on the response. + verify_keys = {} + for key_id, key_data in response_json["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_base64 = key_data["key"] + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = FetchKeyResult( + verify_key=verify_key, valid_until_ts=ts_valid_until_ms + ) + + # TODO: improve this signature checking + server_name = response_json["server_name"] + for key_id in response_json["signatures"].get(server_name, {}): + if key_id not in verify_keys: + raise KeyLookupError( + "Key response must include verification keys for all signatures" + ) + + verify_signed_json( + response_json, server_name, verify_keys[key_id].verify_key + ) + + for key_id, key_data in response_json["old_verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_base64 = key_data["key"] + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = FetchKeyResult( + verify_key=verify_key, valid_until_ts=key_data["expired_ts"] + ) + + # re-sign the json with our own key, so that it is ready if we are asked to + # give it out as a notary server + signed_key_json = sign_json( + response_json, self.config.server_name, self.config.signing_key[0] + ) + + signed_key_json_bytes = encode_canonical_json(signed_key_json) + + # for reasons I don't quite understand, we store this json for the key ids we + # requested, as well as those we got. + updated_key_ids = set(requested_ids) + updated_key_ids.update(verify_keys) + + yield logcontext.make_deferred_yieldable( + defer.gatherResults( + [ + run_in_background( + self.store.store_server_keys_json, + server_name=server_name, + key_id=key_id, + from_server=from_server, + ts_now_ms=time_added_ms, + ts_expires_ms=ts_valid_until_ms, + key_json_bytes=signed_key_json_bytes, + ) + for key_id in updated_key_ids + ], + consumeErrors=True, + ).addErrback(unwrapFirstError) + ) + + defer.returnValue(verify_keys) + + +class PerspectivesKeyFetcher(BaseV2KeyFetcher): + """KeyFetcher impl which fetches keys from the "perspectives" servers""" + + def __init__(self, hs): + super(PerspectivesKeyFetcher, self).__init__(hs) + self.clock = hs.get_clock() + self.client = hs.get_http_client() + self.perspective_servers = self.config.perspectives + @defer.inlineCallbacks - def get_keys_from_perspectives(self, server_name_and_key_ids): + def get_keys(self, server_name_and_key_ids): + """see KeyFetcher.get_keys""" + @defer.inlineCallbacks def get_key(perspective_name, perspective_keys): try: @@ -408,28 +534,6 @@ class Keyring(object): defer.returnValue(union_of_keys) - @defer.inlineCallbacks - def get_keys_from_server(self, server_name_and_key_ids): - results = yield logcontext.make_deferred_yieldable( - defer.gatherResults( - [ - run_in_background( - self.get_server_verify_key_v2_direct, server_name, key_ids - ) - for server_name, key_ids in server_name_and_key_ids - ], - consumeErrors=True, - ).addErrback(unwrapFirstError) - ) - - merged = {} - for result in results: - merged.update(result) - - defer.returnValue( - {server_name: keys for server_name, keys in merged.items() if keys} - ) - @defer.inlineCallbacks def get_server_verify_key_v2_indirect( self, server_names_and_key_ids, perspective_name, perspective_keys @@ -520,6 +624,38 @@ class Keyring(object): defer.returnValue(keys) + +class ServerKeyFetcher(BaseV2KeyFetcher): + """KeyFetcher impl which fetches keys from the origin servers""" + + def __init__(self, hs): + super(ServerKeyFetcher, self).__init__(hs) + self.clock = hs.get_clock() + self.client = hs.get_http_client() + + @defer.inlineCallbacks + def get_keys(self, server_name_and_key_ids): + """see KeyFetcher.get_keys""" + results = yield logcontext.make_deferred_yieldable( + defer.gatherResults( + [ + run_in_background( + self.get_server_verify_key_v2_direct, server_name, key_ids + ) + for server_name, key_ids in server_name_and_key_ids + ], + consumeErrors=True, + ).addErrback(unwrapFirstError) + ) + + merged = {} + for result in results: + merged.update(result) + + defer.returnValue( + {server_name: keys for server_name, keys in merged.items() if keys} + ) + @defer.inlineCallbacks def get_server_verify_key_v2_direct(self, server_name, key_ids): keys = {} # type: dict[str, FetchKeyResult] @@ -568,107 +704,6 @@ class Keyring(object): defer.returnValue({server_name: keys}) - @defer.inlineCallbacks - def process_v2_response( - self, from_server, response_json, time_added_ms, requested_ids=[] - ): - """Parse a 'Server Keys' structure from the result of a /key request - - This is used to parse either the entirety of the response from - GET /_matrix/key/v2/server, or a single entry from the list returned by - POST /_matrix/key/v2/query. - - Checks that each signature in the response that claims to come from the origin - server is valid. (Does not check that there actually is such a signature, for - some reason.) - - Stores the json in server_keys_json so that it can be used for future responses - to /_matrix/key/v2/query. - - Args: - from_server (str): the name of the server producing this result: either - the origin server for a /_matrix/key/v2/server request, or the notary - for a /_matrix/key/v2/query. - - response_json (dict): the json-decoded Server Keys response object - - time_added_ms (int): the timestamp to record in server_keys_json - - requested_ids (iterable[str]): a list of the key IDs that were requested. - We will store the json for these key ids as well as any that are - actually in the response - - Returns: - Deferred[dict[str, FetchKeyResult]]: map from key_id to result object - """ - ts_valid_until_ms = response_json[u"valid_until_ts"] - - # start by extracting the keys from the response, since they may be required - # to validate the signature on the response. - verify_keys = {} - for key_id, key_data in response_json["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_base64 = key_data["key"] - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = FetchKeyResult( - verify_key=verify_key, valid_until_ts=ts_valid_until_ms - ) - - # TODO: improve this signature checking - server_name = response_json["server_name"] - for key_id in response_json["signatures"].get(server_name, {}): - if key_id not in verify_keys: - raise KeyLookupError( - "Key response must include verification keys for all signatures" - ) - - verify_signed_json( - response_json, server_name, verify_keys[key_id].verify_key - ) - - for key_id, key_data in response_json["old_verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_base64 = key_data["key"] - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = FetchKeyResult( - verify_key=verify_key, valid_until_ts=key_data["expired_ts"] - ) - - # re-sign the json with our own key, so that it is ready if we are asked to - # give it out as a notary server - signed_key_json = sign_json( - response_json, self.config.server_name, self.config.signing_key[0] - ) - - signed_key_json_bytes = encode_canonical_json(signed_key_json) - - # for reasons I don't quite understand, we store this json for the key ids we - # requested, as well as those we got. - updated_key_ids = set(requested_ids) - updated_key_ids.update(verify_keys) - - yield logcontext.make_deferred_yieldable( - defer.gatherResults( - [ - run_in_background( - self.store.store_server_keys_json, - server_name=server_name, - key_id=key_id, - from_server=from_server, - ts_now_ms=time_added_ms, - ts_expires_ms=ts_valid_until_ms, - key_json_bytes=signed_key_json_bytes, - ) - for key_id in updated_key_ids - ], - consumeErrors=True, - ).addErrback(unwrapFirstError) - ) - - defer.returnValue(verify_keys) - @defer.inlineCallbacks def _handle_key_deferred(verify_request): diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 83de32b05d..de61bad15d 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -24,7 +24,11 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.crypto import keyring -from synapse.crypto.keyring import KeyLookupError +from synapse.crypto.keyring import ( + KeyLookupError, + PerspectivesKeyFetcher, + ServerKeyFetcher, +) from synapse.storage.keys import FetchKeyResult from synapse.util import logcontext from synapse.util.logcontext import LoggingContext @@ -218,12 +222,19 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.assertFalse(d.called) self.get_success(d) + +class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): + def make_homeserver(self, reactor, clock): + self.http_client = Mock() + hs = self.setup_test_homeserver(handlers=None, http_client=self.http_client) + return hs + def test_get_keys_from_server(self): # arbitrarily advance the clock a bit self.reactor.advance(100) SERVER_NAME = "server2" - kr = keyring.Keyring(self.hs) + fetcher = ServerKeyFetcher(self.hs) testkey = signedjson.key.generate_signing_key("ver1") testverifykey = signedjson.key.get_verify_key(testkey) testverifykey_id = "ed25519:ver1" @@ -250,7 +261,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.http_client.get_json.side_effect = get_json server_name_and_key_ids = [(SERVER_NAME, ("key1",))] - keys = self.get_success(kr.get_keys_from_server(server_name_and_key_ids)) + keys = self.get_success(fetcher.get_keys(server_name_and_key_ids)) k = keys[SERVER_NAME][testverifykey_id] self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS) self.assertEqual(k.verify_key, testverifykey) @@ -278,15 +289,26 @@ class KeyringTestCase(unittest.HomeserverTestCase): # change the server name: it should cause a rejection response["server_name"] = "OTHER_SERVER" self.get_failure( - kr.get_keys_from_server(server_name_and_key_ids), KeyLookupError + fetcher.get_keys(server_name_and_key_ids), KeyLookupError ) + +class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): + def make_homeserver(self, reactor, clock): + self.mock_perspective_server = MockPerspectiveServer() + self.http_client = Mock() + hs = self.setup_test_homeserver(handlers=None, http_client=self.http_client) + keys = self.mock_perspective_server.get_verify_keys() + hs.config.perspectives = {self.mock_perspective_server.server_name: keys} + return hs + def test_get_keys_from_perspectives(self): # arbitrarily advance the clock a bit self.reactor.advance(100) + fetcher = PerspectivesKeyFetcher(self.hs) + SERVER_NAME = "server2" - kr = keyring.Keyring(self.hs) testkey = signedjson.key.generate_signing_key("ver1") testverifykey = signedjson.key.get_verify_key(testkey) testverifykey_id = "ed25519:ver1" @@ -320,7 +342,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.http_client.post_json.side_effect = post_json server_name_and_key_ids = [(SERVER_NAME, ("key1",))] - keys = self.get_success(kr.get_keys_from_perspectives(server_name_and_key_ids)) + keys = self.get_success(fetcher.get_keys(server_name_and_key_ids)) self.assertIn(SERVER_NAME, keys) k = keys[SERVER_NAME][testverifykey_id] self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS) -- cgit 1.5.1 From 6368150a748e9303f34948873af360d8a62347b6 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 23 May 2019 15:00:20 +0100 Subject: Add config option for setting homeserver's default room version (#5223) Replaces DEFAULT_ROOM_VERSION constant with a method that first checks the config, then returns a hardcoded value if the option is not present. That hardcoded value is now located in the server.py config file. --- changelog.d/5223.feature | 1 + docs/sample_config.yaml | 9 +++++++ synapse/api/room_versions.py | 4 ---- synapse/config/server.py | 32 +++++++++++++++++++++++++ synapse/handlers/room.py | 9 +++++-- synapse/rest/client/v2_alpha/capabilities.py | 5 ++-- tests/rest/client/v2_alpha/test_capabilities.py | 7 ++++-- 7 files changed, 57 insertions(+), 10 deletions(-) create mode 100644 changelog.d/5223.feature (limited to 'tests') diff --git a/changelog.d/5223.feature b/changelog.d/5223.feature new file mode 100644 index 0000000000..cfdf1ad41b --- /dev/null +++ b/changelog.d/5223.feature @@ -0,0 +1 @@ +Ability to configure default room version. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 559fbcdd01..2a5a514d61 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -83,6 +83,15 @@ pid_file: DATADIR/homeserver.pid # #restrict_public_rooms_to_local_users: true +# The default room version for newly created rooms. +# +# Known room versions are listed here: +# https://matrix.org/docs/spec/#complete-list-of-room-versions +# +# For example, for room version 1, default_room_version should be set +# to "1". +#default_room_version: "1" + # The GC threshold parameters to pass to `gc.set_threshold`, if defined # #gc_thresholds: [700, 10, 10] diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index b2895355a8..4085bd10b9 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -85,10 +85,6 @@ class RoomVersions(object): ) -# the version we will give rooms which are created on this server -DEFAULT_ROOM_VERSION = RoomVersions.V1 - - KNOWN_ROOM_VERSIONS = { v.identifier: v for v in ( RoomVersions.V1, diff --git a/synapse/config/server.py b/synapse/config/server.py index f34aa42afa..e9120d4d75 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -20,6 +20,7 @@ import os.path from netaddr import IPSet +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.http.endpoint import parse_and_validate_server_name from synapse.python_dependencies import DependencyException, check_requirements @@ -35,6 +36,8 @@ logger = logging.Logger(__name__) # in the list. DEFAULT_BIND_ADDRESSES = ['::', '0.0.0.0'] +DEFAULT_ROOM_VERSION = "1" + class ServerConfig(Config): @@ -88,6 +91,22 @@ class ServerConfig(Config): "restrict_public_rooms_to_local_users", False, ) + default_room_version = config.get( + "default_room_version", DEFAULT_ROOM_VERSION, + ) + + # Ensure room version is a str + default_room_version = str(default_room_version) + + if default_room_version not in KNOWN_ROOM_VERSIONS: + raise ConfigError( + "Unknown default_room_version: %s, known room versions: %s" % + (default_room_version, list(KNOWN_ROOM_VERSIONS.keys())) + ) + + # Get the actual room version object rather than just the identifier + self.default_room_version = KNOWN_ROOM_VERSIONS[default_room_version] + # whether to enable search. If disabled, new entries will not be inserted # into the search tables and they will not be indexed. Users will receive # errors when attempting to search for messages. @@ -310,6 +329,10 @@ class ServerConfig(Config): unsecure_port = 8008 pid_file = os.path.join(data_dir_path, "homeserver.pid") + + # Bring DEFAULT_ROOM_VERSION into the local-scope for use in the + # default config string + default_room_version = DEFAULT_ROOM_VERSION return """\ ## Server ## @@ -384,6 +407,15 @@ class ServerConfig(Config): # #restrict_public_rooms_to_local_users: true + # The default room version for newly created rooms. + # + # Known room versions are listed here: + # https://matrix.org/docs/spec/#complete-list-of-room-versions + # + # For example, for room version 1, default_room_version should be set + # to "1". + #default_room_version: "%(default_room_version)s" + # The GC threshold parameters to pass to `gc.set_threshold`, if defined # #gc_thresholds: [700, 10, 10] diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index e37ae96899..4a17911a87 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -27,7 +27,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError -from synapse.api.room_versions import DEFAULT_ROOM_VERSION, KNOWN_ROOM_VERSIONS +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.storage.state import StateFilter from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID from synapse.util import stringutils @@ -70,6 +70,7 @@ class RoomCreationHandler(BaseHandler): self.spam_checker = hs.get_spam_checker() self.event_creation_handler = hs.get_event_creation_handler() self.room_member_handler = hs.get_room_member_handler() + self.config = hs.config # linearizer to stop two upgrades happening at once self._upgrade_linearizer = Linearizer("room_upgrade_linearizer") @@ -475,7 +476,11 @@ class RoomCreationHandler(BaseHandler): if ratelimit: yield self.ratelimit(requester) - room_version = config.get("room_version", DEFAULT_ROOM_VERSION.identifier) + room_version = config.get( + "room_version", + self.config.default_room_version.identifier, + ) + if not isinstance(room_version, string_types): raise SynapseError( 400, diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py index a868d06098..2b4892330c 100644 --- a/synapse/rest/client/v2_alpha/capabilities.py +++ b/synapse/rest/client/v2_alpha/capabilities.py @@ -16,7 +16,7 @@ import logging from twisted.internet import defer -from synapse.api.room_versions import DEFAULT_ROOM_VERSION, KNOWN_ROOM_VERSIONS +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.http.servlet import RestServlet from ._base import client_v2_patterns @@ -36,6 +36,7 @@ class CapabilitiesRestServlet(RestServlet): """ super(CapabilitiesRestServlet, self).__init__() self.hs = hs + self.config = hs.config self.auth = hs.get_auth() self.store = hs.get_datastore() @@ -48,7 +49,7 @@ class CapabilitiesRestServlet(RestServlet): response = { "capabilities": { "m.room_versions": { - "default": DEFAULT_ROOM_VERSION.identifier, + "default": self.config.default_room_version.identifier, "available": { v.identifier: v.disposition for v in KNOWN_ROOM_VERSIONS.values() diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py index f3ef977404..bce5b0cf4c 100644 --- a/tests/rest/client/v2_alpha/test_capabilities.py +++ b/tests/rest/client/v2_alpha/test_capabilities.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import synapse.rest.admin -from synapse.api.room_versions import DEFAULT_ROOM_VERSION, KNOWN_ROOM_VERSIONS +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.rest.client.v1 import login from synapse.rest.client.v2_alpha import capabilities @@ -32,6 +32,7 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase): self.url = b"/_matrix/client/r0/capabilities" hs = self.setup_test_homeserver() self.store = hs.get_datastore() + self.config = hs.config return hs def test_check_auth_required(self): @@ -51,8 +52,10 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200) for room_version in capabilities['m.room_versions']['available'].keys(): self.assertTrue(room_version in KNOWN_ROOM_VERSIONS, "" + room_version) + self.assertEqual( - DEFAULT_ROOM_VERSION.identifier, capabilities['m.room_versions']['default'] + self.config.default_room_version.identifier, + capabilities['m.room_versions']['default'], ) def test_get_change_password_capabilities(self): -- cgit 1.5.1 From 753b1270da1f0449bbb960b37707556abd3eaac0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 9 Apr 2019 13:03:56 +0100 Subject: Require sig from origin server on perspectives responses --- synapse/crypto/keyring.py | 28 ++++++++------- tests/crypto/test_keyring.py | 84 +++++++++++++++++++++++++++++++++++++++----- 2 files changed, 90 insertions(+), 22 deletions(-) (limited to 'tests') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index eaf41b983c..a64ba0752a 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -394,8 +394,7 @@ class BaseV2KeyFetcher(object): POST /_matrix/key/v2/query. Checks that each signature in the response that claims to come from the origin - server is valid. (Does not check that there actually is such a signature, for - some reason.) + server is valid, and that there is at least one such signature. Stores the json in server_keys_json so that it can be used for future responses to /_matrix/key/v2/query. @@ -430,16 +429,25 @@ class BaseV2KeyFetcher(object): verify_key=verify_key, valid_until_ts=ts_valid_until_ms ) - # TODO: improve this signature checking server_name = response_json["server_name"] + verified = False for key_id in response_json["signatures"].get(server_name, {}): - if key_id not in verify_keys: + # each of the keys used for the signature must be present in the response + # json. + key = verify_keys.get(key_id) + if not key: raise KeyLookupError( - "Key response must include verification keys for all signatures" + "Key response is signed by key id %s:%s but that key is not " + "present in the response" % (server_name, key_id) ) - verify_signed_json( - response_json, server_name, verify_keys[key_id].verify_key + verify_signed_json(response_json, server_name, key.verify_key) + verified = True + + if not verified: + raise KeyLookupError( + "Key response for %s is not signed by the origin server" + % (server_name,) ) for key_id, key_data in response_json["old_verify_keys"].items(): @@ -677,12 +685,6 @@ class ServerKeyFetcher(BaseV2KeyFetcher): except HttpResponseException as e: raise_from(KeyLookupError("Remote server returned an error"), e) - if ( - u"signatures" not in response - or server_name not in response[u"signatures"] - ): - raise KeyLookupError("Key response not signed by remote server") - if response["server_name"] != server_name: raise KeyLookupError( "Expected a response for server %r not %r" diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index de61bad15d..c4c9d29499 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -55,11 +55,11 @@ class MockPerspectiveServer(object): key_id: {"key": signedjson.key.encode_verify_key_base64(verify_key)} }, } - return self.get_signed_response(res) + self.sign_response(res) + return res - def get_signed_response(self, res): + def sign_response(self, res): signedjson.sign.sign_json(res, self.server_name, self.key) - return res class KeyringTestCase(unittest.HomeserverTestCase): @@ -238,7 +238,7 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): testkey = signedjson.key.generate_signing_key("ver1") testverifykey = signedjson.key.get_verify_key(testkey) testverifykey_id = "ed25519:ver1" - VALID_UNTIL_TS = 1000 + VALID_UNTIL_TS = 200 * 1000 # valid response response = { @@ -326,9 +326,10 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): }, } - persp_resp = { - "server_keys": [self.mock_perspective_server.get_signed_response(response)] - } + # the response must be signed by both the origin server and the perspectives + # server. + signedjson.sign.sign_json(response, SERVER_NAME, testkey) + self.mock_perspective_server.sign_response(response) def post_json(destination, path, data, **kwargs): self.assertEqual(destination, self.mock_perspective_server.server_name) @@ -337,7 +338,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): # check that the request is for the expected key q = data["server_keys"] self.assertEqual(list(q[SERVER_NAME].keys()), ["key1"]) - return persp_resp + return {"server_keys": [response]} self.http_client.post_json.side_effect = post_json @@ -365,9 +366,74 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.assertEqual( bytes(res["key_json"]), - canonicaljson.encode_canonical_json(persp_resp["server_keys"][0]), + canonicaljson.encode_canonical_json(response), ) + def test_invalid_perspectives_responses(self): + """Check that invalid responses from the perspectives server are rejected""" + # arbitrarily advance the clock a bit + self.reactor.advance(100) + + SERVER_NAME = "server2" + testkey = signedjson.key.generate_signing_key("ver1") + testverifykey = signedjson.key.get_verify_key(testkey) + testverifykey_id = "ed25519:ver1" + VALID_UNTIL_TS = 200 * 1000 + + def build_response(): + # valid response + response = { + "server_name": SERVER_NAME, + "old_verify_keys": {}, + "valid_until_ts": VALID_UNTIL_TS, + "verify_keys": { + testverifykey_id: { + "key": signedjson.key.encode_verify_key_base64(testverifykey) + } + }, + } + + # the response must be signed by both the origin server and the perspectives + # server. + signedjson.sign.sign_json(response, SERVER_NAME, testkey) + self.mock_perspective_server.sign_response(response) + return response + + def get_key_from_perspectives(response): + fetcher = PerspectivesKeyFetcher(self.hs) + server_name_and_key_ids = [(SERVER_NAME, ("key1",))] + + def post_json(destination, path, data, **kwargs): + self.assertEqual(destination, self.mock_perspective_server.server_name) + self.assertEqual(path, "/_matrix/key/v2/query") + return {"server_keys": [response]} + + self.http_client.post_json.side_effect = post_json + + return self.get_success( + fetcher.get_keys(server_name_and_key_ids) + ) + + # start with a valid response so we can check we are testing the right thing + response = build_response() + keys = get_key_from_perspectives(response) + k = keys[SERVER_NAME][testverifykey_id] + self.assertEqual(k.verify_key, testverifykey) + + # remove the perspectives server's signature + response = build_response() + del response["signatures"][self.mock_perspective_server.server_name] + self.http_client.post_json.return_value = {"server_keys": [response]} + keys = get_key_from_perspectives(response) + self.assertEqual(keys, {}, "Expected empty dict with missing persp server sig") + + # remove the origin server's signature + response = build_response() + del response["signatures"][SERVER_NAME] + self.http_client.post_json.return_value = {"server_keys": [response]} + keys = get_key_from_perspectives(response) + self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig") + @defer.inlineCallbacks def run_in_context(f, *args, **kwargs): -- cgit 1.5.1 From fa1b293da2e0a5e47864ccb49e530d8a81d81790 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 24 May 2019 22:17:18 +0100 Subject: Simplification to Keyring.wait_for_previous_lookups. (#5250) The list of server names was redundant, since it was equivalent to the keys on the server_to_deferred map. This reduces the number of large lists being passed around, and has the benefit of deduplicating the entries in `wait_on`. --- changelog.d/5250.misc | 1 + synapse/crypto/keyring.py | 11 ++++------- tests/crypto/test_keyring.py | 4 ++-- 3 files changed, 7 insertions(+), 9 deletions(-) create mode 100644 changelog.d/5250.misc (limited to 'tests') diff --git a/changelog.d/5250.misc b/changelog.d/5250.misc new file mode 100644 index 0000000000..575a299a82 --- /dev/null +++ b/changelog.d/5250.misc @@ -0,0 +1 @@ +Simplification to Keyring.wait_for_previous_lookups. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index eaf41b983c..d6ad7f1772 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -180,9 +180,7 @@ class Keyring(object): # We want to wait for any previous lookups to complete before # proceeding. - yield self.wait_for_previous_lookups( - [rq.server_name for rq in verify_requests], server_to_deferred - ) + yield self.wait_for_previous_lookups(server_to_deferred) # Actually start fetching keys. self._get_server_verify_keys(verify_requests) @@ -215,12 +213,11 @@ class Keyring(object): logger.exception("Error starting key lookups") @defer.inlineCallbacks - def wait_for_previous_lookups(self, server_names, server_to_deferred): + def wait_for_previous_lookups(self, server_to_deferred): """Waits for any previous key lookups for the given servers to finish. Args: - server_names (list): list of server_names we want to lookup - server_to_deferred (dict): server_name to deferred which gets + server_to_deferred (dict[str, Deferred]): server_name to deferred which gets resolved once we've finished looking up keys for that server. The Deferreds should be regular twisted ones which call their callbacks with no logcontext. @@ -233,7 +230,7 @@ class Keyring(object): while True: wait_on = [ (server_name, self.key_downloads[server_name]) - for server_name in server_names + for server_name in server_to_deferred.keys() if server_name in self.key_downloads ] if not wait_on: diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index de61bad15d..4fba462d44 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -85,7 +85,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): # we run the lookup in a logcontext so that the patched inlineCallbacks can check # it is doing the right thing with logcontexts. wait_1_deferred = run_in_context( - kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_1_deferred} + kr.wait_for_previous_lookups, {"server1": lookup_1_deferred} ) # there were no previous lookups, so the deferred should be ready @@ -94,7 +94,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): # set off another wait. It should block because the first lookup # hasn't yet completed. wait_2_deferred = run_in_context( - kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_2_deferred} + kr.wait_for_previous_lookups, {"server1": lookup_2_deferred} ) self.assertFalse(wait_2_deferred.called) -- cgit 1.5.1 From 52839886d664576831462e033b88e5aba4c019e3 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 28 May 2019 16:47:42 +0100 Subject: Allow configuring a range for the account validity startup job When enabling the account validity feature, Synapse will look at startup for registered account without an expiration date, and will set one equals to 'now + validity_period' for them. On large servers, it can mean that a large number of users will have the same expiration date, which means that they will all be sent a renewal email at the same time, which isn't ideal. In order to mitigate this, this PR allows server admins to define a 'max_delta' so that the expiration date is a random value in the [now + validity_period ; now + validity_period + max_delta] range. This allows renewal emails to be progressively sent over a configured period instead of being sent all in one big batch. --- synapse/config/registration.py | 11 +++++++++++ synapse/storage/_base.py | 23 +++++++++++++++++++++-- tests/rest/client/v2_alpha/test_register.py | 21 +++++++++++++++++++++ 3 files changed, 53 insertions(+), 2 deletions(-) (limited to 'tests') diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 693288f938..b4fd4af368 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -39,6 +39,10 @@ class AccountValidityConfig(Config): else: self.renew_email_subject = "Renew your %(app)s account" + self.startup_job_max_delta = self.parse_duration( + config.get("startup_job_max_delta", 0), + ) + if self.renew_by_email_enabled and "public_baseurl" not in synapse_config: raise ConfigError("Can't send renewal emails without 'public_baseurl'") @@ -131,11 +135,18 @@ class RegistrationConfig(Config): # after that the validity period changes and Synapse is restarted, the users' # expiration dates won't be updated unless their account is manually renewed. # + # If set, the ``startup_job_max_delta`` optional setting will make the startup job + # described above set a random expiration date between t + period and + # t + period + startup_job_max_delta, t being the date and time at which the job + # sets the expiration date for a given user. This is useful for server admins that + # want to avoid Synapse sending a lot of renewal emails at once. + # #account_validity: # enabled: True # period: 6w # renew_at: 1w # renew_email_subject: "Renew your %%(app)s account" + # startup_job_max_delta: 2d # The user must provide all of the below types of 3PID when registering. # diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index fa6839ceca..40802fd3dc 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -16,6 +16,7 @@ # limitations under the License. import itertools import logging +import random import sys import threading import time @@ -247,6 +248,8 @@ class SQLBaseStore(object): self._check_safe_to_upsert, ) + self.rand = random.SystemRandom() + if self._account_validity.enabled: self._clock.call_later( 0.0, @@ -308,21 +311,37 @@ class SQLBaseStore(object): res = self.cursor_to_dict(txn) if res: for user in res: - self.set_expiration_date_for_user_txn(txn, user["name"]) + self.set_expiration_date_for_user_txn( + txn, + user["name"], + use_delta=True, + ) yield self.runInteraction( "get_users_with_no_expiration_date", select_users_with_no_expiration_date_txn, ) - def set_expiration_date_for_user_txn(self, txn, user_id): + def set_expiration_date_for_user_txn(self, txn, user_id, use_delta=False): """Sets an expiration date to the account with the given user ID. Args: user_id (str): User ID to set an expiration date for. + use_delta (bool): If set to False, the expiration date for the user will be + now + validity period. If set to True, this expiration date will be a + random value in the [now + period; now + period + max_delta] range, + max_delta being the configured value for the size of the range, unless + delta is 0, in which case it sets it to now + period. """ now_ms = self._clock.time_msec() expiration_ts = now_ms + self._account_validity.period + + if use_delta and self._account_validity.startup_job_max_delta: + expiration_ts = self.rand.randrange( + expiration_ts, + expiration_ts + self._account_validity.startup_job_max_delta, + ) + self._simple_insert_txn( txn, "account_validity", diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index d4a1d4d50c..7603440fd8 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -436,6 +436,7 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): self.validity_period = 10 + self.max_delta = 10 config = self.default_config() @@ -459,8 +460,28 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): """ user_id = self.register_user("kermit", "user") + self.hs.config.account_validity.startup_job_max_delta = 0 + now_ms = self.hs.clock.time_msec() self.get_success(self.store._set_expiration_date_when_missing()) res = self.get_success(self.store.get_expiration_ts_for_user(user_id)) self.assertEqual(res, now_ms + self.validity_period) + + def test_background_job_with_max_delta(self): + """ + Tests the same thing as test_background_job, except that it sets the + startup_job_max_delta parameter and checks that the expiration date is within the + allowed range. + """ + user_id = self.register_user("kermit_delta", "user") + + self.hs.config.account_validity.startup_job_max_delta = self.max_delta + + now_ms = self.hs.clock.time_msec() + self.get_success(self.store._set_expiration_date_when_missing()) + + res = self.get_success(self.store.get_expiration_ts_for_user(user_id)) + + self.assertLessEqual(res, now_ms + self.validity_period + self.delta) + self.assertGreaterEqual(res, now_ms + self.validity_period) -- cgit 1.5.1 From 7e1c7cc2742f5eb9d6d37205a0c457b8a7bd015f Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 28 May 2019 17:13:26 +0100 Subject: Typo --- tests/rest/client/v2_alpha/test_register.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tests') diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 7603440fd8..68654e25ab 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -483,5 +483,5 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): res = self.get_success(self.store.get_expiration_ts_for_user(user_id)) - self.assertLessEqual(res, now_ms + self.validity_period + self.delta) + self.assertLessEqual(res, now_ms + self.validity_period + self.max_delta) self.assertGreaterEqual(res, now_ms + self.validity_period) -- cgit 1.5.1 From d7add713a8351024aec9a51c1744f78ac39f552e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 May 2019 14:19:11 +0100 Subject: Add test --- tests/storage/test_cleanup_extrems.py | 248 ++++++++++++++++++++++++++++++++++ 1 file changed, 248 insertions(+) create mode 100644 tests/storage/test_cleanup_extrems.py (limited to 'tests') diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py new file mode 100644 index 0000000000..6dda66ecd3 --- /dev/null +++ b/tests/storage/test_cleanup_extrems.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path + +from synapse.api.constants import EventTypes +from synapse.storage import prepare_database +from synapse.types import Requester, UserID + +from tests.unittest import HomeserverTestCase + + +class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): + """Test the background update to clean forward extremities table. + """ + + def prepare(self, reactor, clock, homeserver): + self.store = homeserver.get_datastore() + self.event_creator = homeserver.get_event_creation_handler() + self.room_creator = homeserver.get_room_creation_handler() + + # Create a test user and room + self.user = UserID("alice", "test") + self.requester = Requester(self.user, None, False, None, None) + info = self.get_success(self.room_creator.create_room(self.requester, {})) + self.room_id = info["room_id"] + + def create_and_send_event(self, soft_failed=False, prev_event_ids=None): + """Create and send an event. + + Args: + soft_failed (bool): Whether to create a soft failed event or not + prev_event_ids (list[str]|None): Explicitly set the prev events, + or if None just use the default + + Returns: + str: The new event's ID. + """ + prev_events_and_hashes = None + if prev_event_ids: + prev_events_and_hashes = [[p, {}, 0] for p in prev_event_ids] + + event, context = self.get_success( + self.event_creator.create_event( + self.requester, + { + "type": EventTypes.Message, + "room_id": self.room_id, + "sender": self.user.to_string(), + "content": {"body": "", "msgtype": "m.text"}, + }, + prev_events_and_hashes=prev_events_and_hashes, + ) + ) + + if soft_failed: + event.internal_metadata.soft_failed = True + + self.get_success( + self.event_creator.send_nonmember_event(self.requester, event, context) + ) + + return event.event_id + + def add_extremity(self, event_id): + """Add the given event as an extremity to the room. + """ + self.get_success( + self.store._simple_insert( + table="event_forward_extremities", + values={"room_id": self.room_id, "event_id": event_id}, + desc="test_add_extremity", + ) + ) + + self.store.get_latest_event_ids_in_room.invalidate((self.room_id,)) + + def run_background_update(self): + """Re run the background update to clean up the extremities. + """ + # Make sure we don't clash with in progress updates. + self.assertTrue(self.store._all_done, "Background updates are still ongoing") + + schema_path = os.path.join( + prepare_database.dir_path, + "schema", + "delta", + "54", + "delete_forward_extremities.sql", + ) + + def run_delta_file(txn): + prepare_database.executescript(txn, schema_path) + + self.get_success( + self.store.runInteraction("test_delete_forward_extremities", run_delta_file) + ) + + # Ugh, have to reset this flag + self.store._all_done = False + + while not self.get_success(self.store.has_completed_background_updates()): + self.get_success(self.store.do_next_background_update(100), by=0.1) + + def test_soft_failed_extremities_handled_correctly(self): + """Test that extremities are correctly calculated in the presence of + soft failed events. + + Tests a graph like: + + A <- SF1 <- SF2 <- B + + Where SF* are soft failed. + """ + + # Create the room graph + event_id_1 = self.create_and_send_event() + event_id_2 = self.create_and_send_event(True, [event_id_1]) + event_id_3 = self.create_and_send_event(True, [event_id_2]) + event_id_4 = self.create_and_send_event(False, [event_id_3]) + + # Check the latest events are as expected + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + + self.assertEqual(latest_event_ids, [event_id_4]) + + def test_basic_cleanup(self): + """Test that extremities are correctly calculated in the presence of + soft failed events. + + Tests a graph like: + + A <- SF1 <- B + + Where SF* are soft failed, and with extremities of A and B + """ + # Create the room graph + event_id_a = self.create_and_send_event() + event_id_sf1 = self.create_and_send_event(True, [event_id_a]) + event_id_b = self.create_and_send_event(False, [event_id_sf1]) + + # Add the new extremity and check the latest events are as expected + self.add_extremity(event_id_a) + + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + self.assertEqual(set(latest_event_ids), set((event_id_a, event_id_b))) + + # Run the background update and check it did the right thing + self.run_background_update() + + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + self.assertEqual(latest_event_ids, [event_id_b]) + + def test_chain_of_fail_cleanup(self): + """Test that extremities are correctly calculated in the presence of + soft failed events. + + Tests a graph like: + + A <- SF1 <- SF2 <- B + + Where SF* are soft failed, and with extremities of A and B + """ + # Create the room graph + event_id_a = self.create_and_send_event() + event_id_sf1 = self.create_and_send_event(True, [event_id_a]) + event_id_sf2 = self.create_and_send_event(True, [event_id_sf1]) + event_id_b = self.create_and_send_event(False, [event_id_sf2]) + + # Add the new extremity and check the latest events are as expected + self.add_extremity(event_id_a) + + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + self.assertEqual(set(latest_event_ids), set((event_id_a, event_id_b))) + + # Run the background update and check it did the right thing + self.run_background_update() + + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + self.assertEqual(latest_event_ids, [event_id_b]) + + def test_forked_graph_cleanup(self): + r"""Test that extremities are correctly calculated in the presence of + soft failed events. + + Tests a graph like, where time flows down the page: + + A B + / \ / + / \ / + SF1 SF2 + | | + SF3 | + / \ | + | \ | + C SF4 + + Where SF* are soft failed, and with them A, B and C marked as + extremities. This should resolve to B and C being marked as extremity. + """ + # Create the room graph + event_id_a = self.create_and_send_event() + event_id_b = self.create_and_send_event() + event_id_sf1 = self.create_and_send_event(True, [event_id_a]) + event_id_sf2 = self.create_and_send_event(True, [event_id_a, event_id_b]) + event_id_sf3 = self.create_and_send_event(True, [event_id_sf1]) + self.create_and_send_event(True, [event_id_sf2, event_id_sf3]) # SF4 + event_id_c = self.create_and_send_event(False, [event_id_sf3]) + + # Add the new extremity and check the latest events are as expected + self.add_extremity(event_id_a) + + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + self.assertEqual( + set(latest_event_ids), set((event_id_a, event_id_b, event_id_c)) + ) + + # Run the background update and check it did the right thing + self.run_background_update() + + latest_event_ids = self.get_success( + self.store.get_latest_event_ids_in_room(self.room_id) + ) + self.assertEqual(set(latest_event_ids), set([event_id_b, event_id_c])) -- cgit 1.5.1 From 46c8f7a5170d04dfa6ad02c69667d4aa48635231 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 30 May 2019 01:47:16 +1000 Subject: Implement the SHHS complexity API (#5216) --- changelog.d/5216.misc | 1 + synapse/api/urls.py | 1 + synapse/federation/transport/server.py | 31 +++++++++++- synapse/rest/admin/__init__.py | 12 +++-- synapse/storage/events_worker.py | 50 ++++++++++++++++++- tests/federation/test_complexity.py | 90 ++++++++++++++++++++++++++++++++++ 6 files changed, 180 insertions(+), 5 deletions(-) create mode 100644 changelog.d/5216.misc create mode 100644 tests/federation/test_complexity.py (limited to 'tests') diff --git a/changelog.d/5216.misc b/changelog.d/5216.misc new file mode 100644 index 0000000000..dbfa29475f --- /dev/null +++ b/changelog.d/5216.misc @@ -0,0 +1 @@ +Synapse will now serve the experimental "room complexity" API endpoint. diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 3c6bddff7a..e16c386a14 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -26,6 +26,7 @@ CLIENT_API_PREFIX = "/_matrix/client" FEDERATION_PREFIX = "/_matrix/federation" FEDERATION_V1_PREFIX = FEDERATION_PREFIX + "/v1" FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2" +FEDERATION_UNSTABLE_PREFIX = FEDERATION_PREFIX + "/unstable" STATIC_PREFIX = "/_matrix/static" WEB_CLIENT_PREFIX = "/_matrix/client" CONTENT_REPO_PREFIX = "/_matrix/content" diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 385eda2dca..d0efc4e0d3 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -23,7 +23,11 @@ from twisted.internet import defer import synapse from synapse.api.errors import Codes, FederationDeniedError, SynapseError from synapse.api.room_versions import RoomVersions -from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX +from synapse.api.urls import ( + FEDERATION_UNSTABLE_PREFIX, + FEDERATION_V1_PREFIX, + FEDERATION_V2_PREFIX, +) from synapse.http.endpoint import parse_and_validate_server_name from synapse.http.server import JsonResource from synapse.http.servlet import ( @@ -1304,6 +1308,30 @@ class FederationGroupsSettingJoinPolicyServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class RoomComplexityServlet(BaseFederationServlet): + """ + Indicates to other servers how complex (and therefore likely + resource-intensive) a public room this server knows about is. + """ + PATH = "/rooms/(?P[^/]*)/complexity" + PREFIX = FEDERATION_UNSTABLE_PREFIX + + @defer.inlineCallbacks + def on_GET(self, origin, content, query, room_id): + + store = self.handler.hs.get_datastore() + + is_public = yield store.is_room_world_readable_or_publicly_joinable( + room_id + ) + + if not is_public: + raise SynapseError(404, "Room not found", errcode=Codes.INVALID_PARAM) + + complexity = yield store.get_room_complexity(room_id) + defer.returnValue((200, complexity)) + + FEDERATION_SERVLET_CLASSES = ( FederationSendServlet, FederationEventServlet, @@ -1327,6 +1355,7 @@ FEDERATION_SERVLET_CLASSES = ( FederationThirdPartyInviteExchangeServlet, On3pidBindServlet, FederationVersionServlet, + RoomComplexityServlet, ) OPENID_SERVLET_CLASSES = ( diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 744d85594f..d6c4dcdb18 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -822,10 +822,16 @@ class AdminRestResource(JsonResource): def __init__(self, hs): JsonResource.__init__(self, hs, canonical_json=False) + register_servlets(hs, self) - register_servlets_for_client_rest_resource(hs, self) - SendServerNoticeServlet(hs).register(self) - VersionServlet(hs).register(self) + +def register_servlets(hs, http_server): + """ + Register all the admin servlets. + """ + register_servlets_for_client_rest_resource(hs, http_server) + SendServerNoticeServlet(hs).register(http_server) + VersionServlet(hs).register(http_server) def register_servlets_for_client_rest_resource(hs, http_server): diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 21b353cad3..b56c83e460 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import division + import itertools import logging from collections import namedtuple @@ -614,7 +616,7 @@ class EventsWorkerStore(SQLBaseStore): def _get_total_state_event_counts_txn(self, txn, room_id): """ - See get_state_event_counts. + See get_total_state_event_counts. """ sql = "SELECT COUNT(*) FROM state_events WHERE room_id=?" txn.execute(sql, (room_id,)) @@ -635,3 +637,49 @@ class EventsWorkerStore(SQLBaseStore): "get_total_state_event_counts", self._get_total_state_event_counts_txn, room_id ) + + def _get_current_state_event_counts_txn(self, txn, room_id): + """ + See get_current_state_event_counts. + """ + sql = "SELECT COUNT(*) FROM current_state_events WHERE room_id=?" + txn.execute(sql, (room_id,)) + row = txn.fetchone() + return row[0] if row else 0 + + def get_current_state_event_counts(self, room_id): + """ + Gets the current number of state events in a room. + + Args: + room_id (str) + + Returns: + Deferred[int] + """ + return self.runInteraction( + "get_current_state_event_counts", + self._get_current_state_event_counts_txn, room_id + ) + + @defer.inlineCallbacks + def get_room_complexity(self, room_id): + """ + Get a rough approximation of the complexity of the room. This is used by + remote servers to decide whether they wish to join the room or not. + Higher complexity value indicates that being in the room will consume + more resources. + + Args: + room_id (str) + + Returns: + Deferred[dict[str:int]] of complexity version to complexity. + """ + state_events = yield self.get_current_state_event_counts(room_id) + + # Call this one "v1", so we can introduce new ones as we want to develop + # it. + complexity_v1 = round(state_events / 500, 2) + + defer.returnValue({"v1": complexity_v1}) diff --git a/tests/federation/test_complexity.py b/tests/federation/test_complexity.py new file mode 100644 index 0000000000..1e3e5aec66 --- /dev/null +++ b/tests/federation/test_complexity.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 Matrix.org Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from synapse.config.ratelimiting import FederationRateLimitConfig +from synapse.federation.transport import server +from synapse.rest import admin +from synapse.rest.client.v1 import login, room +from synapse.util.ratelimitutils import FederationRateLimiter + +from tests import unittest + + +class RoomComplexityTests(unittest.HomeserverTestCase): + + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + ] + + def default_config(self, name='test'): + config = super(RoomComplexityTests, self).default_config(name=name) + config["limit_large_remote_room_joins"] = True + config["limit_large_remote_room_complexity"] = 0.05 + return config + + def prepare(self, reactor, clock, homeserver): + class Authenticator(object): + def authenticate_request(self, request, content): + return defer.succeed("otherserver.nottld") + + ratelimiter = FederationRateLimiter( + clock, + FederationRateLimitConfig( + window_size=1, + sleep_limit=1, + sleep_msec=1, + reject_limit=1000, + concurrent_requests=1000, + ), + ) + server.register_servlets( + homeserver, self.resource, Authenticator(), ratelimiter + ) + + def test_complexity_simple(self): + + u1 = self.register_user("u1", "pass") + u1_token = self.login("u1", "pass") + + room_1 = self.helper.create_room_as(u1, tok=u1_token) + self.helper.send_state( + room_1, event_type="m.room.topic", body={"topic": "foo"}, tok=u1_token + ) + + # Get the room complexity + request, channel = self.make_request( + "GET", "/_matrix/federation/unstable/rooms/%s/complexity" % (room_1,) + ) + self.render(request) + self.assertEquals(200, channel.code) + complexity = channel.json_body["v1"] + self.assertTrue(complexity > 0, complexity) + + # Artificially raise the complexity + store = self.hs.get_datastore() + store.get_current_state_event_counts = lambda x: defer.succeed(500 * 1.23) + + # Get the room complexity again -- make sure it's our artificial value + request, channel = self.make_request( + "GET", "/_matrix/federation/unstable/rooms/%s/complexity" % (room_1,) + ) + self.render(request) + self.assertEquals(200, channel.code) + complexity = channel.json_body["v1"] + self.assertEqual(complexity, 1.23) -- cgit 1.5.1 From 847b9dcd1c9d7d7a43333e85f69dc78471095475 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 31 May 2019 09:54:46 +0100 Subject: Make max_delta equal to period * 10% --- synapse/config/registration.py | 15 ++++----------- synapse/storage/_base.py | 7 +++---- tests/rest/client/v2_alpha/test_register.py | 18 +----------------- 3 files changed, 8 insertions(+), 32 deletions(-) (limited to 'tests') diff --git a/synapse/config/registration.py b/synapse/config/registration.py index b4fd4af368..1835b4b1f3 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -39,9 +39,7 @@ class AccountValidityConfig(Config): else: self.renew_email_subject = "Renew your %(app)s account" - self.startup_job_max_delta = self.parse_duration( - config.get("startup_job_max_delta", 0), - ) + self.startup_job_max_delta = self.period * 10. / 100. if self.renew_by_email_enabled and "public_baseurl" not in synapse_config: raise ConfigError("Can't send renewal emails without 'public_baseurl'") @@ -133,20 +131,15 @@ class RegistrationConfig(Config): # This means that, if a validity period is set, and Synapse is restarted (it will # then derive an expiration date from the current validity period), and some time # after that the validity period changes and Synapse is restarted, the users' - # expiration dates won't be updated unless their account is manually renewed. - # - # If set, the ``startup_job_max_delta`` optional setting will make the startup job - # described above set a random expiration date between t + period and - # t + period + startup_job_max_delta, t being the date and time at which the job - # sets the expiration date for a given user. This is useful for server admins that - # want to avoid Synapse sending a lot of renewal emails at once. + # expiration dates won't be updated unless their account is manually renewed. This + # date will be randomly selected within a range [now + period ; now + period + d], + # where d is equal to 10% of the validity period. # #account_validity: # enabled: True # period: 6w # renew_at: 1w # renew_email_subject: "Renew your %%(app)s account" - # startup_job_max_delta: 2d # The user must provide all of the below types of 3PID when registering. # diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 40802fd3dc..7f944ec717 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -329,14 +329,13 @@ class SQLBaseStore(object): user_id (str): User ID to set an expiration date for. use_delta (bool): If set to False, the expiration date for the user will be now + validity period. If set to True, this expiration date will be a - random value in the [now + period; now + period + max_delta] range, - max_delta being the configured value for the size of the range, unless - delta is 0, in which case it sets it to now + period. + random value in the [now + period; now + period + d] range, d being a + delta equal to 10% of the validity period. """ now_ms = self._clock.time_msec() expiration_ts = now_ms + self._account_validity.period - if use_delta and self._account_validity.startup_job_max_delta: + if use_delta: expiration_ts = self.rand.randrange( expiration_ts, expiration_ts + self._account_validity.startup_job_max_delta, diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 68654e25ab..711628ded1 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -436,7 +436,7 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): self.validity_period = 10 - self.max_delta = 10 + self.max_delta = self.validity_period * 10. / 100. config = self.default_config() @@ -453,22 +453,6 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): return self.hs def test_background_job(self): - """ - Tests whether the account validity startup background job does the right thing, - which is sticking an expiration date to every account that doesn't already have - one. - """ - user_id = self.register_user("kermit", "user") - - self.hs.config.account_validity.startup_job_max_delta = 0 - - now_ms = self.hs.clock.time_msec() - self.get_success(self.store._set_expiration_date_when_missing()) - - res = self.get_success(self.store.get_expiration_ts_for_user(user_id)) - self.assertEqual(res, now_ms + self.validity_period) - - def test_background_job_with_max_delta(self): """ Tests the same thing as test_background_job, except that it sets the startup_job_max_delta parameter and checks that the expiration date is within the -- cgit 1.5.1 From 4d794dae210ce30e87d8a6b9ee2f9b481cadf539 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 31 May 2019 11:09:34 +0100 Subject: Move delta from +10% to -10% --- synapse/config/registration.py | 2 +- synapse/storage/_base.py | 4 ++-- tests/rest/client/v2_alpha/test_register.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) (limited to 'tests') diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 4af825a2ab..aad3400819 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -132,7 +132,7 @@ class RegistrationConfig(Config): # then derive an expiration date from the current validity period), and some time # after that the validity period changes and Synapse is restarted, the users' # expiration dates won't be updated unless their account is manually renewed. This - # date will be randomly selected within a range [now + period ; now + period + d], + # date will be randomly selected within a range [now + period - d ; now + period], # where d is equal to 10%% of the validity period. # #account_validity: diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 7f944ec717..086318a530 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -329,7 +329,7 @@ class SQLBaseStore(object): user_id (str): User ID to set an expiration date for. use_delta (bool): If set to False, the expiration date for the user will be now + validity period. If set to True, this expiration date will be a - random value in the [now + period; now + period + d] range, d being a + random value in the [now + period - d ; now + period] range, d being a delta equal to 10% of the validity period. """ now_ms = self._clock.time_msec() @@ -337,8 +337,8 @@ class SQLBaseStore(object): if use_delta: expiration_ts = self.rand.randrange( + expiration_ts - self._account_validity.startup_job_max_delta, expiration_ts, - expiration_ts + self._account_validity.startup_job_max_delta, ) self._simple_insert_txn( diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 711628ded1..0cb6a363d6 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -467,5 +467,5 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): res = self.get_success(self.store.get_expiration_ts_for_user(user_id)) - self.assertLessEqual(res, now_ms + self.validity_period + self.max_delta) - self.assertGreaterEqual(res, now_ms + self.validity_period) + self.assertGreaterEqual(res, now_ms + self.validity_period - self.max_delta) + self.assertLessEqual(res, now_ms + self.validity_period) -- cgit 1.5.1 From 93003aa1720af846f238bd0c6fd2f2a0df3c20ef Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Sat, 1 Jun 2019 11:13:49 +0100 Subject: add some tests --- tests/rest/client/v1/test_profile.py | 62 ++++++++++++++++++++++++++++++++++-- 1 file changed, 60 insertions(+), 2 deletions(-) (limited to 'tests') diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 769c37ce52..f4d0d48dad 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -14,6 +14,8 @@ # limitations under the License. """Tests REST events for /profile paths.""" +import json + from mock import Mock from twisted.internet import defer @@ -31,8 +33,11 @@ myid = "@1234ABCD:test" PATH_PREFIX = "/_matrix/client/api/v1" -class ProfileTestCase(unittest.TestCase): - """ Tests profile management. """ +class MockHandlerProfileTestCase(unittest.TestCase): + """ Tests rest layer of profile management. + + Todo: move these into ProfileTestCase + """ @defer.inlineCallbacks def setUp(self): @@ -159,6 +164,59 @@ class ProfileTestCase(unittest.TestCase): self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif") +class ProfileTestCase(unittest.HomeserverTestCase): + + servlets = [ + admin.register_servlets_for_client_rest_resource, + login.register_servlets, + profile.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + self.hs = self.setup_test_homeserver() + return self.hs + + def prepare(self, reactor, clock, hs): + self.owner = self.register_user("owner", "pass") + self.owner_tok = self.login("owner", "pass") + + def test_set_displayname(self): + request, channel = self.make_request( + "PUT", + "/profile/%s/displayname" % (self.owner, ), + content=json.dumps({"displayname": "test"}), + access_token=self.owner_tok, + ) + self.render(request) + self.assertEqual(channel.code, 200, channel.result) + + res = self.get_displayname() + self.assertEqual(res, "test") + + def test_set_displayname_too_long(self): + """Attempts to set a stupid displayname should get a 400""" + request, channel = self.make_request( + "PUT", + "/profile/%s/displayname" % (self.owner, ), + content=json.dumps({"displayname": "test" * 100}), + access_token=self.owner_tok, + ) + self.render(request) + self.assertEqual(channel.code, 400, channel.result) + + res = self.get_displayname() + self.assertEqual(res, "owner") + + def get_displayname(self): + request, channel = self.make_request( + "GET", + "/profile/%s/displayname" % (self.owner, ), + ) + self.render(request) + self.assertEqual(channel.code, 200, channel.result) + return channel.json_body["displayname"] + + class ProfilesRestrictedTestCase(unittest.HomeserverTestCase): servlets = [ -- cgit 1.5.1 From 2889b055543c8db6bf93eaad7035d0eca1ec2874 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 3 Jun 2019 21:28:59 +1000 Subject: Unify v1 and v2 REST client APIs (#5226) --- changelog.d/5226.misc | 1 + scripts-dev/list_url_patterns.py | 4 +- synapse/app/frontend_proxy.py | 11 +- synapse/rest/client/v1/base.py | 65 ---------- synapse/rest/client/v1/directory.py | 28 +++-- synapse/rest/client/v1/events.py | 17 +-- synapse/rest/client/v1/initial_sync.py | 12 +- synapse/rest/client/v1/login.py | 22 ++-- synapse/rest/client/v1/logout.py | 19 +-- synapse/rest/client/v1/presence.py | 13 +- synapse/rest/client/v1/profile.py | 29 +++-- synapse/rest/client/v1/push_rule.py | 12 +- synapse/rest/client/v1/pusher.py | 21 ++-- synapse/rest/client/v1/room.py | 137 ++++++++++++--------- synapse/rest/client/v1/voip.py | 11 +- synapse/rest/client/v2_alpha/_base.py | 6 +- synapse/rest/client/v2_alpha/account.py | 20 +-- synapse/rest/client/v2_alpha/account_data.py | 6 +- synapse/rest/client/v2_alpha/account_validity.py | 6 +- synapse/rest/client/v2_alpha/auth.py | 4 +- synapse/rest/client/v2_alpha/capabilities.py | 4 +- synapse/rest/client/v2_alpha/devices.py | 8 +- synapse/rest/client/v2_alpha/filter.py | 6 +- synapse/rest/client/v2_alpha/groups.py | 50 ++++---- synapse/rest/client/v2_alpha/keys.py | 10 +- synapse/rest/client/v2_alpha/notifications.py | 4 +- synapse/rest/client/v2_alpha/openid.py | 4 +- synapse/rest/client/v2_alpha/read_marker.py | 4 +- synapse/rest/client/v2_alpha/receipts.py | 4 +- synapse/rest/client/v2_alpha/register.py | 10 +- synapse/rest/client/v2_alpha/relations.py | 12 +- synapse/rest/client/v2_alpha/report_event.py | 4 +- synapse/rest/client/v2_alpha/room_keys.py | 8 +- .../client/v2_alpha/room_upgrade_rest_servlet.py | 4 +- synapse/rest/client/v2_alpha/sendtodevice.py | 4 +- synapse/rest/client/v2_alpha/sync.py | 4 +- synapse/rest/client/v2_alpha/tags.py | 6 +- synapse/rest/client/v2_alpha/thirdparty.py | 10 +- synapse/rest/client/v2_alpha/tokenrefresh.py | 4 +- synapse/rest/client/v2_alpha/user_directory.py | 4 +- tests/__init__.py | 2 +- tests/rest/admin/test_admin.py | 1 - tests/rest/client/v1/test_profile.py | 2 +- 43 files changed, 296 insertions(+), 317 deletions(-) create mode 100644 changelog.d/5226.misc delete mode 100644 synapse/rest/client/v1/base.py (limited to 'tests') diff --git a/changelog.d/5226.misc b/changelog.d/5226.misc new file mode 100644 index 0000000000..e1b9dc58a3 --- /dev/null +++ b/changelog.d/5226.misc @@ -0,0 +1 @@ +The base classes for the v1 and v2_alpha REST APIs have been unified. diff --git a/scripts-dev/list_url_patterns.py b/scripts-dev/list_url_patterns.py index da027be26e..62e5a07472 100755 --- a/scripts-dev/list_url_patterns.py +++ b/scripts-dev/list_url_patterns.py @@ -20,9 +20,7 @@ class CallVisitor(ast.NodeVisitor): else: return - if name == "client_path_patterns": - PATTERNS_V1.append(node.args[0].s) - elif name == "client_v2_patterns": + if name == "client_patterns": PATTERNS_V2.append(node.args[0].s) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 8479fee738..6504da5278 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -37,8 +37,7 @@ from synapse.replication.slave.storage.client_ips import SlavedClientIpStore from synapse.replication.slave.storage.devices import SlavedDeviceStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.tcp.client import ReplicationClientHandler -from synapse.rest.client.v1.base import ClientV1RestServlet, client_path_patterns -from synapse.rest.client.v2_alpha._base import client_v2_patterns +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree @@ -49,11 +48,11 @@ from synapse.util.versionstring import get_version_string logger = logging.getLogger("synapse.app.frontend_proxy") -class PresenceStatusStubServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/presence/(?P[^/]*)/status") +class PresenceStatusStubServlet(RestServlet): + PATTERNS = client_patterns("/presence/(?P[^/]*)/status") def __init__(self, hs): - super(PresenceStatusStubServlet, self).__init__(hs) + super(PresenceStatusStubServlet, self).__init__() self.http_client = hs.get_simple_http_client() self.auth = hs.get_auth() self.main_uri = hs.config.worker_main_http_uri @@ -84,7 +83,7 @@ class PresenceStatusStubServlet(ClientV1RestServlet): class KeyUploadServlet(RestServlet): - PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$") + PATTERNS = client_patterns("/keys/upload(/(?P[^/]+))?$") def __init__(self, hs): """ diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py deleted file mode 100644 index dc63b661c0..0000000000 --- a/synapse/rest/client/v1/base.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This module contains base REST classes for constructing client v1 servlets. -""" - -import logging -import re - -from synapse.api.urls import CLIENT_API_PREFIX -from synapse.http.servlet import RestServlet -from synapse.rest.client.transactions import HttpTransactionCache - -logger = logging.getLogger(__name__) - - -def client_path_patterns(path_regex, releases=(0,), include_in_unstable=True): - """Creates a regex compiled client path with the correct client path - prefix. - - Args: - path_regex (str): The regex string to match. This should NOT have a ^ - as this will be prefixed. - Returns: - SRE_Pattern - """ - patterns = [re.compile("^" + CLIENT_API_PREFIX + "/api/v1" + path_regex)] - if include_in_unstable: - unstable_prefix = CLIENT_API_PREFIX + "/unstable" - patterns.append(re.compile("^" + unstable_prefix + path_regex)) - for release in releases: - new_prefix = CLIENT_API_PREFIX + "/r%d" % (release,) - patterns.append(re.compile("^" + new_prefix + path_regex)) - return patterns - - -class ClientV1RestServlet(RestServlet): - """A base Synapse REST Servlet for the client version 1 API. - """ - - # This subclass was presumably created to allow the auth for the v1 - # protocol version to be different, however this behaviour was removed. - # it may no longer be necessary - - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): - """ - self.hs = hs - self.builder_factory = hs.get_event_builder_factory() - self.auth = hs.get_auth() - self.txns = HttpTransactionCache(hs) diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 0220acf644..0035182bb9 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -19,11 +19,10 @@ import logging from twisted.internet import defer from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError -from synapse.http.servlet import parse_json_object_from_request +from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.types import RoomAlias -from .base import ClientV1RestServlet, client_path_patterns - logger = logging.getLogger(__name__) @@ -33,13 +32,14 @@ def register_servlets(hs, http_server): ClientAppserviceDirectoryListServer(hs).register(http_server) -class ClientDirectoryServer(ClientV1RestServlet): - PATTERNS = client_path_patterns("/directory/room/(?P[^/]*)$") +class ClientDirectoryServer(RestServlet): + PATTERNS = client_patterns("/directory/room/(?P[^/]*)$", v1=True) def __init__(self, hs): - super(ClientDirectoryServer, self).__init__(hs) + super(ClientDirectoryServer, self).__init__() self.store = hs.get_datastore() self.handlers = hs.get_handlers() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_alias): @@ -120,13 +120,14 @@ class ClientDirectoryServer(ClientV1RestServlet): defer.returnValue((200, {})) -class ClientDirectoryListServer(ClientV1RestServlet): - PATTERNS = client_path_patterns("/directory/list/room/(?P[^/]*)$") +class ClientDirectoryListServer(RestServlet): + PATTERNS = client_patterns("/directory/list/room/(?P[^/]*)$", v1=True) def __init__(self, hs): - super(ClientDirectoryListServer, self).__init__(hs) + super(ClientDirectoryListServer, self).__init__() self.store = hs.get_datastore() self.handlers = hs.get_handlers() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id): @@ -162,15 +163,16 @@ class ClientDirectoryListServer(ClientV1RestServlet): defer.returnValue((200, {})) -class ClientAppserviceDirectoryListServer(ClientV1RestServlet): - PATTERNS = client_path_patterns( - "/directory/list/appservice/(?P[^/]*)/(?P[^/]*)$" +class ClientAppserviceDirectoryListServer(RestServlet): + PATTERNS = client_patterns( + "/directory/list/appservice/(?P[^/]*)/(?P[^/]*)$", v1=True ) def __init__(self, hs): - super(ClientAppserviceDirectoryListServer, self).__init__(hs) + super(ClientAppserviceDirectoryListServer, self).__init__() self.store = hs.get_datastore() self.handlers = hs.get_handlers() + self.auth = hs.get_auth() def on_PUT(self, request, network_id, room_id): content = parse_json_object_from_request(request) diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py index c3b0a39ab7..84ca36270b 100644 --- a/synapse/rest/client/v1/events.py +++ b/synapse/rest/client/v1/events.py @@ -19,21 +19,22 @@ import logging from twisted.internet import defer from synapse.api.errors import SynapseError +from synapse.http.servlet import RestServlet +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.streams.config import PaginationConfig -from .base import ClientV1RestServlet, client_path_patterns - logger = logging.getLogger(__name__) -class EventStreamRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/events$") +class EventStreamRestServlet(RestServlet): + PATTERNS = client_patterns("/events$", v1=True) DEFAULT_LONGPOLL_TIME_MS = 30000 def __init__(self, hs): - super(EventStreamRestServlet, self).__init__(hs) + super(EventStreamRestServlet, self).__init__() self.event_stream_handler = hs.get_event_stream_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request): @@ -76,11 +77,11 @@ class EventStreamRestServlet(ClientV1RestServlet): # TODO: Unit test gets, with and without auth, with different kinds of events. -class EventRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/events/(?P[^/]*)$") +class EventRestServlet(RestServlet): + PATTERNS = client_patterns("/events/(?P[^/]*)$", v1=True) def __init__(self, hs): - super(EventRestServlet, self).__init__(hs) + super(EventRestServlet, self).__init__() self.clock = hs.get_clock() self.event_handler = hs.get_event_handler() self._event_serializer = hs.get_event_client_serializer() diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py index 3ead75cb77..0fe5f2d79b 100644 --- a/synapse/rest/client/v1/initial_sync.py +++ b/synapse/rest/client/v1/initial_sync.py @@ -15,19 +15,19 @@ from twisted.internet import defer -from synapse.http.servlet import parse_boolean +from synapse.http.servlet import RestServlet, parse_boolean +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.streams.config import PaginationConfig -from .base import ClientV1RestServlet, client_path_patterns - # TODO: Needs unit testing -class InitialSyncRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/initialSync$") +class InitialSyncRestServlet(RestServlet): + PATTERNS = client_patterns("/initialSync$", v1=True) def __init__(self, hs): - super(InitialSyncRestServlet, self).__init__(hs) + super(InitialSyncRestServlet, self).__init__() self.initial_sync_handler = hs.get_initial_sync_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request): diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 029039c162..3b60728628 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -29,12 +29,11 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.rest.well_known import WellKnownBuilder from synapse.types import UserID, map_username_to_mxid_localpart from synapse.util.msisdn import phone_number_to_msisdn -from .base import ClientV1RestServlet, client_path_patterns - logger = logging.getLogger(__name__) @@ -81,15 +80,16 @@ def login_id_thirdparty_from_phone(identifier): } -class LoginRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/login$") +class LoginRestServlet(RestServlet): + PATTERNS = client_patterns("/login$", v1=True) CAS_TYPE = "m.login.cas" SSO_TYPE = "m.login.sso" TOKEN_TYPE = "m.login.token" JWT_TYPE = "m.login.jwt" def __init__(self, hs): - super(LoginRestServlet, self).__init__(hs) + super(LoginRestServlet, self).__init__() + self.hs = hs self.jwt_enabled = hs.config.jwt_enabled self.jwt_secret = hs.config.jwt_secret self.jwt_algorithm = hs.config.jwt_algorithm @@ -371,7 +371,7 @@ class LoginRestServlet(ClientV1RestServlet): class CasRedirectServlet(RestServlet): - PATTERNS = client_path_patterns("/login/(cas|sso)/redirect") + PATTERNS = client_patterns("/login/(cas|sso)/redirect", v1=True) def __init__(self, hs): super(CasRedirectServlet, self).__init__() @@ -394,27 +394,27 @@ class CasRedirectServlet(RestServlet): finish_request(request) -class CasTicketServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/login/cas/ticket") +class CasTicketServlet(RestServlet): + PATTERNS = client_patterns("/login/cas/ticket", v1=True) def __init__(self, hs): - super(CasTicketServlet, self).__init__(hs) + super(CasTicketServlet, self).__init__() self.cas_server_url = hs.config.cas_server_url self.cas_service_url = hs.config.cas_service_url self.cas_required_attributes = hs.config.cas_required_attributes self._sso_auth_handler = SSOAuthHandler(hs) + self._http_client = hs.get_simple_http_client() @defer.inlineCallbacks def on_GET(self, request): client_redirect_url = parse_string(request, "redirectUrl", required=True) - http_client = self.hs.get_simple_http_client() uri = self.cas_server_url + "/proxyValidate" args = { "ticket": parse_string(request, "ticket", required=True), "service": self.cas_service_url } try: - body = yield http_client.get_raw(uri, args) + body = yield self._http_client.get_raw(uri, args) except PartialDownloadError as pde: # Twisted raises this error if the connection is closed, # even if that's being used old-http style to signal end-of-data diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py index ba20e75033..b8064f261e 100644 --- a/synapse/rest/client/v1/logout.py +++ b/synapse/rest/client/v1/logout.py @@ -17,17 +17,18 @@ import logging from twisted.internet import defer -from .base import ClientV1RestServlet, client_path_patterns +from synapse.http.servlet import RestServlet +from synapse.rest.client.v2_alpha._base import client_patterns logger = logging.getLogger(__name__) -class LogoutRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/logout$") +class LogoutRestServlet(RestServlet): + PATTERNS = client_patterns("/logout$", v1=True) def __init__(self, hs): - super(LogoutRestServlet, self).__init__(hs) - self._auth = hs.get_auth() + super(LogoutRestServlet, self).__init__() + self.auth = hs.get_auth() self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() @@ -41,7 +42,7 @@ class LogoutRestServlet(ClientV1RestServlet): if requester.device_id is None: # the acccess token wasn't associated with a device. # Just delete the access token - access_token = self._auth.get_access_token_from_request(request) + access_token = self.auth.get_access_token_from_request(request) yield self._auth_handler.delete_access_token(access_token) else: yield self._device_handler.delete_device( @@ -50,11 +51,11 @@ class LogoutRestServlet(ClientV1RestServlet): defer.returnValue((200, {})) -class LogoutAllRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/logout/all$") +class LogoutAllRestServlet(RestServlet): + PATTERNS = client_patterns("/logout/all$", v1=True) def __init__(self, hs): - super(LogoutAllRestServlet, self).__init__(hs) + super(LogoutAllRestServlet, self).__init__() self.auth = hs.get_auth() self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index 045d5a20ac..e263da3cb7 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -23,21 +23,22 @@ from twisted.internet import defer from synapse.api.errors import AuthError, SynapseError from synapse.handlers.presence import format_user_presence_state -from synapse.http.servlet import parse_json_object_from_request +from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.types import UserID -from .base import ClientV1RestServlet, client_path_patterns - logger = logging.getLogger(__name__) -class PresenceStatusRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/presence/(?P[^/]*)/status") +class PresenceStatusRestServlet(RestServlet): + PATTERNS = client_patterns("/presence/(?P[^/]*)/status", v1=True) def __init__(self, hs): - super(PresenceStatusRestServlet, self).__init__(hs) + super(PresenceStatusRestServlet, self).__init__() + self.hs = hs self.presence_handler = hs.get_presence_handler() self.clock = hs.get_clock() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, user_id): diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index eac1966c5e..e15d9d82a6 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -16,18 +16,19 @@ """ This module contains REST servlets to do with profile: /profile/ """ from twisted.internet import defer -from synapse.http.servlet import parse_json_object_from_request +from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.types import UserID -from .base import ClientV1RestServlet, client_path_patterns - -class ProfileDisplaynameRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/profile/(?P[^/]*)/displayname") +class ProfileDisplaynameRestServlet(RestServlet): + PATTERNS = client_patterns("/profile/(?P[^/]*)/displayname", v1=True) def __init__(self, hs): - super(ProfileDisplaynameRestServlet, self).__init__(hs) + super(ProfileDisplaynameRestServlet, self).__init__() + self.hs = hs self.profile_handler = hs.get_profile_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, user_id): @@ -71,12 +72,14 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): return (200, {}) -class ProfileAvatarURLRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/profile/(?P[^/]*)/avatar_url") +class ProfileAvatarURLRestServlet(RestServlet): + PATTERNS = client_patterns("/profile/(?P[^/]*)/avatar_url", v1=True) def __init__(self, hs): - super(ProfileAvatarURLRestServlet, self).__init__(hs) + super(ProfileAvatarURLRestServlet, self).__init__() + self.hs = hs self.profile_handler = hs.get_profile_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, user_id): @@ -119,12 +122,14 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet): return (200, {}) -class ProfileRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/profile/(?P[^/]*)") +class ProfileRestServlet(RestServlet): + PATTERNS = client_patterns("/profile/(?P[^/]*)", v1=True) def __init__(self, hs): - super(ProfileRestServlet, self).__init__(hs) + super(ProfileRestServlet, self).__init__() + self.hs = hs self.profile_handler = hs.get_profile_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, user_id): diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 506ec95ddd..3d6326fe2f 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -21,22 +21,22 @@ from synapse.api.errors import ( SynapseError, UnrecognizedRequestError, ) -from synapse.http.servlet import parse_json_value_from_request, parse_string +from synapse.http.servlet import RestServlet, parse_json_value_from_request, parse_string from synapse.push.baserules import BASE_RULE_IDS from synapse.push.clientformat import format_push_rules_for_user from synapse.push.rulekinds import PRIORITY_CLASS_MAP +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException -from .base import ClientV1RestServlet, client_path_patterns - -class PushRuleRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/(?Ppushrules/.*)$") +class PushRuleRestServlet(RestServlet): + PATTERNS = client_patterns("/(?Ppushrules/.*)$", v1=True) SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = ( "Unrecognised request: You probably wanted a trailing slash") def __init__(self, hs): - super(PushRuleRestServlet, self).__init__(hs) + super(PushRuleRestServlet, self).__init__() + self.auth = hs.get_auth() self.store = hs.get_datastore() self.notifier = hs.get_notifier() self._is_worker = hs.config.worker_app is not None diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index 4c07ae7f45..15d860db37 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -26,17 +26,18 @@ from synapse.http.servlet import ( parse_string, ) from synapse.push import PusherConfigException - -from .base import ClientV1RestServlet, client_path_patterns +from synapse.rest.client.v2_alpha._base import client_patterns logger = logging.getLogger(__name__) -class PushersRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/pushers$") +class PushersRestServlet(RestServlet): + PATTERNS = client_patterns("/pushers$", v1=True) def __init__(self, hs): - super(PushersRestServlet, self).__init__(hs) + super(PushersRestServlet, self).__init__() + self.hs = hs + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request): @@ -69,11 +70,13 @@ class PushersRestServlet(ClientV1RestServlet): return 200, {} -class PushersSetRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/pushers/set$") +class PushersSetRestServlet(RestServlet): + PATTERNS = client_patterns("/pushers/set$", v1=True) def __init__(self, hs): - super(PushersSetRestServlet, self).__init__(hs) + super(PushersSetRestServlet, self).__init__() + self.hs = hs + self.auth = hs.get_auth() self.notifier = hs.get_notifier() self.pusher_pool = self.hs.get_pusherpool() @@ -141,7 +144,7 @@ class PushersRemoveRestServlet(RestServlet): """ To allow pusher to be delete by clicking a link (ie. GET request) """ - PATTERNS = client_path_patterns("/pushers/remove$") + PATTERNS = client_patterns("/pushers/remove$", v1=True) SUCCESS_HTML = b"You have been unsubscribed" def __init__(self, hs): diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index b92c6a9a9c..e8f672c4ba 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -28,37 +28,45 @@ from synapse.api.errors import AuthError, Codes, SynapseError from synapse.api.filtering import Filter from synapse.events.utils import format_event_for_client_v2 from synapse.http.servlet import ( + RestServlet, assert_params_in_dict, parse_integer, parse_json_object_from_request, parse_string, ) +from synapse.rest.client.transactions import HttpTransactionCache +from synapse.rest.client.v2_alpha._base import client_patterns from synapse.storage.state import StateFilter from synapse.streams.config import PaginationConfig from synapse.types import RoomAlias, RoomID, StreamToken, ThirdPartyInstanceID, UserID -from .base import ClientV1RestServlet, client_path_patterns - logger = logging.getLogger(__name__) -class RoomCreateRestServlet(ClientV1RestServlet): +class TransactionRestServlet(RestServlet): + def __init__(self, hs): + super(TransactionRestServlet, self).__init__() + self.txns = HttpTransactionCache(hs) + + +class RoomCreateRestServlet(TransactionRestServlet): # No PATTERN; we have custom dispatch rules here def __init__(self, hs): super(RoomCreateRestServlet, self).__init__(hs) self._room_creation_handler = hs.get_room_creation_handler() + self.auth = hs.get_auth() def register(self, http_server): PATTERNS = "/createRoom" register_txn_path(self, PATTERNS, http_server) # define CORS for all of /rooms in RoomCreateRestServlet for simplicity http_server.register_paths("OPTIONS", - client_path_patterns("/rooms(?:/.*)?$"), + client_patterns("/rooms(?:/.*)?$", v1=True), self.on_OPTIONS) # define CORS for /createRoom[/txnid] http_server.register_paths("OPTIONS", - client_path_patterns("/createRoom(?:/.*)?$"), + client_patterns("/createRoom(?:/.*)?$", v1=True), self.on_OPTIONS) def on_PUT(self, request, txn_id): @@ -85,13 +93,14 @@ class RoomCreateRestServlet(ClientV1RestServlet): # TODO: Needs unit testing for generic events -class RoomStateEventRestServlet(ClientV1RestServlet): +class RoomStateEventRestServlet(TransactionRestServlet): def __init__(self, hs): super(RoomStateEventRestServlet, self).__init__(hs) self.handlers = hs.get_handlers() self.event_creation_handler = hs.get_event_creation_handler() self.room_member_handler = hs.get_room_member_handler() self.message_handler = hs.get_message_handler() + self.auth = hs.get_auth() def register(self, http_server): # /room/$roomid/state/$eventtype @@ -102,16 +111,16 @@ class RoomStateEventRestServlet(ClientV1RestServlet): "(?P[^/]*)/(?P[^/]*)$") http_server.register_paths("GET", - client_path_patterns(state_key), + client_patterns(state_key, v1=True), self.on_GET) http_server.register_paths("PUT", - client_path_patterns(state_key), + client_patterns(state_key, v1=True), self.on_PUT) http_server.register_paths("GET", - client_path_patterns(no_state_key), + client_patterns(no_state_key, v1=True), self.on_GET_no_state_key) http_server.register_paths("PUT", - client_path_patterns(no_state_key), + client_patterns(no_state_key, v1=True), self.on_PUT_no_state_key) def on_GET_no_state_key(self, request, room_id, event_type): @@ -185,11 +194,12 @@ class RoomStateEventRestServlet(ClientV1RestServlet): # TODO: Needs unit testing for generic events + feedback -class RoomSendEventRestServlet(ClientV1RestServlet): +class RoomSendEventRestServlet(TransactionRestServlet): def __init__(self, hs): super(RoomSendEventRestServlet, self).__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() + self.auth = hs.get_auth() def register(self, http_server): # /rooms/$roomid/send/$event_type[/$txn_id] @@ -229,10 +239,11 @@ class RoomSendEventRestServlet(ClientV1RestServlet): # TODO: Needs unit testing for room ID + alias joins -class JoinRoomAliasServlet(ClientV1RestServlet): +class JoinRoomAliasServlet(TransactionRestServlet): def __init__(self, hs): super(JoinRoomAliasServlet, self).__init__(hs) self.room_member_handler = hs.get_room_member_handler() + self.auth = hs.get_auth() def register(self, http_server): # /join/$room_identifier[/$txn_id] @@ -291,8 +302,13 @@ class JoinRoomAliasServlet(ClientV1RestServlet): # TODO: Needs unit testing -class PublicRoomListRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/publicRooms$") +class PublicRoomListRestServlet(TransactionRestServlet): + PATTERNS = client_patterns("/publicRooms$", v1=True) + + def __init__(self, hs): + super(PublicRoomListRestServlet, self).__init__(hs) + self.hs = hs + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request): @@ -382,12 +398,13 @@ class PublicRoomListRestServlet(ClientV1RestServlet): # TODO: Needs unit testing -class RoomMemberListRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/rooms/(?P[^/]*)/members$") +class RoomMemberListRestServlet(RestServlet): + PATTERNS = client_patterns("/rooms/(?P[^/]*)/members$", v1=True) def __init__(self, hs): - super(RoomMemberListRestServlet, self).__init__(hs) + super(RoomMemberListRestServlet, self).__init__() self.message_handler = hs.get_message_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id): @@ -436,12 +453,13 @@ class RoomMemberListRestServlet(ClientV1RestServlet): # deprecated in favour of /members?membership=join? # except it does custom AS logic and has a simpler return format -class JoinedRoomMemberListRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/rooms/(?P[^/]*)/joined_members$") +class JoinedRoomMemberListRestServlet(RestServlet): + PATTERNS = client_patterns("/rooms/(?P[^/]*)/joined_members$", v1=True) def __init__(self, hs): - super(JoinedRoomMemberListRestServlet, self).__init__(hs) + super(JoinedRoomMemberListRestServlet, self).__init__() self.message_handler = hs.get_message_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id): @@ -457,12 +475,13 @@ class JoinedRoomMemberListRestServlet(ClientV1RestServlet): # TODO: Needs better unit testing -class RoomMessageListRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/rooms/(?P[^/]*)/messages$") +class RoomMessageListRestServlet(RestServlet): + PATTERNS = client_patterns("/rooms/(?P[^/]*)/messages$", v1=True) def __init__(self, hs): - super(RoomMessageListRestServlet, self).__init__(hs) + super(RoomMessageListRestServlet, self).__init__() self.pagination_handler = hs.get_pagination_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id): @@ -491,12 +510,13 @@ class RoomMessageListRestServlet(ClientV1RestServlet): # TODO: Needs unit testing -class RoomStateRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/rooms/(?P[^/]*)/state$") +class RoomStateRestServlet(RestServlet): + PATTERNS = client_patterns("/rooms/(?P[^/]*)/state$", v1=True) def __init__(self, hs): - super(RoomStateRestServlet, self).__init__(hs) + super(RoomStateRestServlet, self).__init__() self.message_handler = hs.get_message_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id): @@ -511,12 +531,13 @@ class RoomStateRestServlet(ClientV1RestServlet): # TODO: Needs unit testing -class RoomInitialSyncRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/rooms/(?P[^/]*)/initialSync$") +class RoomInitialSyncRestServlet(RestServlet): + PATTERNS = client_patterns("/rooms/(?P[^/]*)/initialSync$", v1=True) def __init__(self, hs): - super(RoomInitialSyncRestServlet, self).__init__(hs) + super(RoomInitialSyncRestServlet, self).__init__() self.initial_sync_handler = hs.get_initial_sync_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id): @@ -530,16 +551,17 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet): defer.returnValue((200, content)) -class RoomEventServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns( - "/rooms/(?P[^/]*)/event/(?P[^/]*)$" +class RoomEventServlet(RestServlet): + PATTERNS = client_patterns( + "/rooms/(?P[^/]*)/event/(?P[^/]*)$", v1=True ) def __init__(self, hs): - super(RoomEventServlet, self).__init__(hs) + super(RoomEventServlet, self).__init__() self.clock = hs.get_clock() self.event_handler = hs.get_event_handler() self._event_serializer = hs.get_event_client_serializer() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id, event_id): @@ -554,16 +576,17 @@ class RoomEventServlet(ClientV1RestServlet): defer.returnValue((404, "Event not found.")) -class RoomEventContextServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns( - "/rooms/(?P[^/]*)/context/(?P[^/]*)$" +class RoomEventContextServlet(RestServlet): + PATTERNS = client_patterns( + "/rooms/(?P[^/]*)/context/(?P[^/]*)$", v1=True ) def __init__(self, hs): - super(RoomEventContextServlet, self).__init__(hs) + super(RoomEventContextServlet, self).__init__() self.clock = hs.get_clock() self.room_context_handler = hs.get_room_context_handler() self._event_serializer = hs.get_event_client_serializer() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request, room_id, event_id): @@ -609,10 +632,11 @@ class RoomEventContextServlet(ClientV1RestServlet): defer.returnValue((200, results)) -class RoomForgetRestServlet(ClientV1RestServlet): +class RoomForgetRestServlet(TransactionRestServlet): def __init__(self, hs): super(RoomForgetRestServlet, self).__init__(hs) self.room_member_handler = hs.get_room_member_handler() + self.auth = hs.get_auth() def register(self, http_server): PATTERNS = ("/rooms/(?P[^/]*)/forget") @@ -639,11 +663,12 @@ class RoomForgetRestServlet(ClientV1RestServlet): # TODO: Needs unit testing -class RoomMembershipRestServlet(ClientV1RestServlet): +class RoomMembershipRestServlet(TransactionRestServlet): def __init__(self, hs): super(RoomMembershipRestServlet, self).__init__(hs) self.room_member_handler = hs.get_room_member_handler() + self.auth = hs.get_auth() def register(self, http_server): # /rooms/$roomid/[invite|join|leave] @@ -722,11 +747,12 @@ class RoomMembershipRestServlet(ClientV1RestServlet): ) -class RoomRedactEventRestServlet(ClientV1RestServlet): +class RoomRedactEventRestServlet(TransactionRestServlet): def __init__(self, hs): super(RoomRedactEventRestServlet, self).__init__(hs) self.handlers = hs.get_handlers() self.event_creation_handler = hs.get_event_creation_handler() + self.auth = hs.get_auth() def register(self, http_server): PATTERNS = ("/rooms/(?P[^/]*)/redact/(?P[^/]*)") @@ -757,15 +783,16 @@ class RoomRedactEventRestServlet(ClientV1RestServlet): ) -class RoomTypingRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns( - "/rooms/(?P[^/]*)/typing/(?P[^/]*)$" +class RoomTypingRestServlet(RestServlet): + PATTERNS = client_patterns( + "/rooms/(?P[^/]*)/typing/(?P[^/]*)$", v1=True ) def __init__(self, hs): - super(RoomTypingRestServlet, self).__init__(hs) + super(RoomTypingRestServlet, self).__init__() self.presence_handler = hs.get_presence_handler() self.typing_handler = hs.get_typing_handler() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_PUT(self, request, room_id, user_id): @@ -798,14 +825,13 @@ class RoomTypingRestServlet(ClientV1RestServlet): defer.returnValue((200, {})) -class SearchRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns( - "/search$" - ) +class SearchRestServlet(RestServlet): + PATTERNS = client_patterns("/search$", v1=True) def __init__(self, hs): - super(SearchRestServlet, self).__init__(hs) + super(SearchRestServlet, self).__init__() self.handlers = hs.get_handlers() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_POST(self, request): @@ -823,12 +849,13 @@ class SearchRestServlet(ClientV1RestServlet): defer.returnValue((200, results)) -class JoinedRoomsRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/joined_rooms$") +class JoinedRoomsRestServlet(RestServlet): + PATTERNS = client_patterns("/joined_rooms$", v1=True) def __init__(self, hs): - super(JoinedRoomsRestServlet, self).__init__(hs) + super(JoinedRoomsRestServlet, self).__init__() self.store = hs.get_datastore() + self.auth = hs.get_auth() @defer.inlineCallbacks def on_GET(self, request): @@ -853,18 +880,18 @@ def register_txn_path(servlet, regex_string, http_server, with_get=False): """ http_server.register_paths( "POST", - client_path_patterns(regex_string + "$"), + client_patterns(regex_string + "$", v1=True), servlet.on_POST ) http_server.register_paths( "PUT", - client_path_patterns(regex_string + "/(?P[^/]*)$"), + client_patterns(regex_string + "/(?P[^/]*)$", v1=True), servlet.on_PUT ) if with_get: http_server.register_paths( "GET", - client_path_patterns(regex_string + "/(?P[^/]*)$"), + client_patterns(regex_string + "/(?P[^/]*)$", v1=True), servlet.on_GET ) diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py index 53da905eea..0975df84cf 100644 --- a/synapse/rest/client/v1/voip.py +++ b/synapse/rest/client/v1/voip.py @@ -19,11 +19,16 @@ import hmac from twisted.internet import defer -from .base import ClientV1RestServlet, client_path_patterns +from synapse.http.servlet import RestServlet +from synapse.rest.client.v2_alpha._base import client_patterns -class VoipRestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/voip/turnServer$") +class VoipRestServlet(RestServlet): + PATTERNS = client_patterns("/voip/turnServer$", v1=True) + + def __init__(self, hs): + super(VoipRestServlet, self).__init__() + self.hs = hs @defer.inlineCallbacks def on_GET(self, request): diff --git a/synapse/rest/client/v2_alpha/_base.py b/synapse/rest/client/v2_alpha/_base.py index 24ac26bf03..5236d5d566 100644 --- a/synapse/rest/client/v2_alpha/_base.py +++ b/synapse/rest/client/v2_alpha/_base.py @@ -26,8 +26,7 @@ from synapse.api.urls import CLIENT_API_PREFIX logger = logging.getLogger(__name__) -def client_v2_patterns(path_regex, releases=(0,), - unstable=True): +def client_patterns(path_regex, releases=(0,), unstable=True, v1=False): """Creates a regex compiled client path with the correct client path prefix. @@ -41,6 +40,9 @@ def client_v2_patterns(path_regex, releases=(0,), if unstable: unstable_prefix = CLIENT_API_PREFIX + "/unstable" patterns.append(re.compile("^" + unstable_prefix + path_regex)) + if v1: + v1_prefix = CLIENT_API_PREFIX + "/api/v1" + patterns.append(re.compile("^" + v1_prefix + path_regex)) for release in releases: new_prefix = CLIENT_API_PREFIX + "/r%d" % (release,) patterns.append(re.compile("^" + new_prefix + path_regex)) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index ee069179f0..ca35dc3c83 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -30,13 +30,13 @@ from synapse.http.servlet import ( from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.threepids import check_3pid_allowed -from ._base import client_v2_patterns, interactive_auth_handler +from ._base import client_patterns, interactive_auth_handler logger = logging.getLogger(__name__) class EmailPasswordRequestTokenRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/password/email/requestToken$") + PATTERNS = client_patterns("/account/password/email/requestToken$") def __init__(self, hs): super(EmailPasswordRequestTokenRestServlet, self).__init__() @@ -70,7 +70,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): class MsisdnPasswordRequestTokenRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/password/msisdn/requestToken$") + PATTERNS = client_patterns("/account/password/msisdn/requestToken$") def __init__(self, hs): super(MsisdnPasswordRequestTokenRestServlet, self).__init__() @@ -108,7 +108,7 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet): class PasswordRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/password$") + PATTERNS = client_patterns("/account/password$") def __init__(self, hs): super(PasswordRestServlet, self).__init__() @@ -180,7 +180,7 @@ class PasswordRestServlet(RestServlet): class DeactivateAccountRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/deactivate$") + PATTERNS = client_patterns("/account/deactivate$") def __init__(self, hs): super(DeactivateAccountRestServlet, self).__init__() @@ -228,7 +228,7 @@ class DeactivateAccountRestServlet(RestServlet): class EmailThreepidRequestTokenRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/3pid/email/requestToken$") + PATTERNS = client_patterns("/account/3pid/email/requestToken$") def __init__(self, hs): self.hs = hs @@ -263,7 +263,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): class MsisdnThreepidRequestTokenRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/3pid/msisdn/requestToken$") + PATTERNS = client_patterns("/account/3pid/msisdn/requestToken$") def __init__(self, hs): self.hs = hs @@ -300,7 +300,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet): class ThreepidRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/3pid$") + PATTERNS = client_patterns("/account/3pid$") def __init__(self, hs): super(ThreepidRestServlet, self).__init__() @@ -364,7 +364,7 @@ class ThreepidRestServlet(RestServlet): class ThreepidDeleteRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/3pid/delete$") + PATTERNS = client_patterns("/account/3pid/delete$") def __init__(self, hs): super(ThreepidDeleteRestServlet, self).__init__() @@ -401,7 +401,7 @@ class ThreepidDeleteRestServlet(RestServlet): class WhoamiRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/account/whoami$") + PATTERNS = client_patterns("/account/whoami$") def __init__(self, hs): super(WhoamiRestServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py index f171b8d626..574a6298ce 100644 --- a/synapse/rest/client/v2_alpha/account_data.py +++ b/synapse/rest/client/v2_alpha/account_data.py @@ -20,7 +20,7 @@ from twisted.internet import defer from synapse.api.errors import AuthError, NotFoundError, SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -30,7 +30,7 @@ class AccountDataServlet(RestServlet): PUT /user/{user_id}/account_data/{account_dataType} HTTP/1.1 GET /user/{user_id}/account_data/{account_dataType} HTTP/1.1 """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/user/(?P[^/]*)/account_data/(?P[^/]*)" ) @@ -79,7 +79,7 @@ class RoomAccountDataServlet(RestServlet): PUT /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1 GET /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1 """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/user/(?P[^/]*)" "/rooms/(?P[^/]*)" "/account_data/(?P[^/]*)" diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py index fc8dbeb617..55c4ed5660 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -21,13 +21,13 @@ from synapse.api.errors import AuthError, SynapseError from synapse.http.server import finish_request from synapse.http.servlet import RestServlet -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class AccountValidityRenewServlet(RestServlet): - PATTERNS = client_v2_patterns("/account_validity/renew$") + PATTERNS = client_patterns("/account_validity/renew$") SUCCESS_HTML = b"Your account has been successfully renewed." def __init__(self, hs): @@ -60,7 +60,7 @@ class AccountValidityRenewServlet(RestServlet): class AccountValiditySendMailServlet(RestServlet): - PATTERNS = client_v2_patterns("/account_validity/send_mail$") + PATTERNS = client_patterns("/account_validity/send_mail$") def __init__(self, hs): """ diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index 4c380ab84d..8dfe5cba02 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -23,7 +23,7 @@ from synapse.api.urls import CLIENT_API_PREFIX from synapse.http.server import finish_request from synapse.http.servlet import RestServlet, parse_string -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -122,7 +122,7 @@ class AuthRestServlet(RestServlet): cannot be handled in the normal flow (with requests to the same endpoint). Current use is for web fallback auth. """ - PATTERNS = client_v2_patterns(r"/auth/(?P[\w\.]*)/fallback/web") + PATTERNS = client_patterns(r"/auth/(?P[\w\.]*)/fallback/web") def __init__(self, hs): super(AuthRestServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py index 2b4892330c..fc7e2f4dd5 100644 --- a/synapse/rest/client/v2_alpha/capabilities.py +++ b/synapse/rest/client/v2_alpha/capabilities.py @@ -19,7 +19,7 @@ from twisted.internet import defer from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.http.servlet import RestServlet -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ logger = logging.getLogger(__name__) class CapabilitiesRestServlet(RestServlet): """End point to expose the capabilities of the server.""" - PATTERNS = client_v2_patterns("/capabilities$") + PATTERNS = client_patterns("/capabilities$") def __init__(self, hs): """ diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index 5a5be7c390..78665304a5 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -24,13 +24,13 @@ from synapse.http.servlet import ( parse_json_object_from_request, ) -from ._base import client_v2_patterns, interactive_auth_handler +from ._base import client_patterns, interactive_auth_handler logger = logging.getLogger(__name__) class DevicesRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/devices$") + PATTERNS = client_patterns("/devices$") def __init__(self, hs): """ @@ -56,7 +56,7 @@ class DeleteDevicesRestServlet(RestServlet): API for bulk deletion of devices. Accepts a JSON object with a devices key which lists the device_ids to delete. Requires user interactive auth. """ - PATTERNS = client_v2_patterns("/delete_devices") + PATTERNS = client_patterns("/delete_devices") def __init__(self, hs): super(DeleteDevicesRestServlet, self).__init__() @@ -95,7 +95,7 @@ class DeleteDevicesRestServlet(RestServlet): class DeviceRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/devices/(?P[^/]*)$") + PATTERNS = client_patterns("/devices/(?P[^/]*)$") def __init__(self, hs): """ diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index ae86728879..65db48c3cc 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -21,13 +21,13 @@ from synapse.api.errors import AuthError, Codes, StoreError, SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.types import UserID -from ._base import client_v2_patterns, set_timeline_upper_limit +from ._base import client_patterns, set_timeline_upper_limit logger = logging.getLogger(__name__) class GetFilterRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/user/(?P[^/]*)/filter/(?P[^/]*)") + PATTERNS = client_patterns("/user/(?P[^/]*)/filter/(?P[^/]*)") def __init__(self, hs): super(GetFilterRestServlet, self).__init__() @@ -63,7 +63,7 @@ class GetFilterRestServlet(RestServlet): class CreateFilterRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/user/(?P[^/]*)/filter") + PATTERNS = client_patterns("/user/(?P[^/]*)/filter") def __init__(self, hs): super(CreateFilterRestServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 21e02c07c0..d082385ec7 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -21,7 +21,7 @@ from twisted.internet import defer from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.types import GroupID -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -29,7 +29,7 @@ logger = logging.getLogger(__name__) class GroupServlet(RestServlet): """Get the group profile """ - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/profile$") + PATTERNS = client_patterns("/groups/(?P[^/]*)/profile$") def __init__(self, hs): super(GroupServlet, self).__init__() @@ -65,7 +65,7 @@ class GroupServlet(RestServlet): class GroupSummaryServlet(RestServlet): """Get the full group summary """ - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/summary$") + PATTERNS = client_patterns("/groups/(?P[^/]*)/summary$") def __init__(self, hs): super(GroupSummaryServlet, self).__init__() @@ -93,7 +93,7 @@ class GroupSummaryRoomsCatServlet(RestServlet): - /groups/:group/summary/rooms/:room_id - /groups/:group/summary/categories/:category/rooms/:room_id """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/summary" "(/categories/(?P[^/]+))?" "/rooms/(?P[^/]*)$" @@ -137,7 +137,7 @@ class GroupSummaryRoomsCatServlet(RestServlet): class GroupCategoryServlet(RestServlet): """Get/add/update/delete a group category """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/categories/(?P[^/]+)$" ) @@ -189,7 +189,7 @@ class GroupCategoryServlet(RestServlet): class GroupCategoriesServlet(RestServlet): """Get all group categories """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/categories/$" ) @@ -214,7 +214,7 @@ class GroupCategoriesServlet(RestServlet): class GroupRoleServlet(RestServlet): """Get/add/update/delete a group role """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/roles/(?P[^/]+)$" ) @@ -266,7 +266,7 @@ class GroupRoleServlet(RestServlet): class GroupRolesServlet(RestServlet): """Get all group roles """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/roles/$" ) @@ -295,7 +295,7 @@ class GroupSummaryUsersRoleServlet(RestServlet): - /groups/:group/summary/users/:room_id - /groups/:group/summary/roles/:role/users/:user_id """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/summary" "(/roles/(?P[^/]+))?" "/users/(?P[^/]*)$" @@ -339,7 +339,7 @@ class GroupSummaryUsersRoleServlet(RestServlet): class GroupRoomServlet(RestServlet): """Get all rooms in a group """ - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/rooms$") + PATTERNS = client_patterns("/groups/(?P[^/]*)/rooms$") def __init__(self, hs): super(GroupRoomServlet, self).__init__() @@ -360,7 +360,7 @@ class GroupRoomServlet(RestServlet): class GroupUsersServlet(RestServlet): """Get all users in a group """ - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/users$") + PATTERNS = client_patterns("/groups/(?P[^/]*)/users$") def __init__(self, hs): super(GroupUsersServlet, self).__init__() @@ -381,7 +381,7 @@ class GroupUsersServlet(RestServlet): class GroupInvitedUsersServlet(RestServlet): """Get users invited to a group """ - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/invited_users$") + PATTERNS = client_patterns("/groups/(?P[^/]*)/invited_users$") def __init__(self, hs): super(GroupInvitedUsersServlet, self).__init__() @@ -405,7 +405,7 @@ class GroupInvitedUsersServlet(RestServlet): class GroupSettingJoinPolicyServlet(RestServlet): """Set group join policy """ - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/settings/m.join_policy$") + PATTERNS = client_patterns("/groups/(?P[^/]*)/settings/m.join_policy$") def __init__(self, hs): super(GroupSettingJoinPolicyServlet, self).__init__() @@ -431,7 +431,7 @@ class GroupSettingJoinPolicyServlet(RestServlet): class GroupCreateServlet(RestServlet): """Create a group """ - PATTERNS = client_v2_patterns("/create_group$") + PATTERNS = client_patterns("/create_group$") def __init__(self, hs): super(GroupCreateServlet, self).__init__() @@ -462,7 +462,7 @@ class GroupCreateServlet(RestServlet): class GroupAdminRoomsServlet(RestServlet): """Add a room to the group """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/admin/rooms/(?P[^/]*)$" ) @@ -499,7 +499,7 @@ class GroupAdminRoomsServlet(RestServlet): class GroupAdminRoomsConfigServlet(RestServlet): """Update the config of a room in a group """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/admin/rooms/(?P[^/]*)" "/config/(?P[^/]*)$" ) @@ -526,7 +526,7 @@ class GroupAdminRoomsConfigServlet(RestServlet): class GroupAdminUsersInviteServlet(RestServlet): """Invite a user to the group """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/admin/users/invite/(?P[^/]*)$" ) @@ -555,7 +555,7 @@ class GroupAdminUsersInviteServlet(RestServlet): class GroupAdminUsersKickServlet(RestServlet): """Kick a user from the group """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/admin/users/remove/(?P[^/]*)$" ) @@ -581,7 +581,7 @@ class GroupAdminUsersKickServlet(RestServlet): class GroupSelfLeaveServlet(RestServlet): """Leave a joined group """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/self/leave$" ) @@ -607,7 +607,7 @@ class GroupSelfLeaveServlet(RestServlet): class GroupSelfJoinServlet(RestServlet): """Attempt to join a group, or knock """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/self/join$" ) @@ -633,7 +633,7 @@ class GroupSelfJoinServlet(RestServlet): class GroupSelfAcceptInviteServlet(RestServlet): """Accept a group invite """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/self/accept_invite$" ) @@ -659,7 +659,7 @@ class GroupSelfAcceptInviteServlet(RestServlet): class GroupSelfUpdatePublicityServlet(RestServlet): """Update whether we publicise a users membership of a group """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/groups/(?P[^/]*)/self/update_publicity$" ) @@ -686,7 +686,7 @@ class GroupSelfUpdatePublicityServlet(RestServlet): class PublicisedGroupsForUserServlet(RestServlet): """Get the list of groups a user is advertising """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/publicised_groups/(?P[^/]*)$" ) @@ -711,7 +711,7 @@ class PublicisedGroupsForUserServlet(RestServlet): class PublicisedGroupsForUsersServlet(RestServlet): """Get the list of groups a user is advertising """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/publicised_groups$" ) @@ -739,7 +739,7 @@ class PublicisedGroupsForUsersServlet(RestServlet): class GroupsForUserServlet(RestServlet): """Get all groups the logged in user is joined to """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/joined_groups$" ) diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 8486086b51..4cbfbf5631 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -26,7 +26,7 @@ from synapse.http.servlet import ( ) from synapse.types import StreamToken -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -56,7 +56,7 @@ class KeyUploadServlet(RestServlet): }, } """ - PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$") + PATTERNS = client_patterns("/keys/upload(/(?P[^/]+))?$") def __init__(self, hs): """ @@ -130,7 +130,7 @@ class KeyQueryServlet(RestServlet): } } } } } } """ - PATTERNS = client_v2_patterns("/keys/query$") + PATTERNS = client_patterns("/keys/query$") def __init__(self, hs): """ @@ -159,7 +159,7 @@ class KeyChangesServlet(RestServlet): 200 OK { "changed": ["@foo:example.com"] } """ - PATTERNS = client_v2_patterns("/keys/changes$") + PATTERNS = client_patterns("/keys/changes$") def __init__(self, hs): """ @@ -209,7 +209,7 @@ class OneTimeKeyServlet(RestServlet): } } } } """ - PATTERNS = client_v2_patterns("/keys/claim$") + PATTERNS = client_patterns("/keys/claim$") def __init__(self, hs): super(OneTimeKeyServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py index 0a1eb0ae45..53e666989b 100644 --- a/synapse/rest/client/v2_alpha/notifications.py +++ b/synapse/rest/client/v2_alpha/notifications.py @@ -20,13 +20,13 @@ from twisted.internet import defer from synapse.events.utils import format_event_for_client_v2_without_room_id from synapse.http.servlet import RestServlet, parse_integer, parse_string -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class NotificationsServlet(RestServlet): - PATTERNS = client_v2_patterns("/notifications$") + PATTERNS = client_patterns("/notifications$") def __init__(self, hs): super(NotificationsServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/openid.py b/synapse/rest/client/v2_alpha/openid.py index 01c90aa2a3..bb927d9f9d 100644 --- a/synapse/rest/client/v2_alpha/openid.py +++ b/synapse/rest/client/v2_alpha/openid.py @@ -22,7 +22,7 @@ from synapse.api.errors import AuthError from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.util.stringutils import random_string -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -56,7 +56,7 @@ class IdTokenServlet(RestServlet): "expires_in": 3600, } """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/user/(?P[^/]*)/openid/request_token" ) diff --git a/synapse/rest/client/v2_alpha/read_marker.py b/synapse/rest/client/v2_alpha/read_marker.py index a6e582a5ae..f4bd0d077f 100644 --- a/synapse/rest/client/v2_alpha/read_marker.py +++ b/synapse/rest/client/v2_alpha/read_marker.py @@ -19,13 +19,13 @@ from twisted.internet import defer from synapse.http.servlet import RestServlet, parse_json_object_from_request -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class ReadMarkerRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/rooms/(?P[^/]*)/read_markers$") + PATTERNS = client_patterns("/rooms/(?P[^/]*)/read_markers$") def __init__(self, hs): super(ReadMarkerRestServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py index de370cac45..fa12ac3e4d 100644 --- a/synapse/rest/client/v2_alpha/receipts.py +++ b/synapse/rest/client/v2_alpha/receipts.py @@ -20,13 +20,13 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class ReceiptRestServlet(RestServlet): - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/rooms/(?P[^/]*)" "/receipt/(?P[^/]*)" "/(?P[^/]*)$" diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 042f636135..79c085408b 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -43,7 +43,7 @@ from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.threepids import check_3pid_allowed -from ._base import client_v2_patterns, interactive_auth_handler +from ._base import client_patterns, interactive_auth_handler # We ought to be using hmac.compare_digest() but on older pythons it doesn't # exist. It's a _really minor_ security flaw to use plain string comparison @@ -60,7 +60,7 @@ logger = logging.getLogger(__name__) class EmailRegisterRequestTokenRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/register/email/requestToken$") + PATTERNS = client_patterns("/register/email/requestToken$") def __init__(self, hs): """ @@ -98,7 +98,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): class MsisdnRegisterRequestTokenRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/register/msisdn/requestToken$") + PATTERNS = client_patterns("/register/msisdn/requestToken$") def __init__(self, hs): """ @@ -142,7 +142,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet): class UsernameAvailabilityRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/register/available") + PATTERNS = client_patterns("/register/available") def __init__(self, hs): """ @@ -182,7 +182,7 @@ class UsernameAvailabilityRestServlet(RestServlet): class RegisterRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/register$") + PATTERNS = client_patterns("/register$") def __init__(self, hs): """ diff --git a/synapse/rest/client/v2_alpha/relations.py b/synapse/rest/client/v2_alpha/relations.py index 41e0a44936..f8f8742bdc 100644 --- a/synapse/rest/client/v2_alpha/relations.py +++ b/synapse/rest/client/v2_alpha/relations.py @@ -34,7 +34,7 @@ from synapse.http.servlet import ( from synapse.rest.client.transactions import HttpTransactionCache from synapse.storage.relations import AggregationPaginationToken, RelationPaginationToken -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -66,12 +66,12 @@ class RelationSendServlet(RestServlet): def register(self, http_server): http_server.register_paths( "POST", - client_v2_patterns(self.PATTERN + "$", releases=()), + client_patterns(self.PATTERN + "$", releases=()), self.on_PUT_or_POST, ) http_server.register_paths( "PUT", - client_v2_patterns(self.PATTERN + "/(?P[^/]*)$", releases=()), + client_patterns(self.PATTERN + "/(?P[^/]*)$", releases=()), self.on_PUT, ) @@ -120,7 +120,7 @@ class RelationPaginationServlet(RestServlet): filtered by relation type and event type. """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/rooms/(?P[^/]*)/relations/(?P[^/]*)" "(/(?P[^/]*)(/(?P[^/]*))?)?$", releases=(), @@ -197,7 +197,7 @@ class RelationAggregationPaginationServlet(RestServlet): } """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/rooms/(?P[^/]*)/aggregations/(?P[^/]*)" "(/(?P[^/]*)(/(?P[^/]*))?)?$", releases=(), @@ -269,7 +269,7 @@ class RelationAggregationGroupPaginationServlet(RestServlet): } """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/rooms/(?P[^/]*)/aggregations/(?P[^/]*)" "/(?P[^/]*)/(?P[^/]*)/(?P[^/]*)$", releases=(), diff --git a/synapse/rest/client/v2_alpha/report_event.py b/synapse/rest/client/v2_alpha/report_event.py index 95d2a71ec2..10198662a9 100644 --- a/synapse/rest/client/v2_alpha/report_event.py +++ b/synapse/rest/client/v2_alpha/report_event.py @@ -27,13 +27,13 @@ from synapse.http.servlet import ( parse_json_object_from_request, ) -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class ReportEventRestServlet(RestServlet): - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/rooms/(?P[^/]*)/report/(?P[^/]*)$" ) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 220a0de30b..87779645f9 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -24,13 +24,13 @@ from synapse.http.servlet import ( parse_string, ) -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class RoomKeysServlet(RestServlet): - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/room_keys/keys(/(?P[^/]+))?(/(?P[^/]+))?$" ) @@ -256,7 +256,7 @@ class RoomKeysServlet(RestServlet): class RoomKeysNewVersionServlet(RestServlet): - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/room_keys/version$" ) @@ -314,7 +314,7 @@ class RoomKeysNewVersionServlet(RestServlet): class RoomKeysVersionServlet(RestServlet): - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/room_keys/version(/(?P[^/]+))?$" ) diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py index 62b8de71fa..c621a90fba 100644 --- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py @@ -25,7 +25,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, ) -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -47,7 +47,7 @@ class RoomUpgradeRestServlet(RestServlet): Args: hs (synapse.server.HomeServer): """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( # /rooms/$roomid/upgrade "/rooms/(?P[^/]*)/upgrade$", ) diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 21e9cef2d0..120a713361 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -21,13 +21,13 @@ from synapse.http import servlet from synapse.http.servlet import parse_json_object_from_request from synapse.rest.client.transactions import HttpTransactionCache -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class SendToDeviceRestServlet(servlet.RestServlet): - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/sendToDevice/(?P[^/]*)/(?P[^/]*)$", ) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index d3025025e3..148fc6c985 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -32,7 +32,7 @@ from synapse.handlers.sync import SyncConfig from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string from synapse.types import StreamToken -from ._base import client_v2_patterns, set_timeline_upper_limit +from ._base import client_patterns, set_timeline_upper_limit logger = logging.getLogger(__name__) @@ -73,7 +73,7 @@ class SyncRestServlet(RestServlet): } """ - PATTERNS = client_v2_patterns("/sync$") + PATTERNS = client_patterns("/sync$") ALLOWED_PRESENCE = set(["online", "offline", "unavailable"]) def __init__(self, hs): diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py index 4fea614e95..ebff7cff45 100644 --- a/synapse/rest/client/v2_alpha/tags.py +++ b/synapse/rest/client/v2_alpha/tags.py @@ -20,7 +20,7 @@ from twisted.internet import defer from synapse.api.errors import AuthError from synapse.http.servlet import RestServlet, parse_json_object_from_request -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) @@ -29,7 +29,7 @@ class TagListServlet(RestServlet): """ GET /user/{user_id}/rooms/{room_id}/tags HTTP/1.1 """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/user/(?P[^/]*)/rooms/(?P[^/]*)/tags" ) @@ -54,7 +54,7 @@ class TagServlet(RestServlet): PUT /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1 DELETE /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1 """ - PATTERNS = client_v2_patterns( + PATTERNS = client_patterns( "/user/(?P[^/]*)/rooms/(?P[^/]*)/tags/(?P[^/]*)" ) diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py index b9b5d07677..e7a987466a 100644 --- a/synapse/rest/client/v2_alpha/thirdparty.py +++ b/synapse/rest/client/v2_alpha/thirdparty.py @@ -21,13 +21,13 @@ from twisted.internet import defer from synapse.api.constants import ThirdPartyEntityKind from synapse.http.servlet import RestServlet -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class ThirdPartyProtocolsServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/protocols") + PATTERNS = client_patterns("/thirdparty/protocols") def __init__(self, hs): super(ThirdPartyProtocolsServlet, self).__init__() @@ -44,7 +44,7 @@ class ThirdPartyProtocolsServlet(RestServlet): class ThirdPartyProtocolServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/protocol/(?P[^/]+)$") + PATTERNS = client_patterns("/thirdparty/protocol/(?P[^/]+)$") def __init__(self, hs): super(ThirdPartyProtocolServlet, self).__init__() @@ -66,7 +66,7 @@ class ThirdPartyProtocolServlet(RestServlet): class ThirdPartyUserServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/user(/(?P[^/]+))?$") + PATTERNS = client_patterns("/thirdparty/user(/(?P[^/]+))?$") def __init__(self, hs): super(ThirdPartyUserServlet, self).__init__() @@ -89,7 +89,7 @@ class ThirdPartyUserServlet(RestServlet): class ThirdPartyLocationServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/location(/(?P[^/]+))?$") + PATTERNS = client_patterns("/thirdparty/location(/(?P[^/]+))?$") def __init__(self, hs): super(ThirdPartyLocationServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/tokenrefresh.py b/synapse/rest/client/v2_alpha/tokenrefresh.py index 6e76b9e9c2..6c366142e1 100644 --- a/synapse/rest/client/v2_alpha/tokenrefresh.py +++ b/synapse/rest/client/v2_alpha/tokenrefresh.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import AuthError from synapse.http.servlet import RestServlet -from ._base import client_v2_patterns +from ._base import client_patterns class TokenRefreshRestServlet(RestServlet): @@ -26,7 +26,7 @@ class TokenRefreshRestServlet(RestServlet): Exchanges refresh tokens for a pair of an access token and a new refresh token. """ - PATTERNS = client_v2_patterns("/tokenrefresh") + PATTERNS = client_patterns("/tokenrefresh") def __init__(self, hs): super(TokenRefreshRestServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/user_directory.py b/synapse/rest/client/v2_alpha/user_directory.py index 36b02de37f..69e4efc47a 100644 --- a/synapse/rest/client/v2_alpha/user_directory.py +++ b/synapse/rest/client/v2_alpha/user_directory.py @@ -20,13 +20,13 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request -from ._base import client_v2_patterns +from ._base import client_patterns logger = logging.getLogger(__name__) class UserDirectorySearchRestServlet(RestServlet): - PATTERNS = client_v2_patterns("/user_directory/search$") + PATTERNS = client_patterns("/user_directory/search$") def __init__(self, hs): """ diff --git a/tests/__init__.py b/tests/__init__.py index d3181f9403..f7fc502f01 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -21,4 +21,4 @@ import tests.patch_inline_callbacks # attempt to do the patch before we load any synapse code tests.patch_inline_callbacks.do_patch() -util.DEFAULT_TIMEOUT_DURATION = 10 +util.DEFAULT_TIMEOUT_DURATION = 20 diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index ee5f09041f..e5fc2fcd15 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -408,7 +408,6 @@ class ShutdownRoomTestCase(unittest.HomeserverTestCase): users_in_room = self.get_success(self.store.get_users_in_room(room_id)) self.assertEqual([], users_in_room) - @unittest.DEBUG def test_shutdown_room_block_peek(self): """Test that a world_readable room can no longer be peeked into after it has been shut down. diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index f4d0d48dad..72c7ed93cb 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -30,7 +30,7 @@ from tests import unittest from ....utils import MockHttpResource, setup_test_homeserver myid = "@1234ABCD:test" -PATH_PREFIX = "/_matrix/client/api/v1" +PATH_PREFIX = "/_matrix/client/r0" class MockHandlerProfileTestCase(unittest.TestCase): -- cgit 1.5.1 From fec2dcb1a538ab8ab447f724af1a94d5b3517197 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 3 Jun 2019 22:59:51 +0100 Subject: Enforce validity period on server_keys for fed requests. (#5321) When handling incoming federation requests, make sure that we have an up-to-date copy of the signing key. We do not yet enforce the validity period for event signatures. --- changelog.d/5321.bugfix | 1 + synapse/crypto/keyring.py | 167 ++++++++++++++++++++++----------- synapse/federation/federation_base.py | 4 +- synapse/federation/transport/server.py | 4 +- synapse/groups/attestations.py | 5 +- tests/crypto/test_keyring.py | 135 ++++++++++++++++++++------ 6 files changed, 228 insertions(+), 88 deletions(-) create mode 100644 changelog.d/5321.bugfix (limited to 'tests') diff --git a/changelog.d/5321.bugfix b/changelog.d/5321.bugfix new file mode 100644 index 0000000000..943a61956d --- /dev/null +++ b/changelog.d/5321.bugfix @@ -0,0 +1 @@ +Ensure that we have an up-to-date copy of the signing key when validating incoming federation requests. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index b2f4cea536..cdec06c88e 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -15,6 +15,7 @@ # limitations under the License. import logging +from collections import defaultdict import six from six import raise_from @@ -70,6 +71,9 @@ class VerifyKeyRequest(object): json_object(dict): The JSON object to verify. + minimum_valid_until_ts (int): time at which we require the signing key to + be valid. (0 implies we don't care) + deferred(Deferred[str, str, nacl.signing.VerifyKey]): A deferred (server_name, key_id, verify_key) tuple that resolves when a verify key has been fetched. The deferreds' callbacks are run with no @@ -82,7 +86,8 @@ class VerifyKeyRequest(object): server_name = attr.ib() key_ids = attr.ib() json_object = attr.ib() - deferred = attr.ib() + minimum_valid_until_ts = attr.ib() + deferred = attr.ib(default=attr.Factory(defer.Deferred)) class KeyLookupError(ValueError): @@ -90,14 +95,16 @@ class KeyLookupError(ValueError): class Keyring(object): - def __init__(self, hs): + def __init__(self, hs, key_fetchers=None): self.clock = hs.get_clock() - self._key_fetchers = ( - StoreKeyFetcher(hs), - PerspectivesKeyFetcher(hs), - ServerKeyFetcher(hs), - ) + if key_fetchers is None: + key_fetchers = ( + StoreKeyFetcher(hs), + PerspectivesKeyFetcher(hs), + ServerKeyFetcher(hs), + ) + self._key_fetchers = key_fetchers # map from server name to Deferred. Has an entry for each server with # an ongoing key download; the Deferred completes once the download @@ -106,9 +113,25 @@ class Keyring(object): # These are regular, logcontext-agnostic Deferreds. self.key_downloads = {} - def verify_json_for_server(self, server_name, json_object): + def verify_json_for_server(self, server_name, json_object, validity_time): + """Verify that a JSON object has been signed by a given server + + Args: + server_name (str): name of the server which must have signed this object + + json_object (dict): object to be checked + + validity_time (int): timestamp at which we require the signing key to + be valid. (0 implies we don't care) + + Returns: + Deferred[None]: completes if the the object was correctly signed, otherwise + errbacks with an error + """ + req = server_name, json_object, validity_time + return logcontext.make_deferred_yieldable( - self.verify_json_objects_for_server([(server_name, json_object)])[0] + self.verify_json_objects_for_server((req,))[0] ) def verify_json_objects_for_server(self, server_and_json): @@ -116,10 +139,12 @@ class Keyring(object): necessary. Args: - server_and_json (list): List of pairs of (server_name, json_object) + server_and_json (iterable[Tuple[str, dict, int]): + Iterable of triplets of (server_name, json_object, validity_time) + validity_time is a timestamp at which the signing key must be valid. Returns: - List: for each input pair, a deferred indicating success + List: for each input triplet, a deferred indicating success or failure to verify each json object's signature for the given server_name. The deferreds run their callbacks in the sentinel logcontext. @@ -128,12 +153,12 @@ class Keyring(object): verify_requests = [] handle = preserve_fn(_handle_key_deferred) - def process(server_name, json_object): + def process(server_name, json_object, validity_time): """Process an entry in the request list - Given a (server_name, json_object) pair from the request list, - adds a key request to verify_requests, and returns a deferred which will - complete or fail (in the sentinel context) when verification completes. + Given a (server_name, json_object, validity_time) triplet from the request + list, adds a key request to verify_requests, and returns a deferred which + will complete or fail (in the sentinel context) when verification completes. """ key_ids = signature_ids(json_object, server_name) @@ -148,7 +173,7 @@ class Keyring(object): # add the key request to the queue, but don't start it off yet. verify_request = VerifyKeyRequest( - server_name, key_ids, json_object, defer.Deferred() + server_name, key_ids, json_object, validity_time ) verify_requests.append(verify_request) @@ -160,8 +185,8 @@ class Keyring(object): return handle(verify_request) results = [ - process(server_name, json_object) - for server_name, json_object in server_and_json + process(server_name, json_object, validity_time) + for server_name, json_object, validity_time in server_and_json ] if verify_requests: @@ -298,8 +323,12 @@ class Keyring(object): verify_request.deferred.errback( SynapseError( 401, - "No key for %s with id %s" - % (verify_request.server_name, verify_request.key_ids), + "No key for %s with ids in %s (min_validity %i)" + % ( + verify_request.server_name, + verify_request.key_ids, + verify_request.minimum_valid_until_ts, + ), Codes.UNAUTHORIZED, ) ) @@ -323,18 +352,28 @@ class Keyring(object): Args: fetcher (KeyFetcher): fetcher to use to fetch the keys remaining_requests (set[VerifyKeyRequest]): outstanding key requests. - Any successfully-completed requests will be reomved from the list. + Any successfully-completed requests will be removed from the list. """ - # dict[str, set(str)]: keys to fetch for each server - missing_keys = {} + # dict[str, dict[str, int]]: keys to fetch. + # server_name -> key_id -> min_valid_ts + missing_keys = defaultdict(dict) + for verify_request in remaining_requests: # any completed requests should already have been removed assert not verify_request.deferred.called - missing_keys.setdefault(verify_request.server_name, set()).update( - verify_request.key_ids - ) + keys_for_server = missing_keys[verify_request.server_name] - results = yield fetcher.get_keys(missing_keys.items()) + for key_id in verify_request.key_ids: + # If we have several requests for the same key, then we only need to + # request that key once, but we should do so with the greatest + # min_valid_until_ts of the requests, so that we can satisfy all of + # the requests. + keys_for_server[key_id] = max( + keys_for_server.get(key_id, -1), + verify_request.minimum_valid_until_ts + ) + + results = yield fetcher.get_keys(missing_keys) completed = list() for verify_request in remaining_requests: @@ -344,25 +383,34 @@ class Keyring(object): # complete this VerifyKeyRequest. result_keys = results.get(server_name, {}) for key_id in verify_request.key_ids: - key = result_keys.get(key_id) - if key: - with PreserveLoggingContext(): - verify_request.deferred.callback( - (server_name, key_id, key.verify_key) - ) - completed.append(verify_request) - break + fetch_key_result = result_keys.get(key_id) + if not fetch_key_result: + # we didn't get a result for this key + continue + + if ( + fetch_key_result.valid_until_ts + < verify_request.minimum_valid_until_ts + ): + # key was not valid at this point + continue + + with PreserveLoggingContext(): + verify_request.deferred.callback( + (server_name, key_id, fetch_key_result.verify_key) + ) + completed.append(verify_request) + break remaining_requests.difference_update(completed) class KeyFetcher(object): - def get_keys(self, server_name_and_key_ids): + def get_keys(self, keys_to_fetch): """ Args: - server_name_and_key_ids (iterable[Tuple[str, iterable[str]]]): - list of (server_name, iterable[key_id]) tuples to fetch keys for - Note that the iterables may be iterated more than once. + keys_to_fetch (dict[str, dict[str, int]]): + the keys to be fetched. server_name -> key_id -> min_valid_ts Returns: Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]]: @@ -378,13 +426,15 @@ class StoreKeyFetcher(KeyFetcher): self.store = hs.get_datastore() @defer.inlineCallbacks - def get_keys(self, server_name_and_key_ids): + def get_keys(self, keys_to_fetch): """see KeyFetcher.get_keys""" + keys_to_fetch = ( (server_name, key_id) - for server_name, key_ids in server_name_and_key_ids - for key_id in key_ids + for server_name, keys_for_server in keys_to_fetch.items() + for key_id in keys_for_server.keys() ) + res = yield self.store.get_server_verify_keys(keys_to_fetch) keys = {} for (server_name, key_id), key in res.items(): @@ -508,14 +558,14 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): self.perspective_servers = self.config.perspectives @defer.inlineCallbacks - def get_keys(self, server_name_and_key_ids): + def get_keys(self, keys_to_fetch): """see KeyFetcher.get_keys""" @defer.inlineCallbacks def get_key(perspective_name, perspective_keys): try: result = yield self.get_server_verify_key_v2_indirect( - server_name_and_key_ids, perspective_name, perspective_keys + keys_to_fetch, perspective_name, perspective_keys ) defer.returnValue(result) except KeyLookupError as e: @@ -549,13 +599,15 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): @defer.inlineCallbacks def get_server_verify_key_v2_indirect( - self, server_names_and_key_ids, perspective_name, perspective_keys + self, keys_to_fetch, perspective_name, perspective_keys ): """ Args: - server_names_and_key_ids (iterable[Tuple[str, iterable[str]]]): - list of (server_name, iterable[key_id]) tuples to fetch keys for + keys_to_fetch (dict[str, dict[str, int]]): + the keys to be fetched. server_name -> key_id -> min_valid_ts + perspective_name (str): name of the notary server to query for the keys + perspective_keys (dict[str, VerifyKey]): map of key_id->key for the notary server @@ -569,12 +621,10 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): """ logger.info( "Requesting keys %s from notary server %s", - server_names_and_key_ids, + keys_to_fetch.items(), perspective_name, ) - # TODO(mark): Set the minimum_valid_until_ts to that needed by - # the events being validated or the current time if validating - # an incoming request. + try: query_response = yield self.client.post_json( destination=perspective_name, @@ -582,9 +632,10 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): data={ u"server_keys": { server_name: { - key_id: {u"minimum_valid_until_ts": 0} for key_id in key_ids + key_id: {u"minimum_valid_until_ts": min_valid_ts} + for key_id, min_valid_ts in server_keys.items() } - for server_name, key_ids in server_names_and_key_ids + for server_name, server_keys in keys_to_fetch.items() } }, long_retries=True, @@ -694,15 +745,18 @@ class ServerKeyFetcher(BaseV2KeyFetcher): self.client = hs.get_http_client() @defer.inlineCallbacks - def get_keys(self, server_name_and_key_ids): + def get_keys(self, keys_to_fetch): """see KeyFetcher.get_keys""" + # TODO make this more resilient results = yield logcontext.make_deferred_yieldable( defer.gatherResults( [ run_in_background( - self.get_server_verify_key_v2_direct, server_name, key_ids + self.get_server_verify_key_v2_direct, + server_name, + server_keys.keys(), ) - for server_name, key_ids in server_name_and_key_ids + for server_name, server_keys in keys_to_fetch.items() ], consumeErrors=True, ).addErrback(unwrapFirstError) @@ -721,6 +775,7 @@ class ServerKeyFetcher(BaseV2KeyFetcher): keys = {} # type: dict[str, FetchKeyResult] for requested_key_id in key_ids: + # we may have found this key as a side-effect of asking for another. if requested_key_id in keys: continue diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index cffa831d80..4b38f7c759 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -265,7 +265,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): ] more_deferreds = keyring.verify_json_objects_for_server([ - (p.sender_domain, p.redacted_pdu_json) + (p.sender_domain, p.redacted_pdu_json, 0) for p in pdus_to_check_sender ]) @@ -298,7 +298,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): ] more_deferreds = keyring.verify_json_objects_for_server([ - (get_domain_from_id(p.pdu.event_id), p.redacted_pdu_json) + (get_domain_from_id(p.pdu.event_id), p.redacted_pdu_json, 0) for p in pdus_to_check_event_id ]) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index d0efc4e0d3..0db8858cf1 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -94,6 +94,7 @@ class NoAuthenticationError(AuthenticationError): class Authenticator(object): def __init__(self, hs): + self._clock = hs.get_clock() self.keyring = hs.get_keyring() self.server_name = hs.hostname self.store = hs.get_datastore() @@ -102,6 +103,7 @@ class Authenticator(object): # A method just so we can pass 'self' as the authenticator to the Servlets @defer.inlineCallbacks def authenticate_request(self, request, content): + now = self._clock.time_msec() json_request = { "method": request.method.decode('ascii'), "uri": request.uri.decode('ascii'), @@ -138,7 +140,7 @@ class Authenticator(object): 401, "Missing Authorization headers", Codes.UNAUTHORIZED, ) - yield self.keyring.verify_json_for_server(origin, json_request) + yield self.keyring.verify_json_for_server(origin, json_request, now) logger.info("Request from %s", origin) request.authenticated_entity = origin diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 786149be65..fa6b641ee1 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -97,10 +97,11 @@ class GroupAttestationSigning(object): # TODO: We also want to check that *new* attestations that people give # us to store are valid for at least a little while. - if valid_until_ms < self.clock.time_msec(): + now = self.clock.time_msec() + if valid_until_ms < now: raise SynapseError(400, "Attestation expired") - yield self.keyring.verify_json_for_server(server_name, attestation) + yield self.keyring.verify_json_for_server(server_name, attestation, now) def create_attestation(self, group_id, user_id): """Create an attestation for the group_id and user_id with default diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 3933ad4347..096401938d 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -19,6 +19,7 @@ from mock import Mock import canonicaljson import signedjson.key import signedjson.sign +from signedjson.key import get_verify_key from twisted.internet import defer @@ -137,7 +138,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): context_11.request = "11" res_deferreds = kr.verify_json_objects_for_server( - [("server10", json1), ("server11", {})] + [("server10", json1, 0), ("server11", {}, 0)] ) # the unsigned json should be rejected pretty quickly @@ -174,7 +175,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.http_client.post_json.return_value = defer.Deferred() res_deferreds_2 = kr.verify_json_objects_for_server( - [("server10", json1)] + [("server10", json1, 0)] ) res_deferreds_2[0].addBoth(self.check_context, None) yield logcontext.make_deferred_yieldable(res_deferreds_2[0]) @@ -197,31 +198,108 @@ class KeyringTestCase(unittest.HomeserverTestCase): kr = keyring.Keyring(self.hs) key1 = signedjson.key.generate_signing_key(1) - key1_id = "%s:%s" % (key1.alg, key1.version) - r = self.hs.datastore.store_server_verify_keys( "server9", time.time() * 1000, - [ - ( - "server9", - key1_id, - FetchKeyResult(signedjson.key.get_verify_key(key1), 1000), - ), - ], + [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))], ) self.get_success(r) + json1 = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object - d = _verify_json_for_server(kr, "server9", {}) + d = _verify_json_for_server(kr, "server9", {}, 0) self.failureResultOf(d, SynapseError) - d = _verify_json_for_server(kr, "server9", json1) - self.assertFalse(d.called) + # should suceed on a signed object + d = _verify_json_for_server(kr, "server9", json1, 500) + # self.assertFalse(d.called) self.get_success(d) + def test_verify_json_dedupes_key_requests(self): + """Two requests for the same key should be deduped.""" + key1 = signedjson.key.generate_signing_key(1) + + def get_keys(keys_to_fetch): + # there should only be one request object (with the max validity) + self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}}) + + return defer.succeed( + { + "server1": { + get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200) + } + } + ) + + mock_fetcher = keyring.KeyFetcher() + mock_fetcher.get_keys = Mock(side_effect=get_keys) + kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,)) + + json1 = {} + signedjson.sign.sign_json(json1, "server1", key1) + + # the first request should succeed; the second should fail because the key + # has expired + results = kr.verify_json_objects_for_server( + [("server1", json1, 500), ("server1", json1, 1500)] + ) + self.assertEqual(len(results), 2) + self.get_success(results[0]) + e = self.get_failure(results[1], SynapseError).value + self.assertEqual(e.errcode, "M_UNAUTHORIZED") + self.assertEqual(e.code, 401) + + # there should have been a single call to the fetcher + mock_fetcher.get_keys.assert_called_once() + + def test_verify_json_falls_back_to_other_fetchers(self): + """If the first fetcher cannot provide a recent enough key, we fall back""" + key1 = signedjson.key.generate_signing_key(1) + + def get_keys1(keys_to_fetch): + self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}}) + return defer.succeed( + { + "server1": { + get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800) + } + } + ) + + def get_keys2(keys_to_fetch): + self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}}) + return defer.succeed( + { + "server1": { + get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200) + } + } + ) + + mock_fetcher1 = keyring.KeyFetcher() + mock_fetcher1.get_keys = Mock(side_effect=get_keys1) + mock_fetcher2 = keyring.KeyFetcher() + mock_fetcher2.get_keys = Mock(side_effect=get_keys2) + kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2)) + + json1 = {} + signedjson.sign.sign_json(json1, "server1", key1) + + results = kr.verify_json_objects_for_server( + [("server1", json1, 1200), ("server1", json1, 1500)] + ) + self.assertEqual(len(results), 2) + self.get_success(results[0]) + e = self.get_failure(results[1], SynapseError).value + self.assertEqual(e.errcode, "M_UNAUTHORIZED") + self.assertEqual(e.code, 401) + + # there should have been a single call to each fetcher + mock_fetcher1.get_keys.assert_called_once() + mock_fetcher2.get_keys.assert_called_once() + class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): @@ -260,8 +338,8 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): self.http_client.get_json.side_effect = get_json - server_name_and_key_ids = [(SERVER_NAME, ("key1",))] - keys = self.get_success(fetcher.get_keys(server_name_and_key_ids)) + keys_to_fetch = {SERVER_NAME: {"key1": 0}} + keys = self.get_success(fetcher.get_keys(keys_to_fetch)) k = keys[SERVER_NAME][testverifykey_id] self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS) self.assertEqual(k.verify_key, testverifykey) @@ -288,9 +366,7 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): # change the server name: it should cause a rejection response["server_name"] = "OTHER_SERVER" - self.get_failure( - fetcher.get_keys(server_name_and_key_ids), KeyLookupError - ) + self.get_failure(fetcher.get_keys(keys_to_fetch), KeyLookupError) class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): @@ -342,8 +418,8 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.http_client.post_json.side_effect = post_json - server_name_and_key_ids = [(SERVER_NAME, ("key1",))] - keys = self.get_success(fetcher.get_keys(server_name_and_key_ids)) + keys_to_fetch = {SERVER_NAME: {"key1": 0}} + keys = self.get_success(fetcher.get_keys(keys_to_fetch)) self.assertIn(SERVER_NAME, keys) k = keys[SERVER_NAME][testverifykey_id] self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS) @@ -401,7 +477,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): def get_key_from_perspectives(response): fetcher = PerspectivesKeyFetcher(self.hs) - server_name_and_key_ids = [(SERVER_NAME, ("key1",))] + keys_to_fetch = {SERVER_NAME: {"key1": 0}} def post_json(destination, path, data, **kwargs): self.assertEqual(destination, self.mock_perspective_server.server_name) @@ -410,9 +486,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.http_client.post_json.side_effect = post_json - return self.get_success( - fetcher.get_keys(server_name_and_key_ids) - ) + return self.get_success(fetcher.get_keys(keys_to_fetch)) # start with a valid response so we can check we are testing the right thing response = build_response() @@ -435,6 +509,11 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig") +def get_key_id(key): + """Get the matrix ID tag for a given SigningKey or VerifyKey""" + return "%s:%s" % (key.alg, key.version) + + @defer.inlineCallbacks def run_in_context(f, *args, **kwargs): with LoggingContext("testctx") as ctx: @@ -445,14 +524,16 @@ def run_in_context(f, *args, **kwargs): defer.returnValue(rv) -def _verify_json_for_server(keyring, server_name, json_object): +def _verify_json_for_server(keyring, server_name, json_object, validity_time): """thin wrapper around verify_json_for_server which makes sure it is wrapped with the patched defer.inlineCallbacks. """ @defer.inlineCallbacks def v(): - rv1 = yield keyring.verify_json_for_server(server_name, json_object) + rv1 = yield keyring.verify_json_for_server( + server_name, json_object, validity_time + ) defer.returnValue(rv1) return run_in_context(v) -- cgit 1.5.1 From def5ea4062295759d7c28d9c2302187871a1bc72 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 3 Jun 2019 15:36:41 +0100 Subject: Don't bomb out on direct key fetches as soon as one fails --- synapse/crypto/keyring.py | 58 +++++++++++++++++++++++++++----------------- tests/crypto/test_keyring.py | 12 ++++----- 2 files changed, 41 insertions(+), 29 deletions(-) (limited to 'tests') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index bef6498f4b..5660c96023 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -46,6 +46,7 @@ from synapse.api.errors import ( ) from synapse.storage.keys import FetchKeyResult from synapse.util import logcontext, unwrapFirstError +from synapse.util.async_helpers import yieldable_gather_results from synapse.util.logcontext import ( LoggingContext, PreserveLoggingContext, @@ -169,7 +170,12 @@ class Keyring(object): ) ) - logger.debug("Verifying for %s with key_ids %s", server_name, key_ids) + logger.debug( + "Verifying for %s with key_ids %s, min_validity %i", + server_name, + key_ids, + validity_time, + ) # add the key request to the queue, but don't start it off yet. verify_request = VerifyKeyRequest( @@ -744,34 +750,42 @@ class ServerKeyFetcher(BaseV2KeyFetcher): self.clock = hs.get_clock() self.client = hs.get_http_client() - @defer.inlineCallbacks def get_keys(self, keys_to_fetch): """see KeyFetcher.get_keys""" - # TODO make this more resilient - results = yield logcontext.make_deferred_yieldable( - defer.gatherResults( - [ - run_in_background( - self.get_server_verify_key_v2_direct, - server_name, - server_keys.keys(), - ) - for server_name, server_keys in keys_to_fetch.items() - ], - consumeErrors=True, - ).addErrback(unwrapFirstError) - ) - merged = {} - for result in results: - merged.update(result) + results = {} + + @defer.inlineCallbacks + def get_key(key_to_fetch_item): + server_name, key_ids = key_to_fetch_item + try: + keys = yield self.get_server_verify_key_v2_direct(server_name, key_ids) + results[server_name] = keys + except KeyLookupError as e: + logger.warning( + "Error looking up keys %s from %s: %s", key_ids, server_name, e + ) + except Exception: + logger.exception("Error getting keys %s from %s", key_ids, server_name) - defer.returnValue( - {server_name: keys for server_name, keys in merged.items() if keys} + return yieldable_gather_results(get_key, keys_to_fetch.items()).addCallback( + lambda _: results ) @defer.inlineCallbacks def get_server_verify_key_v2_direct(self, server_name, key_ids): + """ + + Args: + server_name (str): + key_ids (iterable[str]): + + Returns: + Deferred[dict[str, FetchKeyResult]]: map from key ID to lookup result + + Raises: + KeyLookupError if there was a problem making the lookup + """ keys = {} # type: dict[str, FetchKeyResult] for requested_key_id in key_ids: @@ -823,7 +837,7 @@ class ServerKeyFetcher(BaseV2KeyFetcher): ) keys.update(response_keys) - defer.returnValue({server_name: keys}) + defer.returnValue(keys) @defer.inlineCallbacks diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 096401938d..4cff7e36c8 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -25,11 +25,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.crypto import keyring -from synapse.crypto.keyring import ( - KeyLookupError, - PerspectivesKeyFetcher, - ServerKeyFetcher, -) +from synapse.crypto.keyring import PerspectivesKeyFetcher, ServerKeyFetcher from synapse.storage.keys import FetchKeyResult from synapse.util import logcontext from synapse.util.logcontext import LoggingContext @@ -364,9 +360,11 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): bytes(res["key_json"]), canonicaljson.encode_canonical_json(response) ) - # change the server name: it should cause a rejection + # change the server name: the result should be ignored response["server_name"] = "OTHER_SERVER" - self.get_failure(fetcher.get_keys(keys_to_fetch), KeyLookupError) + + keys = self.get_success(fetcher.get_keys(keys_to_fetch)) + self.assertEqual(keys, {}) class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): -- cgit 1.5.1 From cea9750d112cf74790fb8c16482a0068717954d5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 4 Jun 2019 16:12:57 +0100 Subject: Associate a request_name with each verify request, for logging Also: * rename VerifyKeyRequest->VerifyJsonRequest * calculate key_ids on VerifyJsonRequest construction * refactor things to pass around VerifyJsonRequests instead of 4-tuples --- changelog.d/5353.misc | 2 + synapse/crypto/keyring.py | 112 ++++++++++++++++++++------------- synapse/federation/federation_base.py | 2 + synapse/federation/transport/server.py | 4 +- synapse/groups/attestations.py | 4 +- tests/crypto/test_keyring.py | 18 +++--- 6 files changed, 86 insertions(+), 56 deletions(-) create mode 100644 changelog.d/5353.misc (limited to 'tests') diff --git a/changelog.d/5353.misc b/changelog.d/5353.misc new file mode 100644 index 0000000000..436245fb11 --- /dev/null +++ b/changelog.d/5353.misc @@ -0,0 +1,2 @@ +Various improvements to debug logging. + diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index e94e71bdad..2b6b5913bc 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -60,9 +60,9 @@ logger = logging.getLogger(__name__) @attr.s(slots=True, cmp=False) -class VerifyKeyRequest(object): +class VerifyJsonRequest(object): """ - A request for a verify key to verify a JSON object. + A request to verify a JSON object. Attributes: server_name(str): The name of the server to verify against. @@ -85,11 +85,15 @@ class VerifyKeyRequest(object): """ server_name = attr.ib() - key_ids = attr.ib() json_object = attr.ib() minimum_valid_until_ts = attr.ib() + request_name = attr.ib() + key_ids = attr.ib(init=False) key_ready = attr.ib(default=attr.Factory(defer.Deferred)) + def __attrs_post_init__(self): + self.key_ids = signature_ids(self.json_object, self.server_name) + class KeyLookupError(ValueError): pass @@ -114,7 +118,9 @@ class Keyring(object): # These are regular, logcontext-agnostic Deferreds. self.key_downloads = {} - def verify_json_for_server(self, server_name, json_object, validity_time): + def verify_json_for_server( + self, server_name, json_object, validity_time, request_name + ): """Verify that a JSON object has been signed by a given server Args: @@ -125,24 +131,31 @@ class Keyring(object): validity_time (int): timestamp at which we require the signing key to be valid. (0 implies we don't care) + request_name (str): an identifier for this json object (eg, an event id) + for logging. + Returns: Deferred[None]: completes if the the object was correctly signed, otherwise errbacks with an error """ - req = server_name, json_object, validity_time - - return logcontext.make_deferred_yieldable( - self.verify_json_objects_for_server((req,))[0] - ) + req = VerifyJsonRequest(server_name, json_object, validity_time, request_name) + requests = (req,) + return logcontext.make_deferred_yieldable(self._verify_objects(requests)[0]) def verify_json_objects_for_server(self, server_and_json): """Bulk verifies signatures of json objects, bulk fetching keys as necessary. Args: - server_and_json (iterable[Tuple[str, dict, int]): - Iterable of triplets of (server_name, json_object, validity_time) - validity_time is a timestamp at which the signing key must be valid. + server_and_json (iterable[Tuple[str, dict, int, str]): + Iterable of (server_name, json_object, validity_time, request_name) + tuples. + + validity_time is a timestamp at which the signing key must be + valid. + + request_name is an identifier for this json object (eg, an event id) + for logging. Returns: List: for each input triplet, a deferred indicating success @@ -150,38 +163,54 @@ class Keyring(object): server_name. The deferreds run their callbacks in the sentinel logcontext. """ - # a list of VerifyKeyRequests - verify_requests = [] + return self._verify_objects( + VerifyJsonRequest(server_name, json_object, validity_time, request_name) + for server_name, json_object, validity_time, request_name in server_and_json + ) + + def _verify_objects(self, verify_requests): + """Does the work of verify_json_[objects_]for_server + + + Args: + verify_requests (iterable[VerifyJsonRequest]): + Iterable of verification requests. + + Returns: + List: for each input item, a deferred indicating success + or failure to verify each json object's signature for the given + server_name. The deferreds run their callbacks in the sentinel + logcontext. + """ + # a list of VerifyJsonRequests which are awaiting a key lookup + key_lookups = [] handle = preserve_fn(_handle_key_deferred) - def process(server_name, json_object, validity_time): + def process(verify_request): """Process an entry in the request list - Given a (server_name, json_object, validity_time) triplet from the request - list, adds a key request to verify_requests, and returns a deferred which + Adds a key request to key_lookups, and returns a deferred which will complete or fail (in the sentinel context) when verification completes. """ - key_ids = signature_ids(json_object, server_name) - - if not key_ids: + if not verify_request.key_ids: return defer.fail( SynapseError( - 400, "Not signed by %s" % (server_name,), Codes.UNAUTHORIZED + 400, + "Not signed by %s" % (verify_request.server_name,), + Codes.UNAUTHORIZED, ) ) logger.debug( - "Verifying for %s with key_ids %s, min_validity %i", - server_name, - key_ids, - validity_time, + "Verifying %s for %s with key_ids %s, min_validity %i", + verify_request.request_name, + verify_request.server_name, + verify_request.key_ids, + verify_request.minimum_valid_until_ts, ) # add the key request to the queue, but don't start it off yet. - verify_request = VerifyKeyRequest( - server_name, key_ids, json_object, validity_time - ) - verify_requests.append(verify_request) + key_lookups.append(verify_request) # now run _handle_key_deferred, which will wait for the key request # to complete and then do the verification. @@ -190,13 +219,10 @@ class Keyring(object): # wrap it with preserve_fn (aka run_in_background) return handle(verify_request) - results = [ - process(server_name, json_object, validity_time) - for server_name, json_object, validity_time in server_and_json - ] + results = [process(r) for r in verify_requests] - if verify_requests: - run_in_background(self._start_key_lookups, verify_requests) + if key_lookups: + run_in_background(self._start_key_lookups, key_lookups) return results @@ -207,7 +233,7 @@ class Keyring(object): Once each fetch completes, verify_request.key_ready will be resolved. Args: - verify_requests (List[VerifyKeyRequest]): + verify_requests (List[VerifyJsonRequest]): """ try: @@ -308,7 +334,7 @@ class Keyring(object): with a SynapseError if none of the keys are found. Args: - verify_requests (list[VerifyKeyRequest]): list of verify requests + verify_requests (list[VerifyJsonRequest]): list of verify requests """ remaining_requests = set( @@ -357,7 +383,7 @@ class Keyring(object): Args: fetcher (KeyFetcher): fetcher to use to fetch the keys - remaining_requests (set[VerifyKeyRequest]): outstanding key requests. + remaining_requests (set[VerifyJsonRequest]): outstanding key requests. Any successfully-completed requests will be removed from the list. """ # dict[str, dict[str, int]]: keys to fetch. @@ -376,7 +402,7 @@ class Keyring(object): # the requests. keys_for_server[key_id] = max( keys_for_server.get(key_id, -1), - verify_request.minimum_valid_until_ts + verify_request.minimum_valid_until_ts, ) results = yield fetcher.get_keys(missing_keys) @@ -386,7 +412,7 @@ class Keyring(object): server_name = verify_request.server_name # see if any of the keys we got this time are sufficient to - # complete this VerifyKeyRequest. + # complete this VerifyJsonRequest. result_keys = results.get(server_name, {}) for key_id in verify_request.key_ids: fetch_key_result = result_keys.get(key_id) @@ -454,9 +480,7 @@ class BaseV2KeyFetcher(object): self.config = hs.get_config() @defer.inlineCallbacks - def process_v2_response( - self, from_server, response_json, time_added_ms - ): + def process_v2_response(self, from_server, response_json, time_added_ms): """Parse a 'Server Keys' structure from the result of a /key request This is used to parse either the entirety of the response from @@ -852,7 +876,7 @@ def _handle_key_deferred(verify_request): """Waits for the key to become available, and then performs a verification Args: - verify_request (VerifyKeyRequest): + verify_request (VerifyJsonRequest): Returns: Deferred[None] diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index b541913d82..fc5cfb7d83 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -271,6 +271,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): p.sender_domain, p.redacted_pdu_json, p.pdu.origin_server_ts if v.enforce_key_validity else 0, + p.pdu.event_id, ) for p in pdus_to_check_sender ] @@ -306,6 +307,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus): get_domain_from_id(p.pdu.event_id), p.redacted_pdu_json, p.pdu.origin_server_ts if v.enforce_key_validity else 0, + p.pdu.event_id, ) for p in pdus_to_check_event_id ] diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 0db8858cf1..949a5fb2aa 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -140,7 +140,9 @@ class Authenticator(object): 401, "Missing Authorization headers", Codes.UNAUTHORIZED, ) - yield self.keyring.verify_json_for_server(origin, json_request, now) + yield self.keyring.verify_json_for_server( + origin, json_request, now, "Incoming request" + ) logger.info("Request from %s", origin) request.authenticated_entity = origin diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index fa6b641ee1..e5dda1975f 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -101,7 +101,9 @@ class GroupAttestationSigning(object): if valid_until_ms < now: raise SynapseError(400, "Attestation expired") - yield self.keyring.verify_json_for_server(server_name, attestation, now) + yield self.keyring.verify_json_for_server( + server_name, attestation, now, "Group attestation" + ) def create_attestation(self, group_id, user_id): """Create an attestation for the group_id and user_id with default diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 4cff7e36c8..18121f4f6c 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -134,7 +134,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): context_11.request = "11" res_deferreds = kr.verify_json_objects_for_server( - [("server10", json1, 0), ("server11", {}, 0)] + [("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")] ) # the unsigned json should be rejected pretty quickly @@ -171,7 +171,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.http_client.post_json.return_value = defer.Deferred() res_deferreds_2 = kr.verify_json_objects_for_server( - [("server10", json1, 0)] + [("server10", json1, 0, "test")] ) res_deferreds_2[0].addBoth(self.check_context, None) yield logcontext.make_deferred_yieldable(res_deferreds_2[0]) @@ -205,11 +205,11 @@ class KeyringTestCase(unittest.HomeserverTestCase): signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object - d = _verify_json_for_server(kr, "server9", {}, 0) + d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned") self.failureResultOf(d, SynapseError) # should suceed on a signed object - d = _verify_json_for_server(kr, "server9", json1, 500) + d = _verify_json_for_server(kr, "server9", json1, 500, "test signed") # self.assertFalse(d.called) self.get_success(d) @@ -239,7 +239,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): # the first request should succeed; the second should fail because the key # has expired results = kr.verify_json_objects_for_server( - [("server1", json1, 500), ("server1", json1, 1500)] + [("server1", json1, 500, "test1"), ("server1", json1, 1500, "test2")] ) self.assertEqual(len(results), 2) self.get_success(results[0]) @@ -284,7 +284,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): signedjson.sign.sign_json(json1, "server1", key1) results = kr.verify_json_objects_for_server( - [("server1", json1, 1200), ("server1", json1, 1500)] + [("server1", json1, 1200, "test1"), ("server1", json1, 1500, "test2")] ) self.assertEqual(len(results), 2) self.get_success(results[0]) @@ -522,16 +522,14 @@ def run_in_context(f, *args, **kwargs): defer.returnValue(rv) -def _verify_json_for_server(keyring, server_name, json_object, validity_time): +def _verify_json_for_server(kr, *args): """thin wrapper around verify_json_for_server which makes sure it is wrapped with the patched defer.inlineCallbacks. """ @defer.inlineCallbacks def v(): - rv1 = yield keyring.verify_json_for_server( - server_name, json_object, validity_time - ) + rv1 = yield kr.verify_json_for_server(*args) defer.returnValue(rv1) return run_in_context(v) -- cgit 1.5.1 From e2dfb922e1334e4a506a9d678d0f1bf573cc95e6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 5 Jun 2019 14:16:07 +0100 Subject: Validate federation server TLS certificates by default. --- changelog.d/5359.feature | 1 + synapse/config/tls.py | 10 +++++----- tests/http/federation/test_matrix_federation_agent.py | 12 +++++++++--- 3 files changed, 15 insertions(+), 8 deletions(-) create mode 100644 changelog.d/5359.feature (limited to 'tests') diff --git a/changelog.d/5359.feature b/changelog.d/5359.feature new file mode 100644 index 0000000000..2a03939834 --- /dev/null +++ b/changelog.d/5359.feature @@ -0,0 +1 @@ +Validate federation server TLS certificates by default (implements [MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md)). diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 72dd5926f9..43712b8213 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -74,7 +74,7 @@ class TlsConfig(Config): # Whether to verify certificates on outbound federation traffic self.federation_verify_certificates = config.get( - "federation_verify_certificates", False, + "federation_verify_certificates", True, ) # Whitelist of domains to not verify certificates for @@ -241,12 +241,12 @@ class TlsConfig(Config): # #tls_private_key_path: "%(tls_private_key_path)s" - # Whether to verify TLS certificates when sending federation traffic. + # Whether to verify TLS server certificates for outbound federation requests. # - # This currently defaults to `false`, however this will change in - # Synapse 1.0 when valid federation certificates will be required. + # Defaults to `true`. To disable certificate verification, uncomment the + # following line. # - #federation_verify_certificates: true + #federation_verify_certificates: false # Skip federation certificate verification on the following whitelist # of domains. diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index ed0ca079d9..4153da4da7 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -27,6 +27,7 @@ from twisted.web.http import HTTPChannel from twisted.web.http_headers import Headers from twisted.web.iweb import IPolicyForHTTPS +from synapse.config.homeserver import HomeServerConfig from synapse.crypto.context_factory import ClientTLSOptionsFactory from synapse.http.federation.matrix_federation_agent import ( MatrixFederationAgent, @@ -52,11 +53,16 @@ class MatrixFederationAgentTests(TestCase): self.well_known_cache = TTLCache("test_cache", timer=self.reactor.seconds) + # for now, we disable cert verification for the test, since the cert we + # present will not be trusted. We should do better here, though. + config_dict = default_config("test", parse=False) + config_dict["federation_verify_certificates"] = False + config = HomeServerConfig() + config.parse_config_dict(config_dict) + self.agent = MatrixFederationAgent( reactor=self.reactor, - tls_client_options_factory=ClientTLSOptionsFactory( - default_config("test", parse=True) - ), + tls_client_options_factory=ClientTLSOptionsFactory(config), _well_known_tls_policy=TrustingTLSPolicyForHTTPS(), _srv_resolver=self.mock_resolver, _well_known_cache=self.well_known_cache, -- cgit 1.5.1 From 75538813fcd0403ec8915484a813b99e6eb256c6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Jun 2019 15:45:46 +0100 Subject: Fix background updates to handle redactions/rejections (#5352) * Fix background updates to handle redactions/rejections In background updates based on current state delta stream we need to handle that we may not have all the events (or at least that `get_events` may raise an exception). --- changelog.d/5352.bugfix | 1 + synapse/handlers/presence.py | 11 ++++--- synapse/handlers/stats.py | 18 ++++++++---- synapse/storage/events_worker.py | 37 ++++++++++++++++++++++++ tests/handlers/test_stats.py | 62 ++++++++++++++++++++++++++++++++++++++-- 5 files changed, 117 insertions(+), 12 deletions(-) create mode 100644 changelog.d/5352.bugfix (limited to 'tests') diff --git a/changelog.d/5352.bugfix b/changelog.d/5352.bugfix new file mode 100644 index 0000000000..2ffefe5a68 --- /dev/null +++ b/changelog.d/5352.bugfix @@ -0,0 +1 @@ +Fix room stats and presence background updates to correctly handle missing events. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 6209858bbb..e49c8203ef 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -828,14 +828,17 @@ class PresenceHandler(object): # joins. continue - event = yield self.store.get_event(event_id) - if event.content.get("membership") != Membership.JOIN: + event = yield self.store.get_event(event_id, allow_none=True) + if not event or event.content.get("membership") != Membership.JOIN: # We only care about joins continue if prev_event_id: - prev_event = yield self.store.get_event(prev_event_id) - if prev_event.content.get("membership") == Membership.JOIN: + prev_event = yield self.store.get_event(prev_event_id, allow_none=True) + if ( + prev_event + and prev_event.content.get("membership") == Membership.JOIN + ): # Ignore changes to join events. continue diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 0e92b405ba..7ad16c8566 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -115,6 +115,7 @@ class StatsHandler(StateDeltasHandler): event_id = delta["event_id"] stream_id = delta["stream_id"] prev_event_id = delta["prev_event_id"] + stream_pos = delta["stream_id"] logger.debug("Handling: %r %r, %s", typ, state_key, event_id) @@ -136,10 +137,15 @@ class StatsHandler(StateDeltasHandler): event_content = {} if event_id is not None: - event_content = (yield self.store.get_event(event_id)).content or {} + event = yield self.store.get_event(event_id, allow_none=True) + if event: + event_content = event.content or {} + + # We use stream_pos here rather than fetch by event_id as event_id + # may be None + now = yield self.store.get_received_ts_by_stream_pos(stream_pos) # quantise time to the nearest bucket - now = yield self.store.get_received_ts(event_id) now = (now // 1000 // self.stats_bucket_size) * self.stats_bucket_size if typ == EventTypes.Member: @@ -149,9 +155,11 @@ class StatsHandler(StateDeltasHandler): # compare them. prev_event_content = {} if prev_event_id is not None: - prev_event_content = ( - yield self.store.get_event(prev_event_id) - ).content + prev_event = yield self.store.get_event( + prev_event_id, allow_none=True, + ) + if prev_event: + prev_event_content = prev_event.content membership = event_content.get("membership", Membership.LEAVE) prev_membership = prev_event_content.get("membership", Membership.LEAVE) diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 1782428048..cc7df5cf14 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -78,6 +78,43 @@ class EventsWorkerStore(SQLBaseStore): desc="get_received_ts", ) + def get_received_ts_by_stream_pos(self, stream_ordering): + """Given a stream ordering get an approximate timestamp of when it + happened. + + This is done by simply taking the received ts of the first event that + has a stream ordering greater than or equal to the given stream pos. + If none exists returns the current time, on the assumption that it must + have happened recently. + + Args: + stream_ordering (int) + + Returns: + Deferred[int] + """ + + def _get_approximate_received_ts_txn(txn): + sql = """ + SELECT received_ts FROM events + WHERE stream_ordering >= ? + LIMIT 1 + """ + + txn.execute(sql, (stream_ordering,)) + row = txn.fetchone() + if row and row[0]: + ts = row[0] + else: + ts = self.clock.time_msec() + + return ts + + return self.runInteraction( + "get_approximate_received_ts", + _get_approximate_received_ts_txn, + ) + @defer.inlineCallbacks def get_event( self, diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index 249aba3d59..2710c991cf 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -204,7 +204,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): "a2": {"membership": "not a real thing"}, } - def get_event(event_id): + def get_event(event_id, allow_none=True): m = Mock() m.content = events[event_id] d = defer.Deferred() @@ -224,7 +224,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): "room_id": "room", "event_id": "a1", "prev_event_id": "a2", - "stream_id": "bleb", + "stream_id": 60, } ] @@ -241,7 +241,7 @@ class StatsRoomTests(unittest.HomeserverTestCase): "room_id": "room", "event_id": "a2", "prev_event_id": "a1", - "stream_id": "bleb", + "stream_id": 100, } ] @@ -249,3 +249,59 @@ class StatsRoomTests(unittest.HomeserverTestCase): self.assertEqual( f.value.args[0], "'not a real thing' is not a valid membership" ) + + def test_redacted_prev_event(self): + """ + If the prev_event does not exist, then it is assumed to be a LEAVE. + """ + u1 = self.register_user("u1", "pass") + u1_token = self.login("u1", "pass") + + room_1 = self.helper.create_room_as(u1, tok=u1_token) + + # Do the initial population of the user directory via the background update + self._add_background_updates() + + while not self.get_success(self.store.has_completed_background_updates()): + self.get_success(self.store.do_next_background_update(100), by=0.1) + + events = { + "a1": None, + "a2": {"membership": Membership.JOIN}, + } + + def get_event(event_id, allow_none=True): + if events.get(event_id): + m = Mock() + m.content = events[event_id] + else: + m = None + d = defer.Deferred() + self.reactor.callLater(0.0, d.callback, m) + return d + + def get_received_ts(event_id): + return defer.succeed(1) + + self.store.get_received_ts = get_received_ts + self.store.get_event = get_event + + deltas = [ + { + "type": EventTypes.Member, + "state_key": "some_user:test", + "room_id": room_1, + "event_id": "a2", + "prev_event_id": "a1", + "stream_id": 100, + } + ] + + # Handle our fake deltas, which has a user going from LEAVE -> JOIN. + self.get_success(self.handler._handle_deltas(deltas)) + + # One delta, with two joined members -- the room creator, and our fake + # user. + r = self.get_success(self.store.get_deltas_for_room(room_1, 0)) + self.assertEqual(len(r), 1) + self.assertEqual(r[0]["joined_members"], 2) -- cgit 1.5.1 From fe13bd52acb67de56fb5e1866d0ec64fff10ed94 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 5 Jun 2019 16:35:05 +0100 Subject: Don't check whether the user's account is expired on /send_mail requests --- synapse/api/auth.py | 10 +++++-- synapse/rest/client/v2_alpha/account_validity.py | 2 +- tests/rest/client/v2_alpha/test_register.py | 35 ++++++++++++++++++++++++ 3 files changed, 44 insertions(+), 3 deletions(-) (limited to 'tests') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 0c6c93a87b..e24d942553 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -184,7 +184,13 @@ class Auth(object): return event_auth.get_public_keys(invite_event) @defer.inlineCallbacks - def get_user_by_req(self, request, allow_guest=False, rights="access"): + def get_user_by_req( + self, + request, + allow_guest=False, + rights="access", + allow_expired=False, + ): """ Get a registered user's ID. Args: @@ -229,7 +235,7 @@ class Auth(object): is_guest = user_info["is_guest"] # Deny the request if the user account has expired. - if self._account_validity.enabled: + if self._account_validity.enabled and not allow_expired: user_id = user.to_string() expiration_ts = yield self.store.get_expiration_ts_for_user(user_id) if expiration_ts is not None and self.clock.time_msec() >= expiration_ts: diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py index fc8dbeb617..9bc1e208ca 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -79,7 +79,7 @@ class AccountValiditySendMailServlet(RestServlet): if not self.account_validity.renew_by_email_enabled: raise AuthError(403, "Account renewal via email is disabled on this server.") - requester = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request, allow_expired=True) user_id = requester.user.to_string() yield self.account_activity_handler.send_renewal_email_to_user(user_id) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index d4a1d4d50c..77a2923af6 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -427,6 +427,41 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): self.assertEqual(len(self.email_attempts), 1) + def test_manual_email_send_expired_account(self): + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + + # We need to manually add an email address otherwise the handler will do + # nothing. + now = self.hs.clock.time_msec() + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address="kermit@example.com", + validated_at=now, + added_at=now, + ) + ) + + # Make the account expire. + self.reactor.advance(datetime.timedelta(days=8).total_seconds()) + + # Ignore all emails sent by the automatic background task and only focus on the + # ones sent manually. + self.email_attempts = [] + + # Test that we're still able to manually trigger a mail to be sent. + request, channel = self.make_request( + b"POST", + "/_matrix/client/unstable/account_validity/send_mail", + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + self.assertEqual(len(self.email_attempts), 1) + class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase): -- cgit 1.5.1 From 9fbb20a531161652143028cde333429fe03b0343 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 6 Jun 2019 17:33:11 +0100 Subject: Stop hardcoding trust of old matrix.org key (#5374) There are a few changes going on here: * We make checking the signature on a key server response optional: if no verify_keys are specified, we trust to TLS to validate the connection. * We change the default config so that it does not require responses to be signed by the old key. * We replace the old 'perspectives' config with 'trusted_key_servers', which is also formatted slightly differently. * We emit a warning to the logs every time we trust a key server response signed by the old key. --- changelog.d/5374.feature | 1 + docs/sample_config.yaml | 43 +++- synapse/config/key.py | 228 +++++++++++++++++---- synapse/crypto/keyring.py | 72 +++---- tests/crypto/test_keyring.py | 43 ++-- .../federation/test_matrix_federation_agent.py | 1 + 6 files changed, 293 insertions(+), 95 deletions(-) create mode 100644 changelog.d/5374.feature (limited to 'tests') diff --git a/changelog.d/5374.feature b/changelog.d/5374.feature new file mode 100644 index 0000000000..17937637ab --- /dev/null +++ b/changelog.d/5374.feature @@ -0,0 +1 @@ +Replace the `perspectives` configuration section with `trusted_key_servers`, and make validating the signatures on responses optional (since TLS will do this job for us). diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 2f37e71601..a2e815ea52 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -952,12 +952,43 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key" # The trusted servers to download signing keys from. # -#perspectives: -# servers: -# "matrix.org": -# verify_keys: -# "ed25519:auto": -# key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" +# When we need to fetch a signing key, each server is tried in parallel. +# +# Normally, the connection to the key server is validated via TLS certificates. +# Additional security can be provided by configuring a `verify key`, which +# will make synapse check that the response is signed by that key. +# +# This setting supercedes an older setting named `perspectives`. The old format +# is still supported for backwards-compatibility, but it is deprecated. +# +# Options for each entry in the list include: +# +# server_name: the name of the server. required. +# +# verify_keys: an optional map from key id to base64-encoded public key. +# If specified, we will check that the response is signed by at least +# one of the given keys. +# +# accept_keys_insecurely: a boolean. Normally, if `verify_keys` is unset, +# and federation_verify_certificates is not `true`, synapse will refuse +# to start, because this would allow anyone who can spoof DNS responses +# to masquerade as the trusted key server. If you know what you are doing +# and are sure that your network environment provides a secure connection +# to the key server, you can set this to `true` to override this +# behaviour. +# +# An example configuration might look like: +# +#trusted_key_servers: +# - server_name: "my_trusted_server.example.com" +# verify_keys: +# "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr" +# - server_name: "my_other_trusted_server.example.com" +# +# The default configuration is: +# +#trusted_key_servers: +# - server_name: "matrix.org" # Enable SAML2 for registration and login. Uses pysaml2. diff --git a/synapse/config/key.py b/synapse/config/key.py index eb10259818..aba7092ccd 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +18,8 @@ import hashlib import logging import os +import attr +import jsonschema from signedjson.key import ( NACL_ED25519, decode_signing_key_base64, @@ -32,11 +35,27 @@ from synapse.util.stringutils import random_string, random_string_with_symbols from ._base import Config, ConfigError +INSECURE_NOTARY_ERROR = """\ +Your server is configured to accept key server responses without signature +validation or TLS certificate validation. This is likely to be very insecure. If +you are *sure* you want to do this, set 'accept_keys_insecurely' on the +keyserver configuration.""" + + logger = logging.getLogger(__name__) -class KeyConfig(Config): +@attr.s +class TrustedKeyServer(object): + # string: name of the server. + server_name = attr.ib() + # dict[str,VerifyKey]|None: map from key id to key object, or None to disable + # signature verification. + verify_keys = attr.ib(default=None) + + +class KeyConfig(Config): def read_config(self, config): # the signing key can be specified inline or in a separate file if "signing_key" in config: @@ -49,16 +68,27 @@ class KeyConfig(Config): config.get("old_signing_keys", {}) ) self.key_refresh_interval = self.parse_duration( - config.get("key_refresh_interval", "1d"), + config.get("key_refresh_interval", "1d") ) - self.perspectives = self.read_perspectives( - config.get("perspectives", {}).get("servers", { - "matrix.org": {"verify_keys": { - "ed25519:auto": { - "key": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw", - } - }} - }) + + # if neither trusted_key_servers nor perspectives are given, use the default. + if "perspectives" not in config and "trusted_key_servers" not in config: + key_servers = [{"server_name": "matrix.org"}] + else: + key_servers = config.get("trusted_key_servers", []) + + if not isinstance(key_servers, list): + raise ConfigError( + "trusted_key_servers, if given, must be a list, not a %s" + % (type(key_servers).__name__,) + ) + + # merge the 'perspectives' config into the 'trusted_key_servers' config. + key_servers.extend(_perspectives_to_key_servers(config)) + + # list of TrustedKeyServer objects + self.key_servers = list( + _parse_key_servers(key_servers, self.federation_verify_certificates) ) self.macaroon_secret_key = config.get( @@ -78,8 +108,9 @@ class KeyConfig(Config): # falsification of values self.form_secret = config.get("form_secret", None) - def default_config(self, config_dir_path, server_name, generate_secrets=False, - **kwargs): + def default_config( + self, config_dir_path, server_name, generate_secrets=False, **kwargs + ): base_key_name = os.path.join(config_dir_path, server_name) if generate_secrets: @@ -91,7 +122,8 @@ class KeyConfig(Config): macaroon_secret_key = "# macaroon_secret_key: " form_secret = "# form_secret: " - return """\ + return ( + """\ # a secret which is used to sign access tokens. If none is specified, # the registration_shared_secret is used, if one is given; otherwise, # a secret key is derived from the signing key. @@ -133,33 +165,53 @@ class KeyConfig(Config): # The trusted servers to download signing keys from. # - #perspectives: - # servers: - # "matrix.org": - # verify_keys: - # "ed25519:auto": - # key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" - """ % locals() - - def read_perspectives(self, perspectives_servers): - servers = {} - for server_name, server_config in perspectives_servers.items(): - for key_id, key_data in server_config["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_base64 = key_data["key"] - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - servers.setdefault(server_name, {})[key_id] = verify_key - return servers + # When we need to fetch a signing key, each server is tried in parallel. + # + # Normally, the connection to the key server is validated via TLS certificates. + # Additional security can be provided by configuring a `verify key`, which + # will make synapse check that the response is signed by that key. + # + # This setting supercedes an older setting named `perspectives`. The old format + # is still supported for backwards-compatibility, but it is deprecated. + # + # Options for each entry in the list include: + # + # server_name: the name of the server. required. + # + # verify_keys: an optional map from key id to base64-encoded public key. + # If specified, we will check that the response is signed by at least + # one of the given keys. + # + # accept_keys_insecurely: a boolean. Normally, if `verify_keys` is unset, + # and federation_verify_certificates is not `true`, synapse will refuse + # to start, because this would allow anyone who can spoof DNS responses + # to masquerade as the trusted key server. If you know what you are doing + # and are sure that your network environment provides a secure connection + # to the key server, you can set this to `true` to override this + # behaviour. + # + # An example configuration might look like: + # + #trusted_key_servers: + # - server_name: "my_trusted_server.example.com" + # verify_keys: + # "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr" + # - server_name: "my_other_trusted_server.example.com" + # + # The default configuration is: + # + #trusted_key_servers: + # - server_name: "matrix.org" + """ + % locals() + ) def read_signing_key(self, signing_key_path): signing_keys = self.read_file(signing_key_path, "signing_key") try: return read_signing_keys(signing_keys.splitlines(True)) except Exception as e: - raise ConfigError( - "Error reading signing_key: %s" % (str(e)) - ) + raise ConfigError("Error reading signing_key: %s" % (str(e))) def read_old_signing_keys(self, old_signing_keys): keys = {} @@ -182,9 +234,7 @@ class KeyConfig(Config): if not self.path_exists(signing_key_path): with open(signing_key_path, "w") as signing_key_file: key_id = "a_" + random_string(4) - write_signing_keys( - signing_key_file, (generate_signing_key(key_id),), - ) + write_signing_keys(signing_key_file, (generate_signing_key(key_id),)) else: signing_keys = self.read_file(signing_key_path, "signing_key") if len(signing_keys.split("\n")[0].split()) == 1: @@ -194,6 +244,106 @@ class KeyConfig(Config): NACL_ED25519, key_id, signing_keys.split("\n")[0] ) with open(signing_key_path, "w") as signing_key_file: - write_signing_keys( - signing_key_file, (key,), + write_signing_keys(signing_key_file, (key,)) + + +def _perspectives_to_key_servers(config): + """Convert old-style 'perspectives' configs into new-style 'trusted_key_servers' + + Returns an iterable of entries to add to trusted_key_servers. + """ + + # 'perspectives' looks like: + # + # { + # "servers": { + # "matrix.org": { + # "verify_keys": { + # "ed25519:auto": { + # "key": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" + # } + # } + # } + # } + # } + # + # 'trusted_keys' looks like: + # + # [ + # { + # "server_name": "matrix.org", + # "verify_keys": { + # "ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw", + # } + # } + # ] + + perspectives_servers = config.get("perspectives", {}).get("servers", {}) + + for server_name, server_opts in perspectives_servers.items(): + trusted_key_server_entry = {"server_name": server_name} + verify_keys = server_opts.get("verify_keys") + if verify_keys is not None: + trusted_key_server_entry["verify_keys"] = { + key_id: key_data["key"] for key_id, key_data in verify_keys.items() + } + yield trusted_key_server_entry + + +TRUSTED_KEY_SERVERS_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "schema for the trusted_key_servers setting", + "type": "array", + "items": { + "type": "object", + "properties": { + "server_name": {"type": "string"}, + "verify_keys": { + "type": "object", + # each key must be a base64 string + "additionalProperties": {"type": "string"}, + }, + }, + "required": ["server_name"], + }, +} + + +def _parse_key_servers(key_servers, federation_verify_certificates): + try: + jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA) + except jsonschema.ValidationError as e: + raise ConfigError("Unable to parse 'trusted_key_servers': " + e.message) + + for server in key_servers: + server_name = server["server_name"] + result = TrustedKeyServer(server_name=server_name) + + verify_keys = server.get("verify_keys") + if verify_keys is not None: + result.verify_keys = {} + for key_id, key_base64 in verify_keys.items(): + if not is_signing_algorithm_supported(key_id): + raise ConfigError( + "Unsupported signing algorithm on key %s for server %s in " + "trusted_key_servers" % (key_id, server_name) ) + try: + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + except Exception as e: + raise ConfigError( + "Unable to parse key %s for server %s in " + "trusted_key_servers: %s" % (key_id, server_name, e) + ) + + result.verify_keys[key_id] = verify_key + + if ( + not verify_keys + and not server.get("accept_keys_insecurely") + and not federation_verify_certificates + ): + raise ConfigError(INSECURE_NOTARY_ERROR) + + yield result diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 2b6b5913bc..96964b0d50 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -585,25 +585,27 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): super(PerspectivesKeyFetcher, self).__init__(hs) self.clock = hs.get_clock() self.client = hs.get_http_client() - self.perspective_servers = self.config.perspectives + self.key_servers = self.config.key_servers @defer.inlineCallbacks def get_keys(self, keys_to_fetch): """see KeyFetcher.get_keys""" @defer.inlineCallbacks - def get_key(perspective_name, perspective_keys): + def get_key(key_server): try: result = yield self.get_server_verify_key_v2_indirect( - keys_to_fetch, perspective_name, perspective_keys + keys_to_fetch, key_server ) defer.returnValue(result) except KeyLookupError as e: - logger.warning("Key lookup failed from %r: %s", perspective_name, e) + logger.warning( + "Key lookup failed from %r: %s", key_server.server_name, e + ) except Exception as e: logger.exception( "Unable to get key from %r: %s %s", - perspective_name, + key_server.server_name, type(e).__name__, str(e), ) @@ -613,8 +615,8 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): results = yield logcontext.make_deferred_yieldable( defer.gatherResults( [ - run_in_background(get_key, p_name, p_keys) - for p_name, p_keys in self.perspective_servers.items() + run_in_background(get_key, server) + for server in self.key_servers ], consumeErrors=True, ).addErrback(unwrapFirstError) @@ -629,17 +631,15 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): @defer.inlineCallbacks def get_server_verify_key_v2_indirect( - self, keys_to_fetch, perspective_name, perspective_keys + self, keys_to_fetch, key_server ): """ Args: keys_to_fetch (dict[str, dict[str, int]]): the keys to be fetched. server_name -> key_id -> min_valid_ts - perspective_name (str): name of the notary server to query for the keys - - perspective_keys (dict[str, VerifyKey]): map of key_id->key for the - notary server + key_server (synapse.config.key.TrustedKeyServer): notary server to query for + the keys Returns: Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult]]]: map @@ -649,6 +649,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): KeyLookupError if there was an error processing the entire response from the server """ + perspective_name = key_server.server_name logger.info( "Requesting keys %s from notary server %s", keys_to_fetch.items(), @@ -689,11 +690,13 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): ) try: - processed_response = yield self._process_perspectives_response( - perspective_name, - perspective_keys, + self._validate_perspectives_response( + key_server, response, - time_added_ms=time_now_ms, + ) + + processed_response = yield self.process_v2_response( + perspective_name, response, time_added_ms=time_now_ms ) except KeyLookupError as e: logger.warning( @@ -717,28 +720,24 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): defer.returnValue(keys) - def _process_perspectives_response( - self, perspective_name, perspective_keys, response, time_added_ms + def _validate_perspectives_response( + self, key_server, response, ): - """Parse a 'Server Keys' structure from the result of a /key/query request - - Checks that the entry is correctly signed by the perspectives server, and then - passes over to process_v2_response + """Optionally check the signature on the result of a /key/query request Args: - perspective_name (str): the name of the notary server that produced this - result - - perspective_keys (dict[str, VerifyKey]): map of key_id->key for the - notary server + key_server (synapse.config.key.TrustedKeyServer): the notary server that + produced this result response (dict): the json-decoded Server Keys response object + """ + perspective_name = key_server.server_name + perspective_keys = key_server.verify_keys - time_added_ms (int): the timestamp to record in server_keys_json + if perspective_keys is None: + # signature checking is disabled on this server + return - Returns: - Deferred[dict[str, FetchKeyResult]]: map from key_id to result object - """ if ( u"signatures" not in response or perspective_name not in response[u"signatures"] @@ -751,6 +750,13 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): verify_signed_json(response, perspective_name, perspective_keys[key_id]) verified = True + if perspective_name == "matrix.org" and key_id == "ed25519:auto": + logger.warning( + "Trusting trusted_key_server responses signed by the " + "compromised matrix.org signing key 'ed25519:auto'. " + "This is a placebo." + ) + if not verified: raise KeyLookupError( "Response not signed with a known key: signed with: %r, known keys: %r" @@ -760,10 +766,6 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): ) ) - return self.process_v2_response( - perspective_name, response, time_added_ms=time_added_ms - ) - class ServerKeyFetcher(BaseV2KeyFetcher): """KeyFetcher impl which fetches keys from the origin servers""" diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 18121f4f6c..4b1901ce31 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -19,7 +19,7 @@ from mock import Mock import canonicaljson import signedjson.key import signedjson.sign -from signedjson.key import get_verify_key +from signedjson.key import encode_verify_key_base64, get_verify_key from twisted.internet import defer @@ -40,7 +40,7 @@ class MockPerspectiveServer(object): def get_verify_keys(self): vk = signedjson.key.get_verify_key(self.key) - return {"%s:%s" % (vk.alg, vk.version): vk} + return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)} def get_signed_key(self, server_name, verify_key): key_id = "%s:%s" % (verify_key.alg, verify_key.version) @@ -48,9 +48,7 @@ class MockPerspectiveServer(object): "server_name": server_name, "old_verify_keys": {}, "valid_until_ts": time.time() * 1000 + 3600, - "verify_keys": { - key_id: {"key": signedjson.key.encode_verify_key_base64(verify_key)} - }, + "verify_keys": {key_id: {"key": encode_verify_key_base64(verify_key)}}, } self.sign_response(res) return res @@ -63,10 +61,18 @@ class KeyringTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): self.mock_perspective_server = MockPerspectiveServer() self.http_client = Mock() - hs = self.setup_test_homeserver(handlers=None, http_client=self.http_client) - keys = self.mock_perspective_server.get_verify_keys() - hs.config.perspectives = {self.mock_perspective_server.server_name: keys} - return hs + + config = self.default_config() + config["trusted_key_servers"] = [ + { + "server_name": self.mock_perspective_server.server_name, + "verify_keys": self.mock_perspective_server.get_verify_keys(), + } + ] + + return self.setup_test_homeserver( + handlers=None, http_client=self.http_client, config=config + ) def check_context(self, _, expected): self.assertEquals( @@ -371,10 +377,18 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor, clock): self.mock_perspective_server = MockPerspectiveServer() self.http_client = Mock() - hs = self.setup_test_homeserver(handlers=None, http_client=self.http_client) - keys = self.mock_perspective_server.get_verify_keys() - hs.config.perspectives = {self.mock_perspective_server.server_name: keys} - return hs + + config = self.default_config() + config["trusted_key_servers"] = [ + { + "server_name": self.mock_perspective_server.server_name, + "verify_keys": self.mock_perspective_server.get_verify_keys(), + } + ] + + return self.setup_test_homeserver( + handlers=None, http_client=self.http_client, config=config + ) def test_get_keys_from_perspectives(self): # arbitrarily advance the clock a bit @@ -439,8 +453,7 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS) self.assertEqual( - bytes(res["key_json"]), - canonicaljson.encode_canonical_json(response), + bytes(res["key_json"]), canonicaljson.encode_canonical_json(response) ) def test_invalid_perspectives_responses(self): diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 4153da4da7..05880a1048 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -57,6 +57,7 @@ class MatrixFederationAgentTests(TestCase): # present will not be trusted. We should do better here, though. config_dict = default_config("test", parse=False) config_dict["federation_verify_certificates"] = False + config_dict["trusted_key_servers"] = [] config = HomeServerConfig() config.parse_config_dict(config_dict) -- cgit 1.5.1 From 3719680ee42b72b8480fa76a1455576897b65ef0 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 6 Jun 2019 17:34:07 +0100 Subject: Add ability to perform password reset via email without trusting the identity server (#5377) Sends password reset emails from the homeserver instead of proxying to the identity server. This is now the default behaviour for security reasons. If you wish to continue proxying password reset requests to the identity server you must now enable the email.trust_identity_server_for_password_resets option. This PR is a culmination of 3 smaller PRs which have each been separately reviewed: * #5308 * #5345 * #5368 --- changelog.d/5377.feature | 1 + docs/sample_config.yaml | 60 ++++- synapse/api/errors.py | 9 + synapse/app/homeserver.py | 1 + synapse/config/emailconfig.py | 153 +++++++++-- synapse/handlers/auth.py | 64 ++++- synapse/handlers/identity.py | 13 +- synapse/push/mailer.py | 85 ++++-- synapse/push/pusher.py | 4 +- synapse/python_dependencies.py | 2 +- synapse/res/templates/password_reset.html | 9 + synapse/res/templates/password_reset.txt | 7 + synapse/res/templates/password_reset_failure.html | 6 + synapse/res/templates/password_reset_success.html | 6 + synapse/rest/client/v2_alpha/account.py | 243 ++++++++++++++++- synapse/storage/_base.py | 6 +- synapse/storage/prepare_database.py | 2 +- synapse/storage/registration.py | 290 ++++++++++++++++++++- .../schema/delta/55/track_threepid_validations.sql | 31 +++ tests/utils.py | 1 - 20 files changed, 922 insertions(+), 71 deletions(-) create mode 100644 changelog.d/5377.feature create mode 100644 synapse/res/templates/password_reset.html create mode 100644 synapse/res/templates/password_reset.txt create mode 100644 synapse/res/templates/password_reset_failure.html create mode 100644 synapse/res/templates/password_reset_success.html create mode 100644 synapse/storage/schema/delta/55/track_threepid_validations.sql (limited to 'tests') diff --git a/changelog.d/5377.feature b/changelog.d/5377.feature new file mode 100644 index 0000000000..6aae41847a --- /dev/null +++ b/changelog.d/5377.feature @@ -0,0 +1 @@ +Add ability to perform password reset via email without trusting the identity server. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index a2e815ea52..ea73306fb9 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1065,10 +1065,8 @@ password_config: -# Enable sending emails for notification events or expiry notices -# Defining a custom URL for Riot is only needed if email notifications -# should contain links to a self-hosted installation of Riot; when set -# the "app_name" setting is ignored. +# Enable sending emails for password resets, notification events or +# account expiry notices # # If your SMTP server requires authentication, the optional smtp_user & # smtp_pass variables should be used @@ -1076,22 +1074,64 @@ password_config: #email: # enable_notifs: false # smtp_host: "localhost" -# smtp_port: 25 +# smtp_port: 25 # SSL: 465, STARTTLS: 587 # smtp_user: "exampleusername" # smtp_pass: "examplepassword" # require_transport_security: False # notif_from: "Your Friendly %(app)s Home Server " # app_name: Matrix -# # if template_dir is unset, uses the example templates that are part of -# # the Synapse distribution. +# +# # Enable email notifications by default +# notif_for_new_users: True +# +# # Defining a custom URL for Riot is only needed if email notifications +# # should contain links to a self-hosted installation of Riot; when set +# # the "app_name" setting is ignored +# riot_base_url: "http://localhost/riot" +# +# # Enable sending password reset emails via the configured, trusted +# # identity servers +# # +# # IMPORTANT! This will give a malicious or overtaken identity server +# # the ability to reset passwords for your users! Make absolutely sure +# # that you want to do this! It is strongly recommended that password +# # reset emails be sent by the homeserver instead +# # +# # If this option is set to false and SMTP options have not been +# # configured, resetting user passwords via email will be disabled +# #trust_identity_server_for_password_resets: false +# +# # Configure the time that a validation email or text message code +# # will expire after sending +# # +# # This is currently used for password resets +# #validation_token_lifetime: 1h +# +# # Template directory. All template files should be stored within this +# # directory +# # # #template_dir: res/templates +# +# # Templates for email notifications +# # # notif_template_html: notif_mail.html # notif_template_text: notif_mail.txt -# # Templates for account expiry notices. +# +# # Templates for account expiry notices +# # # expiry_template_html: notice_expiry.html # expiry_template_text: notice_expiry.txt -# notif_for_new_users: True -# riot_base_url: "http://localhost/riot" +# +# # Templates for password reset emails sent by the homeserver +# # +# #password_reset_template_html: password_reset.html +# #password_reset_template_text: password_reset.txt +# +# # Templates for password reset success and failure pages that a user +# # will see after attempting to reset their password +# # +# #password_reset_template_success_html: password_reset_success.html +# #password_reset_template_failure_html: password_reset_failure.html #password_providers: diff --git a/synapse/api/errors.py b/synapse/api/errors.py index e91697049c..66201d6efe 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -339,6 +339,15 @@ class UnsupportedRoomVersionError(SynapseError): ) +class ThreepidValidationError(SynapseError): + """An error raised when there was a problem authorising an event.""" + + def __init__(self, *args, **kwargs): + if "errcode" not in kwargs: + kwargs["errcode"] = Codes.FORBIDDEN + super(ThreepidValidationError, self).__init__(*args, **kwargs) + + class IncompatibleRoomVersionError(SynapseError): """A server is trying to join a room whose version it does not support. diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 1045d28949..df524a23dd 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -176,6 +176,7 @@ class SynapseHomeServer(HomeServer): resources.update({ "/_matrix/client/api/v1": client_resource, + "/_synapse/password_reset": client_resource, "/_matrix/client/r0": client_resource, "/_matrix/client/unstable": client_resource, "/_matrix/client/v2_alpha": client_resource, diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 8400471f40..ae04252906 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -50,6 +50,11 @@ class EmailConfig(Config): else: self.email_app_name = "Matrix" + # TODO: Rename notif_from to something more generic, or have a separate + # from for password resets, message notifications, etc? + # Currently the email section is a bit bogged down with settings for + # multiple functions. Would be good to split it out into separate + # sections and only put the common ones under email: self.email_notif_from = email_config.get("notif_from", None) if self.email_notif_from is not None: # make sure it's valid @@ -74,7 +79,28 @@ class EmailConfig(Config): "account_validity", {}, ).get("renew_at") - if self.email_enable_notifs or account_validity_renewal_enabled: + email_trust_identity_server_for_password_resets = email_config.get( + "trust_identity_server_for_password_resets", False, + ) + self.email_password_reset_behaviour = ( + "remote" if email_trust_identity_server_for_password_resets else "local" + ) + if self.email_password_reset_behaviour == "local" and email_config == {}: + logger.warn( + "User password resets have been disabled due to lack of email config" + ) + self.email_password_reset_behaviour = "off" + + # Get lifetime of a validation token in milliseconds + self.email_validation_token_lifetime = self.parse_duration( + email_config.get("validation_token_lifetime", "1h") + ) + + if ( + self.email_enable_notifs + or account_validity_renewal_enabled + or self.email_password_reset_behaviour == "local" + ): # make sure we can import the required deps import jinja2 import bleach @@ -82,6 +108,67 @@ class EmailConfig(Config): jinja2 bleach + if self.email_password_reset_behaviour == "local": + required = [ + "smtp_host", + "smtp_port", + "notif_from", + ] + + missing = [] + for k in required: + if k not in email_config: + missing.append(k) + + if (len(missing) > 0): + raise RuntimeError( + "email.password_reset_behaviour is set to 'local' " + "but required keys are missing: %s" % + (", ".join(["email." + k for k in missing]),) + ) + + # Templates for password reset emails + self.email_password_reset_template_html = email_config.get( + "password_reset_template_html", "password_reset.html", + ) + self.email_password_reset_template_text = email_config.get( + "password_reset_template_text", "password_reset.txt", + ) + self.email_password_reset_failure_template = email_config.get( + "password_reset_failure_template", "password_reset_failure.html", + ) + # This template does not support any replaceable variables, so we will + # read it from the disk once during setup + email_password_reset_success_template = email_config.get( + "password_reset_success_template", "password_reset_success.html", + ) + + # Check templates exist + for f in [self.email_password_reset_template_html, + self.email_password_reset_template_text, + self.email_password_reset_failure_template, + email_password_reset_success_template]: + p = os.path.join(self.email_template_dir, f) + if not os.path.isfile(p): + raise ConfigError("Unable to find template file %s" % (p, )) + + # Retrieve content of web templates + filepath = os.path.join( + self.email_template_dir, + email_password_reset_success_template, + ) + self.email_password_reset_success_html_content = self.read_file( + filepath, + "email.password_reset_template_success_html", + ) + + if config.get("public_baseurl") is None: + raise RuntimeError( + "email.password_reset_behaviour is set to 'local' but no " + "public_baseurl is set. This is necessary to generate password " + "reset links" + ) + if self.email_enable_notifs: required = [ "smtp_host", @@ -121,10 +208,6 @@ class EmailConfig(Config): self.email_riot_base_url = email_config.get( "riot_base_url", None ) - else: - self.email_enable_notifs = False - # Not much point setting defaults for the rest: it would be an - # error for them to be used. if account_validity_renewal_enabled: self.email_expiry_template_html = email_config.get( @@ -141,10 +224,8 @@ class EmailConfig(Config): def default_config(self, config_dir_path, server_name, **kwargs): return """ - # Enable sending emails for notification events or expiry notices - # Defining a custom URL for Riot is only needed if email notifications - # should contain links to a self-hosted installation of Riot; when set - # the "app_name" setting is ignored. + # Enable sending emails for password resets, notification events or + # account expiry notices # # If your SMTP server requires authentication, the optional smtp_user & # smtp_pass variables should be used @@ -152,20 +233,62 @@ class EmailConfig(Config): #email: # enable_notifs: false # smtp_host: "localhost" - # smtp_port: 25 + # smtp_port: 25 # SSL: 465, STARTTLS: 587 # smtp_user: "exampleusername" # smtp_pass: "examplepassword" # require_transport_security: False # notif_from: "Your Friendly %(app)s Home Server " # app_name: Matrix - # # if template_dir is unset, uses the example templates that are part of - # # the Synapse distribution. + # + # # Enable email notifications by default + # notif_for_new_users: True + # + # # Defining a custom URL for Riot is only needed if email notifications + # # should contain links to a self-hosted installation of Riot; when set + # # the "app_name" setting is ignored + # riot_base_url: "http://localhost/riot" + # + # # Enable sending password reset emails via the configured, trusted + # # identity servers + # # + # # IMPORTANT! This will give a malicious or overtaken identity server + # # the ability to reset passwords for your users! Make absolutely sure + # # that you want to do this! It is strongly recommended that password + # # reset emails be sent by the homeserver instead + # # + # # If this option is set to false and SMTP options have not been + # # configured, resetting user passwords via email will be disabled + # #trust_identity_server_for_password_resets: false + # + # # Configure the time that a validation email or text message code + # # will expire after sending + # # + # # This is currently used for password resets + # #validation_token_lifetime: 1h + # + # # Template directory. All template files should be stored within this + # # directory + # # # #template_dir: res/templates + # + # # Templates for email notifications + # # # notif_template_html: notif_mail.html # notif_template_text: notif_mail.txt - # # Templates for account expiry notices. + # + # # Templates for account expiry notices + # # # expiry_template_html: notice_expiry.html # expiry_template_text: notice_expiry.txt - # notif_for_new_users: True - # riot_base_url: "http://localhost/riot" + # + # # Templates for password reset emails sent by the homeserver + # # + # #password_reset_template_html: password_reset.html + # #password_reset_template_text: password_reset.txt + # + # # Templates for password reset success and failure pages that a user + # # will see after attempting to reset their password + # # + # #password_reset_template_success_html: password_reset_success.html + # #password_reset_template_failure_html: password_reset_failure.html """ diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index aa5d89a9ac..7f8ddc99c6 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -162,7 +162,7 @@ class AuthHandler(BaseHandler): defer.returnValue(params) @defer.inlineCallbacks - def check_auth(self, flows, clientdict, clientip): + def check_auth(self, flows, clientdict, clientip, password_servlet=False): """ Takes a dictionary sent by the client in the login / registration protocol and handles the User-Interactive Auth flow. @@ -186,6 +186,16 @@ class AuthHandler(BaseHandler): clientip (str): The IP address of the client. + password_servlet (bool): Whether the request originated from + PasswordRestServlet. + XXX: This is a temporary hack to distinguish between checking + for threepid validations locally (in the case of password + resets) and using the identity server (in the case of binding + a 3PID during registration). Once we start using the + homeserver for both tasks, this distinction will no longer be + necessary. + + Returns: defer.Deferred[dict, dict, str]: a deferred tuple of (creds, params, session_id). @@ -241,7 +251,9 @@ class AuthHandler(BaseHandler): if 'type' in authdict: login_type = authdict['type'] try: - result = yield self._check_auth_dict(authdict, clientip) + result = yield self._check_auth_dict( + authdict, clientip, password_servlet=password_servlet, + ) if result: creds[login_type] = result self._save_session(session) @@ -351,7 +363,7 @@ class AuthHandler(BaseHandler): return sess.setdefault('serverdict', {}).get(key, default) @defer.inlineCallbacks - def _check_auth_dict(self, authdict, clientip): + def _check_auth_dict(self, authdict, clientip, password_servlet=False): """Attempt to validate the auth dict provided by a client Args: @@ -369,7 +381,13 @@ class AuthHandler(BaseHandler): login_type = authdict['type'] checker = self.checkers.get(login_type) if checker is not None: - res = yield checker(authdict, clientip) + # XXX: Temporary workaround for having Synapse handle password resets + # See AuthHandler.check_auth for further details + res = yield checker( + authdict, + clientip=clientip, + password_servlet=password_servlet, + ) defer.returnValue(res) # build a v1-login-style dict out of the authdict and fall back to the @@ -383,7 +401,7 @@ class AuthHandler(BaseHandler): defer.returnValue(canonical_id) @defer.inlineCallbacks - def _check_recaptcha(self, authdict, clientip): + def _check_recaptcha(self, authdict, clientip, **kwargs): try: user_response = authdict["response"] except KeyError: @@ -429,20 +447,20 @@ class AuthHandler(BaseHandler): defer.returnValue(True) raise LoginError(401, "", errcode=Codes.UNAUTHORIZED) - def _check_email_identity(self, authdict, _): - return self._check_threepid('email', authdict) + def _check_email_identity(self, authdict, **kwargs): + return self._check_threepid('email', authdict, **kwargs) - def _check_msisdn(self, authdict, _): + def _check_msisdn(self, authdict, **kwargs): return self._check_threepid('msisdn', authdict) - def _check_dummy_auth(self, authdict, _): + def _check_dummy_auth(self, authdict, **kwargs): return defer.succeed(True) - def _check_terms_auth(self, authdict, _): + def _check_terms_auth(self, authdict, **kwargs): return defer.succeed(True) @defer.inlineCallbacks - def _check_threepid(self, medium, authdict): + def _check_threepid(self, medium, authdict, password_servlet=False, **kwargs): if 'threepid_creds' not in authdict: raise LoginError(400, "Missing threepid_creds", Codes.MISSING_PARAM) @@ -451,7 +469,29 @@ class AuthHandler(BaseHandler): identity_handler = self.hs.get_handlers().identity_handler logger.info("Getting validated threepid. threepidcreds: %r", (threepid_creds,)) - threepid = yield identity_handler.threepid_from_creds(threepid_creds) + if ( + not password_servlet + or self.hs.config.email_password_reset_behaviour == "remote" + ): + threepid = yield identity_handler.threepid_from_creds(threepid_creds) + elif self.hs.config.email_password_reset_behaviour == "local": + row = yield self.store.get_threepid_validation_session( + medium, + threepid_creds["client_secret"], + sid=threepid_creds["sid"], + ) + + threepid = { + "medium": row["medium"], + "address": row["address"], + "validated_at": row["validated_at"], + } if row else None + + if row: + # Valid threepid returned, delete from the db + yield self.store.delete_threepid_session(threepid_creds["sid"]) + else: + raise SynapseError(400, "Password resets are not enabled on this homeserver") if not threepid: raise LoginError(401, "", errcode=Codes.UNAUTHORIZED) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 22469486d7..04caf65793 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -247,7 +247,14 @@ class IdentityHandler(BaseHandler): defer.returnValue(changed) @defer.inlineCallbacks - def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs): + def requestEmailToken( + self, + id_server, + email, + client_secret, + send_attempt, + next_link=None, + ): if not self._should_trust_id_server(id_server): raise SynapseError( 400, "Untrusted ID server '%s'" % id_server, @@ -259,7 +266,9 @@ class IdentityHandler(BaseHandler): 'client_secret': client_secret, 'send_attempt': send_attempt, } - params.update(kwargs) + + if next_link: + params.update({'next_link': next_link}) try: data = yield self.http_client.post_json_get_json( diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index c269bcf4a4..4bc9eb7313 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -80,10 +80,10 @@ ALLOWED_ATTRS = { class Mailer(object): - def __init__(self, hs, app_name, notif_template_html, notif_template_text): + def __init__(self, hs, app_name, template_html, template_text): self.hs = hs - self.notif_template_html = notif_template_html - self.notif_template_text = notif_template_text + self.template_html = template_html + self.template_text = template_text self.sendmail = self.hs.get_sendmail() self.store = self.hs.get_datastore() @@ -94,21 +94,48 @@ class Mailer(object): logger.info("Created Mailer for app_name %s" % app_name) @defer.inlineCallbacks - def send_notification_mail(self, app_id, user_id, email_address, - push_actions, reason): - try: - from_string = self.hs.config.email_notif_from % { - "app": self.app_name - } - except TypeError: - from_string = self.hs.config.email_notif_from + def send_password_reset_mail( + self, + email_address, + token, + client_secret, + sid, + ): + """Send an email with a password reset link to a user + + Args: + email_address (str): Email address we're sending the password + reset to + token (str): Unique token generated by the server to verify + password reset email was received + client_secret (str): Unique token generated by the client to + group together multiple email sending attempts + sid (str): The generated session ID + """ + if email.utils.parseaddr(email_address)[1] == '': + raise RuntimeError("Invalid 'to' email address") + + link = ( + self.hs.config.public_baseurl + + "_synapse/password_reset/email/submit_token" + "?token=%s&client_secret=%s&sid=%s" % + (token, client_secret, sid) + ) - raw_from = email.utils.parseaddr(from_string)[1] - raw_to = email.utils.parseaddr(email_address)[1] + template_vars = { + "link": link, + } - if raw_to == '': - raise RuntimeError("Invalid 'to' address") + yield self.send_email( + email_address, + "[%s] Password Reset Email" % self.hs.config.server_name, + template_vars, + ) + @defer.inlineCallbacks + def send_notification_mail(self, app_id, user_id, email_address, + push_actions, reason): + """Send email regarding a user's room notifications""" rooms_in_order = deduped_ordered_list( [pa['room_id'] for pa in push_actions] ) @@ -176,14 +203,36 @@ class Mailer(object): "reason": reason, } - html_text = self.notif_template_html.render(**template_vars) + yield self.send_email( + email_address, + "[%s] %s" % (self.app_name, summary_text), + template_vars, + ) + + @defer.inlineCallbacks + def send_email(self, email_address, subject, template_vars): + """Send an email with the given information and template text""" + try: + from_string = self.hs.config.email_notif_from % { + "app": self.app_name + } + except TypeError: + from_string = self.hs.config.email_notif_from + + raw_from = email.utils.parseaddr(from_string)[1] + raw_to = email.utils.parseaddr(email_address)[1] + + if raw_to == '': + raise RuntimeError("Invalid 'to' address") + + html_text = self.template_html.render(**template_vars) html_part = MIMEText(html_text, "html", "utf8") - plain_text = self.notif_template_text.render(**template_vars) + plain_text = self.template_text.render(**template_vars) text_part = MIMEText(plain_text, "plain", "utf8") multipart_msg = MIMEMultipart('alternative') - multipart_msg['Subject'] = "[%s] %s" % (self.app_name, summary_text) + multipart_msg['Subject'] = subject multipart_msg['From'] = from_string multipart_msg['To'] = email_address multipart_msg['Date'] = email.utils.formatdate() diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index 14bc7823cf..aff85daeb5 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -70,8 +70,8 @@ class PusherFactory(object): mailer = Mailer( hs=self.hs, app_name=app_name, - notif_template_html=self.notif_template_html, - notif_template_text=self.notif_template_text, + template_html=self.notif_template_html, + template_text=self.notif_template_text, ) self.mailers[app_name] = mailer return EmailPusher(self.hs, pusherdict, mailer) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index f64baa4d58..c78f2cb15e 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -77,7 +77,7 @@ REQUIREMENTS = [ ] CONDITIONAL_REQUIREMENTS = { - "email.enable_notifs": ["Jinja2>=2.9", "bleach>=1.4.2"], + "email": ["Jinja2>=2.9", "bleach>=1.4.2"], "matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"], # we use execute_batch, which arrived in psycopg 2.7. diff --git a/synapse/res/templates/password_reset.html b/synapse/res/templates/password_reset.html new file mode 100644 index 0000000000..4fa7b36734 --- /dev/null +++ b/synapse/res/templates/password_reset.html @@ -0,0 +1,9 @@ + + +

A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:

+ + {{ link }} + +

If this was not you, please disregard this email and contact your server administrator. Thank you.

+ + diff --git a/synapse/res/templates/password_reset.txt b/synapse/res/templates/password_reset.txt new file mode 100644 index 0000000000..f0deff59a7 --- /dev/null +++ b/synapse/res/templates/password_reset.txt @@ -0,0 +1,7 @@ +A password reset request has been received for your Matrix account. If this +was you, please click the link below to confirm resetting your password: + +{{ link }} + +If this was not you, please disregard this email and contact your server +administrator. Thank you. diff --git a/synapse/res/templates/password_reset_failure.html b/synapse/res/templates/password_reset_failure.html new file mode 100644 index 0000000000..0b132cf8db --- /dev/null +++ b/synapse/res/templates/password_reset_failure.html @@ -0,0 +1,6 @@ + + + +

{{ failure_reason }}. Your password has not been reset.

+ + diff --git a/synapse/res/templates/password_reset_success.html b/synapse/res/templates/password_reset_success.html new file mode 100644 index 0000000000..7b6fa5e6f0 --- /dev/null +++ b/synapse/res/templates/password_reset_success.html @@ -0,0 +1,6 @@ + + + +

Your password was successfully reset. You may now close this window.

+ + diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index ca35dc3c83..e4c63b69b9 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -15,19 +15,25 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import re from six.moves import http_client +import jinja2 + from twisted.internet import defer from synapse.api.constants import LoginType -from synapse.api.errors import Codes, SynapseError +from synapse.api.errors import Codes, SynapseError, ThreepidValidationError +from synapse.http.server import finish_request from synapse.http.servlet import ( RestServlet, assert_params_in_dict, parse_json_object_from_request, + parse_string, ) from synapse.util.msisdn import phone_number_to_msisdn +from synapse.util.stringutils import random_string from synapse.util.threepids import check_3pid_allowed from ._base import client_patterns, interactive_auth_handler @@ -41,17 +47,42 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): def __init__(self, hs): super(EmailPasswordRequestTokenRestServlet, self).__init__() self.hs = hs + self.datastore = hs.get_datastore() + self.config = hs.config self.identity_handler = hs.get_handlers().identity_handler + if self.config.email_password_reset_behaviour == "local": + from synapse.push.mailer import Mailer, load_jinja2_templates + templates = load_jinja2_templates( + config=hs.config, + template_html_name=hs.config.email_password_reset_template_html, + template_text_name=hs.config.email_password_reset_template_text, + ) + self.mailer = Mailer( + hs=self.hs, + app_name=self.config.email_app_name, + template_html=templates[0], + template_text=templates[1], + ) + @defer.inlineCallbacks def on_POST(self, request): + if self.config.email_password_reset_behaviour == "off": + raise SynapseError(400, "Password resets have been disabled on this server") + body = parse_json_object_from_request(request) assert_params_in_dict(body, [ - 'id_server', 'client_secret', 'email', 'send_attempt' + 'client_secret', 'email', 'send_attempt' ]) - if not check_3pid_allowed(self.hs, "email", body['email']): + # Extract params from body + client_secret = body["client_secret"] + email = body["email"] + send_attempt = body["send_attempt"] + next_link = body.get("next_link") # Optional param + + if not check_3pid_allowed(self.hs, "email", email): raise SynapseError( 403, "Your email domain is not authorized on this server", @@ -59,15 +90,100 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): ) existingUid = yield self.hs.get_datastore().get_user_id_by_threepid( - 'email', body['email'] + 'email', email, ) if existingUid is None: raise SynapseError(400, "Email not found", Codes.THREEPID_NOT_FOUND) - ret = yield self.identity_handler.requestEmailToken(**body) + if self.config.email_password_reset_behaviour == "remote": + if 'id_server' not in body: + raise SynapseError(400, "Missing 'id_server' param in body") + + # Have the identity server handle the password reset flow + ret = yield self.identity_handler.requestEmailToken( + body["id_server"], email, client_secret, send_attempt, next_link, + ) + else: + # Send password reset emails from Synapse + sid = yield self.send_password_reset( + email, client_secret, send_attempt, next_link, + ) + + # Wrap the session id in a JSON object + ret = {"sid": sid} + defer.returnValue((200, ret)) + @defer.inlineCallbacks + def send_password_reset( + self, + email, + client_secret, + send_attempt, + next_link=None, + ): + """Send a password reset email + + Args: + email (str): The user's email address + client_secret (str): The provided client secret + send_attempt (int): Which send attempt this is + + Returns: + The new session_id upon success + + Raises: + SynapseError is an error occurred when sending the email + """ + # Check that this email/client_secret/send_attempt combo is new or + # greater than what we've seen previously + session = yield self.datastore.get_threepid_validation_session( + "email", client_secret, address=email, validated=False, + ) + + # Check to see if a session already exists and that it is not yet + # marked as validated + if session and session.get("validated_at") is None: + session_id = session['session_id'] + last_send_attempt = session['last_send_attempt'] + + # Check that the send_attempt is higher than previous attempts + if send_attempt <= last_send_attempt: + # If not, just return a success without sending an email + defer.returnValue(session_id) + else: + # An non-validated session does not exist yet. + # Generate a session id + session_id = random_string(16) + + # Generate a new validation token + token = random_string(32) + + # Send the mail with the link containing the token, client_secret + # and session_id + try: + yield self.mailer.send_password_reset_mail( + email, token, client_secret, session_id, + ) + except Exception: + logger.exception( + "Error sending a password reset email to %s", email, + ) + raise SynapseError( + 500, "An error was encountered when sending the password reset email" + ) + + token_expires = (self.hs.clock.time_msec() + + self.config.email_validation_token_lifetime) + + yield self.datastore.start_or_continue_validation_session( + "email", email, session_id, client_secret, send_attempt, + next_link, token, token_expires, + ) + + defer.returnValue(session_id) + class MsisdnPasswordRequestTokenRestServlet(RestServlet): PATTERNS = client_patterns("/account/password/msisdn/requestToken$") @@ -80,6 +196,9 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): + if not self.config.email_password_reset_behaviour == "off": + raise SynapseError(400, "Password resets have been disabled on this server") + body = parse_json_object_from_request(request) assert_params_in_dict(body, [ @@ -107,6 +226,118 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet): defer.returnValue((200, ret)) +class PasswordResetSubmitTokenServlet(RestServlet): + """Handles 3PID validation token submission""" + PATTERNS = [ + re.compile("^/_synapse/password_reset/(?P[^/]*)/submit_token/*$"), + ] + + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + super(PasswordResetSubmitTokenServlet, self).__init__() + self.hs = hs + self.auth = hs.get_auth() + self.config = hs.config + self.clock = hs.get_clock() + self.datastore = hs.get_datastore() + + @defer.inlineCallbacks + def on_GET(self, request, medium): + if medium != "email": + raise SynapseError( + 400, + "This medium is currently not supported for password resets", + ) + + sid = parse_string(request, "sid") + client_secret = parse_string(request, "client_secret") + token = parse_string(request, "token") + + # Attempt to validate a 3PID sesssion + try: + # Mark the session as valid + next_link = yield self.datastore.validate_threepid_session( + sid, + client_secret, + token, + self.clock.time_msec(), + ) + + # Perform a 302 redirect if next_link is set + if next_link: + if next_link.startswith("file:///"): + logger.warn( + "Not redirecting to next_link as it is a local file: address" + ) + else: + request.setResponseCode(302) + request.setHeader("Location", next_link) + finish_request(request) + defer.returnValue(None) + + # Otherwise show the success template + html = self.config.email_password_reset_success_html_content + request.setResponseCode(200) + except ThreepidValidationError as e: + # Show a failure page with a reason + html = self.load_jinja2_template( + self.config.email_template_dir, + self.config.email_password_reset_failure_template, + template_vars={ + "failure_reason": e.msg, + } + ) + request.setResponseCode(e.code) + + request.write(html.encode('utf-8')) + finish_request(request) + defer.returnValue(None) + + def load_jinja2_template(self, template_dir, template_filename, template_vars): + """Loads a jinja2 template with variables to insert + + Args: + template_dir (str): The directory where templates are stored + template_filename (str): The name of the template in the template_dir + template_vars (Dict): Dictionary of keys in the template + alongside their values to insert + + Returns: + str containing the contents of the rendered template + """ + loader = jinja2.FileSystemLoader(template_dir) + env = jinja2.Environment(loader=loader) + + template = env.get_template(template_filename) + return template.render(**template_vars) + + @defer.inlineCallbacks + def on_POST(self, request, medium): + if medium != "email": + raise SynapseError( + 400, + "This medium is currently not supported for password resets", + ) + + body = parse_json_object_from_request(request) + assert_params_in_dict(body, [ + 'sid', 'client_secret', 'token', + ]) + + valid, _ = yield self.datastore.validate_threepid_validation_token( + body['sid'], + body['client_secret'], + body['token'], + self.clock.time_msec(), + ) + response_code = 200 if valid else 400 + + defer.returnValue((response_code, {"success": valid})) + + class PasswordRestServlet(RestServlet): PATTERNS = client_patterns("/account/password$") @@ -144,6 +375,7 @@ class PasswordRestServlet(RestServlet): result, params, _ = yield self.auth_handler.check_auth( [[LoginType.EMAIL_IDENTITY], [LoginType.MSISDN]], body, self.hs.get_ip_from_request(request), + password_servlet=True, ) if LoginType.EMAIL_IDENTITY in result: @@ -417,6 +649,7 @@ class WhoamiRestServlet(RestServlet): def register_servlets(hs, http_server): EmailPasswordRequestTokenRestServlet(hs).register(http_server) MsisdnPasswordRequestTokenRestServlet(hs).register(http_server) + PasswordResetSubmitTokenServlet(hs).register(http_server) PasswordRestServlet(hs).register(http_server) DeactivateAccountRestServlet(hs).register(http_server) EmailThreepidRequestTokenRestServlet(hs).register(http_server) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 52891bb9eb..ae891aa332 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -588,6 +588,10 @@ class SQLBaseStore(object): Args: table : string giving the table name values : dict of new column names and values for them + or_ignore : bool stating whether an exception should be raised + when a conflicting row already exists. If True, False will be + returned by the function instead + desc : string giving a description of the transaction Returns: bool: Whether the row was inserted or not. Only useful when @@ -1228,8 +1232,8 @@ class SQLBaseStore(object): ) txn.execute(select_sql, list(keyvalues.values())) - row = txn.fetchone() + if not row: if allow_none: return None diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index c1711bc8bd..23a4baa484 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 54 +SCHEMA_VERSION = 55 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 4cf159ba81..9b41cbd757 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -17,17 +17,20 @@ import re +from six import iterkeys from six.moves import range from twisted.internet import defer from synapse.api.constants import UserTypes -from synapse.api.errors import Codes, StoreError +from synapse.api.errors import Codes, StoreError, ThreepidValidationError from synapse.storage import background_updates from synapse.storage._base import SQLBaseStore from synapse.types import UserID from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +THIRTY_MINUTES_IN_MS = 30 * 60 * 1000 + class RegistrationWorkerStore(SQLBaseStore): def __init__(self, db_conn, hs): @@ -422,7 +425,7 @@ class RegistrationWorkerStore(SQLBaseStore): defer.returnValue(None) @defer.inlineCallbacks - def get_user_id_by_threepid(self, medium, address): + def get_user_id_by_threepid(self, medium, address, require_verified=False): """Returns user id from threepid Args: @@ -595,6 +598,11 @@ class RegistrationStore( "user_threepids_grandfather", self._bg_user_threepids_grandfather, ) + # Create a background job for culling expired 3PID validity tokens + hs.get_clock().looping_call( + self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS, + ) + @defer.inlineCallbacks def add_access_token_to_user(self, user_id, token, device_id=None): """Adds an access token for the given user. @@ -963,7 +971,6 @@ class RegistrationStore( We do this by grandfathering in existing user threepids assuming that they used one of the server configured trusted identity servers. """ - id_servers = set(self.config.trusted_third_party_id_servers) def _bg_user_threepids_grandfather_txn(txn): @@ -984,3 +991,280 @@ class RegistrationStore( yield self._end_background_update("user_threepids_grandfather") defer.returnValue(1) + + def get_threepid_validation_session( + self, + medium, + client_secret, + address=None, + sid=None, + validated=None, + ): + """Gets a session_id and last_send_attempt (if available) for a + client_secret/medium/(address|session_id) combo + + Args: + medium (str|None): The medium of the 3PID + address (str|None): The address of the 3PID + sid (str|None): The ID of the validation session + client_secret (str|None): A unique string provided by the client to + help identify this validation attempt + validated (bool|None): Whether sessions should be filtered by + whether they have been validated already or not. None to + perform no filtering + + Returns: + deferred {str, int}|None: A dict containing the + latest session_id and send_attempt count for this 3PID. + Otherwise None if there hasn't been a previous attempt + """ + keyvalues = { + "medium": medium, + "client_secret": client_secret, + } + if address: + keyvalues["address"] = address + if sid: + keyvalues["session_id"] = sid + + assert(address or sid) + + def get_threepid_validation_session_txn(txn): + sql = """ + SELECT address, session_id, medium, client_secret, + last_send_attempt, validated_at + FROM threepid_validation_session WHERE %s + """ % (" AND ".join("%s = ?" % k for k in iterkeys(keyvalues)),) + + if validated is not None: + sql += " AND validated_at IS " + ("NOT NULL" if validated else "NULL") + + sql += " LIMIT 1" + + txn.execute(sql, list(keyvalues.values())) + rows = self.cursor_to_dict(txn) + if not rows: + return None + + return rows[0] + + return self.runInteraction( + "get_threepid_validation_session", + get_threepid_validation_session_txn, + ) + + def validate_threepid_session( + self, + session_id, + client_secret, + token, + current_ts, + ): + """Attempt to validate a threepid session using a token + + Args: + session_id (str): The id of a validation session + client_secret (str): A unique string provided by the client to + help identify this validation attempt + token (str): A validation token + current_ts (int): The current unix time in milliseconds. Used for + checking token expiry status + + Returns: + deferred str|None: A str representing a link to redirect the user + to if there is one. + """ + # Insert everything into a transaction in order to run atomically + def validate_threepid_session_txn(txn): + row = self._simple_select_one_txn( + txn, + table="threepid_validation_session", + keyvalues={"session_id": session_id}, + retcols=["client_secret", "validated_at"], + allow_none=True, + ) + + if not row: + raise ThreepidValidationError(400, "Unknown session_id") + retrieved_client_secret = row["client_secret"] + validated_at = row["validated_at"] + + if retrieved_client_secret != client_secret: + raise ThreepidValidationError( + 400, "This client_secret does not match the provided session_id", + ) + + row = self._simple_select_one_txn( + txn, + table="threepid_validation_token", + keyvalues={"session_id": session_id, "token": token}, + retcols=["expires", "next_link"], + allow_none=True, + ) + + if not row: + raise ThreepidValidationError( + 400, "Validation token not found or has expired", + ) + expires = row["expires"] + next_link = row["next_link"] + + # If the session is already validated, no need to revalidate + if validated_at: + return next_link + + if expires <= current_ts: + raise ThreepidValidationError( + 400, "This token has expired. Please request a new one", + ) + + # Looks good. Validate the session + self._simple_update_txn( + txn, + table="threepid_validation_session", + keyvalues={"session_id": session_id}, + updatevalues={"validated_at": self.clock.time_msec()}, + ) + + return next_link + + # Return next_link if it exists + return self.runInteraction( + "validate_threepid_session_txn", + validate_threepid_session_txn, + ) + + def upsert_threepid_validation_session( + self, + medium, + address, + client_secret, + send_attempt, + session_id, + validated_at=None, + ): + """Upsert a threepid validation session + Args: + medium (str): The medium of the 3PID + address (str): The address of the 3PID + client_secret (str): A unique string provided by the client to + help identify this validation attempt + send_attempt (int): The latest send_attempt on this session + session_id (str): The id of this validation session + validated_at (int|None): The unix timestamp in milliseconds of + when the session was marked as valid + """ + insertion_values = { + "medium": medium, + "address": address, + "client_secret": client_secret, + } + + if validated_at: + insertion_values["validated_at"] = validated_at + + return self._simple_upsert( + table="threepid_validation_session", + keyvalues={"session_id": session_id}, + values={"last_send_attempt": send_attempt}, + insertion_values=insertion_values, + desc="upsert_threepid_validation_session", + ) + + def start_or_continue_validation_session( + self, + medium, + address, + session_id, + client_secret, + send_attempt, + next_link, + token, + token_expires, + ): + """Creates a new threepid validation session if it does not already + exist and associates a new validation token with it + + Args: + medium (str): The medium of the 3PID + address (str): The address of the 3PID + session_id (str): The id of this validation session + client_secret (str): A unique string provided by the client to + help identify this validation attempt + send_attempt (int): The latest send_attempt on this session + next_link (str|None): The link to redirect the user to upon + successful validation + token (str): The validation token + token_expires (int): The timestamp for which after the token + will no longer be valid + """ + def start_or_continue_validation_session_txn(txn): + # Create or update a validation session + self._simple_upsert_txn( + txn, + table="threepid_validation_session", + keyvalues={"session_id": session_id}, + values={"last_send_attempt": send_attempt}, + insertion_values={ + "medium": medium, + "address": address, + "client_secret": client_secret, + }, + ) + + # Create a new validation token with this session ID + self._simple_insert_txn( + txn, + table="threepid_validation_token", + values={ + "session_id": session_id, + "token": token, + "next_link": next_link, + "expires": token_expires, + }, + ) + + return self.runInteraction( + "start_or_continue_validation_session", + start_or_continue_validation_session_txn, + ) + + def cull_expired_threepid_validation_tokens(self): + """Remove threepid validation tokens with expiry dates that have passed""" + def cull_expired_threepid_validation_tokens_txn(txn, ts): + sql = """ + DELETE FROM threepid_validation_token WHERE + expires < ? + """ + return txn.execute(sql, (ts,)) + + return self.runInteraction( + "cull_expired_threepid_validation_tokens", + cull_expired_threepid_validation_tokens_txn, + self.clock.time_msec(), + ) + + def delete_threepid_session(self, session_id): + """Removes a threepid validation session from the database. This can + be done after validation has been performed and whatever action was + waiting on it has been carried out + + Args: + session_id (str): The ID of the session to delete + """ + def delete_threepid_session_txn(txn): + self._simple_delete_txn( + txn, + table="threepid_validation_token", + keyvalues={"session_id": session_id}, + ) + self._simple_delete_txn( + txn, + table="threepid_validation_session", + keyvalues={"session_id": session_id}, + ) + + return self.runInteraction( + "delete_threepid_session", + delete_threepid_session_txn, + ) diff --git a/synapse/storage/schema/delta/55/track_threepid_validations.sql b/synapse/storage/schema/delta/55/track_threepid_validations.sql new file mode 100644 index 0000000000..a8eced2e0a --- /dev/null +++ b/synapse/storage/schema/delta/55/track_threepid_validations.sql @@ -0,0 +1,31 @@ +/* Copyright 2019 The Matrix.org Foundation C.I.C. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +CREATE TABLE IF NOT EXISTS threepid_validation_session ( + session_id TEXT PRIMARY KEY, + medium TEXT NOT NULL, + address TEXT NOT NULL, + client_secret TEXT NOT NULL, + last_send_attempt BIGINT NOT NULL, + validated_at BIGINT +); + +CREATE TABLE IF NOT EXISTS threepid_validation_token ( + token TEXT PRIMARY KEY, + session_id TEXT NOT NULL, + next_link TEXT, + expires BIGINT NOT NULL +); + +CREATE INDEX threepid_validation_token_session_id ON threepid_validation_token(session_id); diff --git a/tests/utils.py b/tests/utils.py index 200c1ceabe..b2817cf22c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -131,7 +131,6 @@ def default_config(name, parse=False): "password_providers": [], "worker_replication_url": "", "worker_app": None, - "email_enable_notifs": False, "block_non_admin_invites": False, "federation_domain_whitelist": None, "filter_timeline_limit": 5000, -- cgit 1.5.1 From a11865016e4fc8f691ce94ec25e8f40290df8329 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 6 Jun 2019 20:13:47 +0100 Subject: Set default room version to v4. (#5379) Set default room version to v4. --- changelog.d/5379.feature | 1 + docs/sample_config.yaml | 2 +- synapse/config/server.py | 2 +- tests/storage/test_cleanup_extrems.py | 6 ++++++ tests/utils.py | 3 ++- 5 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 changelog.d/5379.feature (limited to 'tests') diff --git a/changelog.d/5379.feature b/changelog.d/5379.feature new file mode 100644 index 0000000000..7b64786fe6 --- /dev/null +++ b/changelog.d/5379.feature @@ -0,0 +1 @@ +Set default room version to v4. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index ea73306fb9..4d7e6f3eb5 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -91,7 +91,7 @@ pid_file: DATADIR/homeserver.pid # For example, for room version 1, default_room_version should be set # to "1". # -#default_room_version: "1" +#default_room_version: "4" # The GC threshold parameters to pass to `gc.set_threshold`, if defined # diff --git a/synapse/config/server.py b/synapse/config/server.py index 334921d421..7d56e2d141 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -36,7 +36,7 @@ logger = logging.Logger(__name__) # in the list. DEFAULT_BIND_ADDRESSES = ['::', '0.0.0.0'] -DEFAULT_ROOM_VERSION = "1" +DEFAULT_ROOM_VERSION = "4" class ServerConfig(Config): diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index 6dda66ecd3..6aa8b8b3c6 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -25,6 +25,11 @@ from tests.unittest import HomeserverTestCase class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): """Test the background update to clean forward extremities table. """ + def make_homeserver(self, reactor, clock): + # Hack until we understand why test_forked_graph_cleanup fails with v4 + config = self.default_config() + config['default_room_version'] = '1' + return self.setup_test_homeserver(config=config) def prepare(self, reactor, clock, homeserver): self.store = homeserver.get_datastore() @@ -220,6 +225,7 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): Where SF* are soft failed, and with them A, B and C marked as extremities. This should resolve to B and C being marked as extremity. """ + # Create the room graph event_id_a = self.create_and_send_event() event_id_b = self.create_and_send_event() diff --git a/tests/utils.py b/tests/utils.py index b2817cf22c..f8c7ad2604 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -31,6 +31,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import CodeMessageException, cs_error from synapse.api.room_versions import RoomVersions from synapse.config.homeserver import HomeServerConfig +from synapse.config.server import DEFAULT_ROOM_VERSION from synapse.federation.transport import server as federation_server from synapse.http.server import HttpServer from synapse.server import HomeServer @@ -173,7 +174,7 @@ def default_config(name, parse=False): "use_frozen_dicts": False, # We need a sane default_room_version, otherwise attempts to create # rooms will fail. - "default_room_version": "1", + "default_room_version": DEFAULT_ROOM_VERSION, # disable user directory updates, because they get done in the # background, which upsets the test runner. "update_user_directory": False, -- cgit 1.5.1 From 2d1d7b7e6f2bec3b96b0d23993369ce46aad4f32 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 6 Jun 2019 23:54:00 +0100 Subject: Prevent multiple device list updates from breaking a batch send (#5156) fixes #5153 --- changelog.d/5156.bugfix | 1 + synapse/federation/sender/per_destination_queue.py | 5 +- synapse/storage/devices.py | 152 +++++++++++++++++---- tests/storage/test_devices.py | 69 ++++++++++ 4 files changed, 196 insertions(+), 31 deletions(-) create mode 100644 changelog.d/5156.bugfix (limited to 'tests') diff --git a/changelog.d/5156.bugfix b/changelog.d/5156.bugfix new file mode 100644 index 0000000000..e8aa7d8241 --- /dev/null +++ b/changelog.d/5156.bugfix @@ -0,0 +1 @@ +Prevent federation device list updates breaking when processing multiple updates at once. \ No newline at end of file diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index fae8bea392..564c57203d 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -349,9 +349,10 @@ class PerDestinationQueue(object): @defer.inlineCallbacks def _get_new_device_messages(self, limit): last_device_list = self._last_device_list_stream_id - # Will return at most 20 entries + + # Retrieve list of new device updates to send to the destination now_stream_id, results = yield self._store.get_devices_by_remote( - self._destination, last_device_list + self._destination, last_device_list, limit=limit, ) edus = [ Edu( diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index fd869b934c..d102e07372 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -14,7 +14,7 @@ # limitations under the License. import logging -from six import iteritems, itervalues +from six import iteritems from canonicaljson import json @@ -72,11 +72,14 @@ class DeviceWorkerStore(SQLBaseStore): defer.returnValue({d["device_id"]: d for d in devices}) - def get_devices_by_remote(self, destination, from_stream_id): + @defer.inlineCallbacks + def get_devices_by_remote(self, destination, from_stream_id, limit): """Get stream of updates to send to remote servers Returns: - (int, list[dict]): current stream id and list of updates + Deferred[tuple[int, list[dict]]]: + current stream id (ie, the stream id of the last update included in the + response), and the list of updates """ now_stream_id = self._device_list_id_gen.get_current_token() @@ -84,55 +87,131 @@ class DeviceWorkerStore(SQLBaseStore): destination, int(from_stream_id) ) if not has_changed: - return (now_stream_id, []) - - return self.runInteraction( + defer.returnValue((now_stream_id, [])) + + # We retrieve n+1 devices from the list of outbound pokes where n is + # our outbound device update limit. We then check if the very last + # device has the same stream_id as the second-to-last device. If so, + # then we ignore all devices with that stream_id and only send the + # devices with a lower stream_id. + # + # If when culling the list we end up with no devices afterwards, we + # consider the device update to be too large, and simply skip the + # stream_id; the rationale being that such a large device list update + # is likely an error. + updates = yield self.runInteraction( "get_devices_by_remote", self._get_devices_by_remote_txn, destination, from_stream_id, now_stream_id, + limit + 1, ) + # Return an empty list if there are no updates + if not updates: + defer.returnValue((now_stream_id, [])) + + # if we have exceeded the limit, we need to exclude any results with the + # same stream_id as the last row. + if len(updates) > limit: + stream_id_cutoff = updates[-1][2] + now_stream_id = stream_id_cutoff - 1 + else: + stream_id_cutoff = None + + # Perform the equivalent of a GROUP BY + # + # Iterate through the updates list and copy non-duplicate + # (user_id, device_id) entries into a map, with the value being + # the max stream_id across each set of duplicate entries + # + # maps (user_id, device_id) -> stream_id + # as long as their stream_id does not match that of the last row + query_map = {} + for update in updates: + if stream_id_cutoff is not None and update[2] >= stream_id_cutoff: + # Stop processing updates + break + + key = (update[0], update[1]) + query_map[key] = max(query_map.get(key, 0), update[2]) + + # If we didn't find any updates with a stream_id lower than the cutoff, it + # means that there are more than limit updates all of which have the same + # steam_id. + + # That should only happen if a client is spamming the server with new + # devices, in which case E2E isn't going to work well anyway. We'll just + # skip that stream_id and return an empty list, and continue with the next + # stream_id next time. + if not query_map: + defer.returnValue((stream_id_cutoff, [])) + + results = yield self._get_device_update_edus_by_remote( + destination, + from_stream_id, + query_map, + ) + + defer.returnValue((now_stream_id, results)) + def _get_devices_by_remote_txn( - self, txn, destination, from_stream_id, now_stream_id + self, txn, destination, from_stream_id, now_stream_id, limit ): + """Return device update information for a given remote destination + + Args: + txn (LoggingTransaction): The transaction to execute + destination (str): The host the device updates are intended for + from_stream_id (int): The minimum stream_id to filter updates by, exclusive + now_stream_id (int): The maximum stream_id to filter updates by, inclusive + limit (int): Maximum number of device updates to return + + Returns: + List: List of device updates + """ sql = """ - SELECT user_id, device_id, max(stream_id) FROM device_lists_outbound_pokes + SELECT user_id, device_id, stream_id FROM device_lists_outbound_pokes WHERE destination = ? AND ? < stream_id AND stream_id <= ? AND sent = ? - GROUP BY user_id, device_id - LIMIT 20 + ORDER BY stream_id + LIMIT ? """ - txn.execute(sql, (destination, from_stream_id, now_stream_id, False)) + txn.execute(sql, (destination, from_stream_id, now_stream_id, False, limit)) - # maps (user_id, device_id) -> stream_id - query_map = {(r[0], r[1]): r[2] for r in txn} - if not query_map: - return (now_stream_id, []) + return list(txn) - if len(query_map) >= 20: - now_stream_id = max(stream_id for stream_id in itervalues(query_map)) + @defer.inlineCallbacks + def _get_device_update_edus_by_remote( + self, destination, from_stream_id, query_map, + ): + """Returns a list of device update EDUs as well as E2EE keys - devices = self._get_e2e_device_keys_txn( - txn, + Args: + destination (str): The host the device updates are intended for + from_stream_id (int): The minimum stream_id to filter updates by, exclusive + query_map (Dict[(str, str): int]): Dictionary mapping + user_id/device_id to update stream_id + + Returns: + List[Dict]: List of objects representing an device update EDU + + """ + devices = yield self.runInteraction( + "_get_e2e_device_keys_txn", + self._get_e2e_device_keys_txn, query_map.keys(), include_all_devices=True, include_deleted_devices=True, ) - prev_sent_id_sql = """ - SELECT coalesce(max(stream_id), 0) as stream_id - FROM device_lists_outbound_last_success - WHERE destination = ? AND user_id = ? AND stream_id <= ? - """ - results = [] for user_id, user_devices in iteritems(devices): # The prev_id for the first row is always the last row before # `from_stream_id` - txn.execute(prev_sent_id_sql, (destination, user_id, from_stream_id)) - rows = txn.fetchall() - prev_id = rows[0][0] + prev_id = yield self._get_last_device_update_for_remote_user( + destination, user_id, from_stream_id, + ) for device_id, device in iteritems(user_devices): stream_id = query_map[(user_id, device_id)] result = { @@ -156,7 +235,22 @@ class DeviceWorkerStore(SQLBaseStore): results.append(result) - return (now_stream_id, results) + defer.returnValue(results) + + def _get_last_device_update_for_remote_user( + self, destination, user_id, from_stream_id, + ): + def f(txn): + prev_sent_id_sql = """ + SELECT coalesce(max(stream_id), 0) as stream_id + FROM device_lists_outbound_last_success + WHERE destination = ? AND user_id = ? AND stream_id <= ? + """ + txn.execute(prev_sent_id_sql, (destination, user_id, from_stream_id)) + rows = txn.fetchall() + return rows[0][0] + + return self.runInteraction("get_last_device_update_for_remote_user", f) def mark_as_sent_devices_by_remote(self, destination, stream_id): """Mark that updates have successfully been sent to the destination. diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index aef4dfaf57..6396ccddb5 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -71,6 +71,75 @@ class DeviceStoreTestCase(tests.unittest.TestCase): res["device2"], ) + @defer.inlineCallbacks + def test_get_devices_by_remote(self): + device_ids = ["device_id1", "device_id2"] + + # Add two device updates with a single stream_id + yield self.store.add_device_change_to_streams( + "user_id", device_ids, ["somehost"], + ) + + # Get all device updates ever meant for this remote + now_stream_id, device_updates = yield self.store.get_devices_by_remote( + "somehost", -1, limit=100, + ) + + # Check original device_ids are contained within these updates + self._check_devices_in_updates(device_ids, device_updates) + + @defer.inlineCallbacks + def test_get_devices_by_remote_limited(self): + # Test breaking the update limit in 1, 101, and 1 device_id segments + + # first add one device + device_ids1 = ["device_id0"] + yield self.store.add_device_change_to_streams( + "user_id", device_ids1, ["someotherhost"], + ) + + # then add 101 + device_ids2 = ["device_id" + str(i + 1) for i in range(101)] + yield self.store.add_device_change_to_streams( + "user_id", device_ids2, ["someotherhost"], + ) + + # then one more + device_ids3 = ["newdevice"] + yield self.store.add_device_change_to_streams( + "user_id", device_ids3, ["someotherhost"], + ) + + # + # now read them back. + # + + # first we should get a single update + now_stream_id, device_updates = yield self.store.get_devices_by_remote( + "someotherhost", -1, limit=100, + ) + self._check_devices_in_updates(device_ids1, device_updates) + + # Then we should get an empty list back as the 101 devices broke the limit + now_stream_id, device_updates = yield self.store.get_devices_by_remote( + "someotherhost", now_stream_id, limit=100, + ) + self.assertEqual(len(device_updates), 0) + + # The 101 devices should've been cleared, so we should now just get one device + # update + now_stream_id, device_updates = yield self.store.get_devices_by_remote( + "someotherhost", now_stream_id, limit=100, + ) + self._check_devices_in_updates(device_ids3, device_updates) + + def _check_devices_in_updates(self, expected_device_ids, device_updates): + """Check that an specific device ids exist in a list of device update EDUs""" + self.assertEqual(len(device_updates), len(expected_device_ids)) + + received_device_ids = {update["device_id"] for update in device_updates} + self.assertEqual(received_device_ids, set(expected_device_ids)) + @defer.inlineCallbacks def test_update_device(self): yield self.store.store_device("user_id", "device_id", "display_name 1") -- cgit 1.5.1 From 8182a1cfb523fb1e8d328716111e98be3a1c5c35 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 7 Jun 2019 11:09:08 +0100 Subject: Refactor email tests --- tests/push/test_email.py | 64 +++++++++++++++++++++++++++++++----------------- 1 file changed, 42 insertions(+), 22 deletions(-) (limited to 'tests') diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 9cdde1a9bd..62b3c2a99d 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -15,6 +15,7 @@ import os +import attr import pkg_resources from twisted.internet.defer import Deferred @@ -30,6 +31,13 @@ except Exception: load_jinja2_templates = None +@attr.s +class _User(object): + "Helper wrapper for user ID and access token" + id = attr.ib() + token = attr.ib() + + class EmailPusherTests(HomeserverTestCase): skip = "No Jinja installed" if not load_jinja2_templates else None @@ -77,25 +85,32 @@ class EmailPusherTests(HomeserverTestCase): return hs - def test_sends_email(self): - + def prepare(self, reactor, clock, hs): # Register the user who gets notified - user_id = self.register_user("user", "pass") - access_token = self.login("user", "pass") - - # Register the user who sends the message - other_user_id = self.register_user("otheruser", "pass") - other_access_token = self.login("otheruser", "pass") + self.user_id = self.register_user("user", "pass") + self.access_token = self.login("user", "pass") + + # Register other users + self.others = [ + _User( + id=self.register_user("otheruser1", "pass"), + token=self.login("otheruser1", "pass"), + ), + _User( + id=self.register_user("otheruser2", "pass"), + token=self.login("otheruser2", "pass"), + ), + ] # Register the pusher user_tuple = self.get_success( - self.hs.get_datastore().get_user_by_access_token(access_token) + self.hs.get_datastore().get_user_by_access_token(self.access_token) ) token_id = user_tuple["token_id"] self.get_success( self.hs.get_pusherpool().add_pusher( - user_id=user_id, + user_id=self.user_id, access_token=token_id, kind="email", app_id="m.email", @@ -107,22 +122,27 @@ class EmailPusherTests(HomeserverTestCase): ) ) - # Create a room - room = self.helper.create_room_as(user_id, tok=access_token) + def test_simple_sends_email(self): + # Create a simple room with two users + room = self.helper.create_room_as(self.user_id, tok=self.access_token) + self.helper.invite( + room=room, src=self.user_id, tok=self.access_token, targ=self.others[0].id, + ) + self.helper.join(room=room, user=self.others[0].id, tok=self.others[0].token) - # Invite the other person - self.helper.invite(room=room, src=user_id, tok=access_token, targ=other_user_id) + # The other user sends some messages + self.helper.send(room, body="Hi!", tok=self.others[0].token) + self.helper.send(room, body="There!", tok=self.others[0].token) - # The other user joins - self.helper.join(room=room, user=other_user_id, tok=other_access_token) + # We should get emailed about that message + self._check_for_mail() - # The other user sends some messages - self.helper.send(room, body="Hi!", tok=other_access_token) - self.helper.send(room, body="There!", tok=other_access_token) + def _check_for_mail(self): + "Check that the user receives an email notification" # Get the stream ordering before it gets sent pushers = self.get_success( - self.hs.get_datastore().get_pushers_by(dict(user_name=user_id)) + self.hs.get_datastore().get_pushers_by(dict(user_name=self.user_id)) ) self.assertEqual(len(pushers), 1) last_stream_ordering = pushers[0]["last_stream_ordering"] @@ -132,7 +152,7 @@ class EmailPusherTests(HomeserverTestCase): # It hasn't succeeded yet, so the stream ordering shouldn't have moved pushers = self.get_success( - self.hs.get_datastore().get_pushers_by(dict(user_name=user_id)) + self.hs.get_datastore().get_pushers_by(dict(user_name=self.user_id)) ) self.assertEqual(len(pushers), 1) self.assertEqual(last_stream_ordering, pushers[0]["last_stream_ordering"]) @@ -149,7 +169,7 @@ class EmailPusherTests(HomeserverTestCase): # The stream ordering has increased pushers = self.get_success( - self.hs.get_datastore().get_pushers_by(dict(user_name=user_id)) + self.hs.get_datastore().get_pushers_by(dict(user_name=self.user_id)) ) self.assertEqual(len(pushers), 1) self.assertTrue(pushers[0]["last_stream_ordering"] > last_stream_ordering) -- cgit 1.5.1 From 2ebeda48b2e6ba522fe049ee7ef13450f6839e1b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 7 Jun 2019 12:10:23 +0100 Subject: Add test --- synapse/push/emailpusher.py | 19 +++++++++++++++++++ synapse/push/pusherpool.py | 30 +++++++++++++++++++++++------- tests/push/test_email.py | 29 ++++++++++++++++++++++++++++- 3 files changed, 70 insertions(+), 8 deletions(-) (limited to 'tests') diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index e8ee67401f..c89a8438a9 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -114,6 +114,21 @@ class EmailPusher(object): run_as_background_process("emailpush.process", self._process) + def _pause_processing(self): + """Used by tests to temporarily pause processing of events. + + Asserts that its not currently processing. + """ + assert not self._is_processing + self._is_processing = True + + def _resume_processing(self): + """Used by tests to resume processing of events after pausing. + """ + assert self._is_processing + self._is_processing = False + self._start_processing() + @defer.inlineCallbacks def _process(self): # we should never get here if we are already processing @@ -215,6 +230,10 @@ class EmailPusher(object): @defer.inlineCallbacks def save_last_stream_ordering_and_success(self, last_stream_ordering): + if last_stream_ordering is None: + # This happens if we haven't yet processed anything + return + self.last_stream_ordering = last_stream_ordering yield self.store.update_pusher_last_stream_ordering_and_success( self.app_id, self.email, self.user_id, diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 40a7709c09..63c583565f 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -60,6 +60,11 @@ class PusherPool: def add_pusher(self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag=""): + """Creates a new pusher and adds it to the pool + + Returns: + Deferred[EmailPusher|HttpPusher] + """ time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it @@ -103,7 +108,9 @@ class PusherPool: last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) - yield self.start_pusher_by_id(app_id, pushkey, user_id) + pusher = yield self.start_pusher_by_id(app_id, pushkey, user_id) + + defer.returnValue(pusher) @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, @@ -184,7 +191,11 @@ class PusherPool: @defer.inlineCallbacks def start_pusher_by_id(self, app_id, pushkey, user_id): - """Look up the details for the given pusher, and start it""" + """Look up the details for the given pusher, and start it + + Returns: + Deferred[EmailPusher|HttpPusher|None]: The pusher started, if any + """ if not self._should_start_pushers: return @@ -192,13 +203,16 @@ class PusherPool: app_id, pushkey ) - p = None + pusher_dict = None for r in resultlist: if r['user_name'] == user_id: - p = r + pusher_dict = r - if p: - yield self._start_pusher(p) + pusher = None + if pusher_dict: + pusher = yield self._start_pusher(pusher_dict) + + defer.returnValue(pusher) @defer.inlineCallbacks def _start_pushers(self): @@ -224,7 +238,7 @@ class PusherPool: pusherdict (dict): Returns: - None + Deferred[EmailPusher|HttpPusher] """ try: p = self.pusher_factory.create_pusher(pusherdict) @@ -270,6 +284,8 @@ class PusherPool: p.on_started(have_notifs) + defer.returnValue(p) + @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 62b3c2a99d..c10b65d4b8 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -108,7 +108,7 @@ class EmailPusherTests(HomeserverTestCase): ) token_id = user_tuple["token_id"] - self.get_success( + self.pusher = self.get_success( self.hs.get_pusherpool().add_pusher( user_id=self.user_id, access_token=token_id, @@ -137,6 +137,33 @@ class EmailPusherTests(HomeserverTestCase): # We should get emailed about that message self._check_for_mail() + def test_multiple_members_email(self): + # We want to test multiple notifications, so we pause processing of push + # while we send messages. + self.pusher._pause_processing() + + # Create a simple room with multiple other users + room = self.helper.create_room_as(self.user_id, tok=self.access_token) + + for other in self.others: + self.helper.invite( + room=room, src=self.user_id, tok=self.access_token, targ=other.id, + ) + self.helper.join(room=room, user=other.id, tok=other.token) + + # The other users send some messages + self.helper.send(room, body="Hi!", tok=self.others[0].token) + self.helper.send(room, body="There!", tok=self.others[1].token) + self.helper.send(room, body="There!", tok=self.others[1].token) + + # Nothing should have happened yet, as we're paused. + assert not self.email_attempts + + self.pusher._resume_processing() + + # We should get emailed about those messages + self._check_for_mail() + def _check_for_mail(self): "Check that the user receives an email notification" -- cgit 1.5.1 From 43badd2cd4315c3f3ed45b0092c4479a43a3eb52 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jun 2019 14:31:05 +0100 Subject: Fix key verification when key stored with null valid_until_ms Some keys are stored in the synapse database with a null valid_until_ms which caused an exception to be thrown when using that key. We fix this by treating nulls as zeroes, i.e. they keys will match verification requests with a minimum_valid_until_ms of zero (i.e. don't validate ts) but will not match requests with a non-zero minimum_valid_until_ms. Fixes #5391. --- synapse/storage/keys.py | 8 +++++++ tests/crypto/test_keyring.py | 50 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 57 insertions(+), 1 deletion(-) (limited to 'tests') diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 5300720dbb..e3655ad8d7 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -80,6 +80,14 @@ class KeyStore(SQLBaseStore): for row in txn: server_name, key_id, key_bytes, ts_valid_until_ms = row + + if ts_valid_until_ms is None: + # Old keys may be stored with a ts_valid_until_ms of null, + # in which case we treat this as if it was set to `0`, i.e. + # it won't match key requests that define a minimum + # `ts_valid_until_ms`. + ts_valid_until_ms = 0 + res = FetchKeyResult( verify_key=decode_verify_key_bytes(key_id, bytes(key_bytes)), valid_until_ts=ts_valid_until_ms, diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 4b1901ce31..5a355f00cc 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -25,7 +25,11 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.crypto import keyring -from synapse.crypto.keyring import PerspectivesKeyFetcher, ServerKeyFetcher +from synapse.crypto.keyring import ( + PerspectivesKeyFetcher, + ServerKeyFetcher, + StoreKeyFetcher, +) from synapse.storage.keys import FetchKeyResult from synapse.util import logcontext from synapse.util.logcontext import LoggingContext @@ -219,6 +223,50 @@ class KeyringTestCase(unittest.HomeserverTestCase): # self.assertFalse(d.called) self.get_success(d) + def test_verify_json_for_server_with_null_valid_until_ms(self): + """Tests that we correctly handle key requests for keys we've stored + with a null `ts_valid_until_ms` + """ + mock_fetcher = keyring.KeyFetcher() + mock_fetcher.get_keys = Mock(return_value=defer.succeed({})) + + kr = keyring.Keyring( + self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher) + ) + + key1 = signedjson.key.generate_signing_key(1) + r = self.hs.datastore.store_server_verify_keys( + "server9", + time.time() * 1000, + [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], + ) + self.get_success(r) + + json1 = {} + signedjson.sign.sign_json(json1, "server9", key1) + + # should fail immediately on an unsigned object + d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned") + self.failureResultOf(d, SynapseError) + + # should fail on a signed object with a non-zero minimum_valid_until_ms, + # as it tries to refetch the keys and fails. + d = _verify_json_for_server( + kr, "server9", json1, 500, "test signed non-zero min" + ) + self.get_failure(d, SynapseError) + + # We expect the keyring tried to refetch the key once. + mock_fetcher.get_keys.assert_called_once_with( + {"server9": {get_key_id(key1): 500}} + ) + + # should succeed on a signed object with a 0 minimum_valid_until_ms + d = _verify_json_for_server( + kr, "server9", json1, 0, "test signed with zero min" + ) + self.get_success(d) + def test_verify_json_dedupes_key_requests(self): """Two requests for the same key should be deduped.""" key1 = signedjson.key.generate_signing_key(1) -- cgit 1.5.1 From e01668122126a4b6b7d45e2e24f591bb8546623b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 10 Jun 2019 16:06:25 +0100 Subject: Tests for SSL certs for federation connections Add some tests for bad certificates for federation and .well-known connections --- tests/http/__init__.py | 126 +++++++++++++-- tests/http/ca.crt | 19 +++ tests/http/ca.key | 27 ++++ .../federation/test_matrix_federation_agent.py | 169 +++++++++++++++++++-- tests/http/server.key | 27 ++++ tests/http/server.pem | 81 ---------- 6 files changed, 343 insertions(+), 106 deletions(-) create mode 100644 tests/http/ca.crt create mode 100644 tests/http/ca.key create mode 100644 tests/http/server.key delete mode 100644 tests/http/server.pem (limited to 'tests') diff --git a/tests/http/__init__.py b/tests/http/__init__.py index 851fc0eb33..b03fff0945 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -13,28 +13,124 @@ # See the License for the specific language governing permissions and # limitations under the License. import os.path +import subprocess + +from zope.interface import implementer from OpenSSL import SSL +from OpenSSL.SSL import Connection +from twisted.internet.interfaces import IOpenSSLServerConnectionCreator + + +def get_test_ca_cert_file(): + """Get the path to the test CA cert + + The keypair is generated with: + + openssl genrsa -out ca.key 2048 + openssl req -new -x509 -key ca.key -days 3650 -out ca.crt \ + -subj '/CN=synapse test CA' + """ + return os.path.join(os.path.dirname(__file__), "ca.crt") + + +def get_test_key_file(): + """get the path to the test key + + The key file is made with: + + openssl genrsa -out server.key 2048 + """ + return os.path.join(os.path.dirname(__file__), "server.key") + + +cert_file_count = 0 + +CONFIG_TEMPLATE = b"""\ +[default] +basicConstraints = CA:FALSE +keyUsage=nonRepudiation, digitalSignature, keyEncipherment +subjectAltName = %(sanentries)b +""" + + +def create_test_cert_file(sanlist): + """build an x509 certificate file + + Args: + sanlist: list[bytes]: a list of subjectAltName values for the cert + + Returns: + str: the path to the file + """ + global cert_file_count + csr_filename = "server.csr" + cnf_filename = "server.%i.cnf" % (cert_file_count,) + cert_filename = "server.%i.crt" % (cert_file_count,) + cert_file_count += 1 + + # first build a CSR + subprocess.run( + [ + "openssl", + "req", + "-new", + "-key", + get_test_key_file(), + "-subj", + "/", + "-out", + csr_filename, + ], + check=True, + ) + # now a config file describing the right SAN entries + sanentries = b",".join(sanlist) + with open(cnf_filename, "wb") as f: + f.write(CONFIG_TEMPLATE % {b"sanentries": sanentries}) -def get_test_cert_file(): - """get the path to the test cert""" + # finally the cert + ca_key_filename = os.path.join(os.path.dirname(__file__), "ca.key") + ca_cert_filename = get_test_ca_cert_file() + subprocess.run( + [ + "openssl", + "x509", + "-req", + "-in", + csr_filename, + "-CA", + ca_cert_filename, + "-CAkey", + ca_key_filename, + "-set_serial", + "1", + "-extfile", + cnf_filename, + "-out", + cert_filename, + ], + check=True, + ) - # the cert file itself is made with: - # - # openssl req -x509 -newkey rsa:4096 -keyout server.pem -out server.pem -days 36500 \ - # -nodes -subj '/CN=testserv' - return os.path.join(os.path.dirname(__file__), 'server.pem') + return cert_filename -class ServerTLSContext(object): - """A TLS Context which presents our test cert.""" +@implementer(IOpenSSLServerConnectionCreator) +class TestServerTLSConnectionFactory(object): + """An SSL connection creator which returns connections which present a certificate + signed by our test CA.""" - def __init__(self): - self.filename = get_test_cert_file() + def __init__(self, sanlist): + """ + Args: + sanlist: list[bytes]: a list of subjectAltName values for the cert + """ + self._cert_file = create_test_cert_file(sanlist) - def getContext(self): + def serverConnectionForTLS(self, tlsProtocol): ctx = SSL.Context(SSL.TLSv1_METHOD) - ctx.use_certificate_file(self.filename) - ctx.use_privatekey_file(self.filename) - return ctx + ctx.use_certificate_file(self._cert_file) + ctx.use_privatekey_file(get_test_key_file()) + return Connection(ctx, None) diff --git a/tests/http/ca.crt b/tests/http/ca.crt new file mode 100644 index 0000000000..730f81e99c --- /dev/null +++ b/tests/http/ca.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDCjCCAfKgAwIBAgIJAPwHIHgH/jtjMA0GCSqGSIb3DQEBCwUAMBoxGDAWBgNV +BAMMD3N5bmFwc2UgdGVzdCBDQTAeFw0xOTA2MTAxMTI2NDdaFw0yOTA2MDcxMTI2 +NDdaMBoxGDAWBgNVBAMMD3N5bmFwc2UgdGVzdCBDQTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAOZOXCKuylf9jHzJXpU2nS+XEKrnGPgs2SAhQKrzBxg3 +/d8KT2Zsfsj1i3G7oGu7B0ZKO6qG5AxOPCmSMf9/aiSHFilfSh+r8rCpJyWMev2c +/w/xmhoFHgn+H90NnqlXvWb5y1YZCE3gWaituQSaa93GPKacRqXCgIrzjPUuhfeT +uwFQt4iyUhMNBYEy3aw4IuIHdyBqi4noUhR2ZeuflLJ6PswdJ8mEiAvxCbBGPerq +idhWcZwlo0fKu4u1uu5B8TnTsMg2fJgL6c5olBG90Urt22gA6anfP5W/U1ZdVhmB +T3Rv5SJMkGyMGE6sEUetLFyb2GJpgGD7ePkUCZr+IMMCAwEAAaNTMFEwHQYDVR0O +BBYEFLg7nTCYsvQXWTyS6upLc0YTlIwRMB8GA1UdIwQYMBaAFLg7nTCYsvQXWTyS +6upLc0YTlIwRMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBADqx +GX4Ul5OGQlcG+xTt4u3vMCeqGo8mh1AnJ7zQbyRmwjJiNxJVX+/EcqFSTsmkBNoe +xdYITI7Z6dyoiKw99yCZDE7gALcyACEU7r0XY7VY/hebAaX6uLaw1sZKKAIC04lD +KgCu82tG85n60Qyud5SiZZF0q1XVq7lbvOYVdzVZ7k8Vssy5p9XnaLJLMggYeOiX +psHIQjvYGnTTEBZZHzWOrc0WGThd69wxTOOkAbCsoTPEwZL8BGUsdtLWtvhp452O +npvaUBzKg39R5X3KTdhB68XptiQfzbQkd3FtrwNuYPUywlsg55Bxkv85n57+xDO3 +D9YkgUqEp0RGUXQgCsQ= +-----END CERTIFICATE----- diff --git a/tests/http/ca.key b/tests/http/ca.key new file mode 100644 index 0000000000..5c99cae186 --- /dev/null +++ b/tests/http/ca.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpgIBAAKCAQEA5k5cIq7KV/2MfMlelTadL5cQqucY+CzZICFAqvMHGDf93wpP +Zmx+yPWLcbuga7sHRko7qobkDE48KZIx/39qJIcWKV9KH6vysKknJYx6/Zz/D/Ga +GgUeCf4f3Q2eqVe9ZvnLVhkITeBZqK25BJpr3cY8ppxGpcKAivOM9S6F95O7AVC3 +iLJSEw0FgTLdrDgi4gd3IGqLiehSFHZl65+Usno+zB0nyYSIC/EJsEY96uqJ2FZx +nCWjR8q7i7W67kHxOdOwyDZ8mAvpzmiUEb3RSu3baADpqd8/lb9TVl1WGYFPdG/l +IkyQbIwYTqwRR60sXJvYYmmAYPt4+RQJmv4gwwIDAQABAoIBAQCFuFG+wYYy+MCt +Y65LLN6vVyMSWAQjdMbM5QHLQDiKU1hQPIhFjBFBVXCVpL9MTde3dDqYlKGsk3BT +ItNs6eoTM2wmsXE0Wn4bHNvh7WMsBhACjeFP4lDCtI6DpvjMkmkidT8eyoIL1Yu5 +aMTYa2Dd79AfXPWYIQrJowfhBBY83KuW5fmYnKKDVLqkT9nf2dgmmQz85RgtNiZC +zFkIsNmPqH1zRbcw0wORfOBrLFvsMc4Tt8EY5Wz3NnH8Zfgf8Q3MgARH1yspz3Vp +B+EYHbsK17xZ+P59KPiX3yefvyYWEUjFF7ymVsVnDxLugYl4pXwWUpm19GxeDvFk +cgBUD5OBAoGBAP7lBdCp6lx6fYtxdxUm3n4MMQmYcac4qZdeBIrvpFMnvOBBuixl +eavcfFmFdwgAr8HyVYiu9ynac504IYvmtYlcpUmiRBbmMHbvLQEYHl7FYFKNz9ej +2ue4oJE3RsPdLsD3xIlc+xN8oT1j0knyorwsHdj0Sv77eZzZS9XZZfJzAoGBAOdO +CibYmoNqK/mqDHkp6PgsnbQGD5/CvPF/BLUWV1QpHxLzUQQeoBOQW5FatHe1H5zi +mbq3emBefVmsCLrRIJ4GQu4vsTMfjcpGLwviWmaK6pHbGPt8IYeEQ2MNyv59EtA2 +pQy4dX7/Oe6NLAR1UEQjXmCuXf+rxnxF3VJd1nRxAoGBANb9eusl9fusgSnVOTjJ +AQ7V36KVRv9hZoG6liBNwo80zDVmms4JhRd1MBkd3mkMkzIF4SkZUnWlwLBSANGM +dX/3eZ5i1AVwgF5Am/f5TNxopDbdT/o1RVT/P8dcFT7s1xuBn+6wU0F7dFBgWqVu +lt4aY85zNrJcj5XBHhqwdDGLAoGBAIksPNUAy9F3m5C6ih8o/aKAQx5KIeXrBUZq +v43tK+kbYfRJHBjHWMOBbuxq0G/VmGPf9q9GtGqGXuxZG+w+rYtJx1OeMQZShjIZ +ITl5CYeahrXtK4mo+fF2PMh3m5UE861LWuKKWhPwpJiWXC5grDNcjlHj1pcTdeip +PjHkuJPhAoGBAIh35DptqqdicOd3dr/+/m2YQywY8aSpMrR0bC06aAkscD7oq4tt +s/jwl0UlHIrEm/aMN7OnGIbpfkVdExfGKYaa5NRlgOwQpShwLufIo/c8fErd2zb8 +K3ptlwBxMrayMXpS3DP78r83Z0B8/FSK2guelzdRJ3ftipZ9io1Gss1C +-----END RSA PRIVATE KEY----- diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 05880a1048..ecce473b01 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -17,12 +17,14 @@ import logging from mock import Mock import treq +from service_identity import VerificationError from zope.interface import implementer from twisted.internet import defer from twisted.internet._sslverify import ClientTLSOptions, OpenSSLCertificateOptions from twisted.internet.protocol import Factory from twisted.protocols.tls import TLSMemoryBIOFactory +from twisted.web._newclient import ResponseNeverReceived from twisted.web.http import HTTPChannel from twisted.web.http_headers import Headers from twisted.web.iweb import IPolicyForHTTPS @@ -37,13 +39,29 @@ from synapse.http.federation.srv_resolver import Server from synapse.util.caches.ttlcache import TTLCache from synapse.util.logcontext import LoggingContext -from tests.http import ServerTLSContext +from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file from tests.server import FakeTransport, ThreadedMemoryReactorClock from tests.unittest import TestCase from tests.utils import default_config logger = logging.getLogger(__name__) +test_server_connection_factory = None + + +def get_connection_factory(): + # this needs to happen once, but not until we are ready to run the first test + global test_server_connection_factory + if test_server_connection_factory is None: + test_server_connection_factory = TestServerTLSConnectionFactory(sanlist=[ + b'DNS:testserv', + b'DNS:target-server', + b'DNS:xn--bcher-kva.com', + b'IP:1.2.3.4', + b'IP:::1', + ]) + return test_server_connection_factory + class MatrixFederationAgentTests(TestCase): def setUp(self): @@ -53,12 +71,11 @@ class MatrixFederationAgentTests(TestCase): self.well_known_cache = TTLCache("test_cache", timer=self.reactor.seconds) - # for now, we disable cert verification for the test, since the cert we - # present will not be trusted. We should do better here, though. config_dict = default_config("test", parse=False) - config_dict["federation_verify_certificates"] = False - config_dict["trusted_key_servers"] = [] - config = HomeServerConfig() + config_dict["federation_custom_ca_list"] = [get_test_ca_cert_file()] + # config_dict["trusted_key_servers"] = [] + + self._config = config = HomeServerConfig() config.parse_config_dict(config_dict) self.agent = MatrixFederationAgent( @@ -77,7 +94,7 @@ class MatrixFederationAgentTests(TestCase): """ # build the test server - server_tls_protocol = _build_test_server() + server_tls_protocol = _build_test_server(get_connection_factory()) # now, tell the client protocol factory to build the client protocol (it will be a # _WrappingProtocol, around a TLSMemoryBIOProtocol, around an @@ -328,6 +345,88 @@ class MatrixFederationAgentTests(TestCase): self.reactor.pump((0.1,)) self.successResultOf(test_d) + def test_get_hostname_bad_cert(self): + """ + Test the behaviour when the certificate on the server doesn't match the hostname + """ + self.mock_resolver.resolve_service.side_effect = lambda _: [] + self.reactor.lookups["testserv1"] = "1.2.3.4" + + test_d = self._make_get_request(b"matrix://testserv1/foo/bar") + + # Nothing happened yet + self.assertNoResult(test_d) + + # No SRV record lookup yet + self.mock_resolver.resolve_service.assert_not_called() + + # there should be an attempt to connect on port 443 for the .well-known + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, client_factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 443) + + # fonx the connection + client_factory.clientConnectionFailed(None, Exception("nope")) + + # attemptdelay on the hostnameendpoint is 0.3, so takes that long before the + # .well-known request fails. + self.reactor.pump((0.4,)) + + # now there should be a SRV lookup + self.mock_resolver.resolve_service.assert_called_once_with( + b"_matrix._tcp.testserv1" + ) + + # we should fall back to a direct connection + self.assertEqual(len(clients), 2) + (host, port, client_factory, _timeout, _bindAddress) = clients[1] + self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 8448) + + # make a test server, and wire up the client + http_server = self._make_connection(client_factory, expected_sni=b'testserv1') + + # there should be no requests + self.assertEqual(len(http_server.requests), 0) + + # ... and the request should have failed + e = self.failureResultOf(test_d, ResponseNeverReceived) + failure_reason = e.value.reasons[0] + self.assertIsInstance(failure_reason.value, VerificationError) + + def test_get_ip_address_bad_cert(self): + """ + Test the behaviour when the server name contains an explicit IP, but + the server cert doesn't cover it + """ + # there will be a getaddrinfo on the IP + self.reactor.lookups["1.2.3.5"] = "1.2.3.5" + + test_d = self._make_get_request(b"matrix://1.2.3.5/foo/bar") + + # Nothing happened yet + self.assertNoResult(test_d) + + # Make sure treq is trying to connect + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, client_factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, '1.2.3.5') + self.assertEqual(port, 8448) + + # make a test server, and wire up the client + http_server = self._make_connection(client_factory, expected_sni=None) + + # there should be no requests + self.assertEqual(len(http_server.requests), 0) + + # ... and the request should have failed + e = self.failureResultOf(test_d, ResponseNeverReceived) + failure_reason = e.value.reasons[0] + self.assertIsInstance(failure_reason.value, VerificationError) + def test_get_no_srv_no_well_known(self): """ Test the behaviour when the server name has no port, no SRV, and no well-known @@ -585,6 +684,49 @@ class MatrixFederationAgentTests(TestCase): self.reactor.pump((0.1,)) self.successResultOf(test_d) + def test_get_well_known_unsigned_cert(self): + """Test the behaviour when the .well-known server presents a cert + not signed by a CA + """ + + # we use the same test server as the other tests, but use an agent + # with _well_known_tls_policy left to the default, which will not + # trust it (since the presented cert is signed by a test CA) + + self.mock_resolver.resolve_service.side_effect = lambda _: [] + self.reactor.lookups["testserv"] = "1.2.3.4" + + agent = MatrixFederationAgent( + reactor=self.reactor, + tls_client_options_factory=ClientTLSOptionsFactory(self._config), + _srv_resolver=self.mock_resolver, + _well_known_cache=self.well_known_cache, + ) + + test_d = agent.request(b"GET", b"matrix://testserv/foo/bar") + + # Nothing happened yet + self.assertNoResult(test_d) + + # there should be an attempt to connect on port 443 for the .well-known + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, client_factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 443) + + http_proto = self._make_connection( + client_factory, expected_sni=b"testserv", + ) + + # there should be no requests + self.assertEqual(len(http_proto.requests), 0) + + # and there should be a SRV lookup instead + self.mock_resolver.resolve_service.assert_called_once_with( + b"_matrix._tcp.testserv" + ) + def test_get_hostname_srv(self): """ Test the behaviour when there is a single SRV record @@ -918,11 +1060,17 @@ def _check_logcontext(context): raise AssertionError("Expected logcontext %s but was %s" % (context, current)) -def _build_test_server(): +def _build_test_server(connection_creator): """Construct a test server This builds an HTTP channel, wrapped with a TLSMemoryBIOProtocol + Args: + connection_creator (IOpenSSLServerConnectionCreator): thing to build + SSL connections + sanlist (list[bytes]): list of the SAN entries for the cert returned + by the server + Returns: TLSMemoryBIOProtocol """ @@ -931,7 +1079,7 @@ def _build_test_server(): server_factory.log = _log_request server_tls_factory = TLSMemoryBIOFactory( - ServerTLSContext(), isClient=False, wrappedFactory=server_factory + connection_creator, isClient=False, wrappedFactory=server_factory ) return server_tls_factory.buildProtocol(None) @@ -944,7 +1092,8 @@ def _log_request(request): @implementer(IPolicyForHTTPS) class TrustingTLSPolicyForHTTPS(object): - """An IPolicyForHTTPS which doesn't do any certificate verification""" + """An IPolicyForHTTPS which checks that the certificate belongs to the + right server, but doesn't check the certificate chain.""" def creatorForNetloc(self, hostname, port): certificateOptions = OpenSSLCertificateOptions() diff --git a/tests/http/server.key b/tests/http/server.key new file mode 100644 index 0000000000..c53ee02b21 --- /dev/null +++ b/tests/http/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAvUAWLOE6TEp3FYSfEnJMwYtJg3KIW5BjiAOOvFVOVQfJ5eEa +vzyJ1Z+8DUgLznFnUkAeD9GjPvP7awl3NPJKLQSMkV5Tp+ea4YyV+Aa4R7flROEa +zCGvmleydZw0VqN1atVZ0ikEoglM/APJQd70ec7KSR3QoxaV2/VNCHmyAPdP+0WI +llV54VXX1CZrWSHaCSn1gzo3WjnGbxTOCQE5Z4k5hqJAwLWWhxDv+FX/jD38Sq3H +gMFNpXJv6FYwwaKU8awghHdSY/qlBPE/1rU83vIBFJ3jW6I1WnQDfCQ69of5vshK +N4v4hok56ScwdUnk8lw6xvJx1Uav/XQB9qGh4QIDAQABAoIBAQCHLO5p8hotAgdb +JFZm26N9nxrMPBOvq0ucjEX4ucnwrFaGzynGrNwa7TRqHCrqs0/EjS2ryOacgbL0 +eldeRy26SASLlN+WD7UuI7e+6DXabDzj3RHB+tGuIbPDk+ZCeBDXVTsKBOhdQN1v +KNkpJrJjCtSsMxKiWvCBow353srJKqCDZcF5NIBYBeDBPMoMbfYn5dJ9JhEf+2h4 +0iwpnWDX1Vqf46pCRa0hwEyMXycGeV2CnfJSyV7z52ZHQrvkz8QspSnPpnlCnbOE +UAvc8kZ5e8oZE7W+JfkK38vHbEGM1FCrBmrC/46uUGMRpZfDferGs91RwQVq/F0n +JN9hLzsBAoGBAPh2pm9Xt7a4fWSkX0cDgjI7PT2BvLUjbRwKLV+459uDa7+qRoGE +sSwb2QBqmQ1kbr9JyTS+Ld8dyUTsGHZK+YbTieAxI3FBdKsuFtcYJO/REN0vik+6 +fMaBHPvDHSU2ioq7spZ4JBFskzqs38FvZ0lX7aa3fguMk8GMLnofQ8QxAoGBAML9 +o5sJLN9Tk9bv2aFgnERgfRfNjjV4Wd99TsktnCD04D1GrP2eDSLfpwFlCnguck6b +jxikqcolsNhZH4dgYHqRNj+IljSdl+sYZiygO6Ld0XU+dEFO86N3E9NzZhKcQ1at +85VdwNPCS7JM2fIxEvS9xfbVnsmK6/37ZZ5iI7yxAoGBALw2vRtJGmy60pojfd1A +hibhAyINnlKlFGkSOI7zdgeuRTf6l9BTIRclvTt4hJpFgzM6hMWEbyE94hJoupsZ +bm443o/LCWsox2VI05p6urhD6f9znNWKkiyY78izY+elqksvpjgfqEresaTYAeP5 +LQe9KNSK2VuMUP1j4G04M9BxAoGAWe8ITZJuytZOgrz/YIohqPvj1l2tcIYA1a6C +7xEFSMIIxtpZIWSLZIFJEsCakpHBkPX4iwIveZfmt/JrM1JFTWK6ZZVGyh/BmOIZ +Bg4lU1oBqJTUo+aZQtTCJS29b2n5OPpkNYkXTdP4e9UsVKNDvfPlYZJneUeEzxDr +bqCPIRECgYA544KMwrWxDQZg1dsKWgdVVKx80wEFZAiQr9+0KF6ch6Iu7lwGJHFY +iI6O85paX41qeC/Fo+feIWJVJU2GvG6eBsbO4bmq+KSg4NkABJSYxodgBp9ftNeD +jo1tfw+gudlNe5jXHu7oSX93tqGjR4Cnlgan/KtfkB96yHOumGmOhQ== +-----END RSA PRIVATE KEY----- diff --git a/tests/http/server.pem b/tests/http/server.pem deleted file mode 100644 index 0584cf1a80..0000000000 --- a/tests/http/server.pem +++ /dev/null @@ -1,81 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCgF43/3lAgJ+p0 -x7Rn8UcL8a4fctvdkikvZrCngw96LkB34Evfq8YGWlOVjU+f9naUJLAKMatmAfEN -r+rMX4VOXmpTwuu6iLtqwreUrRFMESyrmvQxa15p+y85gkY0CFmXMblv6ORbxHTG -ncBGwST4WK4Poewcgt6jcISFCESTUKu1zc3cw1ANIDRyDLB5K44KwIe36dcKckyN -Kdtv4BJ+3fcIZIkPJH62zqCypgFF1oiFt40uJzClxgHdJZlKYpgkfnDTckw4Y/Mx -9k8BbE310KAzUNMV9H7I1eEolzrNr66FQj1eN64X/dqO8lTbwCqAd4diCT4sIUk0 -0SVsAUjNd3g8j651hx+Qb1t8fuOjrny8dmeMxtUgIBHoQcpcj76R55Fs7KZ9uar0 -8OFTyGIze51W1jG2K/7/5M1zxIqrA+7lsXu5OR81s7I+Ng/UUAhiHA/z+42/aiNa -qEuk6tqj3rHfLctnCbtZ+JrRNqSSwEi8F0lMA021ivEd2eJV+284OyJjhXOmKHrX -QADHrmS7Sh4syTZvRNm9n+qWID0KdDr2Sji/KnS3Enp44HDQ4xriT6/xhwEGsyuX -oH5aAkdLznulbWkHBbyx1SUQSTLpOqzaioF9m1vRrLsFvrkrY3D253mPJ5eU9HM/ -dilduFcUgj4rz+6cdXUAh+KK/v95zwIDAQABAoICAFG5tJPaOa0ws0/KYx5s3YgL -aIhFalhCNSQtmCDrlwsYcXDA3/rfBchYdDL0YKGYgBBAal3J3WXFt/j0xThvyu2m -5UC9UPl4s7RckrsjXqEmY1d3UxGnbhtMT19cUdpeKN42VCP9EBaIw9Rg07dLAkSF -gNYaIx6q8F0fI4eGIPvTQtUcqur4CfWpaxyNvckdovV6M85/YXfDwbCOnacPDGIX -jfSK3i0MxGMuOHr6o8uzKR6aBUh6WStHWcw7VXXTvzdiFNbckmx3Gb93rf1b/LBw -QFfx+tBKcC62gKroCOzXso/0sL9YTVeSD/DJZOiJwSiz3Dj/3u1IUMbVvfTU8wSi -CYS7Z+jHxwSOCSSNTXm1wO/MtDsNKbI1+R0cohr/J9pOMQvrVh1+2zSDOFvXAQ1S -yvjn+uqdmijRoV2VEGVHd+34C+ci7eJGAhL/f92PohuuFR2shUETgGWzpACZSJwg -j1d90Hs81hj07vWRb+xCeDh00vimQngz9AD8vYvv/S4mqRGQ6TZdfjLoUwSTg0JD -6sQgRXX026gQhLhn687vLKZfHwzQPZkpQdxOR0dTZ/ho/RyGGRJXH4kN4cA2tPr+ -AKYQ29YXGlEzGG7OqikaZcprNWG6UFgEpuXyBxCgp9r4ladZo3J+1Rhgus8ZYatd -uO98q3WEBmP6CZ2n32mBAoIBAQDS/c/ybFTos0YpGHakwdmSfj5OOQJto2y8ywfG -qDHwO0ebcpNnS1+MA+7XbKUQb/3Iq7iJljkkzJG2DIJ6rpKynYts1ViYpM7M/t0T -W3V1gvUcUL62iqkgws4pnpWmubFkqV31cPSHcfIIclnzeQ1aOEGsGHNAvhty0ciC -DnkJACbqApvopFLOR5f6UFTtKExE+hDH0WqgpsCAKJ1L4g6pBzZatI32/CN9JEVU -tDbxLV75hHlFFjUrG7nT1rPyr/gI8Ceh9/2xeXPfjJUR0PrG3U1nwLqUCZkvFzO6 -XpN2+A+/v4v5xqMjKDKDFy1oq6SCMomwv/viw6wl/84TMbolAoIBAQDCPiMecnR8 -REik6tqVzQO/uSe9ZHjz6J15t5xdwaI6HpSwLlIkQPkLTjyXtFpemK5DOYRxrJvQ -remfrZrN2qtLlb/DKpuGPWRsPOvWCrSuNEp48ivUehtclljrzxAFfy0sM+fWeJ48 -nTnR+td9KNhjNtZixzWdAy/mE+jdaMsXVnk66L73Uz+2WsnvVMW2R6cpCR0F2eP/ -B4zDWRqlT2w47sePAB81mFYSQLvPC6Xcgg1OqMubfiizJI49c8DO6Jt+FFYdsxhd -kG52Eqa/Net6rN3ueiS6yXL5TU3Y6g96bPA2KyNCypucGcddcBfqaiVx/o4AH6yT -NrdsrYtyvk/jAoIBAQDHUwKVeeRJJbvdbQAArCV4MI155n+1xhMe1AuXkCQFWGtQ -nlBE4D72jmyf1UKnIbW2Uwv15xY6/ouVWYIWlj9+QDmMaozVP7Uiko+WDuwLRNl8 -k4dn+dzHV2HejbPBG2JLv3lFOx23q1zEwArcaXrExaq9Ayg2fKJ/uVHcFAIiD6Oz -pR1XDY4w1A/uaN+iYFSVQUyDCQLbnEz1hej73CaPZoHh9Pq83vxD5/UbjVjuRTeZ -L55FNzKpc/r89rNvTPBcuUwnxplDhYKDKVNWzn9rSXwrzTY2Tk8J3rh+k4RqevSd -6D47jH1n5Dy7/TRn0ueKHGZZtTUnyEUkbOJo3ayFAoIBAHKDyZaQqaX9Z8p6fwWj -yVsFoK0ih8BcWkLBAdmwZ6DWGJjJpjmjaG/G3ygc9s4gO1R8m12dAnuDnGE8KzDD -gwtbrKM2Alyg4wyA2hTlWOH/CAzH0RlCJ9Fs/d1/xJVJBeuyajLiB3/6vXTS6qnq -I7BSSxAPG8eGcn21LSsjNeB7ZZtaTgNnu/8ZBUYo9yrgkWc67TZe3/ChldYxOOlO -qqHh/BqNWtjxB4VZTp/g4RbgQVInZ2ozdXEv0v/dt0UEk29ANAjsZif7F3RayJ2f -/0TilzCaJ/9K9pKNhaClVRy7Dt8QjYg6BIWCGSw4ApF7pLnQ9gySn95mersCkVzD -YDsCggEAb0E/TORjQhKfNQvahyLfQFm151e+HIoqBqa4WFyfFxe/IJUaLH/JSSFw -VohbQqPdCmaAeuQ8ERL564DdkcY5BgKcax79fLLCOYP5bT11aQx6uFpfl2Dcm6Z9 -QdCRI4jzPftsd5fxLNH1XtGyC4t6vTic4Pji2O71WgWzx0j5v4aeDY4sZQeFxqCV -/q7Ee8hem1Rn5RFHu14FV45RS4LAWl6wvf5pQtneSKzx8YL0GZIRRytOzdEfnGKr -FeUlAj5uL+5/p0ZEgM7gPsEBwdm8scF79qSUn8UWSoXNeIauF9D4BDg8RZcFFxka -KILVFsq3cQC+bEnoM4eVbjEQkGs1RQ== ------END PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIIE/jCCAuagAwIBAgIJANFtVaGvJWZlMA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV -BAMMCHRlc3RzZXJ2MCAXDTE5MDEyNzIyMDIzNloYDzIxMTkwMTAzMjIwMjM2WjAT -MREwDwYDVQQDDAh0ZXN0c2VydjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC -ggIBAKAXjf/eUCAn6nTHtGfxRwvxrh9y292SKS9msKeDD3ouQHfgS9+rxgZaU5WN -T5/2dpQksAoxq2YB8Q2v6sxfhU5ealPC67qIu2rCt5StEUwRLKua9DFrXmn7LzmC -RjQIWZcxuW/o5FvEdMadwEbBJPhYrg+h7ByC3qNwhIUIRJNQq7XNzdzDUA0gNHIM -sHkrjgrAh7fp1wpyTI0p22/gEn7d9whkiQ8kfrbOoLKmAUXWiIW3jS4nMKXGAd0l -mUpimCR+cNNyTDhj8zH2TwFsTfXQoDNQ0xX0fsjV4SiXOs2vroVCPV43rhf92o7y -VNvAKoB3h2IJPiwhSTTRJWwBSM13eDyPrnWHH5BvW3x+46OufLx2Z4zG1SAgEehB -ylyPvpHnkWzspn25qvTw4VPIYjN7nVbWMbYr/v/kzXPEiqsD7uWxe7k5HzWzsj42 -D9RQCGIcD/P7jb9qI1qoS6Tq2qPesd8ty2cJu1n4mtE2pJLASLwXSUwDTbWK8R3Z -4lX7bzg7ImOFc6YoetdAAMeuZLtKHizJNm9E2b2f6pYgPQp0OvZKOL8qdLcSenjg -cNDjGuJPr/GHAQazK5egfloCR0vOe6VtaQcFvLHVJRBJMuk6rNqKgX2bW9GsuwW+ -uStjcPbneY8nl5T0cz92KV24VxSCPivP7px1dQCH4or+/3nPAgMBAAGjUzBRMB0G -A1UdDgQWBBQcQZpzLzTk5KdS/Iz7sGCV7gTd/zAfBgNVHSMEGDAWgBQcQZpzLzTk -5KdS/Iz7sGCV7gTd/zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IC -AQAr/Pgha57jqYsDDX1LyRrVdqoVBpLBeB7x/p9dKYm7S6tBTDFNMZ0SZyQP8VEG -7UoC9/OQ9nCdEMoR7ZKpQsmipwcIqpXHS6l4YOkf5EEq5jpMgvlEesHmBJJeJew/ -FEPDl1bl8d0tSrmWaL3qepmwzA+2lwAAouWk2n+rLiP8CZ3jZeoTXFqYYrUlEqO9 -fHMvuWqTV4KCSyNY+GWCrnHetulgKHlg+W2J1mZnrCKcBhWf9C2DesTJO+JldIeM -ornTFquSt21hZi+k3aySuMn2N3MWiNL8XsZVsAnPSs0zA+2fxjJkShls8Gc7cCvd -a6XrNC+PY6pONguo7rEU4HiwbvnawSTngFFglmH/ImdA/HkaAekW6o82aI8/UxFx -V9fFMO3iKDQdOrg77hI1bx9RlzKNZZinE2/Pu26fWd5d2zqDWCjl8ykGQRAfXgYN -H3BjgyXLl+ao5/pOUYYtzm3ruTXTgRcy5hhL6hVTYhSrf9vYh4LNIeXNKnZ78tyG -TX77/kU2qXhBGCFEUUMqUNV/+ITir2lmoxVjknt19M07aGr8C7SgYt6Rs+qDpMiy -JurgvRh8LpVq4pHx1efxzxCFmo58DMrG40I0+CF3y/niNpOb1gp2wAqByRiORkds -f0ytW6qZ0TpHbD6gOtQLYDnhx3ISuX+QYSekVwQUpffeWQ== ------END CERTIFICATE----- -- cgit 1.5.1 From 19780a521ec1d200bbc1d25bf5041f8fc5691b40 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 10 Jun 2019 17:41:10 +0100 Subject: fix CI on python 2.7 --- tests/http/__init__.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) (limited to 'tests') diff --git a/tests/http/__init__.py b/tests/http/__init__.py index b03fff0945..126826fd3f 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -70,7 +70,7 @@ def create_test_cert_file(sanlist): cert_file_count += 1 # first build a CSR - subprocess.run( + subprocess.check_call( [ "openssl", "req", @@ -81,8 +81,7 @@ def create_test_cert_file(sanlist): "/", "-out", csr_filename, - ], - check=True, + ] ) # now a config file describing the right SAN entries @@ -93,7 +92,7 @@ def create_test_cert_file(sanlist): # finally the cert ca_key_filename = os.path.join(os.path.dirname(__file__), "ca.key") ca_cert_filename = get_test_ca_cert_file() - subprocess.run( + subprocess.check_call( [ "openssl", "x509", @@ -110,8 +109,7 @@ def create_test_cert_file(sanlist): cnf_filename, "-out", cert_filename, - ], - check=True, + ] ) return cert_filename -- cgit 1.5.1 From db74c4fc6ce2982a4e563c98b3affca3169b3f18 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 10 Jun 2019 17:55:01 +0100 Subject: fix ci on py2, again --- tests/http/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tests') diff --git a/tests/http/__init__.py b/tests/http/__init__.py index 126826fd3f..2d5dba6464 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -50,7 +50,7 @@ CONFIG_TEMPLATE = b"""\ [default] basicConstraints = CA:FALSE keyUsage=nonRepudiation, digitalSignature, keyEncipherment -subjectAltName = %(sanentries)b +subjectAltName = %(sanentries)s """ -- cgit 1.5.1 From 10383e6e6fefe29b007d11220841c17ad9cfc3e1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jun 2019 11:31:12 +0100 Subject: Change password reset links to /_matrix. --- synapse/app/homeserver.py | 1 - synapse/push/mailer.py | 2 +- synapse/res/templates/password_reset_success.html | 2 +- synapse/rest/client/v2_alpha/account.py | 9 +- tests/rest/client/v2_alpha/test_account.py | 241 ++++++++++++++++++++++ tests/unittest.py | 12 ++ 6 files changed, 260 insertions(+), 7 deletions(-) create mode 100644 tests/rest/client/v2_alpha/test_account.py (limited to 'tests') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index df524a23dd..1045d28949 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -176,7 +176,6 @@ class SynapseHomeServer(HomeServer): resources.update({ "/_matrix/client/api/v1": client_resource, - "/_synapse/password_reset": client_resource, "/_matrix/client/r0": client_resource, "/_matrix/client/unstable": client_resource, "/_matrix/client/v2_alpha": client_resource, diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 4bc9eb7313..099f9545ab 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -117,7 +117,7 @@ class Mailer(object): link = ( self.hs.config.public_baseurl + - "_synapse/password_reset/email/submit_token" + "_matrix/client/unstable/password_reset/email/submit_token" "?token=%s&client_secret=%s&sid=%s" % (token, client_secret, sid) ) diff --git a/synapse/res/templates/password_reset_success.html b/synapse/res/templates/password_reset_success.html index 7b6fa5e6f0..7324d66d1e 100644 --- a/synapse/res/templates/password_reset_success.html +++ b/synapse/res/templates/password_reset_success.html @@ -1,6 +1,6 @@ -

Your password was successfully reset. You may now close this window.

+

Your email has now been validated, please return to your client to reset your password. You may now close this window.

diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index e4c63b69b9..7025f486e1 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -15,7 +15,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import re from six.moves import http_client @@ -228,9 +227,11 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet): class PasswordResetSubmitTokenServlet(RestServlet): """Handles 3PID validation token submission""" - PATTERNS = [ - re.compile("^/_synapse/password_reset/(?P[^/]*)/submit_token/*$"), - ] + PATTERNS = client_patterns( + "/password_reset/(?P[^/]*)/submit_token/*$", + releases=(), + unstable=True, + ) def __init__(self, hs): """ diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py new file mode 100644 index 0000000000..0d1c0868ce --- /dev/null +++ b/tests/rest/client/v2_alpha/test_account.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright 2015-2016 OpenMarket Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import re +from email.parser import Parser + +import pkg_resources + +import synapse.rest.admin +from synapse.api.constants import LoginType +from synapse.rest.client.v1 import login +from synapse.rest.client.v2_alpha import account, register + +from tests import unittest + + +class PasswordResetTestCase(unittest.HomeserverTestCase): + + servlets = [ + account.register_servlets, + synapse.rest.admin.register_servlets_for_client_rest_resource, + register.register_servlets, + login.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + config = self.default_config() + + # Email config. + self.email_attempts = [] + + def sendmail(smtphost, from_addr, to_addrs, msg, **kwargs): + self.email_attempts.append(msg) + return + + config["email"] = { + "enable_notifs": False, + "template_dir": os.path.abspath( + pkg_resources.resource_filename("synapse", "res/templates") + ), + "smtp_host": "127.0.0.1", + "smtp_port": 20, + "require_transport_security": False, + "smtp_user": None, + "smtp_pass": None, + "notif_from": "test@example.com", + } + config["public_baseurl"] = "https://example.com" + + hs = self.setup_test_homeserver(config=config, sendmail=sendmail) + return hs + + def prepare(self, reactor, clock, hs): + self.store = hs.get_datastore() + + def test_basic_password_reset(self): + """Test basic password reset flow + """ + old_password = "monkey" + new_password = "kangeroo" + + user_id = self.register_user("kermit", old_password) + self.login("kermit", old_password) + + email = "test@example.com" + + # Add a threepid + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address=email, + validated_at=0, + added_at=0, + ) + ) + + client_secret = "foobar" + session_id = self._request_token(email, client_secret) + + self.assertEquals(len(self.email_attempts), 1) + link = self._get_link_from_email() + + self._validate_token(link) + + self._reset_password(new_password, session_id, client_secret) + + # Assert we can log in with the new password + self.login("kermit", new_password) + + # Assert we can't log in with the old password + self.attempt_wrong_password_login("kermit", old_password) + + def test_cant_reset_password_without_clicking_link(self): + """Test that we do actually need to click the link in the email + """ + old_password = "monkey" + new_password = "kangeroo" + + user_id = self.register_user("kermit", old_password) + self.login("kermit", old_password) + + email = "test@example.com" + + # Add a threepid + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address=email, + validated_at=0, + added_at=0, + ) + ) + + client_secret = "foobar" + session_id = self._request_token(email, client_secret) + + self.assertEquals(len(self.email_attempts), 1) + + # Attempt to reset password without clicking the link + self._reset_password( + new_password, session_id, client_secret, expected_code=401, + ) + + # Assert we can log in with the old password + self.login("kermit", old_password) + + # Assert we can't log in with the new password + self.attempt_wrong_password_login("kermit", new_password) + + def test_no_valid_token(self): + """Test that we do actually need to request a token and can't just + make a session up. + """ + old_password = "monkey" + new_password = "kangeroo" + + user_id = self.register_user("kermit", old_password) + self.login("kermit", old_password) + + email = "test@example.com" + + # Add a threepid + self.get_success( + self.store.user_add_threepid( + user_id=user_id, + medium="email", + address=email, + validated_at=0, + added_at=0, + ) + ) + + client_secret = "foobar" + session_id = "weasle" + + # Attempt to reset password without even requesting an email + self._reset_password( + new_password, session_id, client_secret, expected_code=401, + ) + + # Assert we can log in with the old password + self.login("kermit", old_password) + + # Assert we can't log in with the new password + self.attempt_wrong_password_login("kermit", new_password) + + def _request_token(self, email, client_secret): + request, channel = self.make_request( + "POST", + b"account/password/email/requestToken", + {"client_secret": client_secret, "email": email, "send_attempt": 1}, + ) + self.render(request) + self.assertEquals(200, channel.code, channel.result) + + return channel.json_body["sid"] + + def _validate_token(self, link): + # Remove the host + path = link.replace("https://example.com", "") + + request, channel = self.make_request("GET", path, shorthand=False) + self.render(request) + self.assertEquals(200, channel.code, channel.result) + + def _get_link_from_email(self): + assert self.email_attempts, "No emails have been sent" + + raw_msg = self.email_attempts[-1].decode("UTF-8") + mail = Parser().parsestr(raw_msg) + + text = None + for part in mail.walk(): + if part.get_content_type() == "text/plain": + text = part.get_payload(decode=True).decode("UTF-8") + break + + if not text: + self.fail("Could not find text portion of email to parse") + + match = re.search(r"https://example.com\S+", text) + assert match, "Could not find link in email" + + return match.group(0) + + def _reset_password( + self, new_password, session_id, client_secret, expected_code=200 + ): + request, channel = self.make_request( + "POST", + b"account/password", + { + "new_password": new_password, + "auth": { + "type": LoginType.EMAIL_IDENTITY, + "threepid_creds": { + "client_secret": client_secret, + "sid": session_id, + }, + }, + }, + ) + self.render(request) + self.assertEquals(expected_code, channel.code, channel.result) diff --git a/tests/unittest.py b/tests/unittest.py index 26204470b1..7dbb64af59 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -441,3 +441,15 @@ class HomeserverTestCase(TestCase): access_token = channel.json_body["access_token"] return access_token + + def attempt_wrong_password_login(self, username, password): + """Attempts to login as the user with the given password, asserting + that the attempt *fails*. + """ + body = {"type": "m.login.password", "user": username, "password": password} + + request, channel = self.make_request( + "POST", "/_matrix/client/r0/login", json.dumps(body).encode('utf8') + ) + self.render(request) + self.assertEqual(channel.code, 403, channel.result) -- cgit 1.5.1 From a766c41d258b48ca1690723c1aa51684baa05e6a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jun 2019 11:46:32 +0100 Subject: Bump bleach version so that tests can run on old deps. --- synapse/python_dependencies.py | 2 +- tests/push/test_email.py | 6 ------ tests/push/test_http.py | 6 ------ tests/rest/client/test_consent.py | 6 ------ tests/rest/client/v2_alpha/test_register.py | 6 ------ 5 files changed, 1 insertion(+), 25 deletions(-) (limited to 'tests') diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 6efd81f204..7dfa78dadb 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -80,7 +80,7 @@ REQUIREMENTS = [ ] CONDITIONAL_REQUIREMENTS = { - "email": ["Jinja2>=2.9", "bleach>=1.4.2"], + "email": ["Jinja2>=2.9", "bleach>=1.4.3"], "matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"], # we use execute_batch, which arrived in psycopg 2.7. diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 9cdde1a9bd..9bc5f07de1 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -24,15 +24,9 @@ from synapse.rest.client.v1 import login, room from tests.unittest import HomeserverTestCase -try: - from synapse.push.mailer import load_jinja2_templates -except Exception: - load_jinja2_templates = None - class EmailPusherTests(HomeserverTestCase): - skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, diff --git a/tests/push/test_http.py b/tests/push/test_http.py index aba618b2be..22c3f73ef3 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -23,15 +23,9 @@ from synapse.util.logcontext import make_deferred_yieldable from tests.unittest import HomeserverTestCase -try: - from synapse.push.mailer import load_jinja2_templates -except Exception: - load_jinja2_templates = None - class HTTPPusherTests(HomeserverTestCase): - skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py index 88f8f1abdc..efc5a99db3 100644 --- a/tests/rest/client/test_consent.py +++ b/tests/rest/client/test_consent.py @@ -23,14 +23,8 @@ from synapse.rest.consent import consent_resource from tests import unittest from tests.server import render -try: - from synapse.push.mailer import load_jinja2_templates -except Exception: - load_jinja2_templates = None - class ConsentResourceTestCase(unittest.HomeserverTestCase): - skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 0cb6a363d6..e9d8f3c734 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -30,11 +30,6 @@ from synapse.rest.client.v2_alpha import account_validity, register, sync from tests import unittest -try: - from synapse.push.mailer import load_jinja2_templates -except ImportError: - load_jinja2_templates = None - class RegisterRestServletTestCase(unittest.HomeserverTestCase): @@ -307,7 +302,6 @@ class AccountValidityTestCase(unittest.HomeserverTestCase): class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): - skip = "No Jinja installed" if not load_jinja2_templates else None servlets = [ register.register_servlets, synapse.rest.admin.register_servlets_for_client_rest_resource, -- cgit 1.5.1 From 6312d6cc7c5bc80984758a70e2c368d8b4fb3bfd Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 13 Jun 2019 22:40:52 +1000 Subject: Expose statistics on extrems to prometheus (#5384) --- changelog.d/5384.feature | 1 + scripts/generate_signing_key.py | 2 +- synapse/metrics/__init__.py | 112 +++++++++++++++++++++++------ synapse/storage/events.py | 44 ++++++++---- tests/storage/test_cleanup_extrems.py | 128 +++++++++++++--------------------- tests/storage/test_event_metrics.py | 97 ++++++++++++++++++++++++++ tests/unittest.py | 61 +++++++++++++++- 7 files changed, 331 insertions(+), 114 deletions(-) create mode 100644 changelog.d/5384.feature create mode 100644 tests/storage/test_event_metrics.py (limited to 'tests') diff --git a/changelog.d/5384.feature b/changelog.d/5384.feature new file mode 100644 index 0000000000..9497f521c8 --- /dev/null +++ b/changelog.d/5384.feature @@ -0,0 +1 @@ +Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/scripts/generate_signing_key.py b/scripts/generate_signing_key.py index ba3ba97395..36e9140b50 100755 --- a/scripts/generate_signing_key.py +++ b/scripts/generate_signing_key.py @@ -16,7 +16,7 @@ import argparse import sys -from signedjson.key import write_signing_keys, generate_signing_key +from signedjson.key import generate_signing_key, write_signing_keys from synapse.util.stringutils import random_string diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index ef48984fdd..539c353528 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -25,7 +25,7 @@ import six import attr from prometheus_client import Counter, Gauge, Histogram -from prometheus_client.core import REGISTRY, GaugeMetricFamily +from prometheus_client.core import REGISTRY, GaugeMetricFamily, HistogramMetricFamily from twisted.internet import reactor @@ -40,7 +40,6 @@ HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") class RegistryProxy(object): - @staticmethod def collect(): for metric in REGISTRY.collect(): @@ -63,10 +62,7 @@ class LaterGauge(object): try: calls = self.caller() except Exception: - logger.exception( - "Exception running callback for LaterGauge(%s)", - self.name, - ) + logger.exception("Exception running callback for LaterGauge(%s)", self.name) yield g return @@ -116,9 +112,7 @@ class InFlightGauge(object): # Create a class which have the sub_metrics values as attributes, which # default to 0 on initialization. Used to pass to registered callbacks. self._metrics_class = attr.make_class( - "_MetricsEntry", - attrs={x: attr.ib(0) for x in sub_metrics}, - slots=True, + "_MetricsEntry", attrs={x: attr.ib(0) for x in sub_metrics}, slots=True ) # Counts number of in flight blocks for a given set of label values @@ -157,7 +151,9 @@ class InFlightGauge(object): Note: may be called by a separate thread. """ - in_flight = GaugeMetricFamily(self.name + "_total", self.desc, labels=self.labels) + in_flight = GaugeMetricFamily( + self.name + "_total", self.desc, labels=self.labels + ) metrics_by_key = {} @@ -179,7 +175,9 @@ class InFlightGauge(object): yield in_flight for name in self.sub_metrics: - gauge = GaugeMetricFamily("_".join([self.name, name]), "", labels=self.labels) + gauge = GaugeMetricFamily( + "_".join([self.name, name]), "", labels=self.labels + ) for key, metrics in six.iteritems(metrics_by_key): gauge.add_metric(key, getattr(metrics, name)) yield gauge @@ -193,12 +191,75 @@ class InFlightGauge(object): all_gauges[self.name] = self +@attr.s(hash=True) +class BucketCollector(object): + """ + Like a Histogram, but allows buckets to be point-in-time instead of + incrementally added to. + + Args: + name (str): Base name of metric to be exported to Prometheus. + data_collector (callable -> dict): A synchronous callable that + returns a dict mapping bucket to number of items in the + bucket. If these buckets are not the same as the buckets + given to this class, they will be remapped into them. + buckets (list[float]): List of floats/ints of the buckets to + give to Prometheus. +Inf is ignored, if given. + + """ + + name = attr.ib() + data_collector = attr.ib() + buckets = attr.ib() + + def collect(self): + + # Fetch the data -- this must be synchronous! + data = self.data_collector() + + buckets = {} + + res = [] + for x in data.keys(): + for i, bound in enumerate(self.buckets): + if x <= bound: + buckets[bound] = buckets.get(bound, 0) + data[x] + break + + for i in self.buckets: + res.append([i, buckets.get(i, 0)]) + + res.append(["+Inf", sum(data.values())]) + + metric = HistogramMetricFamily( + self.name, + "", + buckets=res, + sum_value=sum([x * y for x, y in data.items()]), + ) + yield metric + + def __attrs_post_init__(self): + self.buckets = [float(x) for x in self.buckets if x != "+Inf"] + if self.buckets != sorted(self.buckets): + raise ValueError("Buckets not sorted") + + self.buckets = tuple(self.buckets) + + if self.name in all_gauges.keys(): + logger.warning("%s already registered, reregistering" % (self.name,)) + REGISTRY.unregister(all_gauges.pop(self.name)) + + REGISTRY.register(self) + all_gauges[self.name] = self + + # # Detailed CPU metrics # -class CPUMetrics(object): +class CPUMetrics(object): def __init__(self): ticks_per_sec = 100 try: @@ -237,13 +298,28 @@ gc_time = Histogram( "python_gc_time", "Time taken to GC (sec)", ["gen"], - buckets=[0.0025, 0.005, 0.01, 0.025, 0.05, 0.10, 0.25, 0.50, 1.00, 2.50, - 5.00, 7.50, 15.00, 30.00, 45.00, 60.00], + buckets=[ + 0.0025, + 0.005, + 0.01, + 0.025, + 0.05, + 0.10, + 0.25, + 0.50, + 1.00, + 2.50, + 5.00, + 7.50, + 15.00, + 30.00, + 45.00, + 60.00, + ], ) class GCCounts(object): - def collect(self): cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"]) for n, m in enumerate(gc.get_count()): @@ -279,9 +355,7 @@ sent_transactions_counter = Counter("synapse_federation_client_sent_transactions events_processed_counter = Counter("synapse_federation_client_events_processed", "") event_processing_loop_counter = Counter( - "synapse_event_processing_loop_count", - "Event processing loop iterations", - ["name"], + "synapse_event_processing_loop_count", "Event processing loop iterations", ["name"] ) event_processing_loop_room_count = Counter( @@ -311,7 +385,6 @@ last_ticked = time.time() class ReactorLastSeenMetric(object): - def collect(self): cm = GaugeMetricFamily( "python_twisted_reactor_last_seen", @@ -325,7 +398,6 @@ REGISTRY.register(ReactorLastSeenMetric()) def runUntilCurrentTimer(func): - @functools.wraps(func) def f(*args, **kwargs): now = reactor.seconds() diff --git a/synapse/storage/events.py b/synapse/storage/events.py index f9162be9b9..1578403f79 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -17,7 +17,7 @@ import itertools import logging -from collections import OrderedDict, deque, namedtuple +from collections import Counter as c_counter, OrderedDict, deque, namedtuple from functools import wraps from six import iteritems, text_type @@ -33,6 +33,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.metrics import BucketCollector from synapse.metrics.background_process_metrics import run_as_background_process from synapse.state import StateResolutionStore from synapse.storage.background_updates import BackgroundUpdateStore @@ -220,13 +221,38 @@ class EventsStore( EventsWorkerStore, BackgroundUpdateStore, ): - def __init__(self, db_conn, hs): super(EventsStore, self).__init__(db_conn, hs) self._event_persist_queue = _EventPeristenceQueue() self._state_resolution_handler = hs.get_state_resolution_handler() + # Collect metrics on the number of forward extremities that exist. + self._current_forward_extremities_amount = {} + + BucketCollector( + "synapse_forward_extremities", + lambda: self._current_forward_extremities_amount, + buckets=[1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"] + ) + + # Read the extrems every 60 minutes + hs.get_clock().looping_call(self._read_forward_extremities, 60 * 60 * 1000) + + @defer.inlineCallbacks + def _read_forward_extremities(self): + def fetch(txn): + txn.execute( + """ + select count(*) c from event_forward_extremities + group by room_id + """ + ) + return txn.fetchall() + + res = yield self.runInteraction("read_forward_extremities", fetch) + self._current_forward_extremities_amount = c_counter(list(x[0] for x in res)) + @defer.inlineCallbacks def persist_events(self, events_and_contexts, backfilled=False): """ @@ -568,17 +594,11 @@ class EventsStore( ) txn.execute(sql, batch) - results.extend( - r[0] - for r in txn - if not json.loads(r[1]).get("soft_failed") - ) + results.extend(r[0] for r in txn if not json.loads(r[1]).get("soft_failed")) for chunk in batch_iter(event_ids, 100): yield self.runInteraction( - "_get_events_which_are_prevs", - _get_events_which_are_prevs_txn, - chunk, + "_get_events_which_are_prevs", _get_events_which_are_prevs_txn, chunk ) defer.returnValue(results) @@ -640,9 +660,7 @@ class EventsStore( for chunk in batch_iter(event_ids, 100): yield self.runInteraction( - "_get_prevs_before_rejected", - _get_prevs_before_rejected_txn, - chunk, + "_get_prevs_before_rejected", _get_prevs_before_rejected_txn, chunk ) defer.returnValue(existing_prevs) diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index 6aa8b8b3c6..f4c81ef77d 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -15,7 +15,6 @@ import os.path -from synapse.api.constants import EventTypes from synapse.storage import prepare_database from synapse.types import Requester, UserID @@ -23,17 +22,12 @@ from tests.unittest import HomeserverTestCase class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): - """Test the background update to clean forward extremities table. """ - def make_homeserver(self, reactor, clock): - # Hack until we understand why test_forked_graph_cleanup fails with v4 - config = self.default_config() - config['default_room_version'] = '1' - return self.setup_test_homeserver(config=config) + Test the background update to clean forward extremities table. + """ def prepare(self, reactor, clock, homeserver): self.store = homeserver.get_datastore() - self.event_creator = homeserver.get_event_creation_handler() self.room_creator = homeserver.get_room_creation_handler() # Create a test user and room @@ -42,56 +36,6 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): info = self.get_success(self.room_creator.create_room(self.requester, {})) self.room_id = info["room_id"] - def create_and_send_event(self, soft_failed=False, prev_event_ids=None): - """Create and send an event. - - Args: - soft_failed (bool): Whether to create a soft failed event or not - prev_event_ids (list[str]|None): Explicitly set the prev events, - or if None just use the default - - Returns: - str: The new event's ID. - """ - prev_events_and_hashes = None - if prev_event_ids: - prev_events_and_hashes = [[p, {}, 0] for p in prev_event_ids] - - event, context = self.get_success( - self.event_creator.create_event( - self.requester, - { - "type": EventTypes.Message, - "room_id": self.room_id, - "sender": self.user.to_string(), - "content": {"body": "", "msgtype": "m.text"}, - }, - prev_events_and_hashes=prev_events_and_hashes, - ) - ) - - if soft_failed: - event.internal_metadata.soft_failed = True - - self.get_success( - self.event_creator.send_nonmember_event(self.requester, event, context) - ) - - return event.event_id - - def add_extremity(self, event_id): - """Add the given event as an extremity to the room. - """ - self.get_success( - self.store._simple_insert( - table="event_forward_extremities", - values={"room_id": self.room_id, "event_id": event_id}, - desc="test_add_extremity", - ) - ) - - self.store.get_latest_event_ids_in_room.invalidate((self.room_id,)) - def run_background_update(self): """Re run the background update to clean up the extremities. """ @@ -131,10 +75,16 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): """ # Create the room graph - event_id_1 = self.create_and_send_event() - event_id_2 = self.create_and_send_event(True, [event_id_1]) - event_id_3 = self.create_and_send_event(True, [event_id_2]) - event_id_4 = self.create_and_send_event(False, [event_id_3]) + event_id_1 = self.create_and_send_event(self.room_id, self.user) + event_id_2 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_1] + ) + event_id_3 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_2] + ) + event_id_4 = self.create_and_send_event( + self.room_id, self.user, False, [event_id_3] + ) # Check the latest events are as expected latest_event_ids = self.get_success( @@ -154,12 +104,16 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): Where SF* are soft failed, and with extremities of A and B """ # Create the room graph - event_id_a = self.create_and_send_event() - event_id_sf1 = self.create_and_send_event(True, [event_id_a]) - event_id_b = self.create_and_send_event(False, [event_id_sf1]) + event_id_a = self.create_and_send_event(self.room_id, self.user) + event_id_sf1 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a] + ) + event_id_b = self.create_and_send_event( + self.room_id, self.user, False, [event_id_sf1] + ) # Add the new extremity and check the latest events are as expected - self.add_extremity(event_id_a) + self.add_extremity(self.room_id, event_id_a) latest_event_ids = self.get_success( self.store.get_latest_event_ids_in_room(self.room_id) @@ -185,13 +139,19 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): Where SF* are soft failed, and with extremities of A and B """ # Create the room graph - event_id_a = self.create_and_send_event() - event_id_sf1 = self.create_and_send_event(True, [event_id_a]) - event_id_sf2 = self.create_and_send_event(True, [event_id_sf1]) - event_id_b = self.create_and_send_event(False, [event_id_sf2]) + event_id_a = self.create_and_send_event(self.room_id, self.user) + event_id_sf1 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a] + ) + event_id_sf2 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_sf1] + ) + event_id_b = self.create_and_send_event( + self.room_id, self.user, False, [event_id_sf2] + ) # Add the new extremity and check the latest events are as expected - self.add_extremity(event_id_a) + self.add_extremity(self.room_id, event_id_a) latest_event_ids = self.get_success( self.store.get_latest_event_ids_in_room(self.room_id) @@ -227,16 +187,26 @@ class CleanupExtremBackgroundUpdateStoreTestCase(HomeserverTestCase): """ # Create the room graph - event_id_a = self.create_and_send_event() - event_id_b = self.create_and_send_event() - event_id_sf1 = self.create_and_send_event(True, [event_id_a]) - event_id_sf2 = self.create_and_send_event(True, [event_id_a, event_id_b]) - event_id_sf3 = self.create_and_send_event(True, [event_id_sf1]) - self.create_and_send_event(True, [event_id_sf2, event_id_sf3]) # SF4 - event_id_c = self.create_and_send_event(False, [event_id_sf3]) + event_id_a = self.create_and_send_event(self.room_id, self.user) + event_id_b = self.create_and_send_event(self.room_id, self.user) + event_id_sf1 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a] + ) + event_id_sf2 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_a, event_id_b] + ) + event_id_sf3 = self.create_and_send_event( + self.room_id, self.user, True, [event_id_sf1] + ) + self.create_and_send_event( + self.room_id, self.user, True, [event_id_sf2, event_id_sf3] + ) # SF4 + event_id_c = self.create_and_send_event( + self.room_id, self.user, False, [event_id_sf3] + ) # Add the new extremity and check the latest events are as expected - self.add_extremity(event_id_a) + self.add_extremity(self.room_id, event_id_a) latest_event_ids = self.get_success( self.store.get_latest_event_ids_in_room(self.room_id) diff --git a/tests/storage/test_event_metrics.py b/tests/storage/test_event_metrics.py new file mode 100644 index 0000000000..20a068f1fc --- /dev/null +++ b/tests/storage/test_event_metrics.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.metrics import REGISTRY +from synapse.types import Requester, UserID + +from tests.unittest import HomeserverTestCase + + +class ExtremStatisticsTestCase(HomeserverTestCase): + def test_exposed_to_prometheus(self): + """ + Forward extremity counts are exposed via Prometheus. + """ + room_creator = self.hs.get_room_creation_handler() + + user = UserID("alice", "test") + requester = Requester(user, None, False, None, None) + + # Real events, forward extremities + events = [(3, 2), (6, 2), (4, 6)] + + for event_count, extrems in events: + info = self.get_success(room_creator.create_room(requester, {})) + room_id = info["room_id"] + + last_event = None + + # Make a real event chain + for i in range(event_count): + ev = self.create_and_send_event(room_id, user, False, last_event) + last_event = [ev] + + # Sprinkle in some extremities + for i in range(extrems): + ev = self.create_and_send_event(room_id, user, False, last_event) + + # Let it run for a while, then pull out the statistics from the + # Prometheus client registry + self.reactor.advance(60 * 60 * 1000) + self.pump(1) + + items = list( + filter( + lambda x: x.name == "synapse_forward_extremities", + list(REGISTRY.collect()), + ) + ) + + # Check the values are what we want + buckets = {} + _count = 0 + _sum = 0 + + for i in items[0].samples: + if i[0].endswith("_bucket"): + buckets[i[1]['le']] = i[2] + elif i[0].endswith("_count"): + _count = i[2] + elif i[0].endswith("_sum"): + _sum = i[2] + + # 3 buckets, 2 with 2 extrems, 1 with 6 extrems (bucketed as 7), and + # +Inf which is all + self.assertEqual( + buckets, + { + 1.0: 0, + 2.0: 2, + 3.0: 0, + 5.0: 0, + 7.0: 1, + 10.0: 0, + 15.0: 0, + 20.0: 0, + 50.0: 0, + 100.0: 0, + 200.0: 0, + 500.0: 0, + "+Inf": 3, + }, + ) + # 3 rooms, with 10 total events + self.assertEqual(_count, 3) + self.assertEqual(_sum, 10) diff --git a/tests/unittest.py b/tests/unittest.py index 7dbb64af59..b6dc7932ce 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -27,11 +27,12 @@ import twisted.logger from twisted.internet.defer import Deferred from twisted.trial import unittest +from synapse.api.constants import EventTypes from synapse.config.homeserver import HomeServerConfig from synapse.http.server import JsonResource from synapse.http.site import SynapseRequest from synapse.server import HomeServer -from synapse.types import UserID, create_requester +from synapse.types import Requester, UserID, create_requester from synapse.util.logcontext import LoggingContext from tests.server import get_clock, make_request, render, setup_test_homeserver @@ -442,6 +443,64 @@ class HomeserverTestCase(TestCase): access_token = channel.json_body["access_token"] return access_token + def create_and_send_event( + self, room_id, user, soft_failed=False, prev_event_ids=None + ): + """ + Create and send an event. + + Args: + soft_failed (bool): Whether to create a soft failed event or not + prev_event_ids (list[str]|None): Explicitly set the prev events, + or if None just use the default + + Returns: + str: The new event's ID. + """ + event_creator = self.hs.get_event_creation_handler() + secrets = self.hs.get_secrets() + requester = Requester(user, None, False, None, None) + + prev_events_and_hashes = None + if prev_event_ids: + prev_events_and_hashes = [[p, {}, 0] for p in prev_event_ids] + + event, context = self.get_success( + event_creator.create_event( + requester, + { + "type": EventTypes.Message, + "room_id": room_id, + "sender": user.to_string(), + "content": {"body": secrets.token_hex(), "msgtype": "m.text"}, + }, + prev_events_and_hashes=prev_events_and_hashes, + ) + ) + + if soft_failed: + event.internal_metadata.soft_failed = True + + self.get_success( + event_creator.send_nonmember_event(requester, event, context) + ) + + return event.event_id + + def add_extremity(self, room_id, event_id): + """ + Add the given event as an extremity to the room. + """ + self.get_success( + self.hs.get_datastore()._simple_insert( + table="event_forward_extremities", + values={"room_id": room_id, "event_id": event_id}, + desc="test_add_extremity", + ) + ) + + self.hs.get_datastore().get_latest_event_ids_in_room.invalidate((room_id,)) + def attempt_wrong_password_login(self, username, password): """Attempts to login as the user with the given password, asserting that the attempt *fails*. -- cgit 1.5.1 From a10c8dae85d3706afbab588e1004350aa5b49539 Mon Sep 17 00:00:00 2001 From: "Amber H. Brown" Date: Fri, 14 Jun 2019 21:09:33 +1000 Subject: fix prometheus rendering error --- synapse/metrics/__init__.py | 2 +- tests/storage/test_event_metrics.py | 61 ++++++++++++++----------------------- 2 files changed, 24 insertions(+), 39 deletions(-) (limited to 'tests') diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 539c353528..0d3ae1a43d 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -227,7 +227,7 @@ class BucketCollector(object): break for i in self.buckets: - res.append([i, buckets.get(i, 0)]) + res.append([str(i), buckets.get(i, 0)]) res.append(["+Inf", sum(data.values())]) diff --git a/tests/storage/test_event_metrics.py b/tests/storage/test_event_metrics.py index 20a068f1fc..1655fcdafc 100644 --- a/tests/storage/test_event_metrics.py +++ b/tests/storage/test_event_metrics.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from prometheus_client.exposition import generate_latest + from synapse.metrics import REGISTRY from synapse.types import Requester, UserID @@ -52,46 +54,29 @@ class ExtremStatisticsTestCase(HomeserverTestCase): self.reactor.advance(60 * 60 * 1000) self.pump(1) - items = list( + items = set( filter( - lambda x: x.name == "synapse_forward_extremities", - list(REGISTRY.collect()), + lambda x: b"synapse_forward_extremities_" in x, + generate_latest(REGISTRY).split(b"\n"), ) ) - # Check the values are what we want - buckets = {} - _count = 0 - _sum = 0 - - for i in items[0].samples: - if i[0].endswith("_bucket"): - buckets[i[1]['le']] = i[2] - elif i[0].endswith("_count"): - _count = i[2] - elif i[0].endswith("_sum"): - _sum = i[2] + expected = set([ + b'synapse_forward_extremities_bucket{le="1.0"} 0.0', + b'synapse_forward_extremities_bucket{le="2.0"} 2.0', + b'synapse_forward_extremities_bucket{le="3.0"} 0.0', + b'synapse_forward_extremities_bucket{le="5.0"} 0.0', + b'synapse_forward_extremities_bucket{le="7.0"} 1.0', + b'synapse_forward_extremities_bucket{le="10.0"} 0.0', + b'synapse_forward_extremities_bucket{le="15.0"} 0.0', + b'synapse_forward_extremities_bucket{le="20.0"} 0.0', + b'synapse_forward_extremities_bucket{le="50.0"} 0.0', + b'synapse_forward_extremities_bucket{le="100.0"} 0.0', + b'synapse_forward_extremities_bucket{le="200.0"} 0.0', + b'synapse_forward_extremities_bucket{le="500.0"} 0.0', + b'synapse_forward_extremities_bucket{le="+Inf"} 3.0', + b'synapse_forward_extremities_count 3.0', + b'synapse_forward_extremities_sum 10.0', + ]) - # 3 buckets, 2 with 2 extrems, 1 with 6 extrems (bucketed as 7), and - # +Inf which is all - self.assertEqual( - buckets, - { - 1.0: 0, - 2.0: 2, - 3.0: 0, - 5.0: 0, - 7.0: 1, - 10.0: 0, - 15.0: 0, - 20.0: 0, - 50.0: 0, - 100.0: 0, - 200.0: 0, - 500.0: 0, - "+Inf": 3, - }, - ) - # 3 rooms, with 10 total events - self.assertEqual(_count, 3) - self.assertEqual(_sum, 10) + self.assertEqual(items, expected) -- cgit 1.5.1 From d0530382eeff053547304532167c0e4654af172c Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 14 Jun 2019 13:18:24 +0100 Subject: Track deactivated accounts in the database (#5378) --- changelog.d/5378.misc | 1 + synapse/handlers/deactivate_account.py | 4 + synapse/storage/registration.py | 114 +++++++++++++++++++++ .../schema/delta/55/users_alter_deactivated.sql | 19 ++++ tests/rest/client/v2_alpha/test_account.py | 45 ++++++++ 5 files changed, 183 insertions(+) create mode 100644 changelog.d/5378.misc create mode 100644 synapse/storage/schema/delta/55/users_alter_deactivated.sql (limited to 'tests') diff --git a/changelog.d/5378.misc b/changelog.d/5378.misc new file mode 100644 index 0000000000..365e49d634 --- /dev/null +++ b/changelog.d/5378.misc @@ -0,0 +1 @@ +Track deactivated accounts in the database. diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 6a91f7698e..b29089d82c 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2017, 2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -114,6 +115,9 @@ class DeactivateAccountHandler(BaseHandler): # parts users from rooms (if it isn't already running) self._start_user_parting() + # Mark the user as deactivated. + yield self.store.set_user_deactivated_status(user_id, True) + defer.returnValue(identity_server_supports_unbinding) def _start_user_parting(self): diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 1dd1182e82..4c5751b57f 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import re from six import iterkeys @@ -31,6 +32,8 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks THIRTY_MINUTES_IN_MS = 30 * 60 * 1000 +logger = logging.getLogger(__name__) + class RegistrationWorkerStore(SQLBaseStore): def __init__(self, db_conn, hs): @@ -598,11 +601,75 @@ class RegistrationStore( "user_threepids_grandfather", self._bg_user_threepids_grandfather, ) + self.register_background_update_handler( + "users_set_deactivated_flag", self._backgroud_update_set_deactivated_flag, + ) + # Create a background job for culling expired 3PID validity tokens hs.get_clock().looping_call( self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS, ) + @defer.inlineCallbacks + def _backgroud_update_set_deactivated_flag(self, progress, batch_size): + """Retrieves a list of all deactivated users and sets the 'deactivated' flag to 1 + for each of them. + """ + + last_user = progress.get("user_id", "") + + def _backgroud_update_set_deactivated_flag_txn(txn): + txn.execute( + """ + SELECT + users.name, + COUNT(access_tokens.token) AS count_tokens, + COUNT(user_threepids.address) AS count_threepids + FROM users + LEFT JOIN access_tokens ON (access_tokens.user_id = users.name) + LEFT JOIN user_threepids ON (user_threepids.user_id = users.name) + WHERE password_hash IS NULL OR password_hash = '' + AND users.name > ? + GROUP BY users.name + ORDER BY users.name ASC + LIMIT ?; + """, + (last_user, batch_size), + ) + + rows = self.cursor_to_dict(txn) + + if not rows: + return True + + rows_processed_nb = 0 + + for user in rows: + if not user["count_tokens"] and not user["count_threepids"]: + self.set_user_deactivated_status_txn(txn, user["user_id"], True) + rows_processed_nb += 1 + + logger.info("Marked %d rows as deactivated", rows_processed_nb) + + self._background_update_progress_txn( + txn, "users_set_deactivated_flag", {"user_id": rows[-1]["user_id"]} + ) + + if batch_size > len(rows): + return True + else: + return False + + end = yield self.runInteraction( + "users_set_deactivated_flag", + _backgroud_update_set_deactivated_flag_txn, + ) + + if end: + yield self._end_background_update("users_set_deactivated_flag") + + defer.returnValue(batch_size) + @defer.inlineCallbacks def add_access_token_to_user(self, user_id, token, device_id=None): """Adds an access token for the given user. @@ -1268,3 +1335,50 @@ class RegistrationStore( "delete_threepid_session", delete_threepid_session_txn, ) + + def set_user_deactivated_status_txn(self, txn, user_id, deactivated): + self._simple_update_one_txn( + txn=txn, + table="users", + keyvalues={"name": user_id}, + updatevalues={"deactivated": 1 if deactivated else 0}, + ) + self._invalidate_cache_and_stream( + txn, self.get_user_deactivated_status, (user_id,), + ) + + @defer.inlineCallbacks + def set_user_deactivated_status(self, user_id, deactivated): + """Set the `deactivated` property for the provided user to the provided value. + + Args: + user_id (str): The ID of the user to set the status for. + deactivated (bool): The value to set for `deactivated`. + """ + + yield self.runInteraction( + "set_user_deactivated_status", + self.set_user_deactivated_status_txn, + user_id, deactivated, + ) + + @cachedInlineCallbacks() + def get_user_deactivated_status(self, user_id): + """Retrieve the value for the `deactivated` property for the provided user. + + Args: + user_id (str): The ID of the user to retrieve the status for. + + Returns: + defer.Deferred(bool): The requested value. + """ + + res = yield self._simple_select_one_onecol( + table="users", + keyvalues={"name": user_id}, + retcol="deactivated", + desc="get_user_deactivated_status", + ) + + # Convert the integer into a boolean. + defer.returnValue(res == 1) diff --git a/synapse/storage/schema/delta/55/users_alter_deactivated.sql b/synapse/storage/schema/delta/55/users_alter_deactivated.sql new file mode 100644 index 0000000000..dabdde489b --- /dev/null +++ b/synapse/storage/schema/delta/55/users_alter_deactivated.sql @@ -0,0 +1,19 @@ +/* Copyright 2019 The Matrix.org Foundation C.I.C. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +ALTER TABLE users ADD deactivated SMALLINT DEFAULT 0 NOT NULL; + +INSERT INTO background_updates (update_name, progress_json) VALUES + ('users_set_deactivated_flag', '{}'); diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py index 0d1c0868ce..a60a4a3b87 100644 --- a/tests/rest/client/v2_alpha/test_account.py +++ b/tests/rest/client/v2_alpha/test_account.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import os import re from email.parser import Parser @@ -239,3 +240,47 @@ class PasswordResetTestCase(unittest.HomeserverTestCase): ) self.render(request) self.assertEquals(expected_code, channel.code, channel.result) + + +class DeactivateTestCase(unittest.HomeserverTestCase): + + servlets = [ + synapse.rest.admin.register_servlets_for_client_rest_resource, + login.register_servlets, + account.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + hs = self.setup_test_homeserver() + return hs + + def test_deactivate_account(self): + user_id = self.register_user("kermit", "test") + tok = self.login("kermit", "test") + + request_data = json.dumps({ + "auth": { + "type": "m.login.password", + "user": user_id, + "password": "test", + }, + "erase": False, + }) + request, channel = self.make_request( + "POST", + "account/deactivate", + request_data, + access_token=tok, + ) + self.render(request) + self.assertEqual(request.code, 200) + + store = self.hs.get_datastore() + + # Check that the user has been marked as deactivated. + self.assertTrue(self.get_success(store.get_user_deactivated_status(user_id))) + + # Check that this access token has been invalidated. + request, channel = self.make_request("GET", "account/whoami") + self.render(request) + self.assertEqual(request.code, 401) -- cgit 1.5.1 From 3ed595e327aee6d45ed0371c98e828d724c26b2d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 14 Jun 2019 14:07:32 +0100 Subject: Prometheus histograms are cumalative --- synapse/metrics/__init__.py | 1 - synapse/storage/events.py | 3 ++- tests/storage/test_event_metrics.py | 20 ++++++++++---------- 3 files changed, 12 insertions(+), 12 deletions(-) (limited to 'tests') diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 0d3ae1a43d..8aee14a8a8 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -224,7 +224,6 @@ class BucketCollector(object): for i, bound in enumerate(self.buckets): if x <= bound: buckets[bound] = buckets.get(bound, 0) + data[x] - break for i in self.buckets: res.append([str(i), buckets.get(i, 0)]) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 1578403f79..f631fb1733 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -228,7 +228,8 @@ class EventsStore( self._state_resolution_handler = hs.get_state_resolution_handler() # Collect metrics on the number of forward extremities that exist. - self._current_forward_extremities_amount = {} + # Counter of number of extremities to count + self._current_forward_extremities_amount = c_counter() BucketCollector( "synapse_forward_extremities", diff --git a/tests/storage/test_event_metrics.py b/tests/storage/test_event_metrics.py index 1655fcdafc..19f9ccf5e0 100644 --- a/tests/storage/test_event_metrics.py +++ b/tests/storage/test_event_metrics.py @@ -64,16 +64,16 @@ class ExtremStatisticsTestCase(HomeserverTestCase): expected = set([ b'synapse_forward_extremities_bucket{le="1.0"} 0.0', b'synapse_forward_extremities_bucket{le="2.0"} 2.0', - b'synapse_forward_extremities_bucket{le="3.0"} 0.0', - b'synapse_forward_extremities_bucket{le="5.0"} 0.0', - b'synapse_forward_extremities_bucket{le="7.0"} 1.0', - b'synapse_forward_extremities_bucket{le="10.0"} 0.0', - b'synapse_forward_extremities_bucket{le="15.0"} 0.0', - b'synapse_forward_extremities_bucket{le="20.0"} 0.0', - b'synapse_forward_extremities_bucket{le="50.0"} 0.0', - b'synapse_forward_extremities_bucket{le="100.0"} 0.0', - b'synapse_forward_extremities_bucket{le="200.0"} 0.0', - b'synapse_forward_extremities_bucket{le="500.0"} 0.0', + b'synapse_forward_extremities_bucket{le="3.0"} 2.0', + b'synapse_forward_extremities_bucket{le="5.0"} 2.0', + b'synapse_forward_extremities_bucket{le="7.0"} 3.0', + b'synapse_forward_extremities_bucket{le="10.0"} 3.0', + b'synapse_forward_extremities_bucket{le="15.0"} 3.0', + b'synapse_forward_extremities_bucket{le="20.0"} 3.0', + b'synapse_forward_extremities_bucket{le="50.0"} 3.0', + b'synapse_forward_extremities_bucket{le="100.0"} 3.0', + b'synapse_forward_extremities_bucket{le="200.0"} 3.0', + b'synapse_forward_extremities_bucket{le="500.0"} 3.0', b'synapse_forward_extremities_bucket{le="+Inf"} 3.0', b'synapse_forward_extremities_count 3.0', b'synapse_forward_extremities_sum 10.0', -- cgit 1.5.1 From 6d56a694f4cbfaf9c57a56837d4170e6c6783f3c Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 7 Jun 2019 15:30:54 +0100 Subject: Don't send renewal emails to deactivated users --- changelog.d/5394.bugfix | 1 + synapse/handlers/account_validity.py | 3 ++ synapse/handlers/deactivate_account.py | 6 +++ synapse/storage/_base.py | 4 +- synapse/storage/registration.py | 14 ++++++ tests/rest/client/v2_alpha/test_register.py | 67 ++++++++++++++++++----------- 6 files changed, 68 insertions(+), 27 deletions(-) create mode 100644 changelog.d/5394.bugfix (limited to 'tests') diff --git a/changelog.d/5394.bugfix b/changelog.d/5394.bugfix new file mode 100644 index 0000000000..2ad9fbe82c --- /dev/null +++ b/changelog.d/5394.bugfix @@ -0,0 +1 @@ +Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 261446517d..5e0b92eb1c 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -110,6 +110,9 @@ class AccountValidityHandler(object): # Stop right here if the user doesn't have at least one email address. # In this case, they will have to ask their server admin to renew their # account manually. + # We don't need to do a specific check to make sure the account isn't + # deactivated, as a deactivated account isn't supposed to have any + # email address attached to it. if not addresses: return diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index b29089d82c..7378b56c1d 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -43,6 +43,8 @@ class DeactivateAccountHandler(BaseHandler): # it left off (if it has work left to do). hs.get_reactor().callWhenRunning(self._start_user_parting) + self._account_validity_enabled = hs.config.account_validity.enabled + @defer.inlineCallbacks def deactivate_account(self, user_id, erase_data, id_server=None): """Deactivate a user's account @@ -115,6 +117,10 @@ class DeactivateAccountHandler(BaseHandler): # parts users from rooms (if it isn't already running) self._start_user_parting() + # Remove all information on the user from the account_validity table. + if self._account_validity_enabled: + yield self.store.delete_account_validity_for_user(user_id) + # Mark the user as deactivated. yield self.store.set_user_deactivated_status(user_id, True) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index ae891aa332..941c07fce5 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -299,12 +299,12 @@ class SQLBaseStore(object): def select_users_with_no_expiration_date_txn(txn): """Retrieves the list of registered users with no expiration date from the - database. + database, filtering out deactivated users. """ sql = ( "SELECT users.name FROM users" " LEFT JOIN account_validity ON (users.name = account_validity.user_id)" - " WHERE account_validity.user_id is NULL;" + " WHERE account_validity.user_id is NULL AND users.deactivated = 0;" ) txn.execute(sql, []) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 4c5751b57f..9f910eac9c 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -251,6 +251,20 @@ class RegistrationWorkerStore(SQLBaseStore): desc="set_renewal_mail_status", ) + @defer.inlineCallbacks + def delete_account_validity_for_user(self, user_id): + """Deletes the entry for the given user in the account validity table, removing + their expiration date and renewal token. + + Args: + user_id (str): ID of the user to remove from the account validity table. + """ + yield self._simple_delete_one( + table="account_validity", + keyvalues={"user_id": user_id}, + desc="delete_account_validity_for_user", + ) + @defer.inlineCallbacks def is_server_admin(self, user): res = yield self._simple_select_one_onecol( diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 8536e6777a..b35b215446 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -26,7 +26,7 @@ from synapse.api.constants import LoginType from synapse.api.errors import Codes from synapse.appservice import ApplicationService from synapse.rest.client.v1 import login -from synapse.rest.client.v2_alpha import account_validity, register, sync +from synapse.rest.client.v2_alpha import account, account_validity, register, sync from tests import unittest @@ -308,6 +308,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): login.register_servlets, sync.register_servlets, account_validity.register_servlets, + account.register_servlets, ] def make_homeserver(self, reactor, clock): @@ -358,20 +359,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): def test_renewal_email(self): self.email_attempts = [] - user_id = self.register_user("kermit", "monkey") - tok = self.login("kermit", "monkey") - # We need to manually add an email address otherwise the handler will do - # nothing. - now = self.hs.clock.time_msec() - self.get_success( - self.store.user_add_threepid( - user_id=user_id, - medium="email", - address="kermit@example.com", - validated_at=now, - added_at=now, - ) - ) + (user_id, tok) = self.create_user() # Move 6 days forward. This should trigger a renewal email to be sent. self.reactor.advance(datetime.timedelta(days=6).total_seconds()) @@ -396,6 +384,44 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): def test_manual_email_send(self): self.email_attempts = [] + (user_id, tok) = self.create_user() + request, channel = self.make_request( + b"POST", + "/_matrix/client/unstable/account_validity/send_mail", + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + self.assertEqual(len(self.email_attempts), 1) + + def test_deactivated_user(self): + self.email_attempts = [] + + (user_id, tok) = self.create_user() + + request_data = json.dumps({ + "auth": { + "type": "m.login.password", + "user": user_id, + "password": "monkey", + }, + "erase": False, + }) + request, channel = self.make_request( + "POST", + "account/deactivate", + request_data, + access_token=tok, + ) + self.render(request) + self.assertEqual(request.code, 200) + + self.reactor.advance(datetime.timedelta(days=8).total_seconds()) + + self.assertEqual(len(self.email_attempts), 0) + + def create_user(self): user_id = self.register_user("kermit", "monkey") tok = self.login("kermit", "monkey") # We need to manually add an email address otherwise the handler will do @@ -410,16 +436,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): added_at=now, ) ) - - request, channel = self.make_request( - b"POST", - "/_matrix/client/unstable/account_validity/send_mail", - access_token=tok, - ) - self.render(request) - self.assertEquals(channel.result["code"], b"200", channel.result) - - self.assertEqual(len(self.email_attempts), 1) + return (user_id, tok) def test_manual_email_send_expired_account(self): user_id = self.register_user("kermit", "monkey") -- cgit 1.5.1 From f874b16b2e7208d3a202283c085340196d065560 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 12 Jun 2019 10:31:37 +0100 Subject: Add plugin APIs for implementations of custom event rules. --- changelog.d/5440.feature | 1 + docs/sample_config.yaml | 13 +++++ synapse/config/homeserver.py | 2 + synapse/config/third_party_event_rules.py | 42 ++++++++++++++++ synapse/events/third_party_rules.py | 62 ++++++++++++++++++++++++ synapse/handlers/federation.py | 68 +++++++++++++++++++++++++- synapse/handlers/message.py | 14 +++++- synapse/server.py | 7 +++ tests/rest/client/third_party_rules.py | 79 +++++++++++++++++++++++++++++++ 9 files changed, 284 insertions(+), 4 deletions(-) create mode 100644 changelog.d/5440.feature create mode 100644 synapse/config/third_party_event_rules.py create mode 100644 synapse/events/third_party_rules.py create mode 100644 tests/rest/client/third_party_rules.py (limited to 'tests') diff --git a/changelog.d/5440.feature b/changelog.d/5440.feature new file mode 100644 index 0000000000..63d9b58734 --- /dev/null +++ b/changelog.d/5440.feature @@ -0,0 +1 @@ +Allow server admins to define implementations of extra rules for allowing or denying incoming events. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 4d7e6f3eb5..bd80d97a93 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1351,3 +1351,16 @@ password_config: # alias: "*" # room_id: "*" # action: allow + + +# Server admins can define a Python module that implements extra rules for +# allowing or denying incoming events. In order to work, this module needs to +# override the methods defined in synapse/events/third_party_rules.py. +# +# This feature is designed to be used in closed federations only, where each +# participating server enforces the same rules. +# +#third_party_event_rules: +# module: "my_custom_project.SuperRulesSet" +# config: +# example_option: 'things' diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 5c4fc8ff21..acadef4fd3 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -38,6 +38,7 @@ from .server import ServerConfig from .server_notices_config import ServerNoticesConfig from .spam_checker import SpamCheckerConfig from .stats import StatsConfig +from .third_party_event_rules import ThirdPartyRulesConfig from .tls import TlsConfig from .user_directory import UserDirectoryConfig from .voip import VoipConfig @@ -73,5 +74,6 @@ class HomeServerConfig( StatsConfig, ServerNoticesConfig, RoomDirectoryConfig, + ThirdPartyRulesConfig, ): pass diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py new file mode 100644 index 0000000000..a89dd5f98a --- /dev/null +++ b/synapse/config/third_party_event_rules.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.util.module_loader import load_module + +from ._base import Config + + +class ThirdPartyRulesConfig(Config): + def read_config(self, config): + self.third_party_event_rules = None + + provider = config.get("third_party_event_rules", None) + if provider is not None: + self.third_party_event_rules = load_module(provider) + + def default_config(self, **kwargs): + return """\ + # Server admins can define a Python module that implements extra rules for + # allowing or denying incoming events. In order to work, this module needs to + # override the methods defined in synapse/events/third_party_rules.py. + # + # This feature is designed to be used in closed federations only, where each + # participating server enforces the same rules. + # + #third_party_event_rules: + # module: "my_custom_project.SuperRulesSet" + # config: + # example_option: 'things' + """ diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py new file mode 100644 index 0000000000..9f98d51523 --- /dev/null +++ b/synapse/events/third_party_rules.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + + +class ThirdPartyEventRules(object): + """Allows server admins to provide a Python module implementing an extra set of rules + to apply when processing events. + + This is designed to help admins of closed federations with enforcing custom + behaviours. + """ + + def __init__(self, hs): + self.third_party_rules = None + + self.store = hs.get_datastore() + + module = None + config = None + if hs.config.third_party_event_rules: + module, config = hs.config.third_party_event_rules + + if module is not None: + self.third_party_rules = module(config=config) + + @defer.inlineCallbacks + def check_event_allowed(self, event, context): + """Check if a provided event should be allowed in the given context. + + Args: + event (synapse.events.EventBase): The event to be checked. + context (synapse.events.snapshot.EventContext): The context of the event. + + Returns: + defer.Deferred(bool), True if the event should be allowed, False if not. + """ + if self.third_party_rules is None: + defer.returnValue(True) + + prev_state_ids = yield context.get_prev_state_ids(self.store) + + # Retrieve the state events from the database. + state_events = {} + for key, event_id in prev_state_ids.items(): + state_events[key] = yield self.store.get_event(event_id, allow_none=True) + + ret = yield self.third_party_rules.check_event_allowed(event, state_events) + defer.returnValue(ret) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index ac5ca79143..983ac9f915 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,6 +34,7 @@ from synapse.api.constants import EventTypes, Membership, RejectedReason from synapse.api.errors import ( AuthError, CodeMessageException, + Codes, FederationDeniedError, FederationError, RequestSendFailed, @@ -127,6 +129,8 @@ class FederationHandler(BaseHandler): self.room_queues = {} self._room_pdu_linearizer = Linearizer("fed_room_pdu") + self.third_party_event_rules = hs.get_third_party_event_rules() + @defer.inlineCallbacks def on_receive_pdu( self, origin, pdu, sent_to_us_directly=False, @@ -1258,6 +1262,15 @@ class FederationHandler(BaseHandler): logger.warn("Failed to create join %r because %s", event, e) raise e + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info("Creation of join %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_join_request` yield self.auth.check_from_context( @@ -1300,6 +1313,15 @@ class FederationHandler(BaseHandler): origin, event ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info("Sending of join %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + logger.debug( "on_send_join_request: After _handle_new_event: %s, sigs: %s", event.event_id, @@ -1458,6 +1480,15 @@ class FederationHandler(BaseHandler): builder=builder, ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.warning("Creation of leave %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + try: # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_leave_request` @@ -1484,10 +1515,19 @@ class FederationHandler(BaseHandler): event.internal_metadata.outlier = False - yield self._handle_new_event( + context = yield self._handle_new_event( origin, event ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info("Sending of leave %s forbidden by third-party rules", event) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + logger.debug( "on_send_leave_request: After _handle_new_event: %s, sigs: %s", event.event_id, @@ -2550,6 +2590,18 @@ class FederationHandler(BaseHandler): builder=builder ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.info( + "Creation of threepid invite %s forbidden by third-party rules", + event, + ) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + event, context = yield self.add_display_name_to_third_party_invite( room_version, event_dict, event, context ) @@ -2598,6 +2650,18 @@ class FederationHandler(BaseHandler): builder=builder, ) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + logger.warning( + "Exchange of threepid invite %s forbidden by third-party rules", + event, + ) + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + event, context = yield self.add_display_name_to_third_party_invite( room_version, event_dict, event, context ) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 0b02469ceb..11650dc80c 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -# Copyright 2014 - 2016 OpenMarket Ltd -# Copyright 2017 - 2018 New Vector Ltd +# Copyright 2014-2016 OpenMarket Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -248,6 +249,7 @@ class EventCreationHandler(object): self.action_generator = hs.get_action_generator() self.spam_checker = hs.get_spam_checker() + self.third_party_event_rules = hs.get_third_party_event_rules() self._block_events_without_consent_error = ( self.config.block_events_without_consent_error @@ -658,6 +660,14 @@ class EventCreationHandler(object): else: room_version = yield self.store.get_room_version(event.room_id) + event_allowed = yield self.third_party_event_rules.check_event_allowed( + event, context, + ) + if not event_allowed: + raise SynapseError( + 403, "This event is not allowed in this context", Codes.FORBIDDEN, + ) + try: yield self.auth.check_from_context(room_version, event, context) except AuthError as err: diff --git a/synapse/server.py b/synapse/server.py index 9229a68a8d..a54e023cc9 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2017-2018 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,6 +37,7 @@ from synapse.crypto import context_factory from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory from synapse.events.spamcheck import SpamChecker +from synapse.events.third_party_rules import ThirdPartyEventRules from synapse.events.utils import EventClientSerializer from synapse.federation.federation_client import FederationClient from synapse.federation.federation_server import ( @@ -178,6 +181,7 @@ class HomeServer(object): 'groups_attestation_renewer', 'secrets', 'spam_checker', + 'third_party_event_rules', 'room_member_handler', 'federation_registry', 'server_notices_manager', @@ -483,6 +487,9 @@ class HomeServer(object): def build_spam_checker(self): return SpamChecker(self) + def build_third_party_event_rules(self): + return ThirdPartyEventRules(self) + def build_room_member_handler(self): if self.config.worker_app: return RoomMemberWorkerHandler(self) diff --git a/tests/rest/client/third_party_rules.py b/tests/rest/client/third_party_rules.py new file mode 100644 index 0000000000..7167fc56b6 --- /dev/null +++ b/tests/rest/client/third_party_rules.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.rest import admin +from synapse.rest.client.v1 import login, room + +from tests import unittest + + +class ThirdPartyRulesTestModule(object): + def __init__(self, config): + pass + + def check_event_allowed(self, event, context): + if event.type == "foo.bar.forbidden": + return False + else: + return True + + @staticmethod + def parse_config(config): + return config + + +class ThirdPartyRulesTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + config = self.default_config() + config["third_party_event_rules"] = { + "module": "tests.rest.client.third_party_rules.ThirdPartyRulesTestModule", + "config": {}, + } + + self.hs = self.setup_test_homeserver(config=config) + return self.hs + + def test_third_party_rules(self): + """Tests that a forbidden event is forbidden from being sent, but an allowed one + can be sent. + """ + user_id = self.register_user("kermit", "monkey") + tok = self.login("kermit", "monkey") + + room_id = self.helper.create_room_as(user_id, tok=tok) + + request, channel = self.make_request( + "PUT", + "/_matrix/client/r0/rooms/%s/send/foo.bar.allowed/1" % room_id, + {}, + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"200", channel.result) + + request, channel = self.make_request( + "PUT", + "/_matrix/client/r0/rooms/%s/send/foo.bar.forbidden/1" % room_id, + {}, + access_token=tok, + ) + self.render(request) + self.assertEquals(channel.result["code"], b"403", channel.result) -- cgit 1.5.1