diff options
author | Mark Haines <mark.haines@matrix.org> | 2015-04-23 16:39:13 +0100 |
---|---|---|
committer | Mark Haines <mark.haines@matrix.org> | 2015-04-23 16:39:13 +0100 |
commit | 4bbf7156efdc493fc1c9e3177bef90c45f25f0d3 (patch) | |
tree | a90839618a5b02ae4516c32db500c0a539efd272 /synapse/rest | |
parent | Implement remote key lookup api (diff) | |
download | synapse-4bbf7156efdc493fc1c9e3177bef90c45f25f0d3.tar.xz |
Update to match the specification for key/v2
Diffstat (limited to 'synapse/rest')
-rw-r--r-- | synapse/rest/key/v2/local_key_resource.py | 18 | ||||
-rw-r--r-- | synapse/rest/key/v2/remote_key_resource.py | 25 |
2 files changed, 24 insertions, 19 deletions
diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index 982a460962..33cbd7cf8e 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -36,14 +36,16 @@ class LocalKey(Resource): HTTP/1.1 200 OK Content-Type: application/json { - "expires": # integer posix timestamp when this result expires. + "valid_until_ts": # integer posix timestamp when this result expires. "server_name": "this.server.example.com" "verify_keys": { - "algorithm:version": # base64 encoded NACL verification key. + "algorithm:version": { + "key": # base64 encoded NACL verification key. + } }, "old_verify_keys": { "algorithm:version": { - "expired": # integer posix timestamp when the key expired. + "expired_ts": # integer posix timestamp when the key expired. "key": # base64 encoded NACL verification key. } } @@ -67,7 +69,7 @@ class LocalKey(Resource): def update_response_body(self, time_now_msec): refresh_interval = self.config.key_refresh_interval - self.expires = int(time_now_msec + refresh_interval) + self.valid_until_ts = int(time_now_msec + refresh_interval) self.response_body = encode_canonical_json(self.response_json_object()) def response_json_object(self): @@ -85,7 +87,7 @@ class LocalKey(Resource): verify_key_bytes = key.encode() old_verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes), - u"expired": key.expired, + u"expired_ts": key.expired, } x509_certificate_bytes = crypto.dump_certificate( @@ -96,7 +98,7 @@ class LocalKey(Resource): sha256_fingerprint = sha256(x509_certificate_bytes).digest() json_object = { - u"valid_until": self.expires, + u"valid_until_ts": self.valid_until_ts, u"server_name": self.config.server_name, u"verify_keys": verify_keys, u"old_verify_keys": old_verify_keys, @@ -115,8 +117,8 @@ class LocalKey(Resource): def render_GET(self, request): time_now = self.clock.time_msec() # Update the expiry time if less than half the interval remains. - if time_now + self.config.key_refresh_interval / 2 > self.expires: - self.update_response_body() + if time_now + self.config.key_refresh_interval / 2 > self.valid_until_ts: + self.update_response_body(time_now) return respond_with_json_bytes( request, 200, self.response_body, version_string=self.version_string diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index cf6f2c2e73..724ca00397 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -41,7 +41,7 @@ class RemoteKey(Resource): "server_keys": [ { "server_name": "remote.server.example.com" - "valid_until": # posix timestamp + "valid_until_ts": # posix timestamp "verify_keys": { "a.key.id": { # The identifier for a key. key: "" # base64 encoded verification key. @@ -50,7 +50,7 @@ class RemoteKey(Resource): "old_verify_keys": { "an.old.key.id": { # The identifier for an old key. key: "", # base64 encoded key - expired: 0, # when th e + "expired_ts": 0, # when the key stop being used. } } "tls_fingerprints": [ @@ -121,7 +121,7 @@ class RemoteKey(Resource): cached = yield self.store.get_server_keys_json(store_queries) - json_results = [] + json_results = set() time_now_ms = self.clock.time_msec() @@ -129,20 +129,23 @@ class RemoteKey(Resource): for (server_name, key_id, from_server), results in cached.items(): results = [ (result["ts_added_ms"], result) for result in results - if result["ts_valid_until_ms"] > time_now_ms ] - if not results: - if key_id is not None: - cache_misses.setdefault(server_name, set()).add(key_id) + if not results and key_id is not None: + cache_misses.setdefault(server_name, set()).add(key_id) continue if key_id is not None: - most_recent_result = max(results) - json_results.append(most_recent_result[-1]["key_json"]) + ts_added_ms, most_recent_result = max(results) + ts_valid_until_ms = most_recent_result["ts_valid_until_ms"] + if (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms: + # We more than half way through the lifetime of the + # response. We should fetch a fresh copy. + cache_misses.setdefault(server_name, set()).add(key_id) + json_results.add(bytes(most_recent_result["key_json"])) else: - for result in results: - json_results.append(result[-1]["key_json"]) + for ts_added, result in results: + json_results.add(bytes(result["key_json"])) if cache_misses and query_remote_on_cache_miss: for server_name, key_ids in cache_misses.items(): |