From 823999716eb506740b34d87fcdaaf3ebbe4f760c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Jan 2015 11:07:57 +0000 Subject: Fix bug in timeout handling in keyclient --- synapse/crypto/keyclient.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index 9c910fa3fc..cdb6279764 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -61,9 +61,11 @@ class SynapseKeyClientProtocol(HTTPClient): def __init__(self): self.remote_key = defer.Deferred() + self.host = None def connectionMade(self): - logger.debug("Connected to %s", self.transport.getHost()) + self.host = self.transport.getHost() + logger.debug("Connected to %s", self.host) self.sendCommand(b"GET", b"/_matrix/key/v1/") self.endHeaders() self.timer = reactor.callLater( @@ -92,8 +94,7 @@ class SynapseKeyClientProtocol(HTTPClient): self.timer.cancel() def on_timeout(self): - logger.debug("Timeout waiting for response from %s", - self.transport.getHost()) + logger.debug("Timeout waiting for response from %s", self.host) self.remote_key.errback(IOError("Timeout waiting for response")) self.transport.abortConnection() -- cgit 1.5.1 From 84a769cdb7321481954c386a040d7e7ff504d02b Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 10 Feb 2015 17:58:36 +0000 Subject: Fix code-style --- synapse/app/homeserver.py | 2 +- synapse/crypto/keyclient.py | 4 ++-- synapse/handlers/presence.py | 8 ++++---- synapse/handlers/sync.py | 2 +- synapse/rest/client/v1/directory.py | 2 +- synapse/rest/client/v2_alpha/sync.py | 2 +- synapse/rest/media/v1/upload_resource.py | 4 ++-- 7 files changed, 12 insertions(+), 12 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 8976ff2e82..ff6680e7da 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -134,7 +134,7 @@ class SynapseHomeServer(HomeServer): logger.info("Attaching %s to path %s", resource, full_path) last_resource = self.root_resource for path_seg in full_path.split('/')[1:-1]: - if not path_seg in last_resource.listNames(): + if path_seg not in last_resource.listNames(): # resource doesn't exist, so make a "dummy resource" child_resource = Resource() last_resource.putChild(path_seg, child_resource) diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index cdb6279764..cd12349f67 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -75,7 +75,7 @@ class SynapseKeyClientProtocol(HTTPClient): def handleStatus(self, version, status, message): if status != b"200": - #logger.info("Non-200 response from %s: %s %s", + # logger.info("Non-200 response from %s: %s %s", # self.transport.getHost(), status, message) self.transport.abortConnection() @@ -83,7 +83,7 @@ class SynapseKeyClientProtocol(HTTPClient): try: json_response = json.loads(response_body_bytes) except ValueError: - #logger.info("Invalid JSON response from %s", + # logger.info("Invalid JSON response from %s", # self.transport.getHost()) self.transport.abortConnection() return diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 6a266ee0fe..59287010ed 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -457,9 +457,9 @@ class PresenceHandler(BaseHandler): if state is None: state = yield self.store.get_presence_state(user.localpart) else: -# statuscache = self._get_or_make_usercache(user) -# self._user_cachemap_latest_serial += 1 -# statuscache.update(state, self._user_cachemap_latest_serial) + # statuscache = self._get_or_make_usercache(user) + # self._user_cachemap_latest_serial += 1 + # statuscache.update(state, self._user_cachemap_latest_serial) pass yield self.push_update_to_local_and_remote( @@ -709,7 +709,7 @@ class PresenceHandler(BaseHandler): # TODO(paul) permissions checks - if not user in self._remote_sendmap: + if user not in self._remote_sendmap: self._remote_sendmap[user] = set() self._remote_sendmap[user].add(origin) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 5af90cc5d1..7883bbd834 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -114,7 +114,7 @@ class SyncHandler(BaseHandler): if sync_config.gap: return self.incremental_sync_with_gap(sync_config, since_token) else: - #TODO(mjark): Handle gapless sync + # TODO(mjark): Handle gapless sync raise NotImplementedError() @defer.inlineCallbacks diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 8f65efec5f..8ed7e2d669 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -48,7 +48,7 @@ class ClientDirectoryServer(ClientV1RestServlet): user, client = yield self.auth.get_user_by_req(request) content = _parse_json(request) - if not "room_id" in content: + if "room_id" not in content: raise SynapseError(400, "Missing room_id key", errcode=Codes.BAD_JSON) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 81d5cf8ead..3056ec45cf 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -118,7 +118,7 @@ class SyncRestServlet(RestServlet): except: filter = Filter({}) # filter = filter.apply_overrides(http_request) - #if filter.matches(event): + # if filter.matches(event): # # stuff sync_config = SyncConfig( diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index 6df52ca434..e5aba3af4c 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -96,8 +96,8 @@ class UploadResource(BaseMediaResource): code=400, ) - #if headers.hasHeader("Content-Disposition"): - # disposition = headers.getRawHeaders("Content-Disposition")[0] + # if headers.hasHeader("Content-Disposition"): + # disposition = headers.getRawHeaders("Content-Disposition")[0] # TODO(markjh): parse content-dispostion content_uri = yield self.create_content( -- cgit 1.5.1 From 4ebbaf0d4382813ba896f3e8101de12e112cbed5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Feb 2015 14:23:10 +0000 Subject: Blunty replace json with simplejson --- synapse/crypto/keyclient.py | 2 +- synapse/federation/persistence.py | 2 +- synapse/federation/transport/server.py | 2 +- synapse/http/client.py | 2 +- synapse/http/matrixfederationclient.py | 2 +- synapse/push/__init__.py | 2 +- synapse/push/pusherpool.py | 2 +- synapse/rest/client/v1/directory.py | 2 +- synapse/rest/client/v1/login.py | 2 +- synapse/rest/client/v1/presence.py | 2 +- synapse/rest/client/v1/profile.py | 2 +- synapse/rest/client/v1/push_rule.py | 2 +- synapse/rest/client/v1/pusher.py | 2 +- synapse/rest/client/v1/register.py | 2 +- synapse/rest/client/v1/room.py | 2 +- synapse/rest/client/v2_alpha/filter.py | 2 +- synapse/rest/media/v0/content_repository.py | 2 +- synapse/storage/__init__.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/filtering.py | 2 +- synapse/storage/push_rule.py | 2 +- 21 files changed, 21 insertions(+), 21 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index cd12349f67..74008347c3 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -19,7 +19,7 @@ from twisted.internet.protocol import Factory from twisted.internet import defer, reactor from synapse.http.endpoint import matrix_federation_endpoint from synapse.util.logcontext import PreserveLoggingContext -import json +import simplejson as json import logging diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 85c82a4623..8a1afc0ca5 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -23,7 +23,7 @@ from twisted.internet import defer from synapse.util.logutils import log_function -import json +import simplejson as json import logging diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 9c9f8d525b..2ffb37aa18 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -20,7 +20,7 @@ from synapse.api.errors import Codes, SynapseError from synapse.util.logutils import log_function import logging -import json +import simplejson as json import re diff --git a/synapse/http/client.py b/synapse/http/client.py index 198f575cfa..d500e19c81 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -23,7 +23,7 @@ from twisted.web.http_headers import Headers from StringIO import StringIO -import json +import simplejson as json import logging import urllib diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 056d446e42..406203acf2 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -33,7 +33,7 @@ from synapse.api.errors import ( from syutil.crypto.jsonsign import sign_json -import json +import simplejson as json import logging import urllib import urlparse diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 418a348a58..0659a1cb9b 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -22,7 +22,7 @@ import synapse.util.async import baserules import logging -import json +import simplejson as json import re logger = logging.getLogger(__name__) diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 5a525befd7..7483d257bf 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -20,7 +20,7 @@ from httppusher import HttpPusher from synapse.push import PusherConfigException import logging -import json +import simplejson as json logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 8ed7e2d669..420aa89f38 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -20,7 +20,7 @@ from synapse.api.errors import AuthError, SynapseError, Codes from synapse.types import RoomAlias from .base import ClientV1RestServlet, client_path_pattern -import json +import simplejson as json import logging diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 7116ac98e8..b2257b749d 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -19,7 +19,7 @@ from synapse.api.errors import SynapseError from synapse.types import UserID from base import ClientV1RestServlet, client_path_pattern -import json +import simplejson as json class LoginRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index 7feb4aadb1..78d4f2b128 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -21,7 +21,7 @@ from synapse.api.errors import SynapseError from synapse.types import UserID from .base import ClientV1RestServlet, client_path_pattern -import json +import simplejson as json import logging logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index 15d6f3fc6c..1e77eb49cf 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -19,7 +19,7 @@ from twisted.internet import defer from .base import ClientV1RestServlet, client_path_pattern from synapse.types import UserID -import json +import simplejson as json class ProfileDisplaynameRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index c4e7dfcf0e..b012f31084 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -27,7 +27,7 @@ from synapse.push.rulekinds import ( PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP ) -import json +import simplejson as json class PushRuleRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index 80e9939b79..6045e86f34 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -19,7 +19,7 @@ from synapse.api.errors import SynapseError, Codes from synapse.push import PusherConfigException from .base import ClientV1RestServlet, client_path_pattern -import json +import simplejson as json class PusherRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index c0423c2d45..d3399c446b 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -25,7 +25,7 @@ from synapse.util.async import run_on_reactor from hashlib import sha1 import hmac -import json +import simplejson as json import logging import urllib diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 410f19ccf6..0346afb1b4 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -23,7 +23,7 @@ from synapse.api.constants import EventTypes, Membership from synapse.types import UserID, RoomID, RoomAlias from synapse.events.utils import serialize_event -import json +import simplejson as json import logging import urllib diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index 6ddc495d23..703250cea8 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -21,7 +21,7 @@ from synapse.types import UserID from ._base import client_v2_pattern -import json +import simplejson as json import logging diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py index 22e26e3cd5..e77a20fb2e 100644 --- a/synapse/rest/media/v0/content_repository.py +++ b/synapse/rest/media/v0/content_repository.py @@ -25,7 +25,7 @@ from twisted.web import server, resource from twisted.internet import defer import base64 -import json +import simplejson as json import logging import os import re diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index a63c59a8a2..924ea89035 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -44,7 +44,7 @@ from syutil.jsonutil import encode_canonical_json from synapse.crypto.event_signing import compute_event_reference_hash -import json +import simplejson as json import logging import os diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 3e1ab0a159..b74e74ac91 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -23,7 +23,7 @@ from synapse.util.logcontext import PreserveLoggingContext, LoggingContext from twisted.internet import defer import collections -import json +import simplejson as json import sys import time diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py index e86eeced45..457a11fd02 100644 --- a/synapse/storage/filtering.py +++ b/synapse/storage/filtering.py @@ -17,7 +17,7 @@ from twisted.internet import defer from ._base import SQLBaseStore -import json +import simplejson as json class FilteringStore(SQLBaseStore): diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 620de71398..ae46b39cc1 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -20,7 +20,7 @@ from twisted.internet import defer import logging import copy -import json +import simplejson as json logger = logging.getLogger(__name__) -- cgit 1.5.1 From 5025305fb250c358236584cae06bf0855f02b0a4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:57:42 +0000 Subject: Rate limit retries when fetching server keys. --- synapse/crypto/keyring.py | 126 ++++++++++++++++++++++++++-------------------- 1 file changed, 71 insertions(+), 55 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 3fb99f7125..3250cff595 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -87,69 +87,85 @@ class Keyring(object): return # Try to fetch the key from the remote server. - # TODO(markjh): Ratelimit requests to a given server. - (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_context_factory + retry_last_ts, retry_interval = (0, 0) + retry_timings = yield self.store.get_destination_retry_timings( + server_name ) + if retry_timings: + retry_last_ts, retry_interval = ( + retry_timings.retry_last_ts, retry_timings.retry_interval + ) + if retry_last_ts + retry_interval > int(self.clock.time_msec()): + logger.info("%s not ready for retry", server_name) + raise ValueError("No verification key found for given key ids") - # Check the response. - - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) - - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") + try: + (response, tls_certificate) = yield fetch_server_key( + server_name, self.hs.tls_context_factory + ) - tls_certificate_b64 = response["tls_certificate"] + # Check the response. - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") + + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") + + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] + ) + + # Cache the result in the datastore. + + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key ) - # Cache the result in the datastore. - - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) - - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key - ) + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + raise ValueError("No verification key found for given key ids") - raise ValueError("No verification key found for given key ids") + except: + self.set_retrying(server_name, retry_interval) + raise -- cgit 1.5.1 From 2b8f1a956c6a1d767a3b60e84e7d0afe5857fb0d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 17:20:56 +0000 Subject: Add per server retry limiting. Factor out the pre destination retry logic from TransactionQueue so it can be reused in both get_pdu and crypto.keyring --- synapse/crypto/keyring.py | 22 ++--- synapse/federation/federation_client.py | 36 ++++--- synapse/federation/transaction_queue.py | 161 +++++++++++++------------------- synapse/util/retryutils.py | 108 +++++++++++++++++++++ 4 files changed, 205 insertions(+), 122 deletions(-) create mode 100644 synapse/util/retryutils.py (limited to 'synapse/crypto') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 3250cff595..ea00c830c0 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -22,6 +22,8 @@ from syutil.crypto.signing_key import ( from syutil.base64util import decode_base64, encode_base64 from synapse.api.errors import SynapseError, Codes +from synapse.util.retryutils import get_retry_limiter + from OpenSSL import crypto import logging @@ -88,19 +90,13 @@ class Keyring(object): # Try to fetch the key from the remote server. - retry_last_ts, retry_interval = (0, 0) - retry_timings = yield self.store.get_destination_retry_timings( - server_name + limiter = yield get_retry_limiter( + server_name, + self.clock, + self.store, ) - if retry_timings: - retry_last_ts, retry_interval = ( - retry_timings.retry_last_ts, retry_timings.retry_interval - ) - if retry_last_ts + retry_interval > int(self.clock.time_msec()): - logger.info("%s not ready for retry", server_name) - raise ValueError("No verification key found for given key ids") - try: + with limiter: (response, tls_certificate) = yield fetch_server_key( server_name, self.hs.tls_context_factory ) @@ -165,7 +161,3 @@ class Keyring(object): return raise ValueError("No verification key found for given key ids") - - except: - self.set_retrying(server_name, retry_interval) - raise diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 70c9a6f46b..c5b0274c2f 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -23,6 +23,8 @@ from synapse.api.errors import CodeMessageException from synapse.util.logutils import log_function from synapse.events import FrozenEvent +from synapse.util.retryutils import get_retry_limiter, NotRetryingDestination + import logging @@ -163,24 +165,34 @@ class FederationClient(FederationBase): pdu = None for destination in destinations: try: - transaction_data = yield self.transport_layer.get_event( - destination, event_id + limiter = yield get_retry_limiter( + destination, + self._clock, + self.store, ) - logger.debug("transaction_data %r", transaction_data) + with limiter: + transaction_data = yield self.transport_layer.get_event( + destination, event_id + ) - pdu_list = [ - self.event_from_pdu_json(p, outlier=outlier) - for p in transaction_data["pdus"] - ] + logger.debug("transaction_data %r", transaction_data) - if pdu_list: - pdu = pdu_list[0] + pdu_list = [ + self.event_from_pdu_json(p, outlier=outlier) + for p in transaction_data["pdus"] + ] - # Check signatures are correct. - pdu = yield self._check_sigs_and_hash(pdu) + if pdu_list: + pdu = pdu_list[0] - break + # Check signatures are correct. + pdu = yield self._check_sigs_and_hash(pdu) + + break + except NotRetryingDestination as e: + logger.info(e.message) + continue except CodeMessageException: raise except Exception as e: diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index bb20f2ebab..7d02afe163 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -22,6 +22,9 @@ from .units import Transaction from synapse.api.errors import HttpResponseException from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext +from synapse.util.retryutils import ( + get_retry_limiter, NotRetryingDestination, +) import logging @@ -138,25 +141,6 @@ class TransactionQueue(object): @defer.inlineCallbacks @log_function def _attempt_new_transaction(self, destination): - - (retry_last_ts, retry_interval) = (0, 0) - retry_timings = yield self.store.get_destination_retry_timings( - destination - ) - if retry_timings: - (retry_last_ts, retry_interval) = ( - retry_timings.retry_last_ts, retry_timings.retry_interval - ) - if retry_last_ts + retry_interval > int(self._clock.time_msec()): - logger.info( - "TX [%s] not ready for retry yet - " - "dropping transaction for now", - destination, - ) - return - else: - logger.info("TX [%s] is ready for retry", destination) - if destination in self.pending_transactions: # XXX: pending_transactions can get stuck on by a never-ending # request at which point pending_pdus_by_dest just keeps growing. @@ -204,77 +188,79 @@ class TransactionQueue(object): ] try: - self.pending_transactions[destination] = 1 - - logger.debug("TX [%s] Persisting transaction...", destination) - - transaction = Transaction.create_new( - origin_server_ts=int(self._clock.time_msec()), - transaction_id=str(self._next_txn_id), - origin=self.server_name, - destination=destination, - pdus=pdus, - edus=edus, - pdu_failures=failures, + limiter = yield get_retry_limiter( + destination, + self._clock, + self.store, ) - self._next_txn_id += 1 + with limiter: + self.pending_transactions[destination] = 1 - yield self.transaction_actions.prepare_to_send(transaction) + logger.debug("TX [%s] Persisting transaction...", destination) - logger.debug("TX [%s] Persisted transaction", destination) - logger.info( - "TX [%s] Sending transaction [%s]", - destination, - transaction.transaction_id, - ) + transaction = Transaction.create_new( + origin_server_ts=int(self._clock.time_msec()), + transaction_id=str(self._next_txn_id), + origin=self.server_name, + destination=destination, + pdus=pdus, + edus=edus, + pdu_failures=failures, + ) + + self._next_txn_id += 1 - # Actually send the transaction - - # FIXME (erikj): This is a bit of a hack to make the Pdu age - # keys work - def json_data_cb(): - data = transaction.get_dict() - now = int(self._clock.time_msec()) - if "pdus" in data: - for p in data["pdus"]: - if "age_ts" in p: - unsigned = p.setdefault("unsigned", {}) - unsigned["age"] = now - int(p["age_ts"]) - del p["age_ts"] - return data - - try: - response = yield self.transport_layer.send_transaction( - transaction, json_data_cb + yield self.transaction_actions.prepare_to_send(transaction) + + logger.debug("TX [%s] Persisted transaction", destination) + logger.info( + "TX [%s] Sending transaction [%s]", + destination, + transaction.transaction_id, ) - code = 200 - except HttpResponseException as e: - code = e.code - response = e.response - logger.info("TX [%s] got %d response", destination, code) + # Actually send the transaction + + # FIXME (erikj): This is a bit of a hack to make the Pdu age + # keys work + def json_data_cb(): + data = transaction.get_dict() + now = int(self._clock.time_msec()) + if "pdus" in data: + for p in data["pdus"]: + if "age_ts" in p: + unsigned = p.setdefault("unsigned", {}) + unsigned["age"] = now - int(p["age_ts"]) + del p["age_ts"] + return data + + try: + response = yield self.transport_layer.send_transaction( + transaction, json_data_cb + ) + code = 200 + except HttpResponseException as e: + code = e.code + response = e.response - logger.debug("TX [%s] Sent transaction", destination) - logger.debug("TX [%s] Marking as delivered...", destination) + logger.info("TX [%s] got %d response", destination, code) - yield self.transaction_actions.delivered( - transaction, code, response - ) + logger.debug("TX [%s] Sent transaction", destination) + logger.debug("TX [%s] Marking as delivered...", destination) + + yield self.transaction_actions.delivered( + transaction, code, response + ) + + logger.debug("TX [%s] Marked as delivered", destination) - logger.debug("TX [%s] Marked as delivered", destination) logger.debug("TX [%s] Yielding to callbacks...", destination) for deferred in deferreds: if code == 200: - if retry_last_ts: - # this host is alive! reset retry schedule - yield self.store.set_destination_retry_timings( - destination, 0, 0 - ) deferred.callback(None) else: - self.set_retrying(destination, retry_interval) deferred.errback(RuntimeError("Got status %d" % code)) # Ensures we don't continue until all callbacks on that @@ -285,6 +271,12 @@ class TransactionQueue(object): pass logger.debug("TX [%s] Yielded to callbacks", destination) + except NotRetryingDestination: + logger.info( + "TX [%s] not ready for retry yet - " + "dropping transaction for now", + destination, + ) except RuntimeError as e: # We capture this here as there as nothing actually listens # for this finishing functions deferred. @@ -302,8 +294,6 @@ class TransactionQueue(object): e, ) - self.set_retrying(destination, retry_interval) - for deferred in deferreds: if not deferred.called: deferred.errback(e) @@ -314,22 +304,3 @@ class TransactionQueue(object): # Check to see if there is anything else to send. self._attempt_new_transaction(destination) - - @defer.inlineCallbacks - def set_retrying(self, destination, retry_interval): - # track that this destination is having problems and we should - # give it a chance to recover before trying it again - - if retry_interval: - retry_interval *= 2 - # plateau at hourly retries for now - if retry_interval >= 60 * 60 * 1000: - retry_interval = 60 * 60 * 1000 - else: - retry_interval = 2000 # try again at first after 2 seconds - - yield self.store.set_destination_retry_timings( - destination, - int(self._clock.time_msec()), - retry_interval - ) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py new file mode 100644 index 0000000000..bba524ebd8 --- /dev/null +++ b/synapse/util/retryutils.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +import logging + + +logger = logging.getLogger(__name__) + + +class NotRetryingDestination(Exception): + def __init__(self, retry_last_ts, retry_interval, destination): + msg = "Not retrying server %s." % (destination,) + super(NotRetryingDestination, self).__init__(msg) + + self.retry_last_ts = retry_last_ts + self.retry_interval = retry_interval + self.destination = destination + + +@defer.inlineCallbacks +def get_retry_limiter(destination, clock, store, **kwargs): + retry_last_ts, retry_interval = (0, 0) + + retry_timings = yield store.get_destination_retry_timings( + destination + ) + + if retry_timings: + retry_last_ts, retry_interval = ( + retry_timings.retry_last_ts, retry_timings.retry_interval + ) + + now = int(clock.time_msec()) + + if retry_last_ts + retry_interval > now: + raise NotRetryingDestination( + retry_last_ts=retry_last_ts, + retry_interval=retry_interval, + destination=destination, + ) + + defer.returnValue( + RetryDestinationLimiter( + destination, + clock, + store, + retry_interval, + **kwargs + ) + ) + + +class RetryDestinationLimiter(object): + def __init__(self, destination, clock, store, retry_interval, + min_retry_interval=20000, max_retry_interval=60 * 60 * 1000, + multiplier_retry_interval=2): + self.clock = clock + self.store = store + self.destination = destination + + self.retry_interval = retry_interval + self.min_retry_interval = min_retry_interval + self.max_retry_interval = max_retry_interval + self.multiplier_retry_interval = multiplier_retry_interval + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + def err(self, failure): + logger.exception( + "Failed to store set_destination_retry_timings", + failure.value + ) + + if exc_type is None and exc_val is None and exc_tb is None: + # We connected successfully. + retry_last_ts = 0 + self.retry_interval = 0 + else: + # We couldn't connect. + if self.retry_interval: + self.retry_interval *= self.multiplier_retry_interval + + if self.retry_interval >= self.max_retry_interval: + self.retry_interval = self.max_retry_interval + else: + self.retry_interval = self.min_retry_interval + + retry_last_ts = int(self._clock.time_msec()), + + self.store.set_destination_retry_timings( + self.destination, retry_last_ts, self.retry_interval + ).addErrback(err) -- cgit 1.5.1 From 9371019133bf16cec163d58fe69aa701c8ca5305 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 18:13:34 +0000 Subject: Try to only back off if we think we failed to connect to the remote --- synapse/crypto/keyring.py | 108 ++++++++++++++++---------------- synapse/federation/transaction_queue.py | 66 +++++++++---------- synapse/util/retryutils.py | 10 ++- 3 files changed, 95 insertions(+), 89 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index ea00c830c0..828aced44a 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -101,63 +101,63 @@ class Keyring(object): server_name, self.hs.tls_context_factory ) - # Check the response. + # Check the response. - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") - - tls_certificate_b64 = response["tls_certificate"] - - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") - - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key - - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] - ) - - # Cache the result in the datastore. - - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") + + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] ) - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + # Cache the result in the datastore. + + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) + + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key + ) + + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return - raise ValueError("No verification key found for given key ids") + raise ValueError("No verification key found for given key ids") diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 7d02afe163..dc0f30cb64 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -167,15 +167,6 @@ class TransactionQueue(object): logger.info("TX [%s] Nothing to send", destination) return - logger.debug( - "TX [%s] Attempting new transaction" - " (pdus: %d, edus: %d, failures: %d)", - destination, - len(pending_pdus), - len(pending_edus), - len(pending_failures) - ) - # Sort based on the order field pending_pdus.sort(key=lambda t: t[2]) @@ -194,32 +185,41 @@ class TransactionQueue(object): self.store, ) - with limiter: - self.pending_transactions[destination] = 1 + logger.debug( + "TX [%s] Attempting new transaction" + " (pdus: %d, edus: %d, failures: %d)", + destination, + len(pending_pdus), + len(pending_edus), + len(pending_failures) + ) + + self.pending_transactions[destination] = 1 - logger.debug("TX [%s] Persisting transaction...", destination) + logger.debug("TX [%s] Persisting transaction...", destination) - transaction = Transaction.create_new( - origin_server_ts=int(self._clock.time_msec()), - transaction_id=str(self._next_txn_id), - origin=self.server_name, - destination=destination, - pdus=pdus, - edus=edus, - pdu_failures=failures, - ) + transaction = Transaction.create_new( + origin_server_ts=int(self._clock.time_msec()), + transaction_id=str(self._next_txn_id), + origin=self.server_name, + destination=destination, + pdus=pdus, + edus=edus, + pdu_failures=failures, + ) - self._next_txn_id += 1 + self._next_txn_id += 1 - yield self.transaction_actions.prepare_to_send(transaction) + yield self.transaction_actions.prepare_to_send(transaction) - logger.debug("TX [%s] Persisted transaction", destination) - logger.info( - "TX [%s] Sending transaction [%s]", - destination, - transaction.transaction_id, - ) + logger.debug("TX [%s] Persisted transaction", destination) + logger.info( + "TX [%s] Sending transaction [%s]", + destination, + transaction.transaction_id, + ) + with limiter: # Actually send the transaction # FIXME (erikj): This is a bit of a hack to make the Pdu age @@ -249,11 +249,11 @@ class TransactionQueue(object): logger.debug("TX [%s] Sent transaction", destination) logger.debug("TX [%s] Marking as delivered...", destination) - yield self.transaction_actions.delivered( - transaction, code, response - ) + yield self.transaction_actions.delivered( + transaction, code, response + ) - logger.debug("TX [%s] Marked as delivered", destination) + logger.debug("TX [%s] Marked as delivered", destination) logger.debug("TX [%s] Yielding to callbacks...", destination) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 888b7ef2e9..d190100c8c 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -15,6 +15,8 @@ from twisted.internet import defer +from synapse.api.errors import CodeMessageException + import logging @@ -67,7 +69,7 @@ def get_retry_limiter(destination, clock, store, **kwargs): class RetryDestinationLimiter(object): def __init__(self, destination, clock, store, retry_interval, min_retry_interval=20000, max_retry_interval=60 * 60 * 1000, - multiplier_retry_interval=2): + multiplier_retry_interval=2,): self.clock = clock self.store = store self.destination = destination @@ -87,7 +89,11 @@ class RetryDestinationLimiter(object): failure.value ) - if exc_type is None and exc_val is None and exc_tb is None: + valid_err_code = False + if exc_type is CodeMessageException: + valid_err_code = 0 <= exc_val.code < 500 + + if exc_type is None or valid_err_code: # We connected successfully. if not self.retry_interval: return -- cgit 1.5.1 From 5b5c7a28d675f2c9a055153089aab96cac28c523 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 5 Mar 2015 17:09:13 +0000 Subject: Log error message when we fail to fetch remote server keys --- synapse/crypto/keyring.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 828aced44a..f4db7b8a05 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -50,18 +50,27 @@ class Keyring(object): ) try: verify_key = yield self.get_server_verify_key(server_name, key_ids) - except IOError: + except IOError as e: + logger.warn( + "Got IOError when downloading keys for %s: %s %s", + server_name, type(e).__name__, str(e.message), + ) raise SynapseError( 502, "Error downloading keys for %s" % (server_name,), Codes.UNAUTHORIZED, ) - except: + except Exception as e: + logger.warn( + "Got Exception when downloading keys for %s: %s %s", + server_name, type(e).__name__, str(e.message), + ) raise SynapseError( 401, "No key for %s with id %s" % (server_name, key_ids), Codes.UNAUTHORIZED, ) + try: verify_signed_json(json_object, server_name, verify_key) except: -- cgit 1.5.1 From 3ce8540484a3cc29ce2970ebf6608b6fd3359931 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 6 Mar 2015 11:34:06 +0000 Subject: Don't look for an TLS private key if we have set --no-tls --- synapse/config/server.py | 3 --- synapse/config/tls.py | 17 +++++++++++++---- synapse/crypto/context_factory.py | 5 ++++- 3 files changed, 17 insertions(+), 8 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/config/server.py b/synapse/config/server.py index 4e4892d40b..b042d4eed9 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -30,7 +30,6 @@ class ServerConfig(Config): self.pid_file = self.abspath(args.pid_file) self.webclient = True self.manhole = args.manhole - self.no_tls = args.no_tls self.soft_file_limit = args.soft_file_limit if not args.content_addr: @@ -76,8 +75,6 @@ class ServerConfig(Config): server_group.add_argument("--content-addr", default=None, help="The host and scheme to use for the " "content repository") - server_group.add_argument("--no-tls", action='store_true', - help="Don't bind to the https port.") server_group.add_argument("--soft-file-limit", type=int, default=0, help="Set the soft limit on the number of " "file descriptors synapse can use. " diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 384b29e7ba..a45bf6d521 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config +from ._base import Config, ConfigError from OpenSSL import crypto import subprocess @@ -28,9 +28,16 @@ class TlsConfig(Config): self.tls_certificate = self.read_tls_certificate( args.tls_certificate_path ) - self.tls_private_key = self.read_tls_private_key( - args.tls_private_key_path - ) + + self.no_tls = args.no_tls + + if self.no_tls: + self.tls_private_key = None + else: + self.tls_private_key = self.read_tls_private_key( + args.tls_private_key_path + ) + self.tls_dh_params_path = self.check_file( args.tls_dh_params_path, "tls_dh_params" ) @@ -45,6 +52,8 @@ class TlsConfig(Config): help="PEM encoded private key for TLS") tls_group.add_argument("--tls-dh-params-path", help="PEM dh parameters for ephemeral keys") + tls_group.add_argument("--no-tls", action='store_true', + help="Don't bind to the https port.") def read_tls_certificate(self, cert_path): cert_pem = self.read_file(cert_path, "tls_certificate") diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 24d4abf3e9..2f8618a0df 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -38,7 +38,10 @@ class ServerContextFactory(ssl.ContextFactory): logger.exception("Failed to enable eliptic curve for TLS") context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) context.use_certificate(config.tls_certificate) - context.use_privatekey(config.tls_private_key) + + if not config.no_tls: + context.use_privatekey(config.tls_private_key) + context.load_tmp_dh(config.tls_dh_params_path) context.set_cipher_list("!ADH:HIGH+kEDH:!AECDH:HIGH+kEECDH") -- cgit 1.5.1