diff options
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/api/auth.py | 21 | ||||
-rwxr-xr-x | synapse/app/homeserver.py | 2 | ||||
-rw-r--r-- | synapse/config/_base.py | 5 | ||||
-rw-r--r-- | synapse/config/logger.py | 4 | ||||
-rw-r--r-- | synapse/crypto/keyclient.py | 4 | ||||
-rw-r--r-- | synapse/federation/federation_server.py | 11 | ||||
-rw-r--r-- | synapse/federation/transaction_queue.py | 9 | ||||
-rw-r--r-- | synapse/handlers/federation.py | 7 | ||||
-rw-r--r-- | synapse/handlers/message.py | 32 | ||||
-rw-r--r-- | synapse/handlers/presence.py | 12 | ||||
-rw-r--r-- | synapse/handlers/register.py | 13 | ||||
-rw-r--r-- | synapse/handlers/sync.py | 11 | ||||
-rw-r--r-- | synapse/push/__init__.py | 15 | ||||
-rw-r--r-- | synapse/push/baserules.py | 11 | ||||
-rw-r--r-- | synapse/push/rulekinds.py | 12 | ||||
-rw-r--r-- | synapse/python_dependencies.py | 12 | ||||
-rw-r--r-- | synapse/rest/client/v1/directory.py | 2 | ||||
-rw-r--r-- | synapse/rest/client/v1/push_rule.py | 18 | ||||
-rw-r--r-- | synapse/rest/client/v1/pusher.py | 4 | ||||
-rw-r--r-- | synapse/rest/client/v2_alpha/sync.py | 2 | ||||
-rw-r--r-- | synapse/rest/media/v1/upload_resource.py | 11 | ||||
-rw-r--r-- | synapse/state.py | 2 | ||||
-rw-r--r-- | synapse/storage/_base.py | 69 | ||||
-rw-r--r-- | synapse/storage/push_rule.py | 4 |
24 files changed, 214 insertions, 79 deletions
diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 310a428066..b176db8ce1 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -89,12 +89,19 @@ class Auth(object): raise @defer.inlineCallbacks - def check_joined_room(self, room_id, user_id): - member = yield self.state.get_current_state( - room_id=room_id, - event_type=EventTypes.Member, - state_key=user_id - ) + def check_joined_room(self, room_id, user_id, current_state=None): + if current_state: + member = current_state.get( + (EventTypes.Member, user_id), + None + ) + else: + member = yield self.state.get_current_state( + room_id=room_id, + event_type=EventTypes.Member, + state_key=user_id + ) + self._check_joined_room(member, user_id, room_id) defer.returnValue(member) @@ -102,7 +109,7 @@ class Auth(object): def check_host_in_room(self, room_id, host): curr_state = yield self.state.get_current_state(room_id) - for event in curr_state: + for event in curr_state.values(): if event.type == EventTypes.Member: try: if UserID.from_string(event.state_key).domain != host: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 343ecea0fd..ff29d785db 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -139,7 +139,7 @@ class SynapseHomeServer(HomeServer): logger.info("Attaching %s to path %s", resource, full_path) last_resource = self.root_resource for path_seg in full_path.split('/')[1:-1]: - if not path_seg in last_resource.listNames(): + if path_seg not in last_resource.listNames(): # resource doesn't exist, so make a "dummy resource" child_resource = Resource() last_resource.putChild(path_seg, child_resource) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index dfc115d8e8..9b0f8c3c32 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -50,8 +50,9 @@ class Config(object): ) return cls.abspath(file_path) - @staticmethod - def ensure_directory(dir_path): + @classmethod + def ensure_directory(cls, dir_path): + dir_path = cls.abspath(dir_path) if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.isdir(dir_path): diff --git a/synapse/config/logger.py b/synapse/config/logger.py index f9568ebd21..63c8e36930 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -18,6 +18,7 @@ from synapse.util.logcontext import LoggingContextFilter from twisted.python.log import PythonLoggingObserver import logging import logging.config +import yaml class LoggingConfig(Config): @@ -79,7 +80,8 @@ class LoggingConfig(Config): logger.addHandler(handler) logger.info("Test") else: - logging.config.fileConfig(self.log_config) + with open(self.log_config, 'r') as f: + logging.config.dictConfig(yaml.load(f)) observer = PythonLoggingObserver() observer.start() diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index cdb6279764..cd12349f67 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -75,7 +75,7 @@ class SynapseKeyClientProtocol(HTTPClient): def handleStatus(self, version, status, message): if status != b"200": - #logger.info("Non-200 response from %s: %s %s", + # logger.info("Non-200 response from %s: %s %s", # self.transport.getHost(), status, message) self.transport.abortConnection() @@ -83,7 +83,7 @@ class SynapseKeyClientProtocol(HTTPClient): try: json_response = json.loads(response_body_bytes) except ValueError: - #logger.info("Invalid JSON response from %s", + # logger.info("Invalid JSON response from %s", # self.transport.getHost()) self.transport.abortConnection() return diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 4742ca9390..b23f72c7fa 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -25,6 +25,8 @@ from synapse.events import FrozenEvent from synapse.api.errors import FederationError, SynapseError +from synapse.crypto.event_signing import compute_event_signature + import logging @@ -156,6 +158,15 @@ class FederationServer(FederationBase): auth_chain = yield self.store.get_auth_chain( [pdu.event_id for pdu in pdus] ) + + for event in auth_chain: + event.signatures.update( + compute_event_signature( + event, + self.hs.hostname, + self.hs.config.signing_key[0] + ) + ) else: raise NotImplementedError("Specify an event") diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index f38aeba7cc..731019ad9f 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -157,15 +157,19 @@ class TransactionQueue(object): else: logger.info("TX [%s] is ready for retry", destination) - logger.info("TX [%s] _attempt_new_transaction", destination) - if destination in self.pending_transactions: # XXX: pending_transactions can get stuck on by a never-ending # request at which point pending_pdus_by_dest just keeps growing. # we need application-layer timeouts of some flavour of these # requests + logger.info( + "TX [%s] Transaction already in progress", + destination + ) return + logger.info("TX [%s] _attempt_new_transaction", destination) + # list of (pending_pdu, deferred, order) pending_pdus = self.pending_pdus_by_dest.pop(destination, []) pending_edus = self.pending_edus_by_dest.pop(destination, []) @@ -176,6 +180,7 @@ class TransactionQueue(object): destination, len(pending_pdus)) if not pending_pdus and not pending_edus and not pending_failures: + logger.info("TX [%s] Nothing to send", destination) return logger.debug( diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b13b7c7701..0f9c82fd06 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -860,9 +860,14 @@ class FederationHandler(BaseHandler): # Only do auth resolution if we have something new to say. # We can't rove an auth failure. do_resolution = False + + provable = [ + RejectedReason.NOT_ANCESTOR, RejectedReason.NOT_ANCESTOR, + ] + for e_id in different_auth: if e_id in have_events: - if have_events[e_id] != RejectedReason.AUTH_ERROR: + if have_events[e_id] in provable: do_resolution = True break diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 3f51f38f18..3355adefcf 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -35,6 +35,7 @@ class MessageHandler(BaseHandler): def __init__(self, hs): super(MessageHandler, self).__init__(hs) self.hs = hs + self.state = hs.get_state_handler() self.clock = hs.get_clock() self.validator = EventValidator() @@ -225,7 +226,9 @@ class MessageHandler(BaseHandler): # TODO: This is duplicating logic from snapshot_all_rooms current_state = yield self.state_handler.get_current_state(room_id) now = self.clock.time_msec() - defer.returnValue([serialize_event(c, now) for c in current_state]) + defer.returnValue( + [serialize_event(c, now) for c in current_state.values()] + ) @defer.inlineCallbacks def snapshot_all_rooms(self, user_id=None, pagin_config=None, @@ -313,7 +316,7 @@ class MessageHandler(BaseHandler): ) d["state"] = [ serialize_event(c, time_now, as_client_event) - for c in current_state + for c in current_state.values() ] except: logger.exception("Failed to get snapshot") @@ -329,7 +332,14 @@ class MessageHandler(BaseHandler): @defer.inlineCallbacks def room_initial_sync(self, user_id, room_id, pagin_config=None, feedback=False): - yield self.auth.check_joined_room(room_id, user_id) + current_state = yield self.state.get_current_state( + room_id=room_id, + ) + + yield self.auth.check_joined_room( + room_id, user_id, + current_state=current_state + ) # TODO(paul): I wish I was called with user objects not user_id # strings... @@ -337,13 +347,12 @@ class MessageHandler(BaseHandler): # TODO: These concurrently time_now = self.clock.time_msec() - state_tuples = yield self.state_handler.get_current_state(room_id) - state = [serialize_event(x, time_now) for x in state_tuples] + state = [ + serialize_event(x, time_now) + for x in current_state.values() + ] - member_event = (yield self.store.get_room_member( - user_id=user_id, - room_id=room_id - )) + member_event = current_state.get((EventTypes.Member, user_id,)) now_token = yield self.hs.get_event_sources().get_current_token() @@ -360,7 +369,10 @@ class MessageHandler(BaseHandler): start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) - room_members = yield self.store.get_room_members(room_id) + room_members = [ + m for m in current_state.values() + if m.type == EventTypes.Member + ] presence_handler = self.hs.get_handlers().presence_handler presence = [] diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index cd0798c2b0..59287010ed 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -457,9 +457,9 @@ class PresenceHandler(BaseHandler): if state is None: state = yield self.store.get_presence_state(user.localpart) else: -# statuscache = self._get_or_make_usercache(user) -# self._user_cachemap_latest_serial += 1 -# statuscache.update(state, self._user_cachemap_latest_serial) + # statuscache = self._get_or_make_usercache(user) + # self._user_cachemap_latest_serial += 1 + # statuscache.update(state, self._user_cachemap_latest_serial) pass yield self.push_update_to_local_and_remote( @@ -658,7 +658,9 @@ class PresenceHandler(BaseHandler): observers = set(self._remote_recvmap.get(user, set())) if observers: - logger.debug(" | %d interested local observers %r", len(observers), observers) + logger.debug( + " | %d interested local observers %r", len(observers), observers + ) rm_handler = self.homeserver.get_handlers().room_member_handler room_ids = yield rm_handler.get_rooms_for_user(user) @@ -707,7 +709,7 @@ class PresenceHandler(BaseHandler): # TODO(paul) permissions checks - if not user in self._remote_sendmap: + if user not in self._remote_sendmap: self._remote_sendmap[user] = set() self._remote_sendmap[user].add(origin) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 7ed0ce6299..516a936cee 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -110,17 +110,20 @@ class RegistrationHandler(BaseHandler): # do it here. try: auth_user = UserID.from_string(user_id) - identicon_resource = self.hs.get_resource_for_media_repository().getChildWithDefault("identicon", None) - upload_resource = self.hs.get_resource_for_media_repository().getChildWithDefault("upload", None) + media_repository = self.hs.get_resource_for_media_repository() + identicon_resource = media_repository.getChildWithDefault("identicon", None) + upload_resource = media_repository.getChildWithDefault("upload", None) identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320) content_uri = yield upload_resource.create_content( "image/png", None, identicon_bytes, len(identicon_bytes), auth_user ) profile_handler = self.hs.get_handlers().profile_handler - profile_handler.set_avatar_url(auth_user, auth_user, ("%s#auto" % content_uri)) + profile_handler.set_avatar_url( + auth_user, auth_user, ("%s#auto" % (content_uri,)) + ) except NotImplementedError: - pass # make tests pass without messing around creating default avatars - + pass # make tests pass without messing around creating default avatars + defer.returnValue((user_id, token)) @defer.inlineCallbacks diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 439164ae39..7883bbd834 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -114,7 +114,7 @@ class SyncHandler(BaseHandler): if sync_config.gap: return self.incremental_sync_with_gap(sync_config, since_token) else: - #TODO(mjark): Handle gapless sync + # TODO(mjark): Handle gapless sync raise NotImplementedError() @defer.inlineCallbacks @@ -175,9 +175,10 @@ class SyncHandler(BaseHandler): room_id, sync_config, now_token, ) - current_state_events = yield self.state_handler.get_current_state( + current_state = yield self.state_handler.get_current_state( room_id ) + current_state_events = current_state.values() defer.returnValue(RoomSyncResult( room_id=room_id, @@ -347,9 +348,10 @@ class SyncHandler(BaseHandler): # TODO(mjark): This seems racy since this isn't being passed a # token to indicate what point in the stream this is - current_state_events = yield self.state_handler.get_current_state( + current_state = yield self.state_handler.get_current_state( room_id ) + current_state_events = current_state.values() state_at_previous_sync = yield self.get_state_at_previous_sync( room_id, since_token=since_token @@ -431,6 +433,7 @@ class SyncHandler(BaseHandler): joined = True if joined: - state_delta = yield self.state_handler.get_current_state(room_id) + res = yield self.state_handler.get_current_state(room_id) + state_delta = res.values() defer.returnValue(state_delta) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 07b5f0187c..418a348a58 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -140,18 +140,21 @@ class Pusher(object): lambda x: ('[%s%s]' % (x.group(1) and '^' or '', re.sub(r'\\\-', '-', x.group(2)))), r) return r - + def _event_fulfills_condition(self, ev, condition, display_name, room_member_count): if condition['kind'] == 'event_match': if 'pattern' not in condition: logger.warn("event_match condition with no pattern") return False # XXX: optimisation: cache our pattern regexps - r = r'\b%s\b' % self._glob_to_regexp(condition['pattern']) + if condition['key'] == 'content.body': + r = r'\b%s\b' % self._glob_to_regexp(condition['pattern']) + else: + r = r'^%s$' % self._glob_to_regexp(condition['pattern']) val = _value_for_dotted_key(condition['key'], ev) if val is None: return False - return re.match(r, val, flags=re.IGNORECASE) != None + return re.search(r, val, flags=re.IGNORECASE) is not None elif condition['kind'] == 'device': if 'profile_tag' not in condition: @@ -167,8 +170,10 @@ class Pusher(object): return False if not display_name: return False - return re.match("\b%s\b" % re.escape(display_name), - ev['content']['body'], flags=re.IGNORECASE) != None + return re.search( + "\b%s\b" % re.escape(display_name), ev['content']['body'], + flags=re.IGNORECASE + ) is not None elif condition['kind'] == 'room_member_count': if 'is' not in condition: diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 37878f1e0b..162d265f66 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -1,5 +1,6 @@ from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP + def list_with_base_rules(rawrules, user_name): ruleslist = [] @@ -9,9 +10,9 @@ def list_with_base_rules(rawrules, user_name): if r['priority_class'] < current_prio_class: while r['priority_class'] < current_prio_class: ruleslist.extend(make_base_rules( - user_name, - PRIORITY_CLASS_INVERSE_MAP[current_prio_class]) - ) + user_name, + PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + )) current_prio_class -= 1 ruleslist.append(r) @@ -19,8 +20,8 @@ def list_with_base_rules(rawrules, user_name): while current_prio_class > 0: ruleslist.extend(make_base_rules( user_name, - PRIORITY_CLASS_INVERSE_MAP[current_prio_class]) - ) + PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + )) current_prio_class -= 1 return ruleslist diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py index 763bdee58e..660aa4e10e 100644 --- a/synapse/push/rulekinds.py +++ b/synapse/push/rulekinds.py @@ -1,8 +1,8 @@ PRIORITY_CLASS_MAP = { - 'underride': 1, - 'sender': 2, - 'room': 3, - 'content': 4, - 'override': 5, - } + 'underride': 1, + 'sender': 2, + 'room': 3, + 'content': 4, + 'override': 5, +} PRIORITY_CLASS_INVERSE_MAP = {v: k for k, v in PRIORITY_CLASS_MAP.items()} diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index a89d618606..e2a9d1f6a7 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) REQUIREMENTS = { "syutil==0.0.2": ["syutil"], - "matrix_angular_sdk==0.6.0": ["syweb>=0.6.0"], + "matrix_angular_sdk>=0.6.1": ["syweb>=0.6.1"], "Twisted==14.0.2": ["twisted==14.0.2"], "service_identity>=1.0.0": ["service_identity>=1.0.0"], "pyopenssl>=0.14": ["OpenSSL>=0.14"], @@ -19,10 +19,11 @@ REQUIREMENTS = { "pydenticon": ["pydenticon"], } + def github_link(project, version, egg): return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg) -DEPENDENCY_LINKS=[ +DEPENDENCY_LINKS = [ github_link( project="matrix-org/syutil", version="v0.0.2", @@ -30,8 +31,8 @@ DEPENDENCY_LINKS=[ ), github_link( project="matrix-org/matrix-angular-sdk", - version="v0.6.0", - egg="matrix_angular_sdk-0.6.0", + version="v0.6.1", + egg="matrix_angular_sdk-0.6.1", ), github_link( project="pyca/pynacl", @@ -101,6 +102,7 @@ def check_requirements(): % (dependency, file_path, version, required_version) ) + def list_requirements(): result = [] linked = [] @@ -111,7 +113,7 @@ def list_requirements(): for requirement in REQUIREMENTS: is_linked = False for link in linked: - if requirement.replace('-','_').startswith(link): + if requirement.replace('-', '_').startswith(link): is_linked = True if not is_linked: result.append(requirement) diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 8e83548bbf..f126798597 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -46,7 +46,7 @@ class ClientDirectoryServer(ClientV1RestServlet): @defer.inlineCallbacks def on_PUT(self, request, room_alias): content = _parse_json(request) - if not "room_id" in content: + if "room_id" not in content: raise SynapseError(400, "Missing room_id key", errcode=Codes.BAD_JSON) diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index d43ade39dd..c4e7dfcf0e 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -15,12 +15,17 @@ from twisted.internet import defer -from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError, NotFoundError, \ - StoreError +from synapse.api.errors import ( + SynapseError, Codes, UnrecognizedRequestError, NotFoundError, StoreError +) from .base import ClientV1RestServlet, client_path_pattern -from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException +from synapse.storage.push_rule import ( + InconsistentRuleException, RuleNotFoundException +) import synapse.push.baserules as baserules -from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP +from synapse.push.rulekinds import ( + PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP +) import json @@ -105,7 +110,9 @@ class PushRuleRestServlet(ClientV1RestServlet): # we build up the full structure and then decide which bits of it # to send which means doing unnecessary work sometimes but is # is probably not going to make a whole lot of difference - rawrules = yield self.hs.get_datastore().get_push_rules_for_user_name(user.to_string()) + rawrules = yield self.hs.get_datastore().get_push_rules_for_user_name( + user.to_string() + ) for r in rawrules: r["conditions"] = json.loads(r["conditions"]) @@ -383,6 +390,7 @@ def _namespaced_rule_id_from_spec(spec): def _rule_id_from_namespaced(in_rule_id): return in_rule_id.split('/')[-1] + class InvalidRuleException(Exception): pass diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index e10d2576d2..80e9939b79 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -34,8 +34,8 @@ class PusherRestServlet(ClientV1RestServlet): pusher_pool = self.hs.get_pusherpool() if ('pushkey' in content and 'app_id' in content - and 'kind' in content and - content['kind'] is None): + and 'kind' in content and + content['kind'] is None): yield pusher_pool.remove_pusher( content['app_id'], content['pushkey'] ) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 81d5cf8ead..3056ec45cf 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -118,7 +118,7 @@ class SyncRestServlet(RestServlet): except: filter = Filter({}) # filter = filter.apply_overrides(http_request) - #if filter.matches(event): + # if filter.matches(event): # # stuff sync_config = SyncConfig( diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index 5b42782331..e5aba3af4c 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -38,9 +38,10 @@ class UploadResource(BaseMediaResource): def render_OPTIONS(self, request): respond_with_json(request, 200, {}, send_cors=True) return NOT_DONE_YET - + @defer.inlineCallbacks - def create_content(self, media_type, upload_name, content, content_length, auth_user): + def create_content(self, media_type, upload_name, content, content_length, + auth_user): media_id = random_string(24) fname = self.filepaths.local_media_filepath(media_id) @@ -65,7 +66,7 @@ class UploadResource(BaseMediaResource): } yield self._generate_local_thumbnails(media_id, media_info) - + defer.returnValue("mxc://%s/%s" % (self.server_name, media_id)) @defer.inlineCallbacks @@ -95,8 +96,8 @@ class UploadResource(BaseMediaResource): code=400, ) - #if headers.hasHeader("Content-Disposition"): - # disposition = headers.getRawHeaders("Content-Disposition")[0] + # if headers.hasHeader("Content-Disposition"): + # disposition = headers.getRawHeaders("Content-Disposition")[0] # TODO(markjh): parse content-dispostion content_uri = yield self.create_content( diff --git a/synapse/state.py b/synapse/state.py index 695a5e7ac4..54380b9e5c 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -76,7 +76,7 @@ class StateHandler(object): defer.returnValue(res[1].get((event_type, state_key))) return - defer.returnValue(res[1].values()) + defer.returnValue(res[1]) @defer.inlineCallbacks def compute_event_context(self, event, old_state=None): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 310ee0104c..3e1ab0a159 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -77,6 +77,43 @@ class LoggingTransaction(object): sql_logger.debug("[SQL time] {%s} %f", self.name, end - start) +class PerformanceCounters(object): + def __init__(self): + self.current_counters = {} + self.previous_counters = {} + + def update(self, key, start_time, end_time=None): + if end_time is None: + end_time = time.time() * 1000 + duration = end_time - start_time + count, cum_time = self.current_counters.get(key, (0, 0)) + count += 1 + cum_time += duration + self.current_counters[key] = (count, cum_time) + return end_time + + def interval(self, interval_duration, limit=3): + counters = [] + for name, (count, cum_time) in self.current_counters.items(): + prev_count, prev_time = self.previous_counters.get(name, (0, 0)) + counters.append(( + (cum_time - prev_time) / interval_duration, + count - prev_count, + name + )) + + self.previous_counters = dict(self.current_counters) + + counters.sort(reverse=True) + + top_n_counters = ", ".join( + "%s(%d): %.3f%%" % (name, count, 100 * ratio) + for ratio, count, name in counters[:limit] + ) + + return top_n_counters + + class SQLBaseStore(object): _TXN_ID = 0 @@ -88,6 +125,8 @@ class SQLBaseStore(object): self._previous_txn_total_time = 0 self._current_txn_total_time = 0 self._previous_loop_ts = 0 + self._txn_perf_counters = PerformanceCounters() + self._get_event_counters = PerformanceCounters() def start_profiling(self): self._previous_loop_ts = self._clock.time_msec() @@ -103,7 +142,18 @@ class SQLBaseStore(object): ratio = (curr - prev)/(time_now - time_then) - logger.info("Total database time: %.3f%%", ratio * 100) + top_three_counters = self._txn_perf_counters.interval( + time_now - time_then, limit=3 + ) + + top_3_event_counters = self._get_event_counters.interval( + time_now - time_then, limit=3 + ) + + logger.info( + "Total database time: %.3f%% {%s} {%s}", + ratio * 100, top_three_counters, top_3_event_counters + ) self._clock.looping_call(loop, 10000) @@ -116,7 +166,7 @@ class SQLBaseStore(object): with LoggingContext("runInteraction") as context: current_context.copy_to(context) start = time.time() * 1000 - txn_id = SQLBaseStore._TXN_ID + txn_id = self._TXN_ID # We don't really need these to be unique, so lets stop it from # growing really large. @@ -138,6 +188,7 @@ class SQLBaseStore(object): ) self._current_txn_total_time += end - start + self._txn_perf_counters.update(desc, start, end) with PreserveLoggingContext(): result = yield self._db_pool.runInteraction( @@ -537,6 +588,8 @@ class SQLBaseStore(object): "LIMIT 1 " ) + start_time = time.time() * 1000 + txn.execute(sql, (event_id,)) res = txn.fetchone() @@ -546,6 +599,8 @@ class SQLBaseStore(object): internal_metadata, js, redacted, rejected_reason = res + self._get_event_counters.update("select_event", start_time) + if allow_rejected or not rejected_reason: return self._get_event_from_row_txn( txn, internal_metadata, js, redacted, @@ -557,10 +612,18 @@ class SQLBaseStore(object): def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False): + + start_time = time.time() * 1000 + update_counter = self._get_event_counters.update + d = json.loads(js) + start_time = update_counter("decode_json", start_time) + internal_metadata = json.loads(internal_metadata) + start_time = update_counter("decode_internal", start_time) ev = FrozenEvent(d, internal_metadata_dict=internal_metadata) + start_time = update_counter("build_frozen_event", start_time) if check_redacted and redacted: ev = prune_event(ev) @@ -576,6 +639,7 @@ class SQLBaseStore(object): if because: ev.unsigned["redacted_because"] = because + start_time = update_counter("redact_event", start_time) if get_prev_content and "replaces_state" in ev.unsigned: prev = self._get_event_txn( @@ -585,6 +649,7 @@ class SQLBaseStore(object): ) if prev: ev.unsigned["prev_content"] = prev.get_dict()["content"] + start_time = update_counter("get_prev_content", start_time) return ev diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 30e23445d9..620de71398 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -91,7 +91,9 @@ class PushRuleStore(SQLBaseStore): txn.execute(sql, (user_name, relative_to_rule)) res = txn.fetchall() if not res: - raise RuleNotFoundException("before/after rule not found: %s" % (relative_to_rule)) + raise RuleNotFoundException( + "before/after rule not found: %s" % (relative_to_rule,) + ) priority_class, base_rule_priority = res[0] if 'priority_class' in kwargs and kwargs['priority_class'] != priority_class: |