diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 15caf1950a..fc7e87de8f 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -29,6 +29,7 @@ from synapse.util import unwrapFirstError
from synapse.util.async import concurrently_execute, run_on_reactor
from synapse.util.caches.snapshot_cache import SnapshotCache
from synapse.util.logcontext import preserve_fn
+from synapse.util.logutils import log_duration
from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
@@ -734,31 +735,33 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def _create_new_client_event(self, builder, prev_event_ids=None):
- if prev_event_ids:
- prev_events = yield self.store.add_event_hashes(prev_event_ids)
- prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids)
- depth = prev_max_depth + 1
- else:
- latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room(
- builder.room_id,
- )
-
- if latest_ret:
- depth = max([d for _, _, d in latest_ret]) + 1
+ with log_duration("prev_events"):
+ if prev_event_ids:
+ prev_events = yield self.store.add_event_hashes(prev_event_ids)
+ prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids)
+ depth = prev_max_depth + 1
else:
- depth = 1
+ latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room(
+ builder.room_id,
+ )
+
+ if latest_ret:
+ depth = max([d for _, _, d in latest_ret]) + 1
+ else:
+ depth = 1
- prev_events = [
- (event_id, prev_hashes)
- for event_id, prev_hashes, _ in latest_ret
- ]
+ prev_events = [
+ (event_id, prev_hashes)
+ for event_id, prev_hashes, _ in latest_ret
+ ]
builder.prev_events = prev_events
builder.depth = depth
state_handler = self.state_handler
- context = yield state_handler.compute_event_context(builder)
+ with log_duration("context"):
+ context = yield state_handler.compute_event_context(builder)
if builder.is_state():
builder.prev_state = yield self.store.add_event_hashes(
@@ -882,14 +885,16 @@ class MessageHandler(BaseHandler):
"Changing the room create event is forbidden",
)
- action_generator = ActionGenerator(self.hs)
- yield action_generator.handle_push_actions_for_event(
- event, context
- )
+ with log_duration("action_generator"):
+ action_generator = ActionGenerator(self.hs)
+ yield action_generator.handle_push_actions_for_event(
+ event, context
+ )
- (event_stream_id, max_stream_id) = yield self.store.persist_event(
- event, context=context
- )
+ with log_duration("persist_event"):
+ (event_stream_id, max_stream_id) = yield self.store.persist_event(
+ event, context=context
+ )
# this intentionally does not yield: we don't care about the result
# and don't need to wait for it.
@@ -916,7 +921,8 @@ class MessageHandler(BaseHandler):
extra_users=extra_users
)
- preserve_fn(_notify)()
+ with log_duration("on_new_room_event"):
+ preserve_fn(_notify)()
# If invite, remove room_state from unsigned before sending.
event.unsigned.pop("invite_room_state", None)
diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py
index 341043952a..1721c3705b 100644
--- a/synapse/metrics/metric.py
+++ b/synapse/metrics/metric.py
@@ -15,6 +15,7 @@
from itertools import chain
+from collections import Counter
# TODO(paul): I can't believe Python doesn't have one of these
@@ -52,30 +53,29 @@ class CounterMetric(BaseMetric):
"""The simplest kind of metric; one that stores a monotonically-increasing
integer that counts events."""
+ __slots__ = ("counts")
+
def __init__(self, *args, **kwargs):
super(CounterMetric, self).__init__(*args, **kwargs)
- self.counts = {}
+ self.counts = Counter()
# Scalar metrics are never empty
if self.is_scalar():
self.counts[()] = 0
def inc_by(self, incr, *values):
- if len(values) != self.dimension():
- raise ValueError(
- "Expected as many values to inc() as labels (%d)" % (self.dimension())
- )
+ # if len(values) != self.dimension():
+ # raise ValueError(
+ # "Expected as many values to inc() as labels (%d)" % (self.dimension())
+ # )
# TODO: should assert that the tag values are all strings
- if values not in self.counts:
- self.counts[values] = incr
- else:
- self.counts[values] += incr
+ self.counts[values] += incr
def inc(self, *values):
- self.inc_by(1, *values)
+ self.counts[values] += 1
def render_item(self, k):
return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])]
diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py
index 46e768e35c..dd8e06f4af 100644
--- a/synapse/push/action_generator.py
+++ b/synapse/push/action_generator.py
@@ -17,6 +17,7 @@ from twisted.internet import defer
from .bulk_push_rule_evaluator import evaluator_for_event
+from synapse.util.logutils import log_duration
from synapse.util.metrics import Measure
import logging
@@ -39,13 +40,15 @@ class ActionGenerator:
@defer.inlineCallbacks
def handle_push_actions_for_event(self, event, context):
with Measure(self.clock, "handle_push_actions_for_event"):
- bulk_evaluator = yield evaluator_for_event(
- event, self.hs, self.store, context.current_state
- )
-
- actions_by_user = yield bulk_evaluator.action_for_event_by_user(
- event, context.current_state
- )
+ with log_duration("evaluator_for_event"):
+ bulk_evaluator = yield evaluator_for_event(
+ event, self.hs, self.store, context.current_state
+ )
+
+ with log_duration("action_for_event_by_user"):
+ actions_by_user = yield bulk_evaluator.action_for_event_by_user(
+ event, context.current_state
+ )
context.push_actions = [
(uid, actions) for uid, actions in actions_by_user.items()
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 6e42121b1d..6599abf1ab 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -22,6 +22,7 @@ from .push_rule_evaluator import PushRuleEvaluatorForEvent
from synapse.api.constants import EventTypes, Membership
from synapse.visibility import filter_events_for_clients
+from synapse.util.logutils import log_duration
logger = logging.getLogger(__name__)
@@ -36,7 +37,8 @@ def decode_rule_json(rule):
@defer.inlineCallbacks
def _get_rules(room_id, user_ids, store):
- rules_by_user = yield store.bulk_get_push_rules(user_ids)
+ with log_duration("bulk_get_push_rules"):
+ rules_by_user = yield store.bulk_get_push_rules(user_ids)
rules_by_user = {k: v for k, v in rules_by_user.items() if v is not None}
@@ -46,25 +48,28 @@ def _get_rules(room_id, user_ids, store):
@defer.inlineCallbacks
def evaluator_for_event(event, hs, store, current_state):
room_id = event.room_id
+
# We also will want to generate notifs for other people in the room so
# their unread countss are correct in the event stream, but to avoid
# generating them for bot / AS users etc, we only do so for people who've
# sent a read receipt into the room.
- local_users_in_room = set(
- e.state_key for e in current_state.values()
- if e.type == EventTypes.Member and e.membership == Membership.JOIN
- and hs.is_mine_id(e.state_key)
- )
+ with log_duration("get_users_in_room"):
+ local_users_in_room = set(
+ e.state_key for e in current_state.values()
+ if e.type == EventTypes.Member and e.membership == Membership.JOIN
+ and hs.is_mine_id(e.state_key)
+ )
# users in the room who have pushers need to get push rules run because
# that's how their pushers work
- if_users_with_pushers = yield store.get_if_users_have_pushers(
- local_users_in_room
- )
- user_ids = set(
- uid for uid, have_pusher in if_users_with_pushers.items() if have_pusher
- )
+ with log_duration("get_users_with_pushers_in_room"):
+ if_users_with_pushers = yield store.get_if_users_have_pushers(
+ local_users_in_room
+ )
+ user_ids = set(
+ uid for uid, have_pusher in if_users_with_pushers.items() if have_pusher
+ )
users_with_receipts = yield store.get_users_with_read_receipts_in_room(room_id)
diff --git a/synapse/state.py b/synapse/state.py
index d0f76dc4f5..b6557ab98a 100644
--- a/synapse/state.py
+++ b/synapse/state.py
@@ -231,6 +231,7 @@ class StateHandler(object):
)
logger.info("Resolving state for %s with %d groups", room_id, len(state_groups))
+ logger.info("State groups for %s with %r", room_id, group_names)
new_state, prev_states = self._resolve_events(
state_groups.values(), event_type, state_key
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index f31dfb22b7..bf271ce5b5 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -94,6 +94,9 @@ class Cache(object):
else:
return default
+ def has(self, key):
+ return key in self.cache
+
def update(self, sequence, key, value):
self.check_thread()
if self.sequence == sequence:
@@ -134,6 +137,12 @@ class Cache(object):
self.sequence += 1
self.cache.clear()
+ def __contains__(self, key):
+ return self.has(key)
+
+ def __getitem__(self, key):
+ return self.get(key)
+
class CacheDescriptor(object):
""" A method decorator that applies a memoizing cache around the function.
diff --git a/synapse/util/logutils.py b/synapse/util/logutils.py
index 3a83828d25..5324c2815b 100644
--- a/synapse/util/logutils.py
+++ b/synapse/util/logutils.py
@@ -21,10 +21,23 @@ import logging
import inspect
import time
+from contextlib import contextmanager
+
_TIME_FUNC_ID = 0
+logger = logging.getLogger(__name__)
+
+
+@contextmanager
+def log_duration(name):
+ start = time.time() * 1000
+ yield
+ end = time.time() * 1000
+ logger.info("Timings: %s tooke %dms", name, int(end - start))
+
+
def _log_debug_as_f(f, msg, msg_args):
name = f.__module__
logger = logging.getLogger(name)
|