From 42ad49f5b75c2c645c4060026c21c5572f5b1063 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 16 Dec 2015 18:42:09 +0000 Subject: still very WIP, but now sends unread_notifications_count in the room object on sync (only actually corrrect in a full sync: hardcoded to 0 in incremental syncs). --- synapse/rest/client/v2_alpha/sync.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index f0a637a6da..4ca10732c1 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -304,6 +304,7 @@ class SyncRestServlet(RestServlet): }, "state": {"events": serialized_state}, "account_data": {"events": account_data}, + "unread_notification_count": room.unread_notification_count } if joined: -- cgit 1.4.1 From c77e7e60fc5d29e8a57bb82dd5aa8e72ae570d84 Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 4 Jan 2016 15:49:06 +0000 Subject: Only joined rooms have unread_notif_count --- synapse/rest/client/v2_alpha/sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index cd3aef9e07..095f96e218 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -311,12 +311,12 @@ class SyncRestServlet(RestServlet): }, "state": {"events": serialized_state}, "account_data": {"events": account_data}, - "unread_notification_count": room.unread_notification_count } if joined: ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) result["ephemeral"] = {"events": ephemeral_events} + result["unread_notification_count"] = room.unread_notification_count return result -- cgit 1.4.1 From cfd07aafff71b452a01265f304172f56b2c49759 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Tue, 5 Jan 2016 18:01:18 +0000 Subject: Allow guests to upgrade their accounts --- synapse/api/auth.py | 6 ++-- synapse/handlers/auth.py | 6 ++-- synapse/handlers/register.py | 37 +++++++++++++++++------ synapse/handlers/room.py | 2 +- synapse/handlers/sync.py | 2 +- synapse/rest/client/v2_alpha/register.py | 12 ++++++-- synapse/rest/media/v1/thumbnail_resource.py | 2 +- synapse/storage/prepare_database.py | 4 +-- synapse/storage/registration.py | 23 +++++++++----- synapse/storage/schema/delta/28/upgrade_times.sql | 21 +++++++++++++ tests/api/test_auth.py | 18 +++++------ 11 files changed, 93 insertions(+), 40 deletions(-) create mode 100644 synapse/storage/schema/delta/28/upgrade_times.sql (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index adb7d64482..b86c6c8399 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -583,7 +583,7 @@ class Auth(object): AuthError if no user by that token exists or the token is invalid. """ try: - ret = yield self._get_user_from_macaroon(token) + ret = yield self.get_user_from_macaroon(token) except AuthError: # TODO(daniel): Remove this fallback when all existing access tokens # have been re-issued as macaroons. @@ -591,7 +591,7 @@ class Auth(object): defer.returnValue(ret) @defer.inlineCallbacks - def _get_user_from_macaroon(self, macaroon_str): + def get_user_from_macaroon(self, macaroon_str): try: macaroon = pymacaroons.Macaroon.deserialize(macaroon_str) self.validate_macaroon(macaroon, "access", False) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index e64b67cdfd..62e82a2570 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -408,7 +408,7 @@ class AuthHandler(BaseHandler): macaroon = pymacaroons.Macaroon.deserialize(login_token) auth_api = self.hs.get_auth() auth_api.validate_macaroon(macaroon, "login", True) - return self._get_user_from_macaroon(macaroon) + return self.get_user_from_macaroon(macaroon) except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError): raise AuthError(401, "Invalid token", errcode=Codes.UNKNOWN_TOKEN) @@ -421,7 +421,7 @@ class AuthHandler(BaseHandler): macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) return macaroon - def _get_user_from_macaroon(self, macaroon): + def get_user_from_macaroon(self, macaroon): user_prefix = "user_id = " for caveat in macaroon.caveats: if caveat.caveat_id.startswith(user_prefix): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index baf7c14e40..6f111ff63e 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -40,12 +40,13 @@ class RegistrationHandler(BaseHandler): def __init__(self, hs): super(RegistrationHandler, self).__init__(hs) + self.auth = hs.get_auth() self.distributor = hs.get_distributor() self.distributor.declare("registered_user") self.captcha_client = CaptchaServerHttpClient(hs) @defer.inlineCallbacks - def check_username(self, localpart): + def check_username(self, localpart, guest_access_token=None): yield run_on_reactor() if urllib.quote(localpart) != localpart: @@ -62,14 +63,29 @@ class RegistrationHandler(BaseHandler): users = yield self.store.get_users_by_id_case_insensitive(user_id) if users: - raise SynapseError( - 400, - "User ID already taken.", - errcode=Codes.USER_IN_USE, - ) + if not guest_access_token: + raise SynapseError( + 400, + "User ID already taken.", + errcode=Codes.USER_IN_USE, + ) + user_data = yield self.auth.get_user_from_macaroon(guest_access_token) + if not user_data["is_guest"] or user_data["user"].localpart != localpart: + raise AuthError( + 403, + "Cannot register taken user ID without valid guest " + "credentials for that user.", + errcode=Codes.FORBIDDEN, + ) @defer.inlineCallbacks - def register(self, localpart=None, password=None, generate_token=True): + def register( + self, + localpart=None, + password=None, + generate_token=True, + guest_access_token=None + ): """Registers a new client on the server. Args: @@ -89,7 +105,7 @@ class RegistrationHandler(BaseHandler): password_hash = self.auth_handler().hash(password) if localpart: - yield self.check_username(localpart) + yield self.check_username(localpart, guest_access_token=guest_access_token) user = UserID(localpart, self.hs.hostname) user_id = user.to_string() @@ -100,7 +116,8 @@ class RegistrationHandler(BaseHandler): yield self.store.register( user_id=user_id, token=token, - password_hash=password_hash + password_hash=password_hash, + was_guest=guest_access_token is not None, ) yield registered_user(self.distributor, user) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 0cfeda10d8..6186c37c7c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 9796f2a57f..41a42418a9 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd +# Copyright 2015 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index b2b89652c6..25389ceded 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd +# Copyright 2015 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -119,8 +119,13 @@ class RegisterRestServlet(RestServlet): if self.hs.config.disable_registration: raise SynapseError(403, "Registration has been disabled") + guest_access_token = body.get("guest_access_token", None) + if desired_username is not None: - yield self.registration_handler.check_username(desired_username) + yield self.registration_handler.check_username( + desired_username, + guest_access_token=guest_access_token + ) if self.hs.config.enable_registration_captcha: flows = [ @@ -150,7 +155,8 @@ class RegisterRestServlet(RestServlet): (user_id, token) = yield self.registration_handler.register( localpart=desired_username, - password=new_password + password=new_password, + guest_access_token=guest_access_token, ) if result and LoginType.EMAIL_IDENTITY in result: diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index 8b8fba3dc7..c18160534e 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 16eff62544..c1f5f99789 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 27 +SCHEMA_VERSION = 28 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 09a05b08ef..f0fa0bd33c 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -73,30 +73,39 @@ class RegistrationStore(SQLBaseStore): ) @defer.inlineCallbacks - def register(self, user_id, token, password_hash): + def register(self, user_id, token, password_hash, was_guest=False): """Attempts to register an account. Args: user_id (str): The desired user ID to register. token (str): The desired access token to use for this user. password_hash (str): Optional. The password hash for this user. + was_guest (bool): Optional. Whether this is a guest account being + upgraded to a non-guest account. Raises: StoreError if the user_id could not be registered. """ yield self.runInteraction( "register", - self._register, user_id, token, password_hash + self._register, user_id, token, password_hash, was_guest ) - def _register(self, txn, user_id, token, password_hash): + def _register(self, txn, user_id, token, password_hash, was_guest): now = int(self.clock.time()) next_id = self._access_tokens_id_gen.get_next_txn(txn) try: - txn.execute("INSERT INTO users(name, password_hash, creation_ts) " - "VALUES (?,?,?)", - [user_id, password_hash, now]) + if was_guest: + txn.execute("UPDATE users SET" + " password_hash = ?," + " upgrade_ts = ?" + " WHERE name = ?", + [password_hash, now, user_id]) + else: + txn.execute("INSERT INTO users(name, password_hash, creation_ts) " + "VALUES (?,?,?)", + [user_id, password_hash, now]) except self.database_engine.module.IntegrityError: raise StoreError( 400, "User ID already taken.", errcode=Codes.USER_IN_USE diff --git a/synapse/storage/schema/delta/28/upgrade_times.sql b/synapse/storage/schema/delta/28/upgrade_times.sql new file mode 100644 index 0000000000..3e4a9ab455 --- /dev/null +++ b/synapse/storage/schema/delta/28/upgrade_times.sql @@ -0,0 +1,21 @@ +/* Copyright 2016 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Stores the timestamp when a user upgraded from a guest to a full user, if + * that happened. + */ + +ALTER TABLE users ADD COLUMN upgrade_ts BIGINT; diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 70d928defe..5ff4c8a873 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd +# Copyright 2015 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -154,7 +154,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) - user_info = yield self.auth._get_user_from_macaroon(macaroon.serialize()) + user_info = yield self.auth.get_user_from_macaroon(macaroon.serialize()) user = user_info["user"] self.assertEqual(UserID.from_string(user_id), user) @@ -171,7 +171,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("guest = true") serialized = macaroon.serialize() - user_info = yield self.auth._get_user_from_macaroon(serialized) + user_info = yield self.auth.get_user_from_macaroon(serialized) user = user_info["user"] is_guest = user_info["is_guest"] self.assertEqual(UserID.from_string(user_id), user) @@ -192,7 +192,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user,)) with self.assertRaises(AuthError) as cm: - yield self.auth._get_user_from_macaroon(macaroon.serialize()) + yield self.auth.get_user_from_macaroon(macaroon.serialize()) self.assertEqual(401, cm.exception.code) self.assertIn("User mismatch", cm.exception.msg) @@ -212,7 +212,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("type = access") with self.assertRaises(AuthError) as cm: - yield self.auth._get_user_from_macaroon(macaroon.serialize()) + yield self.auth.get_user_from_macaroon(macaroon.serialize()) self.assertEqual(401, cm.exception.code) self.assertIn("No user caveat", cm.exception.msg) @@ -234,7 +234,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("user_id = %s" % (user,)) with self.assertRaises(AuthError) as cm: - yield self.auth._get_user_from_macaroon(macaroon.serialize()) + yield self.auth.get_user_from_macaroon(macaroon.serialize()) self.assertEqual(401, cm.exception.code) self.assertIn("Invalid macaroon", cm.exception.msg) @@ -257,7 +257,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("cunning > fox") with self.assertRaises(AuthError) as cm: - yield self.auth._get_user_from_macaroon(macaroon.serialize()) + yield self.auth.get_user_from_macaroon(macaroon.serialize()) self.assertEqual(401, cm.exception.code) self.assertIn("Invalid macaroon", cm.exception.msg) @@ -285,11 +285,11 @@ class AuthTestCase(unittest.TestCase): self.hs.clock.now = 5000 # seconds - yield self.auth._get_user_from_macaroon(macaroon.serialize()) + yield self.auth.get_user_from_macaroon(macaroon.serialize()) # TODO(daniel): Turn on the check that we validate expiration, when we # validate expiration (and remove the above line, which will start # throwing). # with self.assertRaises(AuthError) as cm: - # yield self.auth._get_user_from_macaroon(macaroon.serialize()) + # yield self.auth.get_user_from_macaroon(macaroon.serialize()) # self.assertEqual(401, cm.exception.code) # self.assertIn("Invalid macaroon", cm.exception.msg) -- cgit 1.4.1 From c79f221192044203b9d32cfbd416a7fefeb34cd5 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 6 Jan 2016 11:38:09 +0000 Subject: Add is_guest flag to users db to track whether a user is a guest user or not. Use this so we can run _filter_events_for_client when calculating event_push_actions. --- synapse/handlers/_base.py | 8 ++--- synapse/handlers/federation.py | 6 ++-- synapse/handlers/register.py | 4 ++- synapse/push/action_generator.py | 6 ++-- synapse/push/bulk_push_rule_evaluator.py | 27 ++++++++++++--- synapse/rest/client/v2_alpha/register.py | 5 ++- synapse/storage/event_push_actions.py | 4 +-- synapse/storage/registration.py | 40 ++++++++++++++++------ .../storage/schema/delta/27/event_push_actions.sql | 26 -------------- .../storage/schema/delta/28/event_push_actions.sql | 26 ++++++++++++++ 10 files changed, 95 insertions(+), 57 deletions(-) delete mode 100644 synapse/storage/schema/delta/27/event_push_actions.sql create mode 100644 synapse/storage/schema/delta/28/event_push_actions.sql (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 3115a5065d..66e35de6e4 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -23,8 +23,6 @@ from synapse.push.action_generator import ActionGenerator from synapse.util.logcontext import PreserveLoggingContext -from synapse.events.utils import serialize_event - import logging @@ -256,9 +254,9 @@ class BaseHandler(object): ) action_generator = ActionGenerator(self.store) - yield action_generator.handle_push_actions_for_event(serialize_event( - event, self.clock.time_msec() - )) + yield action_generator.handle_push_actions_for_event( + event, self + ) destinations = set(extra_destinations) for k, s in context.current_state.items(): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 764709b424..075b9e21c3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -32,7 +32,7 @@ from synapse.crypto.event_signing import ( ) from synapse.types import UserID -from synapse.events.utils import prune_event, serialize_event +from synapse.events.utils import prune_event from synapse.util.retryutils import NotRetryingDestination @@ -246,8 +246,8 @@ class FederationHandler(BaseHandler): if not backfilled and not event.internal_metadata.is_outlier(): action_generator = ActionGenerator(self.store) - yield action_generator.handle_push_actions_for_event(serialize_event( - event, self.clock.time_msec()) + yield action_generator.handle_push_actions_for_event( + event, self ) @defer.inlineCallbacks diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 6f111ff63e..1799a668c6 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -84,7 +84,8 @@ class RegistrationHandler(BaseHandler): localpart=None, password=None, generate_token=True, - guest_access_token=None + guest_access_token=None, + make_guest=False ): """Registers a new client on the server. @@ -118,6 +119,7 @@ class RegistrationHandler(BaseHandler): token=token, password_hash=password_hash, was_guest=guest_access_token is not None, + make_guest=make_guest ) yield registered_user(self.distributor, user) diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index 5526324a6d..bcd40798f9 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -33,12 +33,12 @@ class ActionGenerator: # tag (ie. we just need all the users). @defer.inlineCallbacks - def handle_push_actions_for_event(self, event): + def handle_push_actions_for_event(self, event, handler): bulk_evaluator = yield bulk_push_rule_evaluator.evaluator_for_room_id( - event['room_id'], self.store + event.room_id, self.store ) - actions_by_user = bulk_evaluator.action_for_event_by_user(event) + actions_by_user = yield bulk_evaluator.action_for_event_by_user(event, handler) yield self.store.set_push_actions_for_event_and_users( event, diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index c00acfd87e..63d65b4465 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -23,6 +23,8 @@ from synapse.types import UserID import baserules from push_rule_evaluator import PushRuleEvaluator +from synapse.events.utils import serialize_event + logger = logging.getLogger(__name__) @@ -54,7 +56,7 @@ def evaluator_for_room_id(room_id, store): display_names[ev.state_key] = ev.content.get("displayname") defer.returnValue(BulkPushRuleEvaluator( - room_id, rules_by_user, display_names, users + room_id, rules_by_user, display_names, users, store )) @@ -67,13 +69,15 @@ class BulkPushRuleEvaluator: the same logic to run the actual rules, but could be optimised further (see https://matrix.org/jira/browse/SYN-562) """ - def __init__(self, room_id, rules_by_user, display_names, users_in_room): + def __init__(self, room_id, rules_by_user, display_names, users_in_room, store): self.room_id = room_id self.rules_by_user = rules_by_user self.display_names = display_names self.users_in_room = users_in_room + self.store = store - def action_for_event_by_user(self, event): + @defer.inlineCallbacks + def action_for_event_by_user(self, event, handler): actions_by_user = {} for uid, rules in self.rules_by_user.items(): @@ -81,6 +85,13 @@ class BulkPushRuleEvaluator: if uid in self.display_names: display_name = self.display_names[uid] + is_guest = yield self.store.is_guest(UserID.from_string(uid)) + filtered = yield handler._filter_events_for_client( + uid, [event], is_guest=is_guest + ) + if len(filtered) == 0: + continue + for rule in rules: if 'enabled' in rule and not rule['enabled']: continue @@ -94,14 +105,20 @@ class BulkPushRuleEvaluator: if len(actions) > 0: actions_by_user[uid] = actions break - return actions_by_user + defer.returnValue(actions_by_user) @staticmethod def event_matches_rule(event, rule, display_name, room_member_count, profile_tag): matches = True + + # passing the clock all the way into here is extremely awkward and push + # rules do not care about any of the relative timestamps, so we just + # pass 0 for the current time. + client_event = serialize_event(event, 0) + for cond in rule['conditions']: matches &= PushRuleEvaluator._event_fulfills_condition( - event, cond, display_name, room_member_count, profile_tag + client_event, cond, display_name, room_member_count, profile_tag ) return matches diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 25389ceded..c4d025b465 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -259,7 +259,10 @@ class RegisterRestServlet(RestServlet): def _do_guest_registration(self): if not self.hs.config.allow_guest_access: defer.returnValue((403, "Guest access is disabled")) - user_id, _ = yield self.registration_handler.register(generate_token=False) + user_id, _ = yield self.registration_handler.register( + generate_token=False, + make_guest=True + ) access_token = self.auth_handler.generate_access_token(user_id, ["guest = true"]) defer.returnValue((200, { "user_id": user_id, diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 3075d02257..0634af6b62 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -32,8 +32,8 @@ class EventPushActionsStore(SQLBaseStore): values = [] for uid, profile_tag, actions in tuples: values.append({ - 'room_id': event['room_id'], - 'event_id': event['event_id'], + 'room_id': event.room_id, + 'event_id': event.event_id, 'user_id': uid, 'profile_tag': profile_tag, 'actions': json.dumps(actions) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index f0fa0bd33c..c79066f774 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import StoreError, Codes from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks class RegistrationStore(SQLBaseStore): @@ -73,7 +73,8 @@ class RegistrationStore(SQLBaseStore): ) @defer.inlineCallbacks - def register(self, user_id, token, password_hash, was_guest=False): + def register(self, user_id, token, password_hash, + was_guest=False, make_guest=False): """Attempts to register an account. Args: @@ -82,15 +83,18 @@ class RegistrationStore(SQLBaseStore): password_hash (str): Optional. The password hash for this user. was_guest (bool): Optional. Whether this is a guest account being upgraded to a non-guest account. + make_guest (boolean): True if the the new user should be guest, + false to add a regular user account. Raises: StoreError if the user_id could not be registered. """ yield self.runInteraction( "register", - self._register, user_id, token, password_hash, was_guest + self._register, user_id, token, password_hash, was_guest, make_guest ) + self.is_guest.invalidate((user_id,)) - def _register(self, txn, user_id, token, password_hash, was_guest): + def _register(self, txn, user_id, token, password_hash, was_guest, make_guest): now = int(self.clock.time()) next_id = self._access_tokens_id_gen.get_next_txn(txn) @@ -100,12 +104,14 @@ class RegistrationStore(SQLBaseStore): txn.execute("UPDATE users SET" " password_hash = ?," " upgrade_ts = ?" + " is_guest = ?" " WHERE name = ?", - [password_hash, now, user_id]) + [password_hash, now, make_guest, user_id]) else: - txn.execute("INSERT INTO users(name, password_hash, creation_ts) " - "VALUES (?,?,?)", - [user_id, password_hash, now]) + txn.execute("INSERT INTO users " + "(name, password_hash, creation_ts, is_guest) " + "VALUES (?,?,?,?)", + [user_id, password_hash, now, make_guest]) except self.database_engine.module.IntegrityError: raise StoreError( 400, "User ID already taken.", errcode=Codes.USER_IN_USE @@ -126,7 +132,7 @@ class RegistrationStore(SQLBaseStore): keyvalues={ "name": user_id, }, - retcols=["name", "password_hash"], + retcols=["name", "password_hash", "is_guest"], allow_none=True, ) @@ -136,7 +142,7 @@ class RegistrationStore(SQLBaseStore): """ def f(txn): sql = ( - "SELECT name, password_hash FROM users" + "SELECT name, password_hash, is_guest FROM users" " WHERE lower(name) = lower(?)" ) txn.execute(sql, (user_id,)) @@ -249,9 +255,21 @@ class RegistrationStore(SQLBaseStore): defer.returnValue(res if res else False) + @cachedInlineCallbacks() + def is_guest(self, user): + res = yield self._simple_select_one_onecol( + table="users", + keyvalues={"name": user.to_string()}, + retcol="is_guest", + allow_none=True, + desc="is_guest", + ) + + defer.returnValue(res if res else False) + def _query_for_auth(self, txn, token): sql = ( - "SELECT users.name, access_tokens.id as token_id" + "SELECT users.name, users.is_guest, access_tokens.id as token_id" " FROM users" " INNER JOIN access_tokens on users.name = access_tokens.user_id" " WHERE token = ?" diff --git a/synapse/storage/schema/delta/27/event_push_actions.sql b/synapse/storage/schema/delta/27/event_push_actions.sql deleted file mode 100644 index bdf6ae3f24..0000000000 --- a/synapse/storage/schema/delta/27/event_push_actions.sql +++ /dev/null @@ -1,26 +0,0 @@ -/* Copyright 2015 OpenMarket Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -CREATE TABLE IF NOT EXISTS event_push_actions( - room_id TEXT NOT NULL, - event_id TEXT NOT NULL, - user_id TEXT NOT NULL, - profile_tag VARCHAR(32), - actions TEXT NOT NULL, - CONSTRAINT event_id_user_id_profile_tag_uniqueness UNIQUE (room_id, event_id, user_id, profile_tag) -); - - -CREATE INDEX event_push_actions_room_id_event_id_user_id_profile_tag on event_push_actions(room_id, event_id, user_id, profile_tag); diff --git a/synapse/storage/schema/delta/28/event_push_actions.sql b/synapse/storage/schema/delta/28/event_push_actions.sql new file mode 100644 index 0000000000..bdf6ae3f24 --- /dev/null +++ b/synapse/storage/schema/delta/28/event_push_actions.sql @@ -0,0 +1,26 @@ +/* Copyright 2015 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE IF NOT EXISTS event_push_actions( + room_id TEXT NOT NULL, + event_id TEXT NOT NULL, + user_id TEXT NOT NULL, + profile_tag VARCHAR(32), + actions TEXT NOT NULL, + CONSTRAINT event_id_user_id_profile_tag_uniqueness UNIQUE (room_id, event_id, user_id, profile_tag) +); + + +CREATE INDEX event_push_actions_room_id_event_id_user_id_profile_tag on event_push_actions(room_id, event_id, user_id, profile_tag); -- cgit 1.4.1 From 392773ccb271c8e456c497eec3d3b714f3d0d674 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 6 Jan 2016 16:44:13 +0000 Subject: Guest users must be joined to a room to see it in /sync --- synapse/api/errors.py | 16 ---- synapse/api/filtering.py | 12 --- synapse/handlers/sync.py | 152 +++++++++++------------------------ synapse/rest/client/v2_alpha/sync.py | 7 +- 4 files changed, 50 insertions(+), 137 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 8bc7b9e6db..d4037b3d55 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -120,22 +120,6 @@ class AuthError(SynapseError): super(AuthError, self).__init__(*args, **kwargs) -class GuestAccessError(AuthError): - """An error raised when a there is a problem with a guest user accessing - a room""" - - def __init__(self, rooms, *args, **kwargs): - self.rooms = rooms - super(GuestAccessError, self).__init__(*args, **kwargs) - - def error_dict(self): - return cs_error( - self.msg, - self.errcode, - rooms=self.rooms, - ) - - class EventSizeError(SynapseError): """An error raised when an event is too big.""" diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 5287aaa757..4390d01e38 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -149,9 +149,6 @@ class FilterCollection(object): "include_leave", False ) - def list_rooms(self): - return self.room_filter.list_rooms() - def timeline_limit(self): return self.room_timeline_filter.limit() @@ -184,15 +181,6 @@ class Filter(object): def __init__(self, filter_json): self.filter_json = filter_json - def list_rooms(self): - """The list of room_id strings this filter restricts the output to - or None if the this filter doesn't list the room ids. - """ - if "rooms" in self.filter_json: - return list(set(self.filter_json["rooms"])) - else: - return None - def check(self, event): """Checks whether the filter matches the given event. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index b63a27b380..fdccb52231 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -15,8 +15,8 @@ from ._base import BaseHandler +from synapse.streams.config import PaginationConfig from synapse.api.constants import Membership, EventTypes -from synapse.api.errors import GuestAccessError from synapse.util import unwrapFirstError from twisted.internet import defer @@ -29,8 +29,8 @@ logger = logging.getLogger(__name__) SyncConfig = collections.namedtuple("SyncConfig", [ "user", - "is_guest", "filter", + "is_guest", ]) @@ -118,8 +118,6 @@ class SyncResult(collections.namedtuple("SyncResult", [ self.presence or self.joined or self.invited or self.archived ) -GuestRoom = collections.namedtuple("GuestRoom", ("room_id", "membership")) - class SyncHandler(BaseHandler): @@ -138,18 +136,6 @@ class SyncHandler(BaseHandler): A Deferred SyncResult. """ - if sync_config.is_guest: - bad_rooms = [] - for room_id in sync_config.filter.list_rooms(): - world_readable = yield self._is_world_readable(room_id) - if not world_readable: - bad_rooms.append(room_id) - - if bad_rooms: - raise GuestAccessError( - bad_rooms, 403, "Guest access not allowed" - ) - if timeout == 0 or since_token is None or full_state: # we are going to return immediately, so don't bother calling # notifier.wait_for_events. @@ -166,17 +152,6 @@ class SyncHandler(BaseHandler): ) defer.returnValue(result) - @defer.inlineCallbacks - def _is_world_readable(self, room_id): - state = yield self.hs.get_state_handler().get_current_state( - room_id, - EventTypes.RoomHistoryVisibility - ) - if state and "history_visibility" in state.content: - defer.returnValue(state.content["history_visibility"] == "world_readable") - else: - defer.returnValue(False) - def current_sync_for_user(self, sync_config, since_token=None, full_state=False): """Get the sync for client needed to match what the server has now. @@ -200,52 +175,37 @@ class SyncHandler(BaseHandler): """ now_token = yield self.event_sources.get_current_token() - if sync_config.is_guest: - room_list = [ - GuestRoom(room_id, Membership.JOIN) - for room_id in sync_config.filter.list_rooms() - ] - - account_data = {} - account_data_by_room = {} - tags_by_room = {} + now_token, ephemeral_by_room = yield self.ephemeral_by_room( + sync_config, now_token + ) - else: - membership_list = (Membership.INVITE, Membership.JOIN) - if sync_config.filter.include_leave: - membership_list += (Membership.LEAVE, Membership.BAN) + presence_stream = self.event_sources.sources["presence"] + # TODO (mjark): This looks wrong, shouldn't we be getting the presence + # UP to the present rather than after the present? + pagination_config = PaginationConfig(from_token=now_token) + presence, _ = yield presence_stream.get_pagination_rows( + user=sync_config.user, + pagination_config=pagination_config.get_source_config("presence"), + key=None + ) - room_list = yield self.store.get_rooms_for_user_where_membership_is( - user_id=sync_config.user.to_string(), - membership_list=membership_list - ) + membership_list = (Membership.INVITE, Membership.JOIN) + if sync_config.filter.include_leave: + membership_list += (Membership.LEAVE, Membership.BAN) - account_data, account_data_by_room = ( - yield self.store.get_account_data_for_user( - sync_config.user.to_string() - ) - ) + room_list = yield self.store.get_rooms_for_user_where_membership_is( + user_id=sync_config.user.to_string(), + membership_list=membership_list + ) - tags_by_room = yield self.store.get_tags_for_user( + account_data, account_data_by_room = ( + yield self.store.get_account_data_for_user( sync_config.user.to_string() ) - - presence_stream = self.event_sources.sources["presence"] - - joined_room_ids = [ - room.room_id for room in room_list - if room.membership == Membership.JOIN - ] - - presence, _ = yield presence_stream.get_new_events( - from_key=0, - user=sync_config.user, - room_ids=joined_room_ids, - is_guest=sync_config.is_guest, ) - now_token, ephemeral_by_room = yield self.ephemeral_by_room( - sync_config, now_token, joined_room_ids + tags_by_room = yield self.store.get_tags_for_user( + sync_config.user.to_string() ) joined = [] @@ -356,13 +316,11 @@ class SyncHandler(BaseHandler): return account_data_events @defer.inlineCallbacks - def ephemeral_by_room(self, sync_config, now_token, room_ids, - since_token=None): + def ephemeral_by_room(self, sync_config, now_token, since_token=None): """Get the ephemeral events for each room the user is in Args: sync_config (SyncConfig): The flags, filters and user for the sync. now_token (StreamToken): Where the server is currently up to. - room_ids (list): List of room id strings to get data for. since_token (StreamToken): Where the server was when the client last synced. Returns: @@ -373,6 +331,9 @@ class SyncHandler(BaseHandler): typing_key = since_token.typing_key if since_token else "0" + rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string()) + room_ids = [room.room_id for room in rooms] + typing_source = self.event_sources.sources["typing"] typing, typing_key = yield typing_source.get_new_events( user=sync_config.user, @@ -450,38 +411,8 @@ class SyncHandler(BaseHandler): """ now_token = yield self.event_sources.get_current_token() - if sync_config.is_guest: - room_ids = sync_config.filter.list_rooms() - - tags_by_room = {} - account_data = {} - account_data_by_room = {} - - else: - rooms = yield self.store.get_rooms_for_user( - sync_config.user.to_string() - ) - room_ids = [room.room_id for room in rooms] - - now_token, ephemeral_by_room = yield self.ephemeral_by_room( - sync_config, now_token, since_token - ) - - tags_by_room = yield self.store.get_updated_tags( - sync_config.user.to_string(), - since_token.account_data_key, - ) - - account_data, account_data_by_room = ( - yield self.store.get_updated_account_data_for_user( - sync_config.user.to_string(), - since_token.account_data_key, - ) - ) - - now_token, ephemeral_by_room = yield self.ephemeral_by_room( - sync_config, now_token, room_ids, since_token - ) + rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string()) + room_ids = [room.room_id for room in rooms] presence_source = self.event_sources.sources["presence"] presence, presence_key = yield presence_source.get_new_events( @@ -489,10 +420,15 @@ class SyncHandler(BaseHandler): from_key=since_token.presence_key, limit=sync_config.filter.presence_limit(), room_ids=room_ids, - is_guest=sync_config.is_guest, + # /sync doesn't support guest access, they can't get to this point in code + is_guest=False, ) now_token = now_token.copy_and_replace("presence_key", presence_key) + now_token, ephemeral_by_room = yield self.ephemeral_by_room( + sync_config, now_token, since_token + ) + rm_handler = self.hs.get_handlers().room_member_handler app_service = yield self.store.get_app_service_by_user_id( sync_config.user.to_string() @@ -512,8 +448,18 @@ class SyncHandler(BaseHandler): from_key=since_token.room_key, to_key=now_token.room_key, limit=timeline_limit + 1, - room_ids=room_ids if sync_config.is_guest else (), - is_guest=sync_config.is_guest, + ) + + tags_by_room = yield self.store.get_updated_tags( + sync_config.user.to_string(), + since_token.account_data_key, + ) + + account_data, account_data_by_room = ( + yield self.store.get_updated_account_data_for_user( + sync_config.user.to_string(), + since_token.account_data_key, + ) ) joined = [] diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 35a70ffad1..dc3e6f70b7 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -120,15 +120,10 @@ class SyncRestServlet(RestServlet): except: filter = FilterCollection({}) - if is_guest and filter.list_rooms() is None: - raise SynapseError( - 400, "Guest users must provide a list of rooms in the filter" - ) - sync_config = SyncConfig( user=user, - is_guest=is_guest, filter=filter, + is_guest=is_guest, ) if since is not None: -- cgit 1.4.1 From 6c28ac260c2ce4bf93737e53ea3297bff08924c7 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 7 Jan 2016 04:26:29 +0000 Subject: copyrights --- contrib/cmdclient/console.py | 2 +- contrib/cmdclient/http.py | 2 +- contrib/experiments/cursesio.py | 2 +- contrib/experiments/test_messaging.py | 2 +- contrib/graph/graph.py | 2 +- contrib/graph/graph2.py | 2 +- scripts-dev/copyrighter-sql.pl | 4 ++-- scripts-dev/copyrighter.pl | 4 ++-- scripts/register_new_matrix_user | 2 +- scripts/synapse_port_db | 2 +- setup.py | 2 +- synapse/__init__.py | 2 +- synapse/api/__init__.py | 2 +- synapse/api/constants.py | 2 +- synapse/api/errors.py | 2 +- synapse/api/filtering.py | 2 +- synapse/api/ratelimiting.py | 2 +- synapse/api/urls.py | 2 +- synapse/app/__init__.py | 2 +- synapse/app/homeserver.py | 2 +- synapse/app/synctl.py | 2 +- synapse/appservice/__init__.py | 2 +- synapse/appservice/api.py | 2 +- synapse/appservice/scheduler.py | 2 +- synapse/config/__init__.py | 2 +- synapse/config/__main__.py | 2 +- synapse/config/_base.py | 2 +- synapse/config/appservice.py | 2 +- synapse/config/captcha.py | 2 +- synapse/config/cas.py | 2 +- synapse/config/database.py | 2 +- synapse/config/homeserver.py | 2 +- synapse/config/key.py | 2 +- synapse/config/logger.py | 2 +- synapse/config/metrics.py | 2 +- synapse/config/password.py | 2 +- synapse/config/ratelimiting.py | 2 +- synapse/config/registration.py | 2 +- synapse/config/server.py | 2 +- synapse/config/tls.py | 2 +- synapse/config/voip.py | 2 +- synapse/crypto/__init__.py | 2 +- synapse/crypto/context_factory.py | 2 +- synapse/crypto/event_signing.py | 2 +- synapse/crypto/keyclient.py | 2 +- synapse/crypto/keyring.py | 2 +- synapse/events/__init__.py | 2 +- synapse/events/builder.py | 2 +- synapse/events/snapshot.py | 2 +- synapse/events/utils.py | 2 +- synapse/events/validator.py | 2 +- synapse/federation/__init__.py | 2 +- synapse/federation/federation_base.py | 2 +- synapse/federation/federation_client.py | 2 +- synapse/federation/federation_server.py | 2 +- synapse/federation/persistence.py | 2 +- synapse/federation/replication.py | 2 +- synapse/federation/transaction_queue.py | 2 +- synapse/federation/transport/__init__.py | 2 +- synapse/federation/transport/client.py | 2 +- synapse/federation/transport/server.py | 2 +- synapse/federation/units.py | 2 +- synapse/handlers/__init__.py | 2 +- synapse/handlers/account_data.py | 2 +- synapse/handlers/admin.py | 2 +- synapse/handlers/appservice.py | 2 +- synapse/handlers/directory.py | 2 +- synapse/handlers/events.py | 2 +- synapse/handlers/federation.py | 2 +- synapse/handlers/identity.py | 2 +- synapse/handlers/presence.py | 2 +- synapse/handlers/profile.py | 2 +- synapse/handlers/receipts.py | 2 +- synapse/handlers/search.py | 2 +- synapse/handlers/typing.py | 2 +- synapse/http/__init__.py | 2 +- synapse/http/client.py | 2 +- synapse/http/endpoint.py | 2 +- synapse/http/matrixfederationclient.py | 2 +- synapse/http/server.py | 2 +- synapse/http/servlet.py | 2 +- synapse/metrics/__init__.py | 2 +- synapse/metrics/metric.py | 2 +- synapse/metrics/resource.py | 2 +- synapse/push/__init__.py | 2 +- synapse/push/baserules.py | 2 +- synapse/push/httppusher.py | 2 +- synapse/push/push_rule_evaluator.py | 2 +- synapse/push/pusherpool.py | 2 +- synapse/push/rulekinds.py | 2 +- synapse/python_dependencies.py | 2 +- synapse/rest/__init__.py | 2 +- synapse/rest/client/__init__.py | 2 +- synapse/rest/client/v1/__init__.py | 2 +- synapse/rest/client/v1/admin.py | 2 +- synapse/rest/client/v1/base.py | 2 +- synapse/rest/client/v1/directory.py | 2 +- synapse/rest/client/v1/events.py | 2 +- synapse/rest/client/v1/initial_sync.py | 2 +- synapse/rest/client/v1/login.py | 2 +- synapse/rest/client/v1/presence.py | 2 +- synapse/rest/client/v1/profile.py | 2 +- synapse/rest/client/v1/push_rule.py | 2 +- synapse/rest/client/v1/pusher.py | 2 +- synapse/rest/client/v1/register.py | 2 +- synapse/rest/client/v1/room.py | 2 +- synapse/rest/client/v1/transactions.py | 2 +- synapse/rest/client/v1/voip.py | 2 +- synapse/rest/client/v2_alpha/__init__.py | 2 +- synapse/rest/client/v2_alpha/_base.py | 2 +- synapse/rest/client/v2_alpha/account.py | 2 +- synapse/rest/client/v2_alpha/account_data.py | 2 +- synapse/rest/client/v2_alpha/auth.py | 2 +- synapse/rest/client/v2_alpha/filter.py | 2 +- synapse/rest/client/v2_alpha/keys.py | 2 +- synapse/rest/client/v2_alpha/receipts.py | 2 +- synapse/rest/client/v2_alpha/sync.py | 2 +- synapse/rest/client/v2_alpha/tags.py | 2 +- synapse/rest/client/v2_alpha/tokenrefresh.py | 2 +- synapse/rest/key/__init__.py | 2 +- synapse/rest/key/v1/__init__.py | 2 +- synapse/rest/key/v1/server_key_resource.py | 2 +- synapse/rest/key/v2/__init__.py | 2 +- synapse/rest/key/v2/local_key_resource.py | 2 +- synapse/rest/key/v2/remote_key_resource.py | 2 +- synapse/rest/media/v0/content_repository.py | 2 +- synapse/rest/media/v1/__init__.py | 2 +- synapse/rest/media/v1/base_resource.py | 2 +- synapse/rest/media/v1/download_resource.py | 2 +- synapse/rest/media/v1/filepath.py | 2 +- synapse/rest/media/v1/identicon_resource.py | 2 +- synapse/rest/media/v1/media_repository.py | 2 +- synapse/rest/media/v1/thumbnailer.py | 2 +- synapse/rest/media/v1/upload_resource.py | 2 +- synapse/server.py | 2 +- synapse/state.py | 2 +- synapse/storage/__init__.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/account_data.py | 2 +- synapse/storage/appservice.py | 2 +- synapse/storage/background_updates.py | 2 +- synapse/storage/directory.py | 2 +- synapse/storage/end_to_end_keys.py | 2 +- synapse/storage/engines/__init__.py | 2 +- synapse/storage/engines/_base.py | 2 +- synapse/storage/engines/postgres.py | 2 +- synapse/storage/engines/sqlite3.py | 2 +- synapse/storage/event_federation.py | 2 +- synapse/storage/events.py | 2 +- synapse/storage/filtering.py | 2 +- synapse/storage/keys.py | 2 +- synapse/storage/media_repository.py | 2 +- synapse/storage/presence.py | 2 +- synapse/storage/profile.py | 2 +- synapse/storage/push_rule.py | 2 +- synapse/storage/pusher.py | 2 +- synapse/storage/receipts.py | 2 +- synapse/storage/rejections.py | 2 +- synapse/storage/room.py | 2 +- synapse/storage/roommember.py | 2 +- synapse/storage/schema/delta/11/v11.sql | 2 +- synapse/storage/schema/delta/12/v12.sql | 2 +- synapse/storage/schema/delta/13/v13.sql | 2 +- synapse/storage/schema/delta/14/upgrade_appservice_db.py | 2 +- synapse/storage/schema/delta/14/v14.sql | 2 +- synapse/storage/schema/delta/15/appservice_txns.sql | 2 +- synapse/storage/schema/delta/17/drop_indexes.sql | 2 +- synapse/storage/schema/delta/17/server_keys.sql | 2 +- synapse/storage/schema/delta/18/server_keys_bigger_ints.sql | 2 +- synapse/storage/schema/delta/19/event_index.sql | 2 +- synapse/storage/schema/delta/20/pushers.py | 2 +- synapse/storage/schema/delta/21/end_to_end_keys.sql | 2 +- synapse/storage/schema/delta/21/receipts.sql | 2 +- synapse/storage/schema/delta/22/receipts_index.sql | 2 +- synapse/storage/schema/delta/23/drop_state_index.sql | 2 +- synapse/storage/schema/delta/23/refresh_tokens.sql | 2 +- synapse/storage/schema/delta/24/stats_reporting.sql | 2 +- synapse/storage/schema/delta/25/00background_updates.sql | 2 +- synapse/storage/schema/delta/25/fts.py | 2 +- synapse/storage/schema/delta/25/guest_access.sql | 2 +- synapse/storage/schema/delta/25/history_visibility.sql | 2 +- synapse/storage/schema/delta/25/tags.sql | 2 +- synapse/storage/schema/delta/26/account_data.sql | 2 +- synapse/storage/schema/delta/27/account_data.sql | 2 +- synapse/storage/schema/delta/27/forgotten_memberships.sql | 2 +- synapse/storage/schema/delta/27/ts.py | 2 +- synapse/storage/schema/full_schemas/11/event_edges.sql | 2 +- synapse/storage/schema/full_schemas/11/event_signatures.sql | 2 +- synapse/storage/schema/full_schemas/11/im.sql | 2 +- synapse/storage/schema/full_schemas/11/keys.sql | 2 +- synapse/storage/schema/full_schemas/11/media_repository.sql | 2 +- synapse/storage/schema/full_schemas/11/presence.sql | 2 +- synapse/storage/schema/full_schemas/11/profiles.sql | 2 +- synapse/storage/schema/full_schemas/11/redactions.sql | 2 +- synapse/storage/schema/full_schemas/11/room_aliases.sql | 2 +- synapse/storage/schema/full_schemas/11/state.sql | 2 +- synapse/storage/schema/full_schemas/11/transactions.sql | 2 +- synapse/storage/schema/full_schemas/11/users.sql | 2 +- synapse/storage/schema/full_schemas/16/application_services.sql | 2 +- synapse/storage/schema/full_schemas/16/event_edges.sql | 2 +- synapse/storage/schema/full_schemas/16/event_signatures.sql | 2 +- synapse/storage/schema/full_schemas/16/im.sql | 2 +- synapse/storage/schema/full_schemas/16/keys.sql | 2 +- synapse/storage/schema/full_schemas/16/media_repository.sql | 2 +- synapse/storage/schema/full_schemas/16/presence.sql | 2 +- synapse/storage/schema/full_schemas/16/profiles.sql | 2 +- synapse/storage/schema/full_schemas/16/push.sql | 2 +- synapse/storage/schema/full_schemas/16/redactions.sql | 2 +- synapse/storage/schema/full_schemas/16/room_aliases.sql | 2 +- synapse/storage/schema/full_schemas/16/state.sql | 2 +- synapse/storage/schema/full_schemas/16/transactions.sql | 2 +- synapse/storage/schema/full_schemas/16/users.sql | 2 +- synapse/storage/schema/schema_version.sql | 2 +- synapse/storage/search.py | 2 +- synapse/storage/signatures.py | 2 +- synapse/storage/state.py | 2 +- synapse/storage/stream.py | 2 +- synapse/storage/tags.py | 2 +- synapse/storage/transactions.py | 2 +- synapse/storage/util/__init__.py | 2 +- synapse/storage/util/id_generators.py | 2 +- synapse/streams/__init__.py | 2 +- synapse/streams/config.py | 2 +- synapse/streams/events.py | 2 +- synapse/types.py | 2 +- synapse/util/__init__.py | 2 +- synapse/util/async.py | 2 +- synapse/util/caches/__init__.py | 2 +- synapse/util/caches/descriptors.py | 2 +- synapse/util/caches/dictionary_cache.py | 2 +- synapse/util/caches/expiringcache.py | 2 +- synapse/util/caches/lrucache.py | 2 +- synapse/util/caches/snapshot_cache.py | 2 +- synapse/util/debug.py | 2 +- synapse/util/distributor.py | 2 +- synapse/util/frozenutils.py | 2 +- synapse/util/jsonobject.py | 2 +- synapse/util/logcontext.py | 2 +- synapse/util/logutils.py | 2 +- synapse/util/ratelimitutils.py | 2 +- synapse/util/retryutils.py | 2 +- synapse/util/stringutils.py | 2 +- tests/__init__.py | 2 +- tests/api/test_filtering.py | 2 +- tests/appservice/__init__.py | 2 +- tests/appservice/test_appservice.py | 2 +- tests/appservice/test_scheduler.py | 2 +- tests/crypto/__init__.py | 2 +- tests/crypto/test_event_signing.py | 2 +- tests/events/test_utils.py | 2 +- tests/federation/test_federation.py | 2 +- tests/handlers/test_appservice.py | 2 +- tests/handlers/test_auth.py | 2 +- tests/handlers/test_directory.py | 2 +- tests/handlers/test_federation.py | 2 +- tests/handlers/test_presence.py | 2 +- tests/handlers/test_presencelike.py | 2 +- tests/handlers/test_profile.py | 2 +- tests/handlers/test_room.py | 2 +- tests/handlers/test_typing.py | 2 +- tests/metrics/test_metric.py | 2 +- tests/rest/__init__.py | 2 +- tests/rest/client/__init__.py | 2 +- tests/rest/client/v1/__init__.py | 2 +- tests/rest/client/v1/test_events.py | 2 +- tests/rest/client/v1/test_presence.py | 2 +- tests/rest/client/v1/test_profile.py | 2 +- tests/rest/client/v1/test_rooms.py | 2 +- tests/rest/client/v1/test_typing.py | 2 +- tests/rest/client/v1/utils.py | 2 +- tests/rest/client/v2_alpha/__init__.py | 2 +- tests/rest/client/v2_alpha/test_filter.py | 2 +- tests/storage/event_injector.py | 2 +- tests/storage/test__base.py | 2 +- tests/storage/test_appservice.py | 2 +- tests/storage/test_base.py | 2 +- tests/storage/test_directory.py | 2 +- tests/storage/test_events.py | 2 +- tests/storage/test_presence.py | 2 +- tests/storage/test_profile.py | 2 +- tests/storage/test_redaction.py | 2 +- tests/storage/test_registration.py | 2 +- tests/storage/test_room.py | 2 +- tests/storage/test_roommember.py | 2 +- tests/storage/test_stream.py | 2 +- tests/test_distributor.py | 2 +- tests/test_state.py | 2 +- tests/test_test_utils.py | 2 +- tests/test_types.py | 2 +- tests/unittest.py | 2 +- tests/util/__init__.py | 2 +- tests/util/test_dict_cache.py | 2 +- tests/util/test_lrucache.py | 2 +- tests/util/test_snapshot_cache.py | 2 +- tests/utils.py | 2 +- 295 files changed, 297 insertions(+), 297 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/contrib/cmdclient/console.py b/contrib/cmdclient/console.py index d9c6ec6a70..8bb03ce66a 100755 --- a/contrib/cmdclient/console.py +++ b/contrib/cmdclient/console.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2014 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/contrib/cmdclient/http.py b/contrib/cmdclient/http.py index 869f782ec1..4186897316 100644 --- a/contrib/cmdclient/http.py +++ b/contrib/cmdclient/http.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/contrib/experiments/cursesio.py b/contrib/experiments/cursesio.py index 95d87a1fda..44afe81008 100644 --- a/contrib/experiments/cursesio.py +++ b/contrib/experiments/cursesio.py @@ -1,4 +1,4 @@ -# Copyright 2014 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/contrib/experiments/test_messaging.py b/contrib/experiments/test_messaging.py index fedf786cec..85c9c11984 100644 --- a/contrib/experiments/test_messaging.py +++ b/contrib/experiments/test_messaging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/contrib/graph/graph.py b/contrib/graph/graph.py index b2acadcf5e..afd1d446b4 100644 --- a/contrib/graph/graph.py +++ b/contrib/graph/graph.py @@ -1,4 +1,4 @@ -# Copyright 2014 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/contrib/graph/graph2.py b/contrib/graph/graph2.py index d0d2cfe7c0..1ccad65728 100644 --- a/contrib/graph/graph2.py +++ b/contrib/graph/graph2.py @@ -1,4 +1,4 @@ -# Copyright 2014 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts-dev/copyrighter-sql.pl b/scripts-dev/copyrighter-sql.pl index 890e51e587..13e630fc11 100755 --- a/scripts-dev/copyrighter-sql.pl +++ b/scripts-dev/copyrighter-sql.pl @@ -1,5 +1,5 @@ #!/usr/bin/perl -pi -# Copyright 2015 OpenMarket Ltd +# Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ # limitations under the License. $copyright = < Date: Mon, 11 Jan 2016 15:29:57 +0000 Subject: Introduce a Requester object This tracks data about the entity which made the request. This is instead of passing around a tuple, which requires call-site modifications every time a new piece of optional context is passed around. I tried to introduce a User object. I gave up. --- synapse/api/auth.py | 8 +- synapse/rest/client/v1/admin.py | 5 +- synapse/rest/client/v1/directory.py | 8 +- synapse/rest/client/v1/events.py | 19 +++-- synapse/rest/client/v1/initial_sync.py | 4 +- synapse/rest/client/v1/presence.py | 16 ++-- synapse/rest/client/v1/profile.py | 8 +- synapse/rest/client/v1/push_rule.py | 13 +-- synapse/rest/client/v1/pusher.py | 5 +- synapse/rest/client/v1/room.py | 116 ++++++++++++++++----------- synapse/rest/client/v1/voip.py | 4 +- synapse/rest/client/v2_alpha/account.py | 20 ++--- synapse/rest/client/v2_alpha/account_data.py | 8 +- synapse/rest/client/v2_alpha/filter.py | 8 +- synapse/rest/client/v2_alpha/keys.py | 16 ++-- synapse/rest/client/v2_alpha/receipts.py | 4 +- synapse/rest/client/v2_alpha/sync.py | 11 +-- synapse/rest/client/v2_alpha/tags.py | 12 +-- synapse/rest/media/v0/content_repository.py | 6 +- synapse/rest/media/v1/upload_resource.py | 4 +- synapse/types.py | 3 + tests/api/test_auth.py | 12 +-- tests/rest/client/v1/test_presence.py | 5 +- tests/rest/client/v1/test_profile.py | 7 +- 24 files changed, 178 insertions(+), 144 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index b86c6c8399..876869bb74 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -22,7 +22,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership, JoinRules from synapse.api.errors import AuthError, Codes, SynapseError, EventSizeError -from synapse.types import RoomID, UserID, EventID +from synapse.types import Requester, RoomID, UserID, EventID from synapse.util.logutils import log_function from unpaddedbase64 import decode_base64 @@ -534,7 +534,9 @@ class Auth(object): request.authenticated_entity = user_id - defer.returnValue((UserID.from_string(user_id), "", False)) + defer.returnValue( + Requester(UserID.from_string(user_id), "", False) + ) return except KeyError: pass # normal users won't have the user_id query parameter set. @@ -564,7 +566,7 @@ class Auth(object): request.authenticated_entity = user.to_string() - defer.returnValue((user, token_id, is_guest,)) + defer.returnValue(Requester(user, token_id, is_guest)) except KeyError: raise AuthError( self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.", diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 4d724dce72..e2f5eb7b29 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -31,8 +31,9 @@ class WhoisRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): target_user = UserID.from_string(user_id) - auth_user, _, _ = yield self.auth.get_user_by_req(request) - is_admin = yield self.auth.is_server_admin(auth_user) + requester = yield self.auth.get_user_by_req(request) + auth_user = requester.user + is_admin = yield self.auth.is_server_admin(requester.user) if not is_admin and target_user != auth_user: raise AuthError(403, "You are not a server admin") diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 7eef6bf5dc..74ec1e50e0 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -69,9 +69,9 @@ class ClientDirectoryServer(ClientV1RestServlet): try: # try to auth as a user - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) try: - user_id = user.to_string() + user_id = requester.user.to_string() yield dir_handler.create_association( user_id, room_alias, room_id, servers ) @@ -116,8 +116,8 @@ class ClientDirectoryServer(ClientV1RestServlet): # fallback to default user behaviour if they aren't an AS pass - user, _, _ = yield self.auth.get_user_by_req(request) - + requester = yield self.auth.get_user_by_req(request) + user = requester.user is_admin = yield self.auth.is_server_admin(user) if not is_admin: raise AuthError(403, "You need to be a server admin") diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py index 631f2ca052..e89118b37d 100644 --- a/synapse/rest/client/v1/events.py +++ b/synapse/rest/client/v1/events.py @@ -34,10 +34,11 @@ class EventStreamRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request): - auth_user, _, is_guest = yield self.auth.get_user_by_req( + requester = yield self.auth.get_user_by_req( request, - allow_guest=True + allow_guest=True, ) + is_guest = requester.is_guest room_id = None if is_guest: if "room_id" not in request.args: @@ -56,9 +57,13 @@ class EventStreamRestServlet(ClientV1RestServlet): as_client_event = "raw" not in request.args chunk = yield handler.get_stream( - auth_user.to_string(), pagin_config, timeout=timeout, - as_client_event=as_client_event, affect_presence=(not is_guest), - room_id=room_id, is_guest=is_guest + requester.user.to_string(), + pagin_config, + timeout=timeout, + as_client_event=as_client_event, + affect_presence=(not is_guest), + room_id=room_id, + is_guest=is_guest, ) except: logger.exception("Event stream failed") @@ -80,9 +85,9 @@ class EventRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, event_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) handler = self.handlers.event_handler - event = yield handler.get_event(auth_user, event_id) + event = yield handler.get_event(requester.user, event_id) time_now = self.clock.time_msec() if event: diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py index 541319c351..ad161bdbab 100644 --- a/synapse/rest/client/v1/initial_sync.py +++ b/synapse/rest/client/v1/initial_sync.py @@ -25,13 +25,13 @@ class InitialSyncRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request): - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) as_client_event = "raw" not in request.args pagination_config = PaginationConfig.from_request(request) handler = self.handlers.message_handler include_archived = request.args.get("archived", None) == ["true"] content = yield handler.snapshot_all_rooms( - user_id=user.to_string(), + user_id=requester.user.to_string(), pagin_config=pagination_config, as_client_event=as_client_event, include_archived=include_archived, diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index 855385ec16..a6f8754e32 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -32,17 +32,17 @@ class PresenceStatusRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) user = UserID.from_string(user_id) state = yield self.handlers.presence_handler.get_state( - target_user=user, auth_user=auth_user) + target_user=user, auth_user=requester.user) defer.returnValue((200, state)) @defer.inlineCallbacks def on_PUT(self, request, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) user = UserID.from_string(user_id) state = {} @@ -64,7 +64,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet): raise SynapseError(400, "Unable to parse state") yield self.handlers.presence_handler.set_state( - target_user=user, auth_user=auth_user, state=state) + target_user=user, auth_user=requester.user, state=state) defer.returnValue((200, {})) @@ -77,13 +77,13 @@ class PresenceListRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) user = UserID.from_string(user_id) if not self.hs.is_mine(user): raise SynapseError(400, "User not hosted on this Home Server") - if auth_user != user: + if requester.user != user: raise SynapseError(400, "Cannot get another user's presence list") presence = yield self.handlers.presence_handler.get_presence_list( @@ -97,13 +97,13 @@ class PresenceListRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) user = UserID.from_string(user_id) if not self.hs.is_mine(user): raise SynapseError(400, "User not hosted on this Home Server") - if auth_user != user: + if requester.user != user: raise SynapseError( 400, "Cannot modify another user's presence list") diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index d4bc9e076c..b15defdd07 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -37,7 +37,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_PUT(self, request, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) user = UserID.from_string(user_id) try: @@ -47,7 +47,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): defer.returnValue((400, "Unable to parse name")) yield self.handlers.profile_handler.set_displayname( - user, auth_user, new_name) + user, requester.user, new_name) defer.returnValue((200, {})) @@ -70,7 +70,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_PUT(self, request, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) user = UserID.from_string(user_id) try: @@ -80,7 +80,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet): defer.returnValue((400, "Unable to parse name")) yield self.handlers.profile_handler.set_avatar_url( - user, auth_user, new_name) + user, requester.user, new_name) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 2aab28ae7b..c0a21c0c12 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -43,7 +43,7 @@ class PushRuleRestServlet(ClientV1RestServlet): except InvalidRuleException as e: raise SynapseError(400, e.message) - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) if '/' in spec['rule_id'] or '\\' in spec['rule_id']: raise SynapseError(400, "rule_id may not contain slashes") @@ -51,7 +51,7 @@ class PushRuleRestServlet(ClientV1RestServlet): content = _parse_json(request) if 'attr' in spec: - self.set_rule_attr(user.to_string(), spec, content) + self.set_rule_attr(requester.user, spec, content) defer.returnValue((200, {})) try: @@ -73,7 +73,7 @@ class PushRuleRestServlet(ClientV1RestServlet): try: yield self.hs.get_datastore().add_push_rule( - user_name=user.to_string(), + user_name=requester.user.to_string(), rule_id=_namespaced_rule_id_from_spec(spec), priority_class=priority_class, conditions=conditions, @@ -92,13 +92,13 @@ class PushRuleRestServlet(ClientV1RestServlet): def on_DELETE(self, request): spec = _rule_spec_from_path(request.postpath) - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) namespaced_rule_id = _namespaced_rule_id_from_spec(spec) try: yield self.hs.get_datastore().delete_push_rule( - user.to_string(), namespaced_rule_id + requester.user.to_string(), namespaced_rule_id ) defer.returnValue((200, {})) except StoreError as e: @@ -109,7 +109,8 @@ class PushRuleRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request): - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) + user = requester.user # we build up the full structure and then decide which bits of it # to send which means doing unnecessary work sometimes but is diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index 81a8786aeb..b162b210bc 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -30,7 +30,8 @@ class PusherRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request): - user, token_id, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) + user = requester.user content = _parse_json(request) @@ -71,7 +72,7 @@ class PusherRestServlet(ClientV1RestServlet): try: yield pusher_pool.add_pusher( user_name=user.to_string(), - access_token=token_id, + access_token=requester.access_token_id, profile_tag=content['profile_tag'], kind=content['kind'], app_id=content['app_id'], diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 926f77d1c3..7496b26735 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -61,10 +61,14 @@ class RoomCreateRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) room_config = self.get_room_config(request) - info = yield self.make_room(room_config, auth_user, None) + info = yield self.make_room( + room_config, + requester.user, + None, + ) room_config.update(info) defer.returnValue((200, info)) @@ -124,15 +128,15 @@ class RoomStateEventRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id, event_type, state_key): - user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) msg_handler = self.handlers.message_handler data = yield msg_handler.get_room_data( - user_id=user.to_string(), + user_id=requester.user.to_string(), room_id=room_id, event_type=event_type, state_key=state_key, - is_guest=is_guest, + is_guest=requester.is_guest, ) if not data: @@ -143,7 +147,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_PUT(self, request, room_id, event_type, state_key, txn_id=None): - user, token_id, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) content = _parse_json(request) @@ -151,7 +155,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet): "type": event_type, "content": content, "room_id": room_id, - "sender": user.to_string(), + "sender": requester.user.to_string(), } if state_key is not None: @@ -159,7 +163,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet): msg_handler = self.handlers.message_handler yield msg_handler.create_and_send_event( - event_dict, token_id=token_id, txn_id=txn_id, + event_dict, token_id=requester.access_token_id, txn_id=txn_id, ) defer.returnValue((200, {})) @@ -175,7 +179,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id, event_type, txn_id=None): - user, token_id, _ = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) content = _parse_json(request) msg_handler = self.handlers.message_handler @@ -184,9 +188,9 @@ class RoomSendEventRestServlet(ClientV1RestServlet): "type": event_type, "content": content, "room_id": room_id, - "sender": user.to_string(), + "sender": requester.user.to_string(), }, - token_id=token_id, + token_id=requester.access_token_id, txn_id=txn_id, ) @@ -220,9 +224,9 @@ class JoinRoomAliasServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_identifier, txn_id=None): - user, token_id, is_guest = yield self.auth.get_user_by_req( + requester = yield self.auth.get_user_by_req( request, - allow_guest=True + allow_guest=True, ) # the identifier could be a room alias or a room id. Try one then the @@ -241,24 +245,27 @@ class JoinRoomAliasServlet(ClientV1RestServlet): if is_room_alias: handler = self.handlers.room_member_handler - ret_dict = yield handler.join_room_alias(user, identifier) + ret_dict = yield handler.join_room_alias( + requester.user, + identifier, + ) defer.returnValue((200, ret_dict)) else: # room id msg_handler = self.handlers.message_handler content = {"membership": Membership.JOIN} - if is_guest: + if requester.is_guest: content["kind"] = "guest" yield msg_handler.create_and_send_event( { "type": EventTypes.Member, "content": content, "room_id": identifier.to_string(), - "sender": user.to_string(), - "state_key": user.to_string(), + "sender": requester.user.to_string(), + "state_key": requester.user.to_string(), }, - token_id=token_id, + token_id=requester.access_token_id, txn_id=txn_id, - is_guest=is_guest, + is_guest=requester.is_guest, ) defer.returnValue((200, {"room_id": identifier.to_string()})) @@ -296,11 +303,11 @@ class RoomMemberListRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id): # TODO support Pagination stream API (limit/tokens) - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) handler = self.handlers.message_handler events = yield handler.get_state_events( room_id=room_id, - user_id=user.to_string(), + user_id=requester.user.to_string(), ) chunk = [] @@ -315,7 +322,8 @@ class RoomMemberListRestServlet(ClientV1RestServlet): try: presence_handler = self.handlers.presence_handler presence_state = yield presence_handler.get_state( - target_user=target_user, auth_user=user + target_user=target_user, + auth_user=requester.user, ) event["content"].update(presence_state) except: @@ -332,7 +340,7 @@ class RoomMessageListRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id): - user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) pagination_config = PaginationConfig.from_request( request, default_limit=10, ) @@ -340,8 +348,8 @@ class RoomMessageListRestServlet(ClientV1RestServlet): handler = self.handlers.message_handler msgs = yield handler.get_messages( room_id=room_id, - user_id=user.to_string(), - is_guest=is_guest, + user_id=requester.user.to_string(), + is_guest=requester.is_guest, pagin_config=pagination_config, as_client_event=as_client_event ) @@ -355,13 +363,13 @@ class RoomStateRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id): - user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) handler = self.handlers.message_handler # Get all the current state for this room events = yield handler.get_state_events( room_id=room_id, - user_id=user.to_string(), - is_guest=is_guest, + user_id=requester.user.to_string(), + is_guest=requester.is_guest, ) defer.returnValue((200, events)) @@ -372,13 +380,13 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id): - user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) pagination_config = PaginationConfig.from_request(request) content = yield self.handlers.message_handler.room_initial_sync( room_id=room_id, - user_id=user.to_string(), + user_id=requester.user.to_string(), pagin_config=pagination_config, - is_guest=is_guest, + is_guest=requester.is_guest, ) defer.returnValue((200, content)) @@ -394,12 +402,16 @@ class RoomEventContext(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id, event_id): - user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) + requester = yield self.auth.get_user_by_req(request, allow_guest=True) limit = int(request.args.get("limit", [10])[0]) results = yield self.handlers.room_context_handler.get_event_context( - user, room_id, event_id, limit, is_guest + requester.user, + room_id, + event_id, + limit, + requester.is_guest, ) time_now = self.clock.time_msec() @@ -429,14 +441,18 @@ class RoomMembershipRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id, membership_action, txn_id=None): - user, token_id, is_guest = yield self.auth.get_user_by_req( + requester = yield self.auth.get_user_by_req( request, - allow_guest=True + allow_guest=True, ) + user = requester.user effective_membership_action = membership_action - if is_guest and membership_action not in {Membership.JOIN, Membership.LEAVE}: + if requester.is_guest and membership_action not in { + Membership.JOIN, + Membership.LEAVE + }: raise AuthError(403, "Guest access not allowed") content = _parse_json(request) @@ -451,7 +467,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet): content["medium"], content["address"], content["id_server"], - token_id, + requester.access_token_id, txn_id ) defer.returnValue((200, {})) @@ -473,7 +489,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet): msg_handler = self.handlers.message_handler content = {"membership": unicode(effective_membership_action)} - if is_guest: + if requester.is_guest: content["kind"] = "guest" yield msg_handler.create_and_send_event( @@ -484,9 +500,9 @@ class RoomMembershipRestServlet(ClientV1RestServlet): "sender": user.to_string(), "state_key": state_key, }, - token_id=token_id, + token_id=requester.access_token_id, txn_id=txn_id, - is_guest=is_guest, + is_guest=requester.is_guest, ) if membership_action == "forget": @@ -524,7 +540,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id, event_id, txn_id=None): - user, token_id, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) content = _parse_json(request) msg_handler = self.handlers.message_handler @@ -533,10 +549,10 @@ class RoomRedactEventRestServlet(ClientV1RestServlet): "type": EventTypes.Redaction, "content": content, "room_id": room_id, - "sender": user.to_string(), + "sender": requester.user.to_string(), "redacts": event_id, }, - token_id=token_id, + token_id=requester.access_token_id, txn_id=txn_id, ) @@ -564,7 +580,7 @@ class RoomTypingRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_PUT(self, request, room_id, user_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) room_id = urllib.unquote(room_id) target_user = UserID.from_string(urllib.unquote(user_id)) @@ -576,14 +592,14 @@ class RoomTypingRestServlet(ClientV1RestServlet): if content["typing"]: yield typing_handler.started_typing( target_user=target_user, - auth_user=auth_user, + auth_user=requester.user, room_id=room_id, timeout=content.get("timeout", 30000), ) else: yield typing_handler.stopped_typing( target_user=target_user, - auth_user=auth_user, + auth_user=requester.user, room_id=room_id, ) @@ -597,12 +613,16 @@ class SearchRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) content = _parse_json(request) batch = request.args.get("next_batch", [None])[0] - results = yield self.handlers.search_handler.search(auth_user, content, batch) + results = yield self.handlers.search_handler.search( + requester.user, + content, + batch, + ) defer.returnValue((200, results)) diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py index 860cb0a642..ec4cf8db79 100644 --- a/synapse/rest/client/v1/voip.py +++ b/synapse/rest/client/v1/voip.py @@ -28,7 +28,7 @@ class VoipRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) turnUris = self.hs.config.turn_uris turnSecret = self.hs.config.turn_shared_secret @@ -37,7 +37,7 @@ class VoipRestServlet(ClientV1RestServlet): defer.returnValue((200, {})) expiry = (self.hs.get_clock().time_msec() + userLifetime) / 1000 - username = "%d:%s" % (expiry, auth_user.to_string()) + username = "%d:%s" % (expiry, requester.user.to_string()) mac = hmac.new(turnSecret, msg=username, digestmod=hashlib.sha1) # We need to use standard padded base64 encoding here diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index ddb6f041cd..fa56249a69 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -55,10 +55,11 @@ class PasswordRestServlet(RestServlet): if LoginType.PASSWORD in result: # if using password, they should also be logged in - auth_user, _, _ = yield self.auth.get_user_by_req(request) - if auth_user.to_string() != result[LoginType.PASSWORD]: + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + if requester_user_id.to_string() != result[LoginType.PASSWORD]: raise LoginError(400, "", Codes.UNKNOWN) - user_id = auth_user.to_string() + user_id = requester_user_id elif LoginType.EMAIL_IDENTITY in result: threepid = result[LoginType.EMAIL_IDENTITY] if 'medium' not in threepid or 'address' not in threepid: @@ -102,10 +103,10 @@ class ThreepidRestServlet(RestServlet): def on_GET(self, request): yield run_on_reactor() - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) threepids = yield self.hs.get_datastore().user_get_threepids( - auth_user.to_string() + requester.user.to_string() ) defer.returnValue((200, {'threepids': threepids})) @@ -120,7 +121,8 @@ class ThreepidRestServlet(RestServlet): raise SynapseError(400, "Missing param", Codes.MISSING_PARAM) threePidCreds = body['threePidCreds'] - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() threepid = yield self.identity_handler.threepid_from_creds(threePidCreds) @@ -135,7 +137,7 @@ class ThreepidRestServlet(RestServlet): raise SynapseError(500, "Invalid response from ID Server") yield self.auth_handler.add_threepid( - auth_user.to_string(), + user_id, threepid['medium'], threepid['address'], threepid['validated_at'], @@ -144,10 +146,10 @@ class ThreepidRestServlet(RestServlet): if 'bind' in body and body['bind']: logger.debug( "Binding emails %s to %s", - threepid, auth_user.to_string() + threepid, user_id ) yield self.identity_handler.bind_threepid( - threePidCreds, auth_user.to_string() + threePidCreds, user_id ) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py index 629b04fe7a..985efe2a62 100644 --- a/synapse/rest/client/v2_alpha/account_data.py +++ b/synapse/rest/client/v2_alpha/account_data.py @@ -43,8 +43,8 @@ class AccountDataServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, user_id, account_data_type): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - if user_id != auth_user.to_string(): + requester = yield self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") try: @@ -82,8 +82,8 @@ class RoomAccountDataServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, user_id, room_id, account_data_type): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - if user_id != auth_user.to_string(): + requester = yield self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") try: diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index 2af7bfaf99..7695bebc28 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -40,9 +40,9 @@ class GetFilterRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id, filter_id): target_user = UserID.from_string(user_id) - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) - if target_user != auth_user: + if target_user != requester.user: raise AuthError(403, "Cannot get filters for other users") if not self.hs.is_mine(target_user): @@ -76,9 +76,9 @@ class CreateFilterRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, user_id): target_user = UserID.from_string(user_id) - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) - if target_user != auth_user: + if target_user != requester.user: raise AuthError(403, "Cannot create filters for other users") if not self.hs.is_mine(target_user): diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 24c3554831..f989b08614 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -64,8 +64,8 @@ class KeyUploadServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, device_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - user_id = auth_user.to_string() + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() # TODO: Check that the device_id matches that in the authentication # or derive the device_id from the authentication instead. try: @@ -78,8 +78,8 @@ class KeyUploadServlet(RestServlet): device_keys = body.get("device_keys", None) if device_keys: logger.info( - "Updating device_keys for device %r for user %r at %d", - device_id, auth_user, time_now + "Updating device_keys for device %r for user %s at %d", + device_id, user_id, time_now ) # TODO: Sign the JSON with the server key yield self.store.set_e2e_device_keys( @@ -109,8 +109,8 @@ class KeyUploadServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, device_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - user_id = auth_user.to_string() + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() result = yield self.store.count_e2e_one_time_keys(user_id, device_id) defer.returnValue((200, {"one_time_key_counts": result})) @@ -182,8 +182,8 @@ class KeyQueryServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id, device_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - auth_user_id = auth_user.to_string() + requester = yield self.auth.get_user_by_req(request) + auth_user_id = requester.user.to_string() user_id = user_id if user_id else auth_user_id device_ids = [device_id] if device_id else [] result = yield self.handle_request( diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py index 43c23d6090..eb4b369a3d 100644 --- a/synapse/rest/client/v2_alpha/receipts.py +++ b/synapse/rest/client/v2_alpha/receipts.py @@ -40,7 +40,7 @@ class ReceiptRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request, room_id, receipt_type, event_id): - user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) if receipt_type != "m.read": raise SynapseError(400, "Receipt type must be 'm.read'") @@ -48,7 +48,7 @@ class ReceiptRestServlet(RestServlet): yield self.receipts_handler.received_client_receipt( room_id, receipt_type, - user_id=user.to_string(), + user_id=requester.user.to_string(), event_id=event_id ) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index c05e7d50c8..3867547ade 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -85,9 +85,10 @@ class SyncRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request): - user, token_id, is_guest = yield self.auth.get_user_by_req( + requester = yield self.auth.get_user_by_req( request, allow_guest=True ) + user = requester.user timeout = parse_integer(request, "timeout", default=0) since = parse_string(request, "since") @@ -123,7 +124,7 @@ class SyncRestServlet(RestServlet): sync_config = SyncConfig( user=user, filter=filter, - is_guest=is_guest, + is_guest=requester.is_guest, ) if since is not None: @@ -146,15 +147,15 @@ class SyncRestServlet(RestServlet): time_now = self.clock.time_msec() joined = self.encode_joined( - sync_result.joined, filter, time_now, token_id + sync_result.joined, filter, time_now, requester.access_token_id ) invited = self.encode_invited( - sync_result.invited, filter, time_now, token_id + sync_result.invited, filter, time_now, requester.access_token_id ) archived = self.encode_archived( - sync_result.archived, filter, time_now, token_id + sync_result.archived, filter, time_now, requester.access_token_id ) response_content = { diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py index 1bfc36ab2b..42f2203f3d 100644 --- a/synapse/rest/client/v2_alpha/tags.py +++ b/synapse/rest/client/v2_alpha/tags.py @@ -42,8 +42,8 @@ class TagListServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, user_id, room_id): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - if user_id != auth_user.to_string(): + requester = yield self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get tags for other users.") tags = yield self.store.get_tags_for_room(user_id, room_id) @@ -68,8 +68,8 @@ class TagServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, user_id, room_id, tag): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - if user_id != auth_user.to_string(): + requester = yield self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") try: @@ -88,8 +88,8 @@ class TagServlet(RestServlet): @defer.inlineCallbacks def on_DELETE(self, request, user_id, room_id, tag): - auth_user, _, _ = yield self.auth.get_user_by_req(request) - if user_id != auth_user.to_string(): + requester = yield self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") max_id = yield self.store.remove_tag_from_room(user_id, room_id, tag) diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py index dd7a1b2b31..dcf3eaee1f 100644 --- a/synapse/rest/media/v0/content_repository.py +++ b/synapse/rest/media/v0/content_repository.py @@ -66,11 +66,11 @@ class ContentRepoResource(resource.Resource): @defer.inlineCallbacks def map_request_to_name(self, request): # auth the user - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) # namespace all file uploads on the user prefix = base64.urlsafe_b64encode( - auth_user.to_string() + requester.user.to_string() ).replace('=', '') # use a random string for the main portion @@ -94,7 +94,7 @@ class ContentRepoResource(resource.Resource): file_name = prefix + main_part + suffix file_path = os.path.join(self.directory, file_name) logger.info("User %s is uploading a file to path %s", - auth_user.to_string(), + request.user.user_id.to_string(), file_path) # keep trying to make a non-clashing file, with a sensible max attempts diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index c1e895ee81..9c7ad4ae85 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -70,7 +70,7 @@ class UploadResource(BaseMediaResource): @request_handler @defer.inlineCallbacks def _async_render_POST(self, request): - auth_user, _, _ = yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) # TODO: The checks here are a bit late. The content will have # already been uploaded to a tmp file at this point content_length = request.getHeader("Content-Length") @@ -110,7 +110,7 @@ class UploadResource(BaseMediaResource): content_uri = yield self.create_content( media_type, upload_name, request.content.read(), - content_length, auth_user + content_length, requester.user ) respond_with_json( diff --git a/synapse/types.py b/synapse/types.py index 1ec7b3e103..2095837ba6 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -18,6 +18,9 @@ from synapse.api.errors import SynapseError from collections import namedtuple +Requester = namedtuple("Requester", ["user", "access_token_id", "is_guest"]) + + class DomainSpecificString( namedtuple("DomainSpecificString", ("localpart", "domain")) ): diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 5ff4c8a873..474c5c418f 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -51,8 +51,8 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.args["access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = Mock(return_value=[""]) - (user, _, _) = yield self.auth.get_user_by_req(request) - self.assertEquals(user.to_string(), self.test_user) + requester = yield self.auth.get_user_by_req(request) + self.assertEquals(requester.user.to_string(), self.test_user) def test_get_user_by_req_user_bad_token(self): self.store.get_app_service_by_token = Mock(return_value=None) @@ -86,8 +86,8 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.args["access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = Mock(return_value=[""]) - (user, _, _) = yield self.auth.get_user_by_req(request) - self.assertEquals(user.to_string(), self.test_user) + requester = yield self.auth.get_user_by_req(request) + self.assertEquals(requester.user.to_string(), self.test_user) def test_get_user_by_req_appservice_bad_token(self): self.store.get_app_service_by_token = Mock(return_value=None) @@ -121,8 +121,8 @@ class AuthTestCase(unittest.TestCase): request.args["access_token"] = [self.test_token] request.args["user_id"] = [masquerading_user_id] request.requestHeaders.getRawHeaders = Mock(return_value=[""]) - (user, _, _) = yield self.auth.get_user_by_req(request) - self.assertEquals(user.to_string(), masquerading_user_id) + requester = yield self.auth.get_user_by_req(request) + self.assertEquals(requester.user.to_string(), masquerading_user_id) def test_get_user_by_req_appservice_valid_token_bad_user_id(self): masquerading_user_id = "@doppelganger:matrix.org" diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py index d782eadb6a..90b911f879 100644 --- a/tests/rest/client/v1/test_presence.py +++ b/tests/rest/client/v1/test_presence.py @@ -14,7 +14,6 @@ # limitations under the License. """Tests REST events for /presence paths.""" - from tests import unittest from twisted.internet import defer @@ -26,7 +25,7 @@ from synapse.api.constants import PresenceState from synapse.handlers.presence import PresenceHandler from synapse.rest.client.v1 import presence from synapse.rest.client.v1 import events -from synapse.types import UserID +from synapse.types import Requester, UserID from synapse.util.async import run_on_reactor from collections import namedtuple @@ -301,7 +300,7 @@ class PresenceEventStreamTestCase(unittest.TestCase): hs.get_clock().time_msec.return_value = 1000000 def _get_user_by_req(req=None, allow_guest=False): - return (UserID.from_string(myid), "", False) + return Requester(UserID.from_string(myid), "", False) hs.get_v1auth().get_user_by_req = _get_user_by_req diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 77b7b06c10..c1a3f52043 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -14,16 +14,15 @@ # limitations under the License. """Tests REST events for /profile paths.""" - from tests import unittest from twisted.internet import defer -from mock import Mock, NonCallableMock +from mock import Mock from ....utils import MockHttpResource, setup_test_homeserver from synapse.api.errors import SynapseError, AuthError -from synapse.types import UserID +from synapse.types import Requester, UserID from synapse.rest.client.v1 import profile @@ -53,7 +52,7 @@ class ProfileTestCase(unittest.TestCase): ) def _get_user_by_req(request=None, allow_guest=False): - return (UserID.from_string(myid), "", False) + return Requester(UserID.from_string(myid), "", False) hs.get_v1auth().get_user_by_req = _get_user_by_req -- cgit 1.4.1 From 3adcc4c86aade29f502b7245acc2353326a62256 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Jan 2016 11:35:50 +0000 Subject: Return highlight_count in /sync --- synapse/handlers/sync.py | 42 ++++++++++++++++++++++++++++++----- synapse/rest/client/v2_alpha/sync.py | 1 + synapse/storage/event_push_actions.py | 5 +++-- 3 files changed, 40 insertions(+), 8 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 52202d8e63..66e57bd4d6 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -55,6 +55,7 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [ "ephemeral", "account_data", "unread_notification_count", + "unread_highlight_count", ])): __slots__ = [] @@ -292,9 +293,14 @@ class SyncHandler(BaseHandler): notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, ephemeral_by_room ) + notif_count = None + highlight_count = None if notifs is not None: notif_count = len(notifs) + highlight_count = len([ + 1 for notif in notifs if _action_has_highlight(notif["actions"]) + ]) current_state = yield self.get_state_at(room_id, now_token) @@ -307,6 +313,7 @@ class SyncHandler(BaseHandler): room_id, tags_by_room, account_data_by_room ), unread_notification_count=notif_count, + unread_highlight_count=highlight_count, )) def account_data_for_user(self, account_data): @@ -529,9 +536,14 @@ class SyncHandler(BaseHandler): notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, all_ephemeral_by_room ) + notif_count = None + highlight_count = None if notifs is not None: notif_count = len(notifs) + highlight_count = len([ + 1 for notif in notifs if _action_has_highlight(notif["actions"]) + ]) just_joined = yield self.check_joined_room(sync_config, state) if just_joined: @@ -553,7 +565,8 @@ class SyncHandler(BaseHandler): account_data=self.account_data_for_room( room_id, tags_by_room, account_data_by_room ), - unread_notification_count=notif_count + unread_notification_count=notif_count, + unread_highlight_count=highlight_count, ) logger.debug("Result for room %s: %r", room_id, room_sync) @@ -692,9 +705,14 @@ class SyncHandler(BaseHandler): notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, ephemeral_by_room ) + notif_count = None + highlight_count = None if notifs is not None: notif_count = len(notifs) + highlight_count = len([ + 1 for notif in notifs if _action_has_highlight(notif["actions"]) + ]) room_sync = JoinedSyncResult( room_id=room_id, @@ -705,6 +723,7 @@ class SyncHandler(BaseHandler): room_id, tags_by_room, account_data_by_room ), unread_notification_count=notif_count, + unread_highlight_count=highlight_count, ) logger.debug("Room sync: %r", room_sync) @@ -850,8 +869,19 @@ class SyncHandler(BaseHandler): notifs = yield self.store.get_unread_event_push_actions_by_room_for_user( room_id, sync_config.user.to_string(), last_unread_event_id ) - else: - # There is no new information in this period, so your notification - # count is whatever it was last time. - defer.returnValue(None) - defer.returnValue(notifs) + defer.returnValue(notifs) + + # There is no new information in this period, so your notification + # count is whatever it was last time. + defer.returnValue(None) + + +def _action_has_highlight(actions): + for action in actions: + try: + if action.get("set_tweak", None) == "highlight": + return action.get("value", True) + except AttributeError: + pass + + return False diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 826f9db189..e300ced214 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -313,6 +313,7 @@ class SyncRestServlet(RestServlet): ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) result["ephemeral"] = {"events": ephemeral_events} result["unread_notification_count"] = room.unread_notification_count + result["unread_highlight_count"] = room.unread_highlight_count return result diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index d99171ee87..6b7cebc9ce 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -17,7 +17,7 @@ from ._base import SQLBaseStore from twisted.internet import defer import logging -import simplejson as json +import ujson as json logger = logging.getLogger(__name__) @@ -84,7 +84,8 @@ class EventPushActionsStore(SQLBaseStore): ) ) return [ - {"event_id": row[0], "actions": row[1]} for row in txn.fetchall() + {"event_id": row[0], "actions": json.loads(row[1])} + for row in txn.fetchall() ] ret = yield self.runInteraction( -- cgit 1.4.1 From ac2842ff1e1c53da80cc0743e31647c5a0152124 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Jan 2016 17:19:53 +0000 Subject: Only compute unread notifications for rooms we send down stream --- synapse/handlers/sync.py | 50 ++++++++++++++++-------------------- synapse/rest/client/v2_alpha/sync.py | 3 +-- 2 files changed, 23 insertions(+), 30 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index d2864977b0..aca200c1e7 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -54,8 +54,7 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [ "state", # dict[(str, str), FrozenEvent] "ephemeral", "account_data", - "unread_notification_count", - "unread_highlight_count", + "unread_notifications", ])): __slots__ = [] @@ -294,11 +293,10 @@ class SyncHandler(BaseHandler): room_id, sync_config, ephemeral_by_room ) - notif_count = None - highlight_count = None + unread_notifications = {} if notifs is not None: - notif_count = len(notifs) - highlight_count = len([ + unread_notifications["notification_count"] = len(notifs) + unread_notifications["highlight_count"] = len([ 1 for notif in notifs if _action_has_highlight(notif["actions"]) ]) @@ -312,8 +310,7 @@ class SyncHandler(BaseHandler): account_data=self.account_data_for_room( room_id, tags_by_room, account_data_by_room ), - unread_notification_count=notif_count, - unread_highlight_count=highlight_count, + unread_notifications=unread_notifications, )) def account_data_for_user(self, account_data): @@ -533,18 +530,6 @@ class SyncHandler(BaseHandler): else: prev_batch = now_token - notifs = yield self.unread_notifs_for_room_id( - room_id, sync_config, all_ephemeral_by_room - ) - - notif_count = None - highlight_count = None - if notifs is not None: - notif_count = len(notifs) - highlight_count = len([ - 1 for notif in notifs if _action_has_highlight(notif["actions"]) - ]) - just_joined = yield self.check_joined_room(sync_config, state) if just_joined: logger.debug("User has just joined %s: needs full state", @@ -565,12 +550,23 @@ class SyncHandler(BaseHandler): account_data=self.account_data_for_room( room_id, tags_by_room, account_data_by_room ), - unread_notification_count=notif_count, - unread_highlight_count=highlight_count, + unread_notifications={}, ) logger.debug("Result for room %s: %r", room_id, room_sync) if room_sync: + notifs = yield self.unread_notifs_for_room_id( + room_id, sync_config, all_ephemeral_by_room + ) + + if notifs is not None: + notif_dict = room_sync.unread_notifications + notif_dict["notification_count"] = len(notifs) + notif_dict["highlight_count"] = len([ + 1 for notif in notifs + if _action_has_highlight(notif["actions"]) + ]) + joined.append(room_sync) else: @@ -708,11 +704,10 @@ class SyncHandler(BaseHandler): room_id, sync_config, all_ephemeral_by_room ) - notif_count = None - highlight_count = None + unread_notifications = {} if notifs is not None: - notif_count = len(notifs) - highlight_count = len([ + unread_notifications["notification_count"] = len(notifs) + unread_notifications["highlight_count"] = len([ 1 for notif in notifs if _action_has_highlight(notif["actions"]) ]) @@ -724,8 +719,7 @@ class SyncHandler(BaseHandler): account_data=self.account_data_for_room( room_id, tags_by_room, account_data_by_room ), - unread_notification_count=notif_count, - unread_highlight_count=highlight_count, + unread_notifications=unread_notifications, ) logger.debug("Room sync: %r", room_sync) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index e300ced214..df4b305b49 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -312,8 +312,7 @@ class SyncRestServlet(RestServlet): if joined: ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) result["ephemeral"] = {"events": ephemeral_events} - result["unread_notification_count"] = room.unread_notification_count - result["unread_highlight_count"] = room.unread_highlight_count + result["unread_notifications"] = room.unread_notifications return result -- cgit 1.4.1 From 73ca8e58344524610a2d512977f5a8cc0cc1b4a6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 20 Jan 2016 15:42:57 +0000 Subject: Whine if we give a from param to /sync --- synapse/rest/client/v2_alpha/sync.py | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index df4b305b49..4114a7e430 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -85,6 +85,13 @@ class SyncRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request): + if "from" in request.args: + # /events used to use 'from', but /sync uses 'since'. + # Lets be helpful and whine if we see a 'from'. + raise SynapseError( + 400, "'from' is not a valid query parameter. Did you mean 'since'?" + ) + requester = yield self.auth.get_user_by_req( request, allow_guest=True ) -- cgit 1.4.1 From c43b6dcc756c259e479c13d4f0802850d657fed6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 20 Jan 2016 16:14:48 +0000 Subject: Fix change_password --- synapse/rest/client/v2_alpha/account.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index fa56249a69..d507172704 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -56,10 +56,9 @@ class PasswordRestServlet(RestServlet): if LoginType.PASSWORD in result: # if using password, they should also be logged in requester = yield self.auth.get_user_by_req(request) - requester_user_id = requester.user.to_string() - if requester_user_id.to_string() != result[LoginType.PASSWORD]: + user_id = requester.user.to_string() + if user_id != result[LoginType.PASSWORD]: raise LoginError(400, "", Codes.UNKNOWN) - user_id = requester_user_id elif LoginType.EMAIL_IDENTITY in result: threepid = result[LoginType.EMAIL_IDENTITY] if 'medium' not in threepid or 'address' not in threepid: -- cgit 1.4.1 From 975903ae1735f690a387c10b1ffd1a9384353213 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 22 Jan 2016 10:41:30 +0000 Subject: Sanitize filters --- synapse/api/filtering.py | 48 +++++++++++++++++++--------------- synapse/rest/client/v2_alpha/filter.py | 2 +- synapse/rest/client/v2_alpha/sync.py | 24 ++++++++--------- 3 files changed, 40 insertions(+), 34 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index c7f021d1ff..5530b8c48f 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -28,14 +28,14 @@ class Filtering(object): return result def add_user_filter(self, user_localpart, user_filter): - self._check_valid_filter(user_filter) + self.check_valid_filter(user_filter) return self.store.add_user_filter(user_localpart, user_filter) # TODO(paul): surely we should probably add a delete_user_filter or # replace_user_filter at some point? There's no REST API specified for # them however - def _check_valid_filter(self, user_filter_json): + def check_valid_filter(self, user_filter_json): """Check if the provided filter is valid. This inspects all definitions contained within the filter. @@ -129,52 +129,55 @@ class Filtering(object): class FilterCollection(object): def __init__(self, filter_json): - self.filter_json = filter_json + self._filter_json = filter_json - room_filter_json = self.filter_json.get("room", {}) + room_filter_json = self._filter_json.get("room", {}) - self.room_filter = Filter({ + self._room_filter = Filter({ k: v for k, v in room_filter_json.items() if k in ("rooms", "not_rooms") }) - self.room_timeline_filter = Filter(room_filter_json.get("timeline", {})) - self.room_state_filter = Filter(room_filter_json.get("state", {})) - self.room_ephemeral_filter = Filter(room_filter_json.get("ephemeral", {})) - self.room_account_data = Filter(room_filter_json.get("account_data", {})) - self.presence_filter = Filter(self.filter_json.get("presence", {})) - self.account_data = Filter(self.filter_json.get("account_data", {})) + self._room_timeline_filter = Filter(room_filter_json.get("timeline", {})) + self._room_state_filter = Filter(room_filter_json.get("state", {})) + self._room_ephemeral_filter = Filter(room_filter_json.get("ephemeral", {})) + self._room_account_data = Filter(room_filter_json.get("account_data", {})) + self._presence_filter = Filter(filter_json.get("presence", {})) + self._account_data = Filter(filter_json.get("account_data", {})) - self.include_leave = self.filter_json.get("room", {}).get( + self.include_leave = filter_json.get("room", {}).get( "include_leave", False ) + def get_filter_json(self): + return self._filter_json + def timeline_limit(self): - return self.room_timeline_filter.limit() + return self._room_timeline_filter.limit() def presence_limit(self): - return self.presence_filter.limit() + return self._presence_filter.limit() def ephemeral_limit(self): - return self.room_ephemeral_filter.limit() + return self._room_ephemeral_filter.limit() def filter_presence(self, events): - return self.presence_filter.filter(events) + return self._presence_filter.filter(events) def filter_account_data(self, events): - return self.account_data.filter(events) + return self._account_data.filter(events) def filter_room_state(self, events): - return self.room_state_filter.filter(self.room_filter.filter(events)) + return self._room_state_filter.filter(self._room_filter.filter(events)) def filter_room_timeline(self, events): - return self.room_timeline_filter.filter(self.room_filter.filter(events)) + return self._room_timeline_filter.filter(self._room_filter.filter(events)) def filter_room_ephemeral(self, events): - return self.room_ephemeral_filter.filter(self.room_filter.filter(events)) + return self._room_ephemeral_filter.filter(self._room_filter.filter(events)) def filter_room_account_data(self, events): - return self.room_account_data.filter(self.room_filter.filter(events)) + return self._room_account_data.filter(self._room_filter.filter(events)) class Filter(object): @@ -258,3 +261,6 @@ def _matches_wildcard(actual_value, filter_value): return actual_value.startswith(type_prefix) else: return actual_value == filter_value + + +DEFAULT_FILTER_COLLECTION = FilterCollection({}) diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index 7695bebc28..7c94f6ec41 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -59,7 +59,7 @@ class GetFilterRestServlet(RestServlet): filter_id=filter_id, ) - defer.returnValue((200, filter.filter_json)) + defer.returnValue((200, filter.get_filter_json())) except KeyError: raise SynapseError(400, "No such filter") diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 4114a7e430..ab924ad9e0 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -24,7 +24,7 @@ from synapse.events import FrozenEvent from synapse.events.utils import ( serialize_event, format_event_for_client_v2_without_room_id, ) -from synapse.api.filtering import FilterCollection +from synapse.api.filtering import FilterCollection, DEFAULT_FILTER_COLLECTION from synapse.api.errors import SynapseError from ._base import client_v2_patterns @@ -113,20 +113,20 @@ class SyncRestServlet(RestServlet): ) ) - if filter_id and filter_id.startswith('{'): - try: - filter_object = json.loads(filter_id) - except: - raise SynapseError(400, "Invalid filter JSON") - self.filtering._check_valid_filter(filter_object) - filter = FilterCollection(filter_object) - else: - try: + if filter_id: + if filter_id.startswith('{'): + try: + filter_object = json.loads(filter_id) + except: + raise SynapseError(400, "Invalid filter JSON") + self.filtering.check_valid_filter(filter_object) + filter = FilterCollection(filter_object) + else: filter = yield self.filtering.get_user_filter( user.localpart, filter_id ) - except: - filter = FilterCollection({}) + else: + filter = DEFAULT_FILTER_COLLECTION sync_config = SyncConfig( user=user, -- cgit 1.4.1 From 4021f95261ebdcca0ec2c3c91e8dd442a85c5ed4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jan 2016 10:10:44 +0000 Subject: Move logic from rest/ to handlers/ --- synapse/api/filtering.py | 22 ++-- synapse/handlers/sync.py | 189 +++++++++++++++++++++++++++-------- synapse/rest/client/v2_alpha/sync.py | 57 +++++------ 3 files changed, 181 insertions(+), 87 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 5530b8c48f..116060ee7f 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -190,18 +190,16 @@ class Filter(object): Returns: bool: True if the event matches """ - if isinstance(event, dict): - return self.check_fields( - event.get("room_id", None), - event.get("sender", None), - event.get("type", None), - ) - else: - return self.check_fields( - getattr(event, "room_id", None), - getattr(event, "sender", None), - event.type, - ) + sender = event.get("sender", None) + if not sender: + # Presence events have their 'sender' in content.user_id + sender = event.get("conntent", {}).get("user_id", None) + + return self.check_fields( + event.get("room_id", None), + sender, + event.get("type", None), + ) def check_fields(self, room_id, sender, event_type): """Checks whether the filter matches the given event fields. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 53e1eb0508..9b5b4d2c9f 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -17,6 +17,7 @@ from ._base import BaseHandler from synapse.streams.config import PaginationConfig from synapse.api.constants import Membership, EventTypes +from synapse.api.filtering import DEFAULT_FILTER_COLLECTION from synapse.util import unwrapFirstError from twisted.internet import defer @@ -29,7 +30,7 @@ logger = logging.getLogger(__name__) SyncConfig = collections.namedtuple("SyncConfig", [ "user", - "filter", + "filter_collection", "is_guest", ]) @@ -129,6 +130,11 @@ class SyncHandler(BaseHandler): self.event_sources = hs.get_event_sources() self.clock = hs.get_clock() + @defer.inlineCallbacks + def get_sync_for_user(self, sync_config, since_token=None, timeout=0, + filter_collection=DEFAULT_FILTER_COLLECTION): + pass + @defer.inlineCallbacks def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0, full_state=False): @@ -142,8 +148,9 @@ class SyncHandler(BaseHandler): if timeout == 0 or since_token is None or full_state: # we are going to return immediately, so don't bother calling # notifier.wait_for_events. - result = yield self.current_sync_for_user(sync_config, since_token, - full_state=full_state) + result = yield self.current_sync_for_user( + sync_config, since_token, full_state=full_state, + ) defer.returnValue(result) else: def current_sync_callback(before_token, after_token): @@ -151,7 +158,7 @@ class SyncHandler(BaseHandler): result = yield self.notifier.wait_for_events( sync_config.user.to_string(), timeout, current_sync_callback, - from_token=since_token + from_token=since_token, ) defer.returnValue(result) @@ -205,7 +212,7 @@ class SyncHandler(BaseHandler): ) membership_list = (Membership.INVITE, Membership.JOIN) - if sync_config.filter.include_leave: + if sync_config.filter_collection.include_leave: membership_list += (Membership.LEAVE, Membership.BAN) room_list = yield self.store.get_rooms_for_user_where_membership_is( @@ -266,9 +273,17 @@ class SyncHandler(BaseHandler): deferreds, consumeErrors=True ).addErrback(unwrapFirstError) + account_data_for_user = sync_config.filter_collection.filter_account_data( + self.account_data_for_user(account_data) + ) + + presence = sync_config.filter_collection.filter_presence( + presence + ) + defer.returnValue(SyncResult( presence=presence, - account_data=self.account_data_for_user(account_data), + account_data=account_data_for_user, joined=joined, invited=invited, archived=archived, @@ -302,14 +317,31 @@ class SyncHandler(BaseHandler): current_state = yield self.get_state_at(room_id, now_token) + current_state = { + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state( + current_state.values() + ) + } + + account_data = self.account_data_for_room( + room_id, tags_by_room, account_data_by_room + ) + + account_data = sync_config.filter_collection.filter_room_account_data( + account_data + ) + + ephemeral = sync_config.filter_collection.filter_room_ephemeral( + ephemeral_by_room.get(room_id, []) + ) + defer.returnValue(JoinedSyncResult( room_id=room_id, timeline=batch, state=current_state, - ephemeral=ephemeral_by_room.get(room_id, []), - account_data=self.account_data_for_room( - room_id, tags_by_room, account_data_by_room - ), + ephemeral=ephemeral, + account_data=account_data, unread_notifications=unread_notifications, )) @@ -365,7 +397,7 @@ class SyncHandler(BaseHandler): typing, typing_key = yield typing_source.get_new_events( user=sync_config.user, from_key=typing_key, - limit=sync_config.filter.ephemeral_limit(), + limit=sync_config.filter_collection.ephemeral_limit(), room_ids=room_ids, is_guest=sync_config.is_guest, ) @@ -388,7 +420,7 @@ class SyncHandler(BaseHandler): receipts, receipt_key = yield receipt_source.get_new_events( user=sync_config.user, from_key=receipt_key, - limit=sync_config.filter.ephemeral_limit(), + limit=sync_config.filter_collection.ephemeral_limit(), room_ids=room_ids, is_guest=sync_config.is_guest, ) @@ -419,13 +451,26 @@ class SyncHandler(BaseHandler): leave_state = yield self.store.get_state_for_event(leave_event_id) + leave_state = { + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state( + leave_state.values() + ) + } + + account_data = self.account_data_for_room( + room_id, tags_by_room, account_data_by_room + ) + + account_data = sync_config.filter_collection.filter_room_account_data( + account_data + ) + defer.returnValue(ArchivedSyncResult( room_id=room_id, timeline=batch, state=leave_state, - account_data=self.account_data_for_room( - room_id, tags_by_room, account_data_by_room - ), + account_data=account_data, )) @defer.inlineCallbacks @@ -444,7 +489,7 @@ class SyncHandler(BaseHandler): presence, presence_key = yield presence_source.get_new_events( user=sync_config.user, from_key=since_token.presence_key, - limit=sync_config.filter.presence_limit(), + limit=sync_config.filter_collection.presence_limit(), room_ids=room_ids, is_guest=sync_config.is_guest, ) @@ -473,7 +518,7 @@ class SyncHandler(BaseHandler): sync_config.user ) - timeline_limit = sync_config.filter.timeline_limit() + timeline_limit = sync_config.filter_collection.timeline_limit() room_events, _ = yield self.store.get_room_events_stream( sync_config.user.to_string(), @@ -538,6 +583,27 @@ class SyncHandler(BaseHandler): # the timeline is inherently limited if we've just joined limited = True + recents = sync_config.filter_collection.filter_room_timeline(recents) + + state = { + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state( + state.values() + ) + } + + acc_data = self.account_data_for_room( + room_id, tags_by_room, account_data_by_room + ) + + acc_data = sync_config.filter_collection.filter_room_account_data( + acc_data + ) + + ephemeral = sync_config.filter_collection.filter_room_ephemeral( + ephemeral_by_room.get(room_id, []) + ) + room_sync = JoinedSyncResult( room_id=room_id, timeline=TimelineBatch( @@ -546,10 +612,8 @@ class SyncHandler(BaseHandler): limited=limited, ), state=state, - ephemeral=ephemeral_by_room.get(room_id, []), - account_data=self.account_data_for_room( - room_id, tags_by_room, account_data_by_room - ), + ephemeral=ephemeral, + account_data=acc_data, unread_notifications={}, ) logger.debug("Result for room %s: %r", room_id, room_sync) @@ -603,9 +667,17 @@ class SyncHandler(BaseHandler): for event in invite_events ] + account_data_for_user = sync_config.filter_collection.filter_account_data( + self.account_data_for_user(account_data) + ) + + presence = sync_config.filter_collection.filter_presence( + presence + ) + defer.returnValue(SyncResult( presence=presence, - account_data=self.account_data_for_user(account_data), + account_data=account_data_for_user, joined=joined, invited=invited, archived=archived, @@ -621,7 +693,7 @@ class SyncHandler(BaseHandler): limited = True recents = [] filtering_factor = 2 - timeline_limit = sync_config.filter.timeline_limit() + timeline_limit = sync_config.filter_collection.timeline_limit() load_limit = max(timeline_limit * filtering_factor, 100) max_repeat = 3 # Only try a few times per room, otherwise room_key = now_token.room_key @@ -634,9 +706,9 @@ class SyncHandler(BaseHandler): from_token=since_token.room_key if since_token else None, end_token=end_key, ) - (room_key, _) = keys + room_key, _ = keys end_key = "s" + room_key.split('-')[-1] - loaded_recents = sync_config.filter.filter_room_timeline(events) + loaded_recents = sync_config.filter_collection.filter_room_timeline(events) loaded_recents = yield self._filter_events_for_client( sync_config.user.to_string(), loaded_recents, @@ -684,21 +756,28 @@ class SyncHandler(BaseHandler): logger.debug("Recents %r", batch) - current_state = yield self.get_state_at(room_id, now_token) + if batch.limited: + current_state = yield self.get_state_at(room_id, now_token) - state_at_previous_sync = yield self.get_state_at( - room_id, stream_position=since_token - ) + state_at_previous_sync = yield self.get_state_at( + room_id, stream_position=since_token + ) - state = yield self.compute_state_delta( - since_token=since_token, - previous_state=state_at_previous_sync, - current_state=current_state, - ) + state = yield self.compute_state_delta( + since_token=since_token, + previous_state=state_at_previous_sync, + current_state=current_state, + ) + else: + state = { + (event.type, event.state_key): event + for event in batch.events if event.is_state() + } just_joined = yield self.check_joined_room(sync_config, state) if just_joined: state = yield self.get_state_at(room_id, now_token) + # batch.limited = True notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, all_ephemeral_by_room @@ -711,14 +790,29 @@ class SyncHandler(BaseHandler): 1 for notif in notifs if _action_has_highlight(notif["actions"]) ]) + state = { + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state(state.values()) + } + + account_data = self.account_data_for_room( + room_id, tags_by_room, account_data_by_room + ) + + account_data = sync_config.filter_collection.filter_room_account_data( + account_data + ) + + ephemeral = sync_config.filter_collection.filter_room_ephemeral( + ephemeral_by_room.get(room_id, []) + ) + room_sync = JoinedSyncResult( room_id=room_id, timeline=batch, state=state, - ephemeral=ephemeral_by_room.get(room_id, []), - account_data=self.account_data_for_room( - room_id, tags_by_room, account_data_by_room - ), + ephemeral=ephemeral, + account_data=account_data, unread_notifications=unread_notifications, ) @@ -765,13 +859,26 @@ class SyncHandler(BaseHandler): current_state=state_events_at_leave, ) + state_events_delta = { + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state( + state_events_delta.values() + ) + } + + account_data = self.account_data_for_room( + leave_event.room_id, tags_by_room, account_data_by_room + ) + + account_data = sync_config.filter_collection.filter_room_account_data( + account_data + ) + room_sync = ArchivedSyncResult( room_id=leave_event.room_id, timeline=batch, state=state_events_delta, - account_data=self.account_data_for_room( - leave_event.room_id, tags_by_room, account_data_by_room - ), + account_data=account_data, ) logger.debug("Room sync: %r", room_sync) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index ab924ad9e0..07b5b5dfd5 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -130,7 +130,7 @@ class SyncRestServlet(RestServlet): sync_config = SyncConfig( user=user, - filter=filter, + filter_collection=filter, is_guest=requester.is_guest, ) @@ -154,23 +154,21 @@ class SyncRestServlet(RestServlet): time_now = self.clock.time_msec() joined = self.encode_joined( - sync_result.joined, filter, time_now, requester.access_token_id + sync_result.joined, time_now, requester.access_token_id ) invited = self.encode_invited( - sync_result.invited, filter, time_now, requester.access_token_id + sync_result.invited, time_now, requester.access_token_id ) archived = self.encode_archived( - sync_result.archived, filter, time_now, requester.access_token_id + sync_result.archived, time_now, requester.access_token_id ) response_content = { - "account_data": self.encode_account_data( - sync_result.account_data, filter, time_now - ), + "account_data": {"events": sync_result.account_data}, "presence": self.encode_presence( - sync_result.presence, filter, time_now + sync_result.presence, time_now ), "rooms": { "join": joined, @@ -182,24 +180,20 @@ class SyncRestServlet(RestServlet): defer.returnValue((200, response_content)) - def encode_presence(self, events, filter, time_now): + def encode_presence(self, events, time_now): formatted = [] for event in events: event = copy.deepcopy(event) event['sender'] = event['content'].pop('user_id') formatted.append(event) - return {"events": filter.filter_presence(formatted)} - - def encode_account_data(self, events, filter, time_now): - return {"events": filter.filter_account_data(events)} + return {"events": formatted} - def encode_joined(self, rooms, filter, time_now, token_id): + def encode_joined(self, rooms, time_now, token_id): """ Encode the joined rooms in a sync result :param list[synapse.handlers.sync.JoinedSyncResult] rooms: list of sync results for rooms this user is joined to - :param FilterCollection filter: filters to apply to the results :param int time_now: current time - used as a baseline for age calculations :param int token_id: ID of the user's auth token - used for namespacing @@ -211,18 +205,17 @@ class SyncRestServlet(RestServlet): joined = {} for room in rooms: joined[room.room_id] = self.encode_room( - room, filter, time_now, token_id + room, time_now, token_id ) return joined - def encode_invited(self, rooms, filter, time_now, token_id): + def encode_invited(self, rooms, time_now, token_id): """ Encode the invited rooms in a sync result :param list[synapse.handlers.sync.InvitedSyncResult] rooms: list of sync results for rooms this user is joined to - :param FilterCollection filter: filters to apply to the results :param int time_now: current time - used as a baseline for age calculations :param int token_id: ID of the user's auth token - used for namespacing @@ -237,7 +230,9 @@ class SyncRestServlet(RestServlet): room.invite, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_room_id, ) - invited_state = invite.get("unsigned", {}).pop("invite_room_state", []) + unsigned = dict(invite.get("unsigned", {})) + invite["unsigned"] = unsigned + invited_state = list(unsigned.pop("invite_room_state", [])) invited_state.append(invite) invited[room.room_id] = { "invite_state": {"events": invited_state} @@ -245,13 +240,12 @@ class SyncRestServlet(RestServlet): return invited - def encode_archived(self, rooms, filter, time_now, token_id): + def encode_archived(self, rooms, time_now, token_id): """ Encode the archived rooms in a sync result :param list[synapse.handlers.sync.ArchivedSyncResult] rooms: list of sync results for rooms this user is joined to - :param FilterCollection filter: filters to apply to the results :param int time_now: current time - used as a baseline for age calculations :param int token_id: ID of the user's auth token - used for namespacing @@ -263,17 +257,16 @@ class SyncRestServlet(RestServlet): joined = {} for room in rooms: joined[room.room_id] = self.encode_room( - room, filter, time_now, token_id, joined=False + room, time_now, token_id, joined=False ) return joined @staticmethod - def encode_room(room, filter, time_now, token_id, joined=True): + def encode_room(room, time_now, token_id, joined=True): """ :param JoinedSyncResult|ArchivedSyncResult room: sync result for a single room - :param FilterCollection filter: filters to apply to the results :param int time_now: current time - used as a baseline for age calculations :param int token_id: ID of the user's auth token - used for namespacing @@ -292,19 +285,17 @@ class SyncRestServlet(RestServlet): ) state_dict = room.state - timeline_events = filter.filter_room_timeline(room.timeline.events) + timeline_events = room.timeline.events state_dict = SyncRestServlet._rollback_state_for_timeline( state_dict, timeline_events) - state_events = filter.filter_room_state(state_dict.values()) + state_events = state_dict.values() serialized_state = [serialize(e) for e in state_events] serialized_timeline = [serialize(e) for e in timeline_events] - account_data = filter.filter_room_account_data( - room.account_data - ) + account_data = room.account_data result = { "timeline": { @@ -317,7 +308,7 @@ class SyncRestServlet(RestServlet): } if joined: - ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) + ephemeral_events = room.ephemeral result["ephemeral"] = {"events": ephemeral_events} result["unread_notifications"] = room.unread_notifications @@ -334,8 +325,6 @@ class SyncRestServlet(RestServlet): :param list[synapse.events.EventBase] timeline: the event timeline :return: updated state dictionary """ - logger.debug("Processing state dict %r; timeline %r", state, - [e.get_dict() for e in timeline]) result = state.copy() @@ -357,8 +346,8 @@ class SyncRestServlet(RestServlet): # the event graph, and the state is no longer valid. Really, # the event shouldn't be in the timeline. We're going to ignore # it for now, however. - logger.warn("Found state event %r in timeline which doesn't " - "match state dictionary", timeline_event) + logger.debug("Found state event %r in timeline which doesn't " + "match state dictionary", timeline_event) continue prev_event_id = timeline_event.unsigned.get("replaces_state", None) -- cgit 1.4.1 From 5687a00e4ec24b78e41b0377bfe524d7f0b13a43 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 29 Jan 2016 13:26:15 +0000 Subject: Allow three_pid_creds as well as threePidCreds in /account/3pid --- synapse/rest/client/v2_alpha/account.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index d507172704..a614b79d45 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -116,9 +116,10 @@ class ThreepidRestServlet(RestServlet): body = parse_json_dict_from_request(request) - if 'threePidCreds' not in body: + threePidCreds = body.get('threePidCreds') + threePidCreds = body.get('three_pid_creds', threePidCreds) + if threePidCreds is None: raise SynapseError(400, "Missing param", Codes.MISSING_PARAM) - threePidCreds = body['threePidCreds'] requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() -- cgit 1.4.1 From fa48020a52dfb924be4f191d54631b4d05b08ba0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 1 Feb 2016 15:59:40 +0000 Subject: Move state calculations from rest to handler --- synapse/handlers/sync.py | 164 +++++++++++++++++++++-------------- synapse/rest/client/v2_alpha/sync.py | 75 ---------------- 2 files changed, 98 insertions(+), 141 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 075566417f..3109e30414 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -23,6 +23,7 @@ from twisted.internet import defer import collections import logging +import itertools logger = logging.getLogger(__name__) @@ -672,35 +673,10 @@ class SyncHandler(BaseHandler): account_data_by_room, all_ephemeral_by_room, batch, full_state=False): - if full_state: - state = yield self.get_state_at(room_id, now_token) - - elif batch.limited: - current_state = yield self.get_state_at(room_id, now_token) - - state_at_previous_sync = yield self.get_state_at( - room_id, stream_position=since_token - ) - - state = yield self.compute_state_delta( - since_token=since_token, - previous_state=state_at_previous_sync, - current_state=current_state, - ) - else: - state = { - (event.type, event.state_key): event - for event in batch.events if event.is_state() - } - - just_joined = yield self.check_joined_room(sync_config, state) - if just_joined: - state = yield self.get_state_at(room_id, now_token) - - state = { - (e.type, e.state_key): e - for e in sync_config.filter_collection.filter_room_state(state.values()) - } + state = yield self.compute_state_delta( + room_id, batch, sync_config, since_token, now_token, + full_state=full_state + ) account_data = self.account_data_for_room( room_id, tags_by_room, account_data_by_room @@ -766,30 +742,11 @@ class SyncHandler(BaseHandler): logger.debug("Recents %r", batch) - state_events_at_leave = yield self.store.get_state_for_event( - leave_event_id + state_events_delta = yield self.compute_state_delta( + room_id, batch, sync_config, since_token, leave_token, + full_state=full_state ) - if not full_state: - state_at_previous_sync = yield self.get_state_at( - room_id, stream_position=since_token - ) - - state_events_delta = yield self.compute_state_delta( - since_token=since_token, - previous_state=state_at_previous_sync, - current_state=state_events_at_leave, - ) - else: - state_events_delta = state_events_at_leave - - state_events_delta = { - (e.type, e.state_key): e - for e in sync_config.filter_collection.filter_room_state( - state_events_delta.values() - ) - } - account_data = self.account_data_for_room( room_id, tags_by_room, account_data_by_room ) @@ -843,15 +800,18 @@ class SyncHandler(BaseHandler): state = {} defer.returnValue(state) - def compute_state_delta(self, since_token, previous_state, current_state): - """ Works out the differnce in state between the current state and the - state the client got when it last performed a sync. - - :param str since_token: the point we are comparing against - :param dict[(str,str), synapse.events.FrozenEvent] previous_state: the - state to compare to - :param dict[(str,str), synapse.events.FrozenEvent] current_state: the - new state + @defer.inlineCallbacks + def compute_state_delta(self, room_id, batch, sync_config, since_token, now_token, + full_state): + """ Works out the differnce in state between the start of the timeline + and the previous sync. + + :param str room_id + :param TimelineBatch batch + :param sync_config + :param str since_token + :param str now_token + :param bool full_state :returns A new event dictionary """ @@ -860,12 +820,50 @@ class SyncHandler(BaseHandler): # updates even if they occured logically before the previous event. # TODO(mjark) Check for new redactions in the state events. - state_delta = {} - for key, event in current_state.iteritems(): - if (key not in previous_state or - previous_state[key].event_id != event.event_id): - state_delta[key] = event - return state_delta + if full_state: + if batch: + state = yield self.store.get_state_for_event(batch.events[0].event_id) + else: + state = yield self.get_state_at( + room_id, stream_position=now_token + ) + + timeline_state = { + (event.type, event.state_key): event + for event in batch.events if event.is_state() + } + + state = _calculate_state( + timeline_contains=timeline_state, + timeline_start=state, + previous={}, + ) + elif batch.limited: + state_at_previous_sync = yield self.get_state_at( + room_id, stream_position=since_token + ) + + state_at_timeline_start = yield self.store.get_state_for_event( + batch.events[0].event_id + ) + + timeline_state = { + (event.type, event.state_key): event + for event in batch.events if event.is_state() + } + + state = _calculate_state( + timeline_contains=timeline_state, + timeline_start=state_at_timeline_start, + previous=state_at_previous_sync, + ) + else: + state = {} + + defer.returnValue({ + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state(state.values()) + }) def check_joined_room(self, sync_config, state_delta): """ @@ -912,3 +910,37 @@ def _action_has_highlight(actions): pass return False + + +def _calculate_state(timeline_contains, timeline_start, previous): + """Works out what state to include in a sync response. + + Args: + timeline_contains (dict): state in the timeline + timeline_start (dict): state at the start of the timeline + previous (dict): state at the end of the previous sync (or empty dict + if there is an initial sync) + + Returns: + dict + """ + event_id_to_state = { + e.event_id: e + for e in itertools.chain( + timeline_contains.values(), + previous.values(), + timeline_start.values(), + ) + } + + tc_ids = set(e.event_id for e in timeline_contains.values()) + p_ids = set(e.event_id for e in previous.values()) + ts_ids = set(e.event_id for e in timeline_start.values()) + + state_ids = (ts_ids - p_ids) - tc_ids + + evs = (event_id_to_state[e] for e in state_ids) + return { + (e.type, e.state_key): e + for e in evs + } diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 07b5b5dfd5..140ce2704b 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -20,7 +20,6 @@ from synapse.http.servlet import ( ) from synapse.handlers.sync import SyncConfig from synapse.types import StreamToken -from synapse.events import FrozenEvent from synapse.events.utils import ( serialize_event, format_event_for_client_v2_without_room_id, ) @@ -287,9 +286,6 @@ class SyncRestServlet(RestServlet): state_dict = room.state timeline_events = room.timeline.events - state_dict = SyncRestServlet._rollback_state_for_timeline( - state_dict, timeline_events) - state_events = state_dict.values() serialized_state = [serialize(e) for e in state_events] @@ -314,77 +310,6 @@ class SyncRestServlet(RestServlet): return result - @staticmethod - def _rollback_state_for_timeline(state, timeline): - """ - Wind the state dictionary backwards, so that it represents the - state at the start of the timeline, rather than at the end. - - :param dict[(str, str), synapse.events.EventBase] state: the - state dictionary. Will be updated to the state before the timeline. - :param list[synapse.events.EventBase] timeline: the event timeline - :return: updated state dictionary - """ - - result = state.copy() - - for timeline_event in reversed(timeline): - if not timeline_event.is_state(): - continue - - event_key = (timeline_event.type, timeline_event.state_key) - - logger.debug("Considering %s for removal", event_key) - - state_event = result.get(event_key) - if (state_event is None or - state_event.event_id != timeline_event.event_id): - # the event in the timeline isn't present in the state - # dictionary. - # - # the most likely cause for this is that there was a fork in - # the event graph, and the state is no longer valid. Really, - # the event shouldn't be in the timeline. We're going to ignore - # it for now, however. - logger.debug("Found state event %r in timeline which doesn't " - "match state dictionary", timeline_event) - continue - - prev_event_id = timeline_event.unsigned.get("replaces_state", None) - - prev_content = timeline_event.unsigned.get('prev_content') - prev_sender = timeline_event.unsigned.get('prev_sender') - # Empircally it seems possible for the event to have a - # "replaces_state" key but not a prev_content or prev_sender - # markjh conjectures that it could be due to the server not - # having a copy of that event. - # If this is the case the we ignore the previous event. This will - # cause the displayname calculations on the client to be incorrect - if prev_event_id is None or not prev_content or not prev_sender: - logger.debug( - "Removing %r from the state dict, as it is missing" - " prev_content (prev_event_id=%r)", - timeline_event.event_id, prev_event_id - ) - del result[event_key] - else: - logger.debug( - "Replacing %r with %r in state dict", - timeline_event.event_id, prev_event_id - ) - result[event_key] = FrozenEvent({ - "type": timeline_event.type, - "state_key": timeline_event.state_key, - "content": prev_content, - "sender": prev_sender, - "event_id": prev_event_id, - "room_id": timeline_event.room_id, - }) - - logger.debug("New value: %r", result.get(event_key)) - - return result - def register_servlets(hs, http_server): SyncRestServlet(hs).register(http_server) -- cgit 1.4.1 From d7ac861d3b34bbc6d0d0b66370903831ef6ee9bf Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 1 Feb 2016 16:33:19 +0000 Subject: Pull guest access token out of the auth session params, otherwise it will break if you open the email on a different device. --- synapse/rest/client/v2_alpha/register.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index c4d025b465..5d50dd9e3d 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -152,6 +152,7 @@ class RegisterRestServlet(RestServlet): desired_username = params.get("username", None) new_password = params.get("password", None) + guest_access_token = params.get("guest_access_token", None) (user_id, token) = yield self.registration_handler.register( localpart=desired_username, -- cgit 1.4.1 From d83d004ccdb7ace1dcb51b8acf7645bc176b10a5 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Tue, 2 Feb 2016 17:18:50 +0000 Subject: Fix flake8 warnings for new flake8 --- setup.cfg | 1 + synapse/api/auth.py | 2 +- synapse/app/__init__.py | 19 ++++++++++++++++ synapse/app/homeserver.py | 38 +++++++++----------------------- synapse/appservice/api.py | 2 +- synapse/federation/federation_client.py | 2 +- synapse/handlers/_base.py | 2 +- synapse/handlers/directory.py | 4 ++-- synapse/handlers/events.py | 2 +- synapse/handlers/presence.py | 2 +- synapse/handlers/register.py | 2 +- synapse/handlers/room.py | 2 +- synapse/http/matrixfederationclient.py | 2 +- synapse/notifier.py | 2 +- synapse/push/push_rule_evaluator.py | 2 +- synapse/rest/client/v1/login.py | 2 +- synapse/rest/client/v1/pusher.py | 4 ++-- synapse/rest/client/v1/register.py | 3 ++- synapse/rest/client/v2_alpha/register.py | 3 ++- synapse/rest/client/versions.py | 4 +--- synapse/server.py | 2 +- synapse/state.py | 2 +- synapse/storage/__init__.py | 2 +- synapse/storage/_base.py | 7 ++++-- synapse/storage/engines/sqlite3.py | 2 +- synapse/storage/event_federation.py | 2 +- synapse/storage/events.py | 6 ++--- synapse/storage/stream.py | 2 +- synapse/util/__init__.py | 2 +- synapse/util/caches/descriptors.py | 4 ++-- synapse/util/caches/expiringcache.py | 2 +- synapse/util/caches/treecache.py | 2 +- synapse/util/logutils.py | 2 +- synapse/util/ratelimitutils.py | 2 +- 34 files changed, 73 insertions(+), 66 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/setup.cfg b/setup.cfg index ba027c7d13..e7fc5ffe78 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,3 +16,4 @@ ignore = [flake8] max-line-length = 90 +ignore = W503 diff --git a/synapse/api/auth.py b/synapse/api/auth.py index b5536e8565..c5a2865e26 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -574,7 +574,7 @@ class Auth(object): raise AuthError( 403, "Application service has not registered this user" - ) + ) defer.returnValue(user_id) @defer.inlineCallbacks diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index bfebb0f644..1bc4279807 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -12,3 +12,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +import sys +sys.dont_write_bytecode = True + +from synapse.python_dependencies import ( + check_requirements, MissingRequirementError +) # NOQA + +try: + check_requirements() +except MissingRequirementError as e: + message = "\n".join([ + "Missing Requirement: %s" % (e.message,), + "To install run:", + " pip install --upgrade --force \"%s\"" % (e.dependency,), + "", + ]) + sys.stderr.writelines(message) + sys.exit(1) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index e5066c48ef..c3066d6a0d 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -14,27 +14,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import synapse + +import contextlib +import logging +import os +import re +import resource +import subprocess import sys -from synapse.rest import ClientRestResource +import time -sys.dont_write_bytecode = True from synapse.python_dependencies import ( - check_requirements, DEPENDENCY_LINKS, MissingRequirementError + check_requirements, DEPENDENCY_LINKS ) -if __name__ == '__main__': - try: - check_requirements() - except MissingRequirementError as e: - message = "\n".join([ - "Missing Requirement: %s" % (e.message,), - "To install run:", - " pip install --upgrade --force \"%s\"" % (e.dependency,), - "", - ]) - sys.stderr.writelines(message) - sys.exit(1) - +from synapse.rest import ClientRestResource from synapse.storage.engines import create_engine, IncorrectDatabaseSetup from synapse.storage import are_all_users_on_domain from synapse.storage.prepare_database import UpgradeDatabaseException @@ -73,17 +68,6 @@ from synapse import events from daemonize import Daemonize -import synapse - -import contextlib -import logging -import os -import re -import resource -import subprocess -import time - - logger = logging.getLogger("synapse.app.homeserver") diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index e1c07028e8..bc90605324 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -29,7 +29,7 @@ class ApplicationServiceApi(SimpleHttpClient): pushing. """ - def __init__(self, hs): + def __init__(self, hs): super(ApplicationServiceApi, self).__init__(hs) self.clock = hs.get_clock() diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index c6259f9dc8..e30e2da58d 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -57,7 +57,7 @@ class FederationClient(FederationBase): cache_name="get_pdu_cache", clock=self._clock, max_len=1000, - expiry_ms=120*1000, + expiry_ms=120 * 1000, reset_expiry_on_get=False, ) diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 744a9ee507..1423df6cf3 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -147,7 +147,7 @@ class BaseHandler(object): ) if not allowed: raise LimitExceededError( - retry_after_ms=int(1000*(time_allowed - time_now)), + retry_after_ms=int(1000 * (time_allowed - time_now)), ) @defer.inlineCallbacks diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 691564c651..4efecb1ffd 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -175,8 +175,8 @@ class DirectoryHandler(BaseHandler): # If this server is in the list of servers, return it first. if self.server_name in servers: servers = ( - [self.server_name] - + [s for s in servers if s != self.server_name] + [self.server_name] + + [s for s in servers if s != self.server_name] ) else: servers = list(servers) diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 254b483da6..5ad8f3779a 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -130,7 +130,7 @@ class EventStreamHandler(BaseHandler): # Add some randomness to this value to try and mitigate against # thundering herds on restart. - timeout = random.randint(int(timeout*0.9), int(timeout*1.1)) + timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1)) events, tokens = yield self.notifier.get_events_for( auth_user, pagin_config, timeout, diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index d36eb3b8d7..d0c21ff5c9 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -34,7 +34,7 @@ metrics = synapse.metrics.get_metrics_for(__name__) # Don't bother bumping "last active" time if it differs by less than 60 seconds -LAST_ACTIVE_GRANULARITY = 60*1000 +LAST_ACTIVE_GRANULARITY = 60 * 1000 # Keep no more than this number of offline serial revisions MAX_OFFLINE_SERIALS = 1000 diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index abd1a16a41..b8fbcf9233 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -213,7 +213,7 @@ class RegistrationHandler(BaseHandler): 400, "User ID must only contain characters which do not" " require URL encoding." - ) + ) user = UserID(localpart, self.hs.hostname) user_id = user.to_string() diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 799221c198..088b76d237 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -927,7 +927,7 @@ class RoomContextHandler(BaseHandler): Returns: dict, or None if the event isn't found """ - before_limit = math.floor(limit/2.) + before_limit = math.floor(limit / 2.) after_limit = limit - before_limit now_token = yield self.hs.get_event_sources().get_current_token() diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index da13e32e78..c3589534f8 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -152,7 +152,7 @@ class MatrixFederationHttpClient(object): return self.clock.time_bound_deferred( request_deferred, - time_out=timeout/1000. if timeout else 60, + time_out=timeout / 1000. if timeout else 60, ) response = yield preserve_context_over_fn( diff --git a/synapse/notifier.py b/synapse/notifier.py index 29965a9ab5..1a90bd55cd 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -308,7 +308,7 @@ class Notifier(object): def timed_out(): if listener: listener.deferred.cancel() - timer = self.clock.call_later(timeout/1000., timed_out) + timer = self.clock.call_later(timeout / 1000., timed_out) prev_token = from_token while not result: diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index dca018af95..2a2b4437dc 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -304,7 +304,7 @@ def _flatten_dict(d, prefix=[], result={}): if isinstance(value, basestring): result[".".join(prefix + [key])] = value.lower() elif hasattr(value, "items"): - _flatten_dict(value, prefix=(prefix+[key]), result=result) + _flatten_dict(value, prefix=(prefix + [key]), result=result) return result diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 07836709fb..7199113dac 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -89,7 +89,7 @@ class LoginRestServlet(ClientV1RestServlet): LoginRestServlet.SAML2_TYPE): relay_state = "" if "relay_state" in login_submission: - relay_state = "&RelayState="+urllib.quote( + relay_state = "&RelayState=" + urllib.quote( login_submission["relay_state"]) result = { "uri": "%s%s" % (self.idp_redirect_url, relay_state) diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index e218ed215c..5547f1b112 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -52,7 +52,7 @@ class PusherRestServlet(ClientV1RestServlet): if i not in content: missing.append(i) if len(missing): - raise SynapseError(400, "Missing parameters: "+','.join(missing), + raise SynapseError(400, "Missing parameters: " + ','.join(missing), errcode=Codes.MISSING_PARAM) logger.debug("set pushkey %s to kind %s", content['pushkey'], content['kind']) @@ -83,7 +83,7 @@ class PusherRestServlet(ClientV1RestServlet): data=content['data'] ) except PusherConfigException as pce: - raise SynapseError(400, "Config Error: "+pce.message, + raise SynapseError(400, "Config Error: " + pce.message, errcode=Codes.MISSING_PARAM) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index 5378a9a938..2bfd4d96bf 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -38,7 +38,8 @@ logger = logging.getLogger(__name__) if hasattr(hmac, "compare_digest"): compare_digest = hmac.compare_digest else: - compare_digest = lambda a, b: a == b + def compare_digest(a, b): + return a == b class RegisterRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 5d50dd9e3d..56a5bbec30 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -34,7 +34,8 @@ from synapse.util.async import run_on_reactor if hasattr(hmac, "compare_digest"): compare_digest = hmac.compare_digest else: - compare_digest = lambda a, b: a == b + def compare_digest(a, b): + return a == b logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 349ef6b396..ca5468c402 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -26,9 +26,7 @@ class VersionsRestServlet(RestServlet): def on_GET(self, request): return (200, { - "versions": [ - "r0.0.1", - ] + "versions": ["r0.0.1"] }) diff --git a/synapse/server.py b/synapse/server.py index 5fee7fe130..368d615576 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -23,7 +23,7 @@ from twisted.web.client import BrowserLikePolicyForHTTPS from twisted.enterprise import adbapi from synapse.federation import initialize_http_replication -from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory +from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory from synapse.notifier import Notifier from synapse.api.auth import Auth from synapse.handlers import Handlers diff --git a/synapse/state.py b/synapse/state.py index 0acf309fe0..b9a1387520 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -63,7 +63,7 @@ class StateHandler(object): cache_name="state_cache", clock=self.clock, max_len=SIZE_OF_CACHE, - expiry_ms=EVICTION_TIMEOUT_SECONDS*1000, + expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, reset_expiry_on_get=True, ) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index c91c7a3729..5a9e7720d9 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -59,7 +59,7 @@ logger = logging.getLogger(__name__) # Number of msec of granularity to store the user IP 'last seen' time. Smaller # times give more inserts into the database even for readonly API hits # 120 seconds == 2 minutes -LAST_SEEN_GRANULARITY = 120*1000 +LAST_SEEN_GRANULARITY = 120 * 1000 class DataStore(RoomMemberStore, RoomStore, diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 5e77320540..cfb87d9328 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -185,7 +185,7 @@ class SQLBaseStore(object): time_then = self._previous_loop_ts self._previous_loop_ts = time_now - ratio = (curr - prev)/(time_now - time_then) + ratio = (curr - prev) / (time_now - time_then) top_three_counters = self._txn_perf_counters.interval( time_now - time_then, limit=3 @@ -643,7 +643,10 @@ class SQLBaseStore(object): if not iterable: defer.returnValue(results) - chunks = [iterable[i:i+batch_size] for i in xrange(0, len(iterable), batch_size)] + chunks = [ + iterable[i:i + batch_size] + for i in xrange(0, len(iterable), batch_size) + ] for chunk in chunks: rows = yield self.runInteraction( desc, diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py index 400c10103c..91fac33b8b 100644 --- a/synapse/storage/engines/sqlite3.py +++ b/synapse/storage/engines/sqlite3.py @@ -54,7 +54,7 @@ class Sqlite3Engine(object): def _parse_match_info(buf): bufsize = len(buf) - return [struct.unpack('@I', buf[i:i+4])[0] for i in range(0, bufsize, 4)] + return [struct.unpack('@I', buf[i:i + 4])[0] for i in range(0, bufsize, 4)] def _rank(raw_match_info): diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 5f32eec6f8..ce2c794025 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -58,7 +58,7 @@ class EventFederationStore(SQLBaseStore): new_front = set() front_list = list(front) chunks = [ - front_list[x:x+100] + front_list[x:x + 100] for x in xrange(0, len(front), 100) ] for chunk in chunks: diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 5e85552029..4d7cdd00d0 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -84,7 +84,7 @@ class EventsStore(SQLBaseStore): event.internal_metadata.stream_ordering = stream chunks = [ - events_and_contexts[x:x+100] + events_and_contexts[x:x + 100] for x in xrange(0, len(events_and_contexts), 100) ] @@ -740,7 +740,7 @@ class EventsStore(SQLBaseStore): rows = [] N = 200 for i in range(1 + len(events) / N): - evs = events[i*N:(i + 1)*N] + evs = events[i * N:(i + 1) * N] if not evs: break @@ -755,7 +755,7 @@ class EventsStore(SQLBaseStore): " LEFT JOIN rejections as rej USING (event_id)" " LEFT JOIN redactions as r ON e.event_id = r.redacts" " WHERE e.event_id IN (%s)" - ) % (",".join(["?"]*len(evs)),) + ) % (",".join(["?"] * len(evs)),) txn.execute(sql, evs) rows.extend(self.cursor_to_dict(txn)) diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 338a9d40d5..2c49a5e499 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -168,7 +168,7 @@ class StreamStore(SQLBaseStore): results = {} room_ids = list(room_ids) - for rm_ids in (room_ids[i:i+20] for i in xrange(0, len(room_ids), 20)): + for rm_ids in (room_ids[i:i + 20] for i in xrange(0, len(room_ids), 20)): res = yield defer.gatherResults([ self.get_room_events_stream_for_room( room_id, from_key, to_key, limit diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index f1fe963adf..7566d9eb33 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -46,7 +46,7 @@ class Clock(object): def looping_call(self, f, msec): l = task.LoopingCall(f) - l.start(msec/1000.0, now=False) + l.start(msec / 1000.0, now=False) return l def stop_looping_call(self, loop): diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 88e56e3302..e27917c63a 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -149,7 +149,7 @@ class CacheDescriptor(object): self.lru = lru self.tree = tree - self.arg_names = inspect.getargspec(orig).args[1:num_args+1] + self.arg_names = inspect.getargspec(orig).args[1:num_args + 1] if len(self.arg_names) < self.num_args: raise Exception( @@ -250,7 +250,7 @@ class CacheListDescriptor(object): self.num_args = num_args self.list_name = list_name - self.arg_names = inspect.getargspec(orig).args[1:num_args+1] + self.arg_names = inspect.getargspec(orig).args[1:num_args + 1] self.list_pos = self.arg_names.index(self.list_name) self.cache = cache diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 494226f5ea..62cae99649 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -55,7 +55,7 @@ class ExpiringCache(object): def f(): self._prune_cache() - self._clock.looping_call(f, self._expiry_ms/2) + self._clock.looping_call(f, self._expiry_ms / 2) def __setitem__(self, key, value): now = self._clock.time_msec() diff --git a/synapse/util/caches/treecache.py b/synapse/util/caches/treecache.py index 29d02f7e95..03bc1401b7 100644 --- a/synapse/util/caches/treecache.py +++ b/synapse/util/caches/treecache.py @@ -58,7 +58,7 @@ class TreeCache(object): if n: break - node_and_keys[i+1][0].pop(k) + node_and_keys[i + 1][0].pop(k) popped, cnt = _strip_and_count_entires(popped) self.size -= cnt diff --git a/synapse/util/logutils.py b/synapse/util/logutils.py index d5b1a37eff..c37a157787 100644 --- a/synapse/util/logutils.py +++ b/synapse/util/logutils.py @@ -111,7 +111,7 @@ def time_function(f): _log_debug_as_f( f, "[FUNC END] {%s-%d} %f", - (func_name, id, end-start,), + (func_name, id, end - start,), ) return r diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index c37d6f12e3..ea321bc6a9 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -163,7 +163,7 @@ class _PerHostRatelimiter(object): "Ratelimit [%s]: sleeping req", id(request_id), ) - ret_defer = sleep(self.sleep_msec/1000.0) + ret_defer = sleep(self.sleep_msec / 1000.0) self.sleeping_requests.add(request_id) -- cgit 1.4.1 From 5054806ec1f64fd784d9e74d73a678643d539c3f Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Wed, 3 Feb 2016 14:42:01 +0000 Subject: Rename config field to reflect yaml name --- synapse/config/registration.py | 6 +++--- synapse/rest/client/v1/register.py | 4 ++-- synapse/rest/client/v2_alpha/register.py | 2 +- tests/rest/client/v1/test_events.py | 2 +- tests/rest/client/v2_alpha/test_register.py | 4 ++-- tests/utils.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 76d2d2d640..90ea19bd4b 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -23,11 +23,11 @@ from distutils.util import strtobool class RegistrationConfig(Config): def read_config(self, config): - self.disable_registration = not bool( + self.enable_registration = bool( strtobool(str(config["enable_registration"])) ) if "disable_registration" in config: - self.disable_registration = bool( + self.enable_registration = not bool( strtobool(str(config["disable_registration"])) ) @@ -78,6 +78,6 @@ class RegistrationConfig(Config): def read_arguments(self, args): if args.enable_registration is not None: - self.disable_registration = not bool( + self.enable_registration = bool( strtobool(str(args.enable_registration)) ) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index 2bfd4d96bf..6d6d03c34c 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -59,7 +59,7 @@ class RegisterRestServlet(ClientV1RestServlet): # } # TODO: persistent storage self.sessions = {} - self.disable_registration = hs.config.disable_registration + self.enable_registration = hs.config.enable_registration def on_GET(self, request): if self.hs.config.enable_registration_captcha: @@ -113,7 +113,7 @@ class RegisterRestServlet(ClientV1RestServlet): is_using_shared_secret = login_type == LoginType.SHARED_SECRET can_register = ( - not self.disable_registration + self.enable_registration or is_application_server or is_using_shared_secret ) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 56a5bbec30..ec5c21fa1f 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -117,7 +117,7 @@ class RegisterRestServlet(RestServlet): return # == Normal User Registration == (everyone else) - if self.hs.config.disable_registration: + if not self.hs.config.enable_registration: raise SynapseError(403, "Registration has been disabled") guest_access_token = body.get("guest_access_token", None) diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index b260e269ac..e9698bfdc9 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -122,7 +122,7 @@ class EventStreamPermissionsTestCase(RestTestCase): self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) hs.config.enable_registration_captcha = False - hs.config.disable_registration = False + hs.config.enable_registration = True hs.get_handlers().federation_handler = Mock() diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index f9a2b22485..df0841b0b1 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -41,7 +41,7 @@ class RegisterRestServletTestCase(unittest.TestCase): self.hs.hostname = "superbig~testing~thing.com" self.hs.get_auth = Mock(return_value=self.auth) self.hs.get_handlers = Mock(return_value=self.handlers) - self.hs.config.disable_registration = False + self.hs.config.enable_registration = True # init the thing we're testing self.servlet = RegisterRestServlet(self.hs) @@ -120,7 +120,7 @@ class RegisterRestServletTestCase(unittest.TestCase): })) def test_POST_disabled_registration(self): - self.hs.config.disable_registration = True + self.hs.config.enable_registration = False self.request_data = json.dumps({ "username": "kermit", "password": "monkey" diff --git a/tests/utils.py b/tests/utils.py index 431252a6f1..3b1eb50d8d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -46,7 +46,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs): config = Mock() config.signing_key = [MockKey()] config.event_cache_size = 1 - config.disable_registration = False + config.enable_registration = True config.macaroon_secret_key = "not even a little secret" config.server_name = "server.under.test" config.trusted_third_party_id_servers = [] -- cgit 1.4.1 From 2c1fbea5319db2c64fa486adb32b5e66680b6daf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 4 Feb 2016 10:22:44 +0000 Subject: Fix up logcontexts --- synapse/api/auth.py | 4 +- synapse/app/homeserver.py | 2 + synapse/crypto/keyring.py | 83 ++++++++++++----------- synapse/federation/federation_server.py | 4 +- synapse/federation/transaction_queue.py | 3 - synapse/handlers/_base.py | 10 +-- synapse/handlers/events.py | 11 +++- synapse/handlers/federation.py | 50 ++------------ synapse/handlers/presence.py | 20 +++--- synapse/handlers/register.py | 2 +- synapse/handlers/room.py | 11 +++- synapse/handlers/sync.py | 40 ++++++------ synapse/http/server.py | 5 +- synapse/notifier.py | 58 ++++++++-------- synapse/push/__init__.py | 2 +- synapse/push/pusherpool.py | 9 +-- synapse/rest/client/v2_alpha/account_data.py | 4 +- synapse/rest/client/v2_alpha/tags.py | 4 +- synapse/storage/_base.py | 18 ++--- synapse/storage/events.py | 34 ++++++---- synapse/storage/presence.py | 5 +- synapse/storage/stream.py | 9 +-- synapse/util/__init__.py | 6 +- synapse/util/async.py | 11 +++- synapse/util/caches/descriptors.py | 16 +++-- synapse/util/caches/snapshot_cache.py | 3 +- synapse/util/distributor.py | 15 +++-- synapse/util/logcontext.py | 98 ++++++++++++++++++++++++++-- synapse/util/logutils.py | 35 ++++++++++ synapse/util/metrics.py | 10 +-- synapse/util/ratelimitutils.py | 3 +- 31 files changed, 356 insertions(+), 229 deletions(-) (limited to 'synapse/rest/client/v2_alpha') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 5bba9343f6..e2f84c4d57 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -24,6 +24,7 @@ from synapse.api.constants import EventTypes, Membership, JoinRules from synapse.api.errors import AuthError, Codes, SynapseError, EventSizeError from synapse.types import Requester, RoomID, UserID, EventID from synapse.util.logutils import log_function +from synapse.util.logcontext import preserve_context_over_fn from unpaddedbase64 import decode_base64 import logging @@ -529,7 +530,8 @@ class Auth(object): default=[""] )[0] if user and access_token and ip_addr: - self.store.insert_client_ip( + preserve_context_over_fn( + self.store.insert_client_ip, user=user, access_token=access_token, ip=ip_addr, diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index e5c7e39cf9..2b4be7bdd0 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -709,6 +709,8 @@ def run(hs): phone_home_task.start(60 * 60 * 24, now=False) def in_thread(): + # Uncomment to enable tracing of log context changes. + # sys.settrace(logcontext_tracer) with LoggingContext("run"): change_resource_limit(hs.config.soft_file_limit) reactor.run() diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index cddec0b2bc..d08ee0aa91 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -18,6 +18,10 @@ from synapse.api.errors import SynapseError, Codes from synapse.util.retryutils import get_retry_limiter from synapse.util import unwrapFirstError from synapse.util.async import ObservableDeferred +from synapse.util.logcontext import ( + preserve_context_over_deferred, preserve_context_over_fn, PreserveLoggingContext, + preserve_fn +) from twisted.internet import defer @@ -142,40 +146,43 @@ class Keyring(object): for server_name, _ in server_and_json } - # We want to wait for any previous lookups to complete before - # proceeding. - wait_on_deferred = self.wait_for_previous_lookups( - [server_name for server_name, _ in server_and_json], - server_to_deferred, - ) + with PreserveLoggingContext(): - # Actually start fetching keys. - wait_on_deferred.addBoth( - lambda _: self.get_server_verify_keys(group_id_to_group, deferreds) - ) + # We want to wait for any previous lookups to complete before + # proceeding. + wait_on_deferred = self.wait_for_previous_lookups( + [server_name for server_name, _ in server_and_json], + server_to_deferred, + ) - # When we've finished fetching all the keys for a given server_name, - # resolve the deferred passed to `wait_for_previous_lookups` so that - # any lookups waiting will proceed. - server_to_gids = {} + # Actually start fetching keys. + wait_on_deferred.addBoth( + lambda _: self.get_server_verify_keys(group_id_to_group, deferreds) + ) + + # When we've finished fetching all the keys for a given server_name, + # resolve the deferred passed to `wait_for_previous_lookups` so that + # any lookups waiting will proceed. + server_to_gids = {} - def remove_deferreds(res, server_name, group_id): - server_to_gids[server_name].discard(group_id) - if not server_to_gids[server_name]: - d = server_to_deferred.pop(server_name, None) - if d: - d.callback(None) - return res + def remove_deferreds(res, server_name, group_id): + server_to_gids[server_name].discard(group_id) + if not server_to_gids[server_name]: + d = server_to_deferred.pop(server_name, None) + if d: + d.callback(None) + return res - for g_id, deferred in deferreds.items(): - server_name = group_id_to_group[g_id].server_name - server_to_gids.setdefault(server_name, set()).add(g_id) - deferred.addBoth(remove_deferreds, server_name, g_id) + for g_id, deferred in deferreds.items(): + server_name = group_id_to_group[g_id].server_name + server_to_gids.setdefault(server_name, set()).add(g_id) + deferred.addBoth(remove_deferreds, server_name, g_id) # Pass those keys to handle_key_deferred so that the json object # signatures can be verified return [ - handle_key_deferred( + preserve_context_over_fn( + handle_key_deferred, group_id_to_group[g_id], deferreds[g_id], ) @@ -198,12 +205,13 @@ class Keyring(object): if server_name in self.key_downloads ] if wait_on: - yield defer.DeferredList(wait_on) + with PreserveLoggingContext(): + yield defer.DeferredList(wait_on) else: break for server_name, deferred in server_to_deferred.items(): - d = ObservableDeferred(deferred) + d = ObservableDeferred(preserve_context_over_deferred(deferred)) self.key_downloads[server_name] = d def rm(r, server_name): @@ -244,12 +252,13 @@ class Keyring(object): for group in group_id_to_group.values(): for key_id in group.key_ids: if key_id in merged_results[group.server_name]: - group_id_to_deferred[group.group_id].callback(( - group.group_id, - group.server_name, - key_id, - merged_results[group.server_name][key_id], - )) + with PreserveLoggingContext(): + group_id_to_deferred[group.group_id].callback(( + group.group_id, + group.server_name, + key_id, + merged_results[group.server_name][key_id], + )) break else: missing_groups.setdefault( @@ -504,7 +513,7 @@ class Keyring(object): yield defer.gatherResults( [ - self.store_keys( + preserve_fn(self.store_keys)( server_name=key_server_name, from_server=server_name, verify_keys=verify_keys, @@ -573,7 +582,7 @@ class Keyring(object): yield defer.gatherResults( [ - self.store.store_server_keys_json( + preserve_fn(self.store.store_server_keys_json)( server_name=server_name, key_id=key_id, from_server=server_name, @@ -675,7 +684,7 @@ class Keyring(object): # TODO(markjh): Store whether the keys have expired. yield defer.gatherResults( [ - self.store.store_server_verify_key( + preserve_fn(self.store.store_server_verify_key)( server_name, server_name, key.time_added, key ) for key_id, key in verify_keys.items() diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index a97aa0c94a..90718192dd 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -126,10 +126,8 @@ class FederationServer(FederationBase): results = [] for pdu in pdu_list: - d = self._handle_new_pdu(transaction.origin, pdu) - try: - yield d + yield self._handle_new_pdu(transaction.origin, pdu) results.append({}) except FederationError as e: self.send_failure(e, transaction.origin) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 622adad3ae..1928da03b3 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -103,7 +103,6 @@ class TransactionQueue(object): else: return not destination.startswith("localhost") - @defer.inlineCallbacks def enqueue_pdu(self, pdu, destinations, order): # We loop through all destinations to see whether we already have # a transaction in progress. If we do, stick it in the pending_pdus @@ -141,8 +140,6 @@ class TransactionQueue(object): deferreds.append(deferred) - yield defer.DeferredList(deferreds, consumeErrors=True) - # NO inlineCallbacks def enqueue_edu(self, edu): destination = edu.destination diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 1423df6cf3..fa83d3e464 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -293,19 +293,11 @@ class BaseHandler(object): with PreserveLoggingContext(): # Don't block waiting on waking up all the listeners. - notify_d = self.notifier.on_new_room_event( + self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, extra_users=extra_users ) - def log_failure(f): - logger.warn( - "Failed to notify about %s: %s", - event.event_id, f.value - ) - - notify_d.addErrback(log_failure) - # If invite, remove room_state from unsigned before sending. event.unsigned.pop("invite_room_state", None) diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 5ad8f3779a..4933c31c19 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -18,6 +18,7 @@ from twisted.internet import defer from synapse.util.logutils import log_function from synapse.types import UserID from synapse.events.utils import serialize_event +from synapse.util.logcontext import preserve_context_over_fn from ._base import BaseHandler @@ -29,11 +30,17 @@ logger = logging.getLogger(__name__) def started_user_eventstream(distributor, user): - return distributor.fire("started_user_eventstream", user) + return preserve_context_over_fn( + distributor.fire, + "started_user_eventstream", user + ) def stopped_user_eventstream(distributor, user): - return distributor.fire("stopped_user_eventstream", user) + return preserve_context_over_fn( + distributor.fire, + "stopped_user_eventstream", user + ) class EventStreamHandler(BaseHandler): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2ce1e9d6c7..b78b0502d9 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -221,19 +221,11 @@ class FederationHandler(BaseHandler): extra_users.append(target_user) with PreserveLoggingContext(): - d = self.notifier.on_new_room_event( + self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, extra_users=extra_users ) - def log_failure(f): - logger.warn( - "Failed to notify about %s: %s", - event.event_id, f.value - ) - - d.addErrback(log_failure) - if event.type == EventTypes.Member: if event.membership == Membership.JOIN: prev_state = context.current_state.get((event.type, event.state_key)) @@ -643,19 +635,11 @@ class FederationHandler(BaseHandler): ) with PreserveLoggingContext(): - d = self.notifier.on_new_room_event( + self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, extra_users=[joinee] ) - def log_failure(f): - logger.warn( - "Failed to notify about %s: %s", - event.event_id, f.value - ) - - d.addErrback(log_failure) - logger.debug("Finished joining %s to %s", joinee, room_id) finally: room_queue = self.room_queues[room_id] @@ -730,18 +714,10 @@ class FederationHandler(BaseHandler): extra_users.append(target_user) with PreserveLoggingContext(): - d = self.notifier.on_new_room_event( + self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, extra_users=extra_users ) - def log_failure(f): - logger.warn( - "Failed to notify about %s: %s", - event.event_id, f.value - ) - - d.addErrback(log_failure) - if event.type == EventTypes.Member: if event.content["membership"] == Membership.JOIN: user = UserID.from_string(event.state_key) @@ -811,19 +787,11 @@ class FederationHandler(BaseHandler): target_user = UserID.from_string(event.state_key) with PreserveLoggingContext(): - d = self.notifier.on_new_room_event( + self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, extra_users=[target_user], ) - def log_failure(f): - logger.warn( - "Failed to notify about %s: %s", - event.event_id, f.value - ) - - d.addErrback(log_failure) - defer.returnValue(event) @defer.inlineCallbacks @@ -948,18 +916,10 @@ class FederationHandler(BaseHandler): extra_users.append(target_user) with PreserveLoggingContext(): - d = self.notifier.on_new_room_event( + self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, extra_users=extra_users ) - def log_failure(f): - logger.warn( - "Failed to notify about %s: %s", - event.event_id, f.value - ) - - d.addErrback(log_failure) - new_pdu = event destinations = set() diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index d0c21ff5c9..b61394f2b5 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -378,9 +378,9 @@ class PresenceHandler(BaseHandler): was_polling = target_user in self._user_cachemap if now_online and not was_polling: - self.start_polling_presence(target_user, state=state) + yield self.start_polling_presence(target_user, state=state) elif not now_online and was_polling: - self.stop_polling_presence(target_user) + yield self.stop_polling_presence(target_user) # TODO(paul): perform a presence push as part of start/stop poll so # we don't have to do this all the time @@ -394,7 +394,8 @@ class PresenceHandler(BaseHandler): if now - prev_state.state.get("last_active", 0) < LAST_ACTIVE_GRANULARITY: return - self.changed_presencelike_data(user, {"last_active": now}) + with PreserveLoggingContext(): + self.changed_presencelike_data(user, {"last_active": now}) def get_joined_rooms_for_user(self, user): """Get the list of rooms a user is joined to. @@ -466,11 +467,12 @@ class PresenceHandler(BaseHandler): local_user, room_ids=[room_id], add_to_cache=False ) - self.push_update_to_local_and_remote( - observed_user=local_user, - users_to_push=[user], - statuscache=statuscache, - ) + with PreserveLoggingContext(): + self.push_update_to_local_and_remote( + observed_user=local_user, + users_to_push=[user], + statuscache=statuscache, + ) @defer.inlineCallbacks def send_presence_invite(self, observer_user, observed_user): @@ -556,7 +558,7 @@ class PresenceHandler(BaseHandler): observer_user.localpart, observed_user.to_string() ) - self.start_polling_presence( + yield self.start_polling_presence( observer_user, target_user=observed_user ) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 2660fd21a2..24c850ae9b 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -186,7 +186,7 @@ class RegistrationHandler(BaseHandler): token=token, password_hash="" ) - registered_user(self.distributor, user) + yield registered_user(self.distributor, user) defer.returnValue((user_id, token)) @defer.inlineCallbacks diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index bfd7e44e9f..a8e3a9029c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -25,6 +25,7 @@ from synapse.api.constants import ( from synapse.api.errors import AuthError, StoreError, SynapseError, Codes from synapse.util import stringutils, unwrapFirstError from synapse.util.async import run_on_reactor +from synapse.util.logcontext import preserve_context_over_fn from signedjson.sign import verify_signed_json from signedjson.key import decode_verify_key_bytes @@ -46,11 +47,17 @@ def collect_presencelike_data(distributor, user, content): def user_left_room(distributor, user, room_id): - return distributor.fire("user_left_room", user=user, room_id=room_id) + return preserve_context_over_fn( + distributor.fire, + "user_left_room", user=user, room_id=room_id + ) def user_joined_room(distributor, user, room_id): - return distributor.fire("user_joined_room", user=user, room_id=room_id) + return preserve_context_over_fn( + distributor.fire, + "user_joined_room", user=user, room_id=room_id + ) class RoomCreationHandler(BaseHandler): diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 72271f2626..3f1cda5b0b 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -18,7 +18,7 @@ from ._base import BaseHandler from synapse.streams.config import PaginationConfig from synapse.api.constants import Membership, EventTypes from synapse.util import unwrapFirstError -from synapse.util.logcontext import LoggingContext +from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from twisted.internet import defer @@ -241,15 +241,16 @@ class SyncHandler(BaseHandler): deferreds = [] for event in room_list: if event.membership == Membership.JOIN: - room_sync_deferred = self.full_state_sync_for_joined_room( - room_id=event.room_id, - sync_config=sync_config, - now_token=now_token, - timeline_since_token=timeline_since_token, - ephemeral_by_room=ephemeral_by_room, - tags_by_room=tags_by_room, - account_data_by_room=account_data_by_room, - ) + with PreserveLoggingContext(LoggingContext.current_context()): + room_sync_deferred = self.full_state_sync_for_joined_room( + room_id=event.room_id, + sync_config=sync_config, + now_token=now_token, + timeline_since_token=timeline_since_token, + ephemeral_by_room=ephemeral_by_room, + tags_by_room=tags_by_room, + account_data_by_room=account_data_by_room, + ) room_sync_deferred.addCallback(joined.append) deferreds.append(room_sync_deferred) elif event.membership == Membership.INVITE: @@ -262,15 +263,16 @@ class SyncHandler(BaseHandler): leave_token = now_token.copy_and_replace( "room_key", "s%d" % (event.stream_ordering,) ) - room_sync_deferred = self.full_state_sync_for_archived_room( - sync_config=sync_config, - room_id=event.room_id, - leave_event_id=event.event_id, - leave_token=leave_token, - timeline_since_token=timeline_since_token, - tags_by_room=tags_by_room, - account_data_by_room=account_data_by_room, - ) + with PreserveLoggingContext(LoggingContext.current_context()): + room_sync_deferred = self.full_state_sync_for_archived_room( + sync_config=sync_config, + room_id=event.room_id, + leave_event_id=event.event_id, + leave_token=leave_token, + timeline_since_token=timeline_since_token, + tags_by_room=tags_by_room, + account_data_by_room=account_data_by_room, + ) room_sync_deferred.addCallback(archived.append) deferreds.append(room_sync_deferred) diff --git a/synapse/http/server.py b/synapse/http/server.py index 06935783ca..a90e2e1125 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -99,9 +99,8 @@ def request_handler(request_handler): request_context.request = request_id with request.processing(): try: - d = request_handler(self, request) - with PreserveLoggingContext(): - yield d + with PreserveLoggingContext(request_context): + yield request_handler(self, request) except CodeMessageException as e: code = e.code if isinstance(e, SynapseError): diff --git a/synapse/notifier.py b/synapse/notifier.py index 1a90bd55cd..560866b26e 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -18,7 +18,8 @@ from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.util.logutils import log_function -from synapse.util.async import run_on_reactor, ObservableDeferred +from synapse.util.async import ObservableDeferred +from synapse.util.logcontext import PreserveLoggingContext from synapse.types import StreamToken import synapse.metrics @@ -73,7 +74,8 @@ class _NotifierUserStream(object): self.current_token = current_token self.last_notified_ms = time_now_ms - self.notify_deferred = ObservableDeferred(defer.Deferred()) + with PreserveLoggingContext(): + self.notify_deferred = ObservableDeferred(defer.Deferred()) def notify(self, stream_key, stream_id, time_now_ms): """Notify any listeners for this user of a new event from an @@ -88,8 +90,10 @@ class _NotifierUserStream(object): ) self.last_notified_ms = time_now_ms noify_deferred = self.notify_deferred - self.notify_deferred = ObservableDeferred(defer.Deferred()) - noify_deferred.callback(self.current_token) + + with PreserveLoggingContext(): + self.notify_deferred = ObservableDeferred(defer.Deferred()) + noify_deferred.callback(self.current_token) def remove(self, notifier): """ Remove this listener from all the indexes in the Notifier @@ -184,8 +188,6 @@ class Notifier(object): lambda: count(bool, self.appservice_to_user_streams.values()), ) - @log_function - @defer.inlineCallbacks def on_new_room_event(self, event, room_stream_id, max_room_stream_id, extra_users=[]): """ Used by handlers to inform the notifier something has happened @@ -199,12 +201,11 @@ class Notifier(object): until all previous events have been persisted before notifying the client streams. """ - yield run_on_reactor() - - self.pending_new_room_events.append(( - room_stream_id, event, extra_users - )) - self._notify_pending_new_room_events(max_room_stream_id) + with PreserveLoggingContext(): + self.pending_new_room_events.append(( + room_stream_id, event, extra_users + )) + self._notify_pending_new_room_events(max_room_stream_id) def _notify_pending_new_room_events(self, max_room_stream_id): """Notify for the room events that were queued waiting for a previous @@ -251,31 +252,29 @@ class Notifier(object): extra_streams=app_streams, ) - @defer.inlineCallbacks - @log_function def on_new_event(self, stream_key, new_token, users=[], rooms=[], extra_streams=set()): """ Used to inform listeners that something has happend event wise. Will wake up all listeners for the given users and rooms. """ - yield run_on_reactor() - user_streams = set() + with PreserveLoggingContext(): + user_streams = set() - for user in users: - user_stream = self.user_to_user_stream.get(str(user)) - if user_stream is not None: - user_streams.add(user_stream) + for user in users: + user_stream = self.user_to_user_stream.get(str(user)) + if user_stream is not None: + user_streams.add(user_stream) - for room in rooms: - user_streams |= self.room_to_user_streams.get(room, set()) + for room in rooms: + user_streams |= self.room_to_user_streams.get(room, set()) - time_now_ms = self.clock.time_msec() - for user_stream in user_streams: - try: - user_stream.notify(stream_key, new_token, time_now_ms) - except: - logger.exception("Failed to notify listener") + time_now_ms = self.clock.time_msec() + for user_stream in user_streams: + try: + user_stream.notify(stream_key, new_token, time_now_ms) + except: + logger.exception("Failed to notify listener") @defer.inlineCallbacks def wait_for_events(self, user_id, timeout, callback, room_ids=None, @@ -325,7 +324,8 @@ class Notifier(object): # that we don't miss any current_token updates. prev_token = current_token listener = user_stream.new_listener(prev_token) - yield listener.deferred + with PreserveLoggingContext(): + yield listener.deferred except defer.CancelledError: break diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 64e581b8ba..8da2d8716c 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -111,7 +111,7 @@ class Pusher(object): self.user_id, config, timeout=0, affect_presence=False ) self.last_token = chunk['end'] - self.store.update_pusher_last_token( + yield self.store.update_pusher_last_token( self.app_id, self.pushkey, self.user_id, self.last_token ) logger.info("New pusher %s for user %s starting from token %s", diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index d1b7c0802f..d7dcb2de4b 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -18,6 +18,7 @@ from twisted.internet import defer from httppusher import HttpPusher from synapse.push import PusherConfigException +from synapse.util.logcontext import preserve_fn import logging @@ -76,7 +77,7 @@ class PusherPool: "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p['user_name'] ) - self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) + yield self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks def remove_pushers_by_user(self, user_id): @@ -91,7 +92,7 @@ class PusherPool: "Removing pusher for app id %s, pushkey %s, user %s", p['app_id'], p['pushkey'], p['user_name'] ) - self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) + yield self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks def _add_pusher_to_store(self, user_id, access_token, profile_tag, kind, @@ -110,7 +111,7 @@ class PusherPool: lang=lang, data=data, ) - self._refresh_pusher(app_id, pushkey, user_id) + yield self._refresh_pusher(app_id, pushkey, user_id) def _create_pusher(self, pusherdict): if pusherdict['kind'] == 'http': @@ -166,7 +167,7 @@ class PusherPool: if fullid in self.pushers: self.pushers[fullid].stop() self.pushers[fullid] = p - p.start() + preserve_fn(p.start)() logger.info("Started pushers") diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py index 985efe2a62..1456881c1a 100644 --- a/synapse/rest/client/v2_alpha/account_data.py +++ b/synapse/rest/client/v2_alpha/account_data.py @@ -57,7 +57,7 @@ class AccountDataServlet(RestServlet): user_id, account_data_type, body ) - yield self.notifier.on_new_event( + self.notifier.on_new_event( "account_data_key", max_id, users=[user_id] ) @@ -99,7 +99,7 @@ class RoomAccountDataServlet(RestServlet): user_id, room_id, account_data_type, body ) - yield self.notifier.on_new_event( + self.notifier.on_new_event( "account_data_key", max_id, users=[user_id] ) diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py index 42f2203f3d..79c436a8cf 100644 --- a/synapse/rest/client/v2_alpha/tags.py +++ b/synapse/rest/client/v2_alpha/tags.py @@ -80,7 +80,7 @@ class TagServlet(RestServlet): max_id = yield self.store.add_tag_to_room(user_id, room_id, tag, body) - yield self.notifier.on_new_event( + self.notifier.on_new_event( "account_data_key", max_id, users=[user_id] ) @@ -94,7 +94,7 @@ class TagServlet(RestServlet): max_id = yield self.store.remove_tag_from_room(user_id, room_id, tag) - yield self.notifier.on_new_event( + self.notifier.on_new_event( "account_data_key", max_id, users=[user_id] ) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index cfb87d9328..2e97ac84a8 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -15,7 +15,7 @@ import logging from synapse.api.errors import StoreError -from synapse.util.logcontext import preserve_context_over_fn, LoggingContext +from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.caches.descriptors import Cache import synapse.metrics @@ -298,10 +298,10 @@ class SQLBaseStore(object): func, *args, **kwargs ) - result = yield preserve_context_over_fn( - self._db_pool.runWithConnection, - inner_func, *args, **kwargs - ) + with PreserveLoggingContext(): + result = yield self._db_pool.runWithConnection( + inner_func, *args, **kwargs + ) for after_callback, after_args in after_callbacks: after_callback(*after_args) @@ -326,10 +326,10 @@ class SQLBaseStore(object): return func(conn, *args, **kwargs) - result = yield preserve_context_over_fn( - self._db_pool.runWithConnection, - inner_func, *args, **kwargs - ) + with PreserveLoggingContext(): + result = yield self._db_pool.runWithConnection( + inner_func, *args, **kwargs + ) defer.returnValue(result) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 4d7cdd00d0..c6ed54721c 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -19,7 +19,7 @@ from twisted.internet import defer, reactor from synapse.events import FrozenEvent, USE_FROZEN_DICTS from synapse.events.utils import prune_event -from synapse.util.logcontext import preserve_context_over_deferred +from synapse.util.logcontext import preserve_fn, PreserveLoggingContext from synapse.util.logutils import log_function from synapse.api.constants import EventTypes @@ -664,14 +664,16 @@ class EventsStore(SQLBaseStore): for ids, d in lst: if not d.called: try: - d.callback([ - res[i] - for i in ids - if i in res - ]) + with PreserveLoggingContext(): + d.callback([ + res[i] + for i in ids + if i in res + ]) except: logger.exception("Failed to callback") - reactor.callFromThread(fire, event_list, row_dict) + with PreserveLoggingContext(): + reactor.callFromThread(fire, event_list, row_dict) except Exception as e: logger.exception("do_fetch") @@ -679,10 +681,12 @@ class EventsStore(SQLBaseStore): def fire(evs): for _, d in evs: if not d.called: - d.errback(e) + with PreserveLoggingContext(): + d.errback(e) if event_list: - reactor.callFromThread(fire, event_list) + with PreserveLoggingContext(): + reactor.callFromThread(fire, event_list) @defer.inlineCallbacks def _enqueue_events(self, events, check_redacted=True, @@ -709,18 +713,20 @@ class EventsStore(SQLBaseStore): should_start = False if should_start: - self.runWithConnection( - self._do_fetch - ) + with PreserveLoggingContext(): + self.runWithConnection( + self._do_fetch + ) - rows = yield preserve_context_over_deferred(events_d) + with PreserveLoggingContext(): + rows = yield events_d if not allow_rejected: rows[:] = [r for r in rows if not r["rejects"]] res = yield defer.gatherResults( [ - self._get_event_from_row( + preserve_fn(self._get_event_from_row)( row["internal_metadata"], row["json"], row["redacts"], check_redacted=check_redacted, get_prev_content=get_prev_content, diff --git a/synapse/storage/presence.py b/synapse/storage/presence.py index 9b3aecaf8c..ef525f34c5 100644 --- a/synapse/storage/presence.py +++ b/synapse/storage/presence.py @@ -68,8 +68,9 @@ class PresenceStore(SQLBaseStore): for row in rows }) + @defer.inlineCallbacks def set_presence_state(self, user_localpart, new_state): - res = self._simple_update_one( + res = yield self._simple_update_one( table="presence", keyvalues={"user_id": user_localpart}, updatevalues={"state": new_state["state"], @@ -79,7 +80,7 @@ class PresenceStore(SQLBaseStore): ) self.get_presence_state.invalidate((user_localpart,)) - return res + defer.returnValue(res) def allow_presence_visible(self, observed_localpart, observer_userid): return self._simple_insert( diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 50436cb2d2..367ffc9543 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -39,6 +39,7 @@ from ._base import SQLBaseStore from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.api.constants import EventTypes from synapse.types import RoomStreamToken +from synapse.util.logcontext import preserve_fn import logging @@ -170,12 +171,12 @@ class StreamStore(SQLBaseStore): room_ids = list(room_ids) for rm_ids in (room_ids[i:i + 20] for i in xrange(0, len(room_ids), 20)): res = yield defer.gatherResults([ - self.get_room_events_stream_for_room( - room_id, from_key, to_key, limit - ).addCallback(lambda r, rm: (rm, r), room_id) + preserve_fn(self.get_room_events_stream_for_room)( + room_id, from_key, to_key, limit, + ) for room_id in room_ids ]) - results.update(dict(res)) + results.update(dict(zip(rm_ids, res))) defer.returnValue(results) diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 7566d9eb33..133671e238 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext +from synapse.util.logcontext import PreserveLoggingContext from twisted.internet import defer, reactor, task @@ -61,10 +61,8 @@ class Clock(object): *args: Postional arguments to pass to function. **kwargs: Key arguments to pass to function. """ - current_context = LoggingContext.current_context() - def wrapped_callback(*args, **kwargs): - with PreserveLoggingContext(current_context): + with PreserveLoggingContext(): callback(*args, **kwargs) with PreserveLoggingContext(): diff --git a/synapse/util/async.py b/synapse/util/async.py index 200edd404c..640fae3890 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -16,13 +16,16 @@ from twisted.internet import defer, reactor -from .logcontext import preserve_context_over_deferred +from .logcontext import PreserveLoggingContext +@defer.inlineCallbacks def sleep(seconds): d = defer.Deferred() - reactor.callLater(seconds, d.callback, seconds) - return preserve_context_over_deferred(d) + with PreserveLoggingContext(): + reactor.callLater(seconds, d.callback, seconds) + res = yield d + defer.returnValue(res) def run_on_reactor(): @@ -54,6 +57,7 @@ class ObservableDeferred(object): object.__setattr__(self, "_result", (True, r)) while self._observers: try: + # TODO: Handle errors here. self._observers.pop().callback(r) except: pass @@ -63,6 +67,7 @@ class ObservableDeferred(object): object.__setattr__(self, "_result", (False, f)) while self._observers: try: + # TODO: Handle errors here. self._observers.pop().errback(f) except: pass diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index e27917c63a..277854ccbc 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -18,6 +18,9 @@ from synapse.util.async import ObservableDeferred from synapse.util import unwrapFirstError from synapse.util.caches.lrucache import LruCache from synapse.util.caches.treecache import TreeCache +from synapse.util.logcontext import ( + PreserveLoggingContext, preserve_context_over_deferred, preserve_context_over_fn +) from . import caches_by_name, DEBUG_CACHES, cache_counter @@ -190,7 +193,7 @@ class CacheDescriptor(object): defer.returnValue(cached_result) observer.addCallback(check_result) - return observer + return preserve_context_over_deferred(observer) except KeyError: # Get the sequence number of the cache before reading from the # database so that we can tell if the cache is invalidated @@ -198,6 +201,7 @@ class CacheDescriptor(object): sequence = self.cache.sequence ret = defer.maybeDeferred( + preserve_context_over_fn, self.function_to_call, obj, *args, **kwargs ) @@ -211,7 +215,7 @@ class CacheDescriptor(object): ret = ObservableDeferred(ret, consumeErrors=True) self.cache.update(sequence, cache_key, ret) - return ret.observe() + return preserve_context_over_deferred(ret.observe()) wrapped.invalidate = self.cache.invalidate wrapped.invalidate_all = self.cache.invalidate_all @@ -299,6 +303,7 @@ class CacheListDescriptor(object): args_to_call[self.list_name] = missing ret_d = defer.maybeDeferred( + preserve_context_over_fn, self.function_to_call, **args_to_call ) @@ -308,7 +313,8 @@ class CacheListDescriptor(object): # We need to create deferreds for each arg in the list so that # we can insert the new deferred into the cache. for arg in missing: - observer = ret_d.observe() + with PreserveLoggingContext(): + observer = ret_d.observe() observer.addCallback(lambda r, arg: r.get(arg, None), arg) observer = ObservableDeferred(observer) @@ -327,10 +333,10 @@ class CacheListDescriptor(object): cached[arg] = res - return defer.gatherResults( + return preserve_context_over_deferred(defer.gatherResults( cached.values(), consumeErrors=True, - ).addErrback(unwrapFirstError).addCallback(lambda res: dict(res)) + ).addErrback(unwrapFirstError).addCallback(lambda res: dict(res))) obj.__dict__[self.orig.__name__] = wrapped diff --git a/synapse/util/caches/snapshot_cache.py b/synapse/util/caches/snapshot_cache.py index b1e40417fd..d03678b8c8 100644 --- a/synapse/util/caches/snapshot_cache.py +++ b/synapse/util/caches/snapshot_cache.py @@ -87,7 +87,8 @@ class SnapshotCache(object): # expire from the rotation of that cache. self.next_result_cache[key] = result self.pending_result_cache.pop(key, None) + return r - result.observe().addBoth(shuffle_along) + result.addBoth(shuffle_along) return result.observe() diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index 4ebfebf701..8875813de4 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -15,9 +15,7 @@ from twisted.internet import defer -from synapse.util.logcontext import ( - PreserveLoggingContext, preserve_context_over_deferred, -) +from synapse.util.logcontext import PreserveLoggingContext from synapse.util import unwrapFirstError @@ -97,6 +95,7 @@ class Signal(object): Each observer callable may return a Deferred.""" self.observers.append(observer) + @defer.inlineCallbacks def fire(self, *args, **kwargs): """Invokes every callable in the observer list, passing in the args and kwargs. Exceptions thrown by observers are logged but ignored. It is @@ -116,6 +115,7 @@ class Signal(object): failure.getTracebackObject())) if not self.suppress_failures: return failure + return defer.maybeDeferred(observer, *args, **kwargs).addErrback(eb) with PreserveLoggingContext(): @@ -124,8 +124,11 @@ class Signal(object): for observer in self.observers ] - d = defer.gatherResults(deferreds, consumeErrors=True) + res = yield defer.gatherResults( + deferreds, consumeErrors=True + ).addErrback(unwrapFirstError) - d.addErrback(unwrapFirstError) + defer.returnValue(res) - return preserve_context_over_deferred(d) + def __repr__(self): + return "" % (self.name,) diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index e701092cd8..9134e67908 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -48,7 +48,7 @@ class LoggingContext(object): __slots__ = [ "parent_context", "name", "usage_start", "usage_end", "main_thread", - "__dict__", "tag", + "__dict__", "tag", "alive", ] thread_local = threading.local() @@ -88,6 +88,7 @@ class LoggingContext(object): self.usage_start = None self.main_thread = threading.current_thread() self.tag = "" + self.alive = True def __str__(self): return "%s@%x" % (self.name, id(self)) @@ -106,6 +107,7 @@ class LoggingContext(object): The context that was previously active """ current = cls.current_context() + if current is not context: current.stop() cls.thread_local.current_context = context @@ -117,6 +119,7 @@ class LoggingContext(object): if self.parent_context is not None: raise Exception("Attempt to enter logging context multiple times") self.parent_context = self.set_current_context(self) + self.alive = True return self def __exit__(self, type, value, traceback): @@ -136,6 +139,7 @@ class LoggingContext(object): self ) self.parent_context = None + self.alive = False def __getattr__(self, name): """Delegate member lookup to parent context""" @@ -213,7 +217,7 @@ class PreserveLoggingContext(object): exited. Used to restore the context after a function using @defer.inlineCallbacks is resumed by a callback from the reactor.""" - __slots__ = ["current_context", "new_context"] + __slots__ = ["current_context", "new_context", "has_parent"] def __init__(self, new_context=LoggingContext.sentinel): self.new_context = new_context @@ -224,11 +228,26 @@ class PreserveLoggingContext(object): self.new_context ) + if self.current_context: + self.has_parent = self.current_context.parent_context is not None + if not self.current_context.alive: + logger.warn( + "Entering dead context: %s", + self.current_context, + ) + def __exit__(self, type, value, traceback): """Restores the current logging context""" - LoggingContext.set_current_context(self.current_context) + context = LoggingContext.set_current_context(self.current_context) + + if context != self.new_context: + logger.warn( + "Unexpected logging context: %s is not %s", + context, self.new_context, + ) + if self.current_context is not LoggingContext.sentinel: - if self.current_context.parent_context is None: + if not self.current_context.alive: logger.warn( "Restoring dead context: %s", self.current_context, @@ -289,3 +308,74 @@ def preserve_context_over_deferred(deferred): d = _PreservingContextDeferred(current_context) deferred.chainDeferred(d) return d + + +def preserve_fn(f): + """Ensures that function is called with correct context and that context is + restored after return. Useful for wrapping functions that return a deferred + which you don't yield on. + """ + current = LoggingContext.current_context() + + def g(*args, **kwargs): + with PreserveLoggingContext(current): + return f(*args, **kwargs) + + return g + + +# modules to ignore in `logcontext_tracer` +_to_ignore = [ + "synapse.util.logcontext", + "synapse.http.server", + "synapse.storage._base", + "synapse.util.async", +] + + +def logcontext_tracer(frame, event, arg): + """A tracer that logs whenever a logcontext "unexpectedly" changes within + a function. Probably inaccurate. + + Use by calling `sys.settrace(logcontext_tracer)` in the main thread. + """ + if event == 'call': + name = frame.f_globals["__name__"] + if name.startswith("synapse"): + if name == "synapse.util.logcontext": + if frame.f_code.co_name in ["__enter__", "__exit__"]: + tracer = frame.f_back.f_trace + if tracer: + tracer.just_changed = True + + tracer = frame.f_trace + if tracer: + return tracer + + if not any(name.startswith(ig) for ig in _to_ignore): + return LineTracer() + + +class LineTracer(object): + __slots__ = ["context", "just_changed"] + + def __init__(self): + self.context = LoggingContext.current_context() + self.just_changed = False + + def __call__(self, frame, event, arg): + if event in 'line': + if self.just_changed: + self.context = LoggingContext.current_context() + self.just_changed = False + else: + c = LoggingContext.current_context() + if c != self.context: + logger.info( + "Context changed! %s -> %s, %s, %s", + self.context, c, + frame.f_code.co_filename, frame.f_lineno + ) + self.context = c + + return self diff --git a/synapse/util/logutils.py b/synapse/util/logutils.py index c37a157787..3a83828d25 100644 --- a/synapse/util/logutils.py +++ b/synapse/util/logutils.py @@ -168,3 +168,38 @@ def trace_function(f): wrapped.__name__ = func_name return wrapped + + +def get_previous_frames(): + s = inspect.currentframe().f_back.f_back + to_return = [] + while s: + if s.f_globals["__name__"].startswith("synapse"): + filename, lineno, function, _, _ = inspect.getframeinfo(s) + args_string = inspect.formatargvalues(*inspect.getargvalues(s)) + + to_return.append("{{ %s:%d %s - Args: %s }}" % ( + filename, lineno, function, args_string + )) + + s = s.f_back + + return ", ". join(to_return) + + +def get_previous_frame(ignore=[]): + s = inspect.currentframe().f_back.f_back + + while s: + if s.f_globals["__name__"].startswith("synapse"): + if not any(s.f_globals["__name__"].startswith(ig) for ig in ignore): + filename, lineno, function, _, _ = inspect.getframeinfo(s) + args_string = inspect.formatargvalues(*inspect.getargvalues(s)) + + return "{{ %s:%d %s - Args: %s }}" % ( + filename, lineno, function, args_string + ) + + s = s.f_back + + return None diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index daf6087fe0..ca48007218 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -68,16 +68,18 @@ class Measure(object): block_timer.inc_by(duration, self.name) context = LoggingContext.current_context() - if not context: - return if context != self.start_context: logger.warn( - "Context have unexpectedly changed %r, %r", - context, self.start_context + "Context have unexpectedly changed from '%s' to '%s'. (%r)", + context, self.start_context, self.name ) return + if not context: + logger.warn("Expected context. (%r)", self.name) + return + ru_utime, ru_stime = context.get_resource_usage() block_ru_utime.inc_by(ru_utime, self.name) diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index ea321bc6a9..4076eed269 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -18,6 +18,7 @@ from twisted.internet import defer from synapse.api.errors import LimitExceededError from synapse.util.async import sleep +from synapse.util.logcontext import preserve_fn import collections import contextlib @@ -163,7 +164,7 @@ class _PerHostRatelimiter(object): "Ratelimit [%s]: sleeping req", id(request_id), ) - ret_defer = sleep(self.sleep_msec / 1000.0) + ret_defer = preserve_fn(sleep)(self.sleep_msec / 1000.0) self.sleeping_requests.add(request_id) -- cgit 1.4.1