From 3db1377b261eaf3fcff486547d6302ccb24553e5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 23 Jul 2019 13:31:03 +0100 Subject: Log when we receive receipt from a different origin --- synapse/handlers/receipts.py | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index a85dd8cdee..e58bf7e360 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -17,7 +17,7 @@ import logging from twisted.internet import defer from synapse.handlers._base import BaseHandler -from synapse.types import ReadReceipt +from synapse.types import ReadReceipt, get_domain_from_id logger = logging.getLogger(__name__) @@ -40,18 +40,27 @@ class ReceiptsHandler(BaseHandler): def _received_remote_receipt(self, origin, content): """Called when we receive an EDU of type m.receipt from a remote HS. """ - receipts = [ - ReadReceipt( - room_id=room_id, - receipt_type=receipt_type, - user_id=user_id, - event_ids=user_values["event_ids"], - data=user_values.get("data", {}), - ) - for room_id, room_values in content.items() - for receipt_type, users in room_values.items() - for user_id, user_values in users.items() - ] + receipts = [] + for room_id, room_values in content.items(): + for receipt_type, users in room_values.items(): + for user_id, user_values in users.items(): + if get_domain_from_id(user_id) != origin: + logger.info( + "Received receipt for user %r from server %s, ignoring", + user_id, + origin, + ) + continue + + receipts.append( + ReadReceipt( + room_id=room_id, + receipt_type=receipt_type, + user_id=user_id, + event_ids=user_values["event_ids"], + data=user_values.get("data", {}), + ) + ) yield self._handle_new_receipts(receipts) -- cgit 1.4.1 From 4806651744616bf48abf408034ab9560e33f60ce Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 23 Jul 2019 23:00:55 +1000 Subject: Replace returnValue with return (#5736) --- changelog.d/5736.misc | 1 + docs/log_contexts.rst | 2 +- synapse/api/auth.py | 44 ++++---- synapse/api/filtering.py | 2 +- synapse/app/frontend_proxy.py | 8 +- synapse/app/homeserver.py | 2 +- synapse/appservice/__init__.py | 28 +++--- synapse/appservice/api.py | 38 +++---- synapse/appservice/scheduler.py | 4 +- synapse/crypto/keyring.py | 14 +-- synapse/events/builder.py | 16 ++- synapse/events/snapshot.py | 28 +++--- synapse/events/third_party_rules.py | 8 +- synapse/events/utils.py | 4 +- synapse/federation/federation_base.py | 6 +- synapse/federation/federation_client.py | 46 ++++----- synapse/federation/federation_server.py | 77 ++++++-------- synapse/federation/sender/per_destination_queue.py | 4 +- synapse/federation/sender/transaction_manager.py | 2 +- synapse/federation/transport/client.py | 30 +++--- synapse/groups/attestations.py | 2 +- synapse/groups/groups_server.py | 92 ++++++++--------- synapse/handlers/account_data.py | 4 +- synapse/handlers/account_validity.py | 6 +- synapse/handlers/acme.py | 2 +- synapse/handlers/admin.py | 10 +- synapse/handlers/appservice.py | 22 ++-- synapse/handlers/auth.py | 44 ++++---- synapse/handlers/deactivate_account.py | 2 +- synapse/handlers/device.py | 22 ++-- synapse/handlers/directory.py | 14 +-- synapse/handlers/e2e_keys.py | 10 +- synapse/handlers/e2e_room_keys.py | 8 +- synapse/handlers/events.py | 6 +- synapse/handlers/federation.py | 82 ++++++++------- synapse/handlers/groups_local.py | 26 ++--- synapse/handlers/identity.py | 18 ++-- synapse/handlers/initial_sync.py | 54 +++++----- synapse/handlers/message.py | 32 +++--- synapse/handlers/pagination.py | 14 ++- synapse/handlers/presence.py | 54 +++++----- synapse/handlers/profile.py | 18 ++-- synapse/handlers/receipts.py | 14 +-- synapse/handlers/register.py | 16 +-- synapse/handlers/room.py | 16 +-- synapse/handlers/room_list.py | 10 +- synapse/handlers/room_member.py | 42 ++++---- synapse/handlers/room_member_worker.py | 2 +- synapse/handlers/search.py | 14 ++- synapse/handlers/state_deltas.py | 8 +- synapse/handlers/stats.py | 6 +- synapse/handlers/sync.py | 112 ++++++++++----------- synapse/handlers/typing.py | 4 +- synapse/handlers/user_directory.py | 2 +- synapse/http/client.py | 28 +++--- synapse/http/federation/matrix_federation_agent.py | 46 ++++----- synapse/http/federation/srv_resolver.py | 8 +- synapse/http/matrixfederationclient.py | 16 +-- synapse/logging/opentracing.py | 6 +- synapse/module_api/__init__.py | 2 +- synapse/notifier.py | 18 ++-- synapse/push/bulk_push_rule_evaluator.py | 10 +- synapse/push/httppusher.py | 18 ++-- synapse/push/mailer.py | 84 +++++++--------- synapse/push/presentable_names.py | 25 +++-- synapse/push/push_tools.py | 4 +- synapse/push/pusherpool.py | 6 +- synapse/replication/http/_base.py | 2 +- synapse/replication/http/federation.py | 10 +- synapse/replication/http/login.py | 2 +- synapse/replication/http/membership.py | 4 +- synapse/replication/http/register.py | 4 +- synapse/replication/http/send_event.py | 4 +- synapse/replication/tcp/streams/_base.py | 12 +-- synapse/replication/tcp/streams/events.py | 2 +- synapse/rest/admin/__init__.py | 48 +++++---- synapse/rest/admin/server_notice_servlet.py | 2 +- synapse/rest/client/v1/directory.py | 18 ++-- synapse/rest/client/v1/events.py | 6 +- synapse/rest/client/v1/initial_sync.py | 2 +- synapse/rest/client/v1/login.py | 14 +-- synapse/rest/client/v1/logout.py | 4 +- synapse/rest/client/v1/presence.py | 4 +- synapse/rest/client/v1/profile.py | 14 +-- synapse/rest/client/v1/push_rule.py | 10 +- synapse/rest/client/v1/pusher.py | 8 +- synapse/rest/client/v1/room.py | 46 ++++----- synapse/rest/client/v1/voip.py | 22 ++-- synapse/rest/client/v2_alpha/account.py | 32 +++--- synapse/rest/client/v2_alpha/account_data.py | 8 +- synapse/rest/client/v2_alpha/account_validity.py | 4 +- synapse/rest/client/v2_alpha/auth.py | 4 +- synapse/rest/client/v2_alpha/capabilities.py | 2 +- synapse/rest/client/v2_alpha/devices.py | 10 +- synapse/rest/client/v2_alpha/filter.py | 4 +- synapse/rest/client/v2_alpha/groups.py | 64 ++++++------ synapse/rest/client/v2_alpha/keys.py | 8 +- synapse/rest/client/v2_alpha/notifications.py | 4 +- synapse/rest/client/v2_alpha/openid.py | 18 ++-- synapse/rest/client/v2_alpha/read_marker.py | 2 +- synapse/rest/client/v2_alpha/receipts.py | 2 +- synapse/rest/client/v2_alpha/register.py | 38 ++++--- synapse/rest/client/v2_alpha/relations.py | 8 +- synapse/rest/client/v2_alpha/report_event.py | 2 +- synapse/rest/client/v2_alpha/room_keys.py | 14 +-- .../client/v2_alpha/room_upgrade_rest_servlet.py | 2 +- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- synapse/rest/client/v2_alpha/sync.py | 48 ++++----- synapse/rest/client/v2_alpha/tags.py | 6 +- synapse/rest/client/v2_alpha/thirdparty.py | 10 +- synapse/rest/client/v2_alpha/user_directory.py | 4 +- synapse/rest/media/v1/media_repository.py | 18 ++-- synapse/rest/media/v1/media_storage.py | 12 +-- synapse/rest/media/v1/preview_url_resource.py | 34 +++---- .../resource_limits_server_notices.py | 2 +- synapse/server_notices/server_notices_manager.py | 6 +- synapse/state/__init__.py | 38 ++++--- synapse/state/v1.py | 8 +- synapse/state/v2.py | 26 ++--- synapse/storage/__init__.py | 2 +- synapse/storage/_base.py | 14 +-- synapse/storage/account_data.py | 14 ++- synapse/storage/appservice.py | 14 ++- synapse/storage/background_updates.py | 20 ++-- synapse/storage/client_ips.py | 26 +++-- synapse/storage/deviceinbox.py | 10 +- synapse/storage/devices.py | 32 +++--- synapse/storage/directory.py | 10 +- synapse/storage/e2e_room_keys.py | 6 +- synapse/storage/end_to_end_keys.py | 8 +- synapse/storage/event_federation.py | 12 +-- synapse/storage/event_push_actions.py | 16 +-- synapse/storage/events.py | 34 +++---- synapse/storage/events_bg_updates.py | 6 +- synapse/storage/events_worker.py | 20 ++-- synapse/storage/filtering.py | 4 +- synapse/storage/group_server.py | 38 ++++--- synapse/storage/monthly_active_users.py | 2 +- synapse/storage/presence.py | 6 +- synapse/storage/profile.py | 12 +-- synapse/storage/push_rule.py | 18 ++-- synapse/storage/pusher.py | 10 +- synapse/storage/receipts.py | 36 +++---- synapse/storage/registration.py | 34 +++---- synapse/storage/relations.py | 4 +- synapse/storage/room.py | 10 +- synapse/storage/roommember.py | 42 ++++---- synapse/storage/search.py | 56 +++++------ synapse/storage/signatures.py | 2 +- synapse/storage/state.py | 54 +++++----- synapse/storage/stats.py | 16 +-- synapse/storage/stream.py | 44 ++++---- synapse/storage/tags.py | 12 +-- synapse/storage/transactions.py | 4 +- synapse/storage/user_directory.py | 30 +++--- synapse/storage/user_erasure_store.py | 5 +- synapse/streams/events.py | 4 +- synapse/util/__init__.py | 2 +- synapse/util/async_helpers.py | 4 +- synapse/util/caches/descriptors.py | 2 +- synapse/util/caches/response_cache.py | 2 +- synapse/util/metrics.py | 2 +- synapse/util/retryutils.py | 16 ++- synapse/visibility.py | 8 +- tests/crypto/test_keyring.py | 6 +- tests/handlers/test_register.py | 2 +- .../federation/test_matrix_federation_agent.py | 4 +- tests/http/federation/test_srv_resolver.py | 2 +- tests/http/test_fedclient.py | 2 +- tests/rest/client/test_transactions.py | 2 +- tests/storage/test_background_update.py | 4 +- tests/storage/test_redaction.py | 4 +- tests/storage/test_roommember.py | 2 +- tests/storage/test_state.py | 2 +- tests/test_visibility.py | 6 +- tests/util/caches/test_descriptors.py | 8 +- tests/utils.py | 6 +- 177 files changed, 1360 insertions(+), 1514 deletions(-) create mode 100644 changelog.d/5736.misc mode change 100755 => 100644 synapse/app/homeserver.py (limited to 'synapse/handlers') diff --git a/changelog.d/5736.misc b/changelog.d/5736.misc new file mode 100644 index 0000000000..5713b8b32d --- /dev/null +++ b/changelog.d/5736.misc @@ -0,0 +1 @@ +Replace uses of returnValue with plain return, as returnValue is not needed on Python 3. diff --git a/docs/log_contexts.rst b/docs/log_contexts.rst index f5cd5de8ab..4502cd9454 100644 --- a/docs/log_contexts.rst +++ b/docs/log_contexts.rst @@ -148,7 +148,7 @@ call any other functions. d = more_stuff() result = yield d # also fine, of course - defer.returnValue(result) + return result def nonInlineCallbacksFun(): logger.debug("just a wrapper really") diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 7ce6540bdd..351790cca4 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -128,7 +128,7 @@ class Auth(object): ) self._check_joined_room(member, user_id, room_id) - defer.returnValue(member) + return member @defer.inlineCallbacks def check_user_was_in_room(self, room_id, user_id): @@ -156,13 +156,13 @@ class Auth(object): if forgot: raise AuthError(403, "User %s not in room %s" % (user_id, room_id)) - defer.returnValue(member) + return member @defer.inlineCallbacks def check_host_in_room(self, room_id, host): with Measure(self.clock, "check_host_in_room"): latest_event_ids = yield self.store.is_host_joined(room_id, host) - defer.returnValue(latest_event_ids) + return latest_event_ids def _check_joined_room(self, member, user_id, room_id): if not member or member.membership != Membership.JOIN: @@ -219,9 +219,7 @@ class Auth(object): device_id="dummy-device", # stubbed ) - defer.returnValue( - synapse.types.create_requester(user_id, app_service=app_service) - ) + return synapse.types.create_requester(user_id, app_service=app_service) user_info = yield self.get_user_by_access_token(access_token, rights) user = user_info["user"] @@ -262,10 +260,8 @@ class Auth(object): request.authenticated_entity = user.to_string() - defer.returnValue( - synapse.types.create_requester( - user, token_id, is_guest, device_id, app_service=app_service - ) + return synapse.types.create_requester( + user, token_id, is_guest, device_id, app_service=app_service ) except KeyError: raise MissingClientTokenError() @@ -276,25 +272,25 @@ class Auth(object): self.get_access_token_from_request(request) ) if app_service is None: - defer.returnValue((None, None)) + return (None, None) if app_service.ip_range_whitelist: ip_address = IPAddress(self.hs.get_ip_from_request(request)) if ip_address not in app_service.ip_range_whitelist: - defer.returnValue((None, None)) + return (None, None) if b"user_id" not in request.args: - defer.returnValue((app_service.sender, app_service)) + return (app_service.sender, app_service) user_id = request.args[b"user_id"][0].decode("utf8") if app_service.sender == user_id: - defer.returnValue((app_service.sender, app_service)) + return (app_service.sender, app_service) if not app_service.is_interested_in_user(user_id): raise AuthError(403, "Application service cannot masquerade as this user.") if not (yield self.store.get_user_by_id(user_id)): raise AuthError(403, "Application service has not registered this user") - defer.returnValue((user_id, app_service)) + return (user_id, app_service) @defer.inlineCallbacks def get_user_by_access_token(self, token, rights="access"): @@ -330,7 +326,7 @@ class Auth(object): msg="Access token has expired", soft_logout=True ) - defer.returnValue(r) + return r # otherwise it needs to be a valid macaroon try: @@ -378,7 +374,7 @@ class Auth(object): } else: raise RuntimeError("Unknown rights setting %s", rights) - defer.returnValue(ret) + return ret except ( _InvalidMacaroonException, pymacaroons.exceptions.MacaroonException, @@ -506,7 +502,7 @@ class Auth(object): def _look_up_user_by_access_token(self, token): ret = yield self.store.get_user_by_access_token(token) if not ret: - defer.returnValue(None) + return None # we use ret.get() below because *lots* of unit tests stub out # get_user_by_access_token in a way where it only returns a couple of @@ -518,7 +514,7 @@ class Auth(object): "device_id": ret.get("device_id"), "valid_until_ms": ret.get("valid_until_ms"), } - defer.returnValue(user_info) + return user_info def get_appservice_by_req(self, request): token = self.get_access_token_from_request(request) @@ -543,7 +539,7 @@ class Auth(object): @defer.inlineCallbacks def compute_auth_events(self, event, current_state_ids, for_verification=False): if event.type == EventTypes.Create: - defer.returnValue([]) + return [] auth_ids = [] @@ -604,7 +600,7 @@ class Auth(object): if member_event.content["membership"] == Membership.JOIN: auth_ids.append(member_event.event_id) - defer.returnValue(auth_ids) + return auth_ids @defer.inlineCallbacks def check_can_change_room_list(self, room_id, user): @@ -618,7 +614,7 @@ class Auth(object): is_admin = yield self.is_server_admin(user) if is_admin: - defer.returnValue(True) + return True user_id = user.to_string() yield self.check_joined_room(room_id, user_id) @@ -712,7 +708,7 @@ class Auth(object): # * The user is a guest user, and has joined the room # else it will throw. member_event = yield self.check_user_was_in_room(room_id, user_id) - defer.returnValue((member_event.membership, member_event.event_id)) + return (member_event.membership, member_event.event_id) except AuthError: visibility = yield self.state.get_current_state( room_id, EventTypes.RoomHistoryVisibility, "" @@ -721,7 +717,7 @@ class Auth(object): visibility and visibility.content["history_visibility"] == "world_readable" ): - defer.returnValue((Membership.JOIN, None)) + return (Membership.JOIN, None) return raise AuthError( 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 9b3daca29b..9f06556bd2 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -132,7 +132,7 @@ class Filtering(object): @defer.inlineCallbacks def get_user_filter(self, user_localpart, filter_id): result = yield self.store.get_user_filter(user_localpart, filter_id) - defer.returnValue(FilterCollection(result)) + return FilterCollection(result) def add_user_filter(self, user_localpart, user_filter): self.check_valid_filter(user_filter) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 5b563c2778..e2822ca848 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -70,12 +70,12 @@ class PresenceStatusStubServlet(RestServlet): except HttpResponseException as e: raise e.to_synapse_error() - defer.returnValue((200, result)) + return (200, result) @defer.inlineCallbacks def on_PUT(self, request, user_id): yield self.auth.get_user_by_req(request) - defer.returnValue((200, {})) + return (200, {}) class KeyUploadServlet(RestServlet): @@ -126,11 +126,11 @@ class KeyUploadServlet(RestServlet): self.main_uri + request.uri.decode("ascii"), body, headers=headers ) - defer.returnValue((200, result)) + return (200, result) else: # Just interested in counts. result = yield self.store.count_e2e_one_time_keys(user_id, device_id) - defer.returnValue((200, {"one_time_key_counts": result})) + return (200, {"one_time_key_counts": result}) class FrontendProxySlavedStore( diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py old mode 100755 new mode 100644 index 34c3f5ee99..7d6b51b5bc --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -406,7 +406,7 @@ def setup(config_options): if provision: yield acme.provision_certificate() - defer.returnValue(provision) + return provision @defer.inlineCallbacks def reprovision_acme(): diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index b26a31dd54..33b3579425 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -175,21 +175,21 @@ class ApplicationService(object): @defer.inlineCallbacks def _matches_user(self, event, store): if not event: - defer.returnValue(False) + return False if self.is_interested_in_user(event.sender): - defer.returnValue(True) + return True # also check m.room.member state key if event.type == EventTypes.Member and self.is_interested_in_user( event.state_key ): - defer.returnValue(True) + return True if not store: - defer.returnValue(False) + return False does_match = yield self._matches_user_in_member_list(event.room_id, store) - defer.returnValue(does_match) + return does_match @cachedInlineCallbacks(num_args=1, cache_context=True) def _matches_user_in_member_list(self, room_id, store, cache_context): @@ -200,8 +200,8 @@ class ApplicationService(object): # check joined member events for user_id in member_list: if self.is_interested_in_user(user_id): - defer.returnValue(True) - defer.returnValue(False) + return True + return False def _matches_room_id(self, event): if hasattr(event, "room_id"): @@ -211,13 +211,13 @@ class ApplicationService(object): @defer.inlineCallbacks def _matches_aliases(self, event, store): if not store or not event: - defer.returnValue(False) + return False alias_list = yield store.get_aliases_for_room(event.room_id) for alias in alias_list: if self.is_interested_in_alias(alias): - defer.returnValue(True) - defer.returnValue(False) + return True + return False @defer.inlineCallbacks def is_interested(self, event, store=None): @@ -231,15 +231,15 @@ class ApplicationService(object): """ # Do cheap checks first if self._matches_room_id(event): - defer.returnValue(True) + return True if (yield self._matches_aliases(event, store)): - defer.returnValue(True) + return True if (yield self._matches_user(event, store)): - defer.returnValue(True) + return True - defer.returnValue(False) + return False def is_interested_in_user(self, user_id): return ( diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 571881775b..007ca75a94 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -97,40 +97,40 @@ class ApplicationServiceApi(SimpleHttpClient): @defer.inlineCallbacks def query_user(self, service, user_id): if service.url is None: - defer.returnValue(False) + return False uri = service.url + ("/users/%s" % urllib.parse.quote(user_id)) response = None try: response = yield self.get_json(uri, {"access_token": service.hs_token}) if response is not None: # just an empty json object - defer.returnValue(True) + return True except CodeMessageException as e: if e.code == 404: - defer.returnValue(False) + return False return logger.warning("query_user to %s received %s", uri, e.code) except Exception as ex: logger.warning("query_user to %s threw exception %s", uri, ex) - defer.returnValue(False) + return False @defer.inlineCallbacks def query_alias(self, service, alias): if service.url is None: - defer.returnValue(False) + return False uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias)) response = None try: response = yield self.get_json(uri, {"access_token": service.hs_token}) if response is not None: # just an empty json object - defer.returnValue(True) + return True except CodeMessageException as e: logger.warning("query_alias to %s received %s", uri, e.code) if e.code == 404: - defer.returnValue(False) + return False return except Exception as ex: logger.warning("query_alias to %s threw exception %s", uri, ex) - defer.returnValue(False) + return False @defer.inlineCallbacks def query_3pe(self, service, kind, protocol, fields): @@ -141,7 +141,7 @@ class ApplicationServiceApi(SimpleHttpClient): else: raise ValueError("Unrecognised 'kind' argument %r to query_3pe()", kind) if service.url is None: - defer.returnValue([]) + return [] uri = "%s%s/thirdparty/%s/%s" % ( service.url, @@ -155,7 +155,7 @@ class ApplicationServiceApi(SimpleHttpClient): logger.warning( "query_3pe to %s returned an invalid response %r", uri, response ) - defer.returnValue([]) + return [] ret = [] for r in response: @@ -166,14 +166,14 @@ class ApplicationServiceApi(SimpleHttpClient): "query_3pe to %s returned an invalid result %r", uri, r ) - defer.returnValue(ret) + return ret except Exception as ex: logger.warning("query_3pe to %s threw exception %s", uri, ex) - defer.returnValue([]) + return [] def get_3pe_protocol(self, service, protocol): if service.url is None: - defer.returnValue({}) + return {} @defer.inlineCallbacks def _get(): @@ -189,7 +189,7 @@ class ApplicationServiceApi(SimpleHttpClient): logger.warning( "query_3pe_protocol to %s did not return a" " valid result", uri ) - defer.returnValue(None) + return None for instance in info.get("instances", []): network_id = instance.get("network_id", None) @@ -198,10 +198,10 @@ class ApplicationServiceApi(SimpleHttpClient): service.id, network_id ).to_string() - defer.returnValue(info) + return info except Exception as ex: logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex) - defer.returnValue(None) + return None key = (service.id, protocol) return self.protocol_meta_cache.wrap(key, _get) @@ -209,7 +209,7 @@ class ApplicationServiceApi(SimpleHttpClient): @defer.inlineCallbacks def push_bulk(self, service, events, txn_id=None): if service.url is None: - defer.returnValue(True) + return True events = self._serialize(events) @@ -229,14 +229,14 @@ class ApplicationServiceApi(SimpleHttpClient): ) sent_transactions_counter.labels(service.id).inc() sent_events_counter.labels(service.id).inc(len(events)) - defer.returnValue(True) + return True return except CodeMessageException as e: logger.warning("push_bulk to %s received %s", uri, e.code) except Exception as ex: logger.warning("push_bulk to %s threw exception %s", uri, ex) failed_transactions_counter.labels(service.id).inc() - defer.returnValue(False) + return False def _serialize(self, events): time_now = self.clock.time_msec() diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index e5b36494f5..42a350bff8 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -193,7 +193,7 @@ class _TransactionController(object): @defer.inlineCallbacks def _is_service_up(self, service): state = yield self.store.get_appservice_state(service) - defer.returnValue(state == ApplicationServiceState.UP or state is None) + return state == ApplicationServiceState.UP or state is None class _Recoverer(object): @@ -208,7 +208,7 @@ class _Recoverer(object): r.service.id, ) r.recover() - defer.returnValue(recoverers) + return recoverers def __init__(self, clock, store, as_api, service, callback): self.clock = clock diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index e8bb420ad1..6c3e885e72 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -462,7 +462,7 @@ class StoreKeyFetcher(KeyFetcher): keys = {} for (server_name, key_id), key in res.items(): keys.setdefault(server_name, {})[key_id] = key - defer.returnValue(keys) + return keys class BaseV2KeyFetcher(object): @@ -566,7 +566,7 @@ class BaseV2KeyFetcher(object): ).addErrback(unwrapFirstError) ) - defer.returnValue(verify_keys) + return verify_keys class PerspectivesKeyFetcher(BaseV2KeyFetcher): @@ -588,7 +588,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): result = yield self.get_server_verify_key_v2_indirect( keys_to_fetch, key_server ) - defer.returnValue(result) + return result except KeyLookupError as e: logger.warning( "Key lookup failed from %r: %s", key_server.server_name, e @@ -601,7 +601,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): str(e), ) - defer.returnValue({}) + return {} results = yield make_deferred_yieldable( defer.gatherResults( @@ -615,7 +615,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): for server_name, keys in result.items(): union_of_keys.setdefault(server_name, {}).update(keys) - defer.returnValue(union_of_keys) + return union_of_keys @defer.inlineCallbacks def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server): @@ -701,7 +701,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): perspective_name, time_now_ms, added_keys ) - defer.returnValue(keys) + return keys def _validate_perspectives_response(self, key_server, response): """Optionally check the signature on the result of a /key/query request @@ -843,7 +843,7 @@ class ServerKeyFetcher(BaseV2KeyFetcher): ) keys.update(response_keys) - defer.returnValue(keys) + return keys @defer.inlineCallbacks diff --git a/synapse/events/builder.py b/synapse/events/builder.py index db011e0407..3997751337 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -144,15 +144,13 @@ class EventBuilder(object): if self._origin_server_ts is not None: event_dict["origin_server_ts"] = self._origin_server_ts - defer.returnValue( - create_local_event_from_event_dict( - clock=self._clock, - hostname=self._hostname, - signing_key=self._signing_key, - format_version=self.format_version, - event_dict=event_dict, - internal_metadata_dict=self.internal_metadata.get_dict(), - ) + return create_local_event_from_event_dict( + clock=self._clock, + hostname=self._hostname, + signing_key=self._signing_key, + format_version=self.format_version, + event_dict=event_dict, + internal_metadata_dict=self.internal_metadata.get_dict(), ) diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index a9545e6c1b..acbcbeeced 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -133,19 +133,17 @@ class EventContext(object): else: prev_state_id = None - defer.returnValue( - { - "prev_state_id": prev_state_id, - "event_type": event.type, - "event_state_key": event.state_key if event.is_state() else None, - "state_group": self.state_group, - "rejected": self.rejected, - "prev_group": self.prev_group, - "delta_ids": _encode_state_dict(self.delta_ids), - "prev_state_events": self.prev_state_events, - "app_service_id": self.app_service.id if self.app_service else None, - } - ) + return { + "prev_state_id": prev_state_id, + "event_type": event.type, + "event_state_key": event.state_key if event.is_state() else None, + "state_group": self.state_group, + "rejected": self.rejected, + "prev_group": self.prev_group, + "delta_ids": _encode_state_dict(self.delta_ids), + "prev_state_events": self.prev_state_events, + "app_service_id": self.app_service.id if self.app_service else None, + } @staticmethod def deserialize(store, input): @@ -202,7 +200,7 @@ class EventContext(object): yield make_deferred_yieldable(self._fetching_state_deferred) - defer.returnValue(self._current_state_ids) + return self._current_state_ids @defer.inlineCallbacks def get_prev_state_ids(self, store): @@ -222,7 +220,7 @@ class EventContext(object): yield make_deferred_yieldable(self._fetching_state_deferred) - defer.returnValue(self._prev_state_ids) + return self._prev_state_ids def get_cached_current_state_ids(self): """Gets the current state IDs if we have them already cached. diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py index 8f5d95696b..714a9b1579 100644 --- a/synapse/events/third_party_rules.py +++ b/synapse/events/third_party_rules.py @@ -51,7 +51,7 @@ class ThirdPartyEventRules(object): defer.Deferred[bool]: True if the event should be allowed, False if not. """ if self.third_party_rules is None: - defer.returnValue(True) + return True prev_state_ids = yield context.get_prev_state_ids(self.store) @@ -61,7 +61,7 @@ class ThirdPartyEventRules(object): state_events[key] = yield self.store.get_event(event_id, allow_none=True) ret = yield self.third_party_rules.check_event_allowed(event, state_events) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def on_create_room(self, requester, config, is_requester_admin): @@ -98,7 +98,7 @@ class ThirdPartyEventRules(object): """ if self.third_party_rules is None: - defer.returnValue(True) + return True state_ids = yield self.store.get_filtered_current_state_ids(room_id) room_state_events = yield self.store.get_events(state_ids.values()) @@ -110,4 +110,4 @@ class ThirdPartyEventRules(object): ret = yield self.third_party_rules.check_threepid_can_be_invited( medium, address, state_events ) - defer.returnValue(ret) + return ret diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 9487a886f5..07d1c5bcf0 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -360,7 +360,7 @@ class EventClientSerializer(object): """ # To handle the case of presence events and the like if not isinstance(event, EventBase): - defer.returnValue(event) + return event event_id = event.event_id serialized_event = serialize_event(event, time_now, **kwargs) @@ -406,7 +406,7 @@ class EventClientSerializer(object): "sender": edit.sender, } - defer.returnValue(serialized_event) + return serialized_event def serialize_events(self, events, time_now, **kwargs): """Serializes multiple events. diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index f7bb806ae7..5a1e23a145 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -106,7 +106,7 @@ class FederationBase(object): "Failed to find copy of %s with valid signature", pdu.event_id ) - defer.returnValue(res) + return res handle = preserve_fn(handle_check_result) deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)] @@ -116,9 +116,9 @@ class FederationBase(object): ).addErrback(unwrapFirstError) if include_none: - defer.returnValue(valid_pdus) + return valid_pdus else: - defer.returnValue([p for p in valid_pdus if p]) + return [p for p in valid_pdus if p] def _check_sigs_and_hash(self, room_version, pdu): return make_deferred_yieldable( diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 3cb4b94420..25ed1257f1 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -213,7 +213,7 @@ class FederationClient(FederationBase): ).addErrback(unwrapFirstError) ) - defer.returnValue(pdus) + return pdus @defer.inlineCallbacks @log_function @@ -245,7 +245,7 @@ class FederationClient(FederationBase): ev = self._get_pdu_cache.get(event_id) if ev: - defer.returnValue(ev) + return ev pdu_attempts = self.pdu_destination_tried.setdefault(event_id, {}) @@ -307,7 +307,7 @@ class FederationClient(FederationBase): if signed_pdu: self._get_pdu_cache[event_id] = signed_pdu - defer.returnValue(signed_pdu) + return signed_pdu @defer.inlineCallbacks @log_function @@ -355,7 +355,7 @@ class FederationClient(FederationBase): auth_chain.sort(key=lambda e: e.depth) - defer.returnValue((pdus, auth_chain)) + return (pdus, auth_chain) except HttpResponseException as e: if e.code == 400 or e.code == 404: logger.info("Failed to use get_room_state_ids API, falling back") @@ -404,7 +404,7 @@ class FederationClient(FederationBase): signed_auth.sort(key=lambda e: e.depth) - defer.returnValue((signed_pdus, signed_auth)) + return (signed_pdus, signed_auth) @defer.inlineCallbacks def get_events_from_store_or_dest(self, destination, room_id, event_ids): @@ -429,7 +429,7 @@ class FederationClient(FederationBase): missing_events.discard(k) if not missing_events: - defer.returnValue((signed_events, failed_to_fetch)) + return (signed_events, failed_to_fetch) logger.debug( "Fetching unknown state/auth events %s for room %s", @@ -465,7 +465,7 @@ class FederationClient(FederationBase): # We removed all events we successfully fetched from `batch` failed_to_fetch.update(batch) - defer.returnValue((signed_events, failed_to_fetch)) + return (signed_events, failed_to_fetch) @defer.inlineCallbacks @log_function @@ -485,7 +485,7 @@ class FederationClient(FederationBase): signed_auth.sort(key=lambda e: e.depth) - defer.returnValue(signed_auth) + return signed_auth @defer.inlineCallbacks def _try_destination_list(self, description, destinations, callback): @@ -521,7 +521,7 @@ class FederationClient(FederationBase): try: res = yield callback(destination) - defer.returnValue(res) + return res except InvalidResponseError as e: logger.warn("Failed to %s via %s: %s", description, destination, e) except HttpResponseException as e: @@ -615,7 +615,7 @@ class FederationClient(FederationBase): event_dict=pdu_dict, ) - defer.returnValue((destination, ev, event_format)) + return (destination, ev, event_format) return self._try_destination_list( "make_" + membership, destinations, send_request @@ -728,13 +728,11 @@ class FederationClient(FederationBase): check_authchain_validity(signed_auth) - defer.returnValue( - { - "state": signed_state, - "auth_chain": signed_auth, - "origin": destination, - } - ) + return { + "state": signed_state, + "auth_chain": signed_auth, + "origin": destination, + } return self._try_destination_list("send_join", destinations, send_request) @@ -758,7 +756,7 @@ class FederationClient(FederationBase): # FIXME: We should handle signature failures more gracefully. - defer.returnValue(pdu) + return pdu @defer.inlineCallbacks def _do_send_invite(self, destination, pdu, room_version): @@ -786,7 +784,7 @@ class FederationClient(FederationBase): "invite_room_state": pdu.unsigned.get("invite_room_state", []), }, ) - defer.returnValue(content) + return content except HttpResponseException as e: if e.code in [400, 404]: err = e.to_synapse_error() @@ -821,7 +819,7 @@ class FederationClient(FederationBase): event_id=pdu.event_id, content=pdu.get_pdu_json(time_now), ) - defer.returnValue(content) + return content def send_leave(self, destinations, pdu): """Sends a leave event to one of a list of homeservers. @@ -856,7 +854,7 @@ class FederationClient(FederationBase): ) logger.debug("Got content: %s", content) - defer.returnValue(None) + return None return self._try_destination_list("send_leave", destinations, send_request) @@ -917,7 +915,7 @@ class FederationClient(FederationBase): "missing": content.get("missing", []), } - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_missing_events( @@ -974,7 +972,7 @@ class FederationClient(FederationBase): # get_missing_events signed_events = [] - defer.returnValue(signed_events) + return signed_events @defer.inlineCallbacks def forward_third_party_invite(self, destinations, room_id, event_dict): @@ -986,7 +984,7 @@ class FederationClient(FederationBase): yield self.transport_layer.exchange_third_party_invite( destination=destination, room_id=room_id, event_dict=event_dict ) - defer.returnValue(None) + return None except CodeMessageException: raise except Exception as e: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 8c0a18b120..b4b9a05ca6 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -99,7 +99,7 @@ class FederationServer(FederationBase): res = self._transaction_from_pdus(pdus).get_dict() - defer.returnValue((200, res)) + return (200, res) @defer.inlineCallbacks @log_function @@ -126,7 +126,7 @@ class FederationServer(FederationBase): origin, transaction, request_time ) - defer.returnValue(result) + return result @defer.inlineCallbacks def _handle_incoming_transaction(self, origin, transaction, request_time): @@ -147,8 +147,7 @@ class FederationServer(FederationBase): "[%s] We've already responded to this request", transaction.transaction_id, ) - defer.returnValue(response) - return + return response logger.debug("[%s] Transaction is new", transaction.transaction_id) @@ -163,7 +162,7 @@ class FederationServer(FederationBase): yield self.transaction_actions.set_response( origin, transaction, 400, response ) - defer.returnValue((400, response)) + return (400, response) received_pdus_counter.inc(len(transaction.pdus)) @@ -265,7 +264,7 @@ class FederationServer(FederationBase): logger.debug("Returning: %s", str(response)) yield self.transaction_actions.set_response(origin, transaction, 200, response) - defer.returnValue((200, response)) + return (200, response) @defer.inlineCallbacks def received_edu(self, origin, edu_type, content): @@ -298,7 +297,7 @@ class FederationServer(FederationBase): event_id, ) - defer.returnValue((200, resp)) + return (200, resp) @defer.inlineCallbacks def on_state_ids_request(self, origin, room_id, event_id): @@ -315,9 +314,7 @@ class FederationServer(FederationBase): state_ids = yield self.handler.get_state_ids_for_pdu(room_id, event_id) auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids) - defer.returnValue( - (200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}) - ) + return (200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}) @defer.inlineCallbacks def _on_context_state_request_compute(self, room_id, event_id): @@ -336,12 +333,10 @@ class FederationServer(FederationBase): ) ) - defer.returnValue( - { - "pdus": [pdu.get_pdu_json() for pdu in pdus], - "auth_chain": [pdu.get_pdu_json() for pdu in auth_chain], - } - ) + return { + "pdus": [pdu.get_pdu_json() for pdu in pdus], + "auth_chain": [pdu.get_pdu_json() for pdu in auth_chain], + } @defer.inlineCallbacks @log_function @@ -349,15 +344,15 @@ class FederationServer(FederationBase): pdu = yield self.handler.get_persisted_pdu(origin, event_id) if pdu: - defer.returnValue((200, self._transaction_from_pdus([pdu]).get_dict())) + return (200, self._transaction_from_pdus([pdu]).get_dict()) else: - defer.returnValue((404, "")) + return (404, "") @defer.inlineCallbacks def on_query_request(self, query_type, args): received_queries_counter.labels(query_type).inc() resp = yield self.registry.on_query(query_type, args) - defer.returnValue((200, resp)) + return (200, resp) @defer.inlineCallbacks def on_make_join_request(self, origin, room_id, user_id, supported_versions): @@ -371,9 +366,7 @@ class FederationServer(FederationBase): pdu = yield self.handler.on_make_join_request(room_id, user_id) time_now = self._clock.time_msec() - defer.returnValue( - {"event": pdu.get_pdu_json(time_now), "room_version": room_version} - ) + return {"event": pdu.get_pdu_json(time_now), "room_version": room_version} @defer.inlineCallbacks def on_invite_request(self, origin, content, room_version): @@ -391,7 +384,7 @@ class FederationServer(FederationBase): yield self.check_server_matches_acl(origin_host, pdu.room_id) ret_pdu = yield self.handler.on_invite_request(origin, pdu) time_now = self._clock.time_msec() - defer.returnValue({"event": ret_pdu.get_pdu_json(time_now)}) + return {"event": ret_pdu.get_pdu_json(time_now)} @defer.inlineCallbacks def on_send_join_request(self, origin, content, room_id): @@ -407,16 +400,14 @@ class FederationServer(FederationBase): logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures) res_pdus = yield self.handler.on_send_join_request(origin, pdu) time_now = self._clock.time_msec() - defer.returnValue( - ( - 200, - { - "state": [p.get_pdu_json(time_now) for p in res_pdus["state"]], - "auth_chain": [ - p.get_pdu_json(time_now) for p in res_pdus["auth_chain"] - ], - }, - ) + return ( + 200, + { + "state": [p.get_pdu_json(time_now) for p in res_pdus["state"]], + "auth_chain": [ + p.get_pdu_json(time_now) for p in res_pdus["auth_chain"] + ], + }, ) @defer.inlineCallbacks @@ -428,9 +419,7 @@ class FederationServer(FederationBase): room_version = yield self.store.get_room_version(room_id) time_now = self._clock.time_msec() - defer.returnValue( - {"event": pdu.get_pdu_json(time_now), "room_version": room_version} - ) + return {"event": pdu.get_pdu_json(time_now), "room_version": room_version} @defer.inlineCallbacks def on_send_leave_request(self, origin, content, room_id): @@ -445,7 +434,7 @@ class FederationServer(FederationBase): logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures) yield self.handler.on_send_leave_request(origin, pdu) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_event_auth(self, origin, room_id, event_id): @@ -456,7 +445,7 @@ class FederationServer(FederationBase): time_now = self._clock.time_msec() auth_pdus = yield self.handler.on_event_auth(event_id) res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]} - defer.returnValue((200, res)) + return (200, res) @defer.inlineCallbacks def on_query_auth_request(self, origin, content, room_id, event_id): @@ -509,7 +498,7 @@ class FederationServer(FederationBase): "missing": ret.get("missing", []), } - defer.returnValue((200, send_content)) + return (200, send_content) @log_function def on_query_client_keys(self, origin, content): @@ -548,7 +537,7 @@ class FederationServer(FederationBase): ), ) - defer.returnValue({"one_time_keys": json_result}) + return {"one_time_keys": json_result} @defer.inlineCallbacks @log_function @@ -580,9 +569,7 @@ class FederationServer(FederationBase): time_now = self._clock.time_msec() - defer.returnValue( - {"events": [ev.get_pdu_json(time_now) for ev in missing_events]} - ) + return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]} @log_function def on_openid_userinfo(self, token): @@ -676,14 +663,14 @@ class FederationServer(FederationBase): ret = yield self.handler.exchange_third_party_invite( sender_user_id, target_user_id, room_id, signed ) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def on_exchange_third_party_invite_request(self, origin, room_id, event_dict): ret = yield self.handler.on_exchange_third_party_invite_request( origin, room_id, event_dict ) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def check_server_matches_acl(self, server_name, room_id): diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index 9aab12c0d3..fad980b893 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -374,7 +374,7 @@ class PerDestinationQueue(object): assert len(edus) <= limit, "get_devices_by_remote returned too many EDUs" - defer.returnValue((edus, now_stream_id)) + return (edus, now_stream_id) @defer.inlineCallbacks def _get_to_device_message_edus(self, limit): @@ -393,4 +393,4 @@ class PerDestinationQueue(object): for content in contents ] - defer.returnValue((edus, stream_id)) + return (edus, stream_id) diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index 0460a8c4ac..52706302f2 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -133,4 +133,4 @@ class TransactionManager(object): ) success = False - defer.returnValue(success) + return success diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 1aae9ec9e7..2a6709ff48 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -183,7 +183,7 @@ class TransportLayerClient(object): try_trailing_slash_on_400=True, ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -201,7 +201,7 @@ class TransportLayerClient(object): ignore_backoff=ignore_backoff, ) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -259,7 +259,7 @@ class TransportLayerClient(object): ignore_backoff=ignore_backoff, ) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -270,7 +270,7 @@ class TransportLayerClient(object): destination=destination, path=path, data=content ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -288,7 +288,7 @@ class TransportLayerClient(object): ignore_backoff=True, ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -299,7 +299,7 @@ class TransportLayerClient(object): destination=destination, path=path, data=content, ignore_backoff=True ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -310,7 +310,7 @@ class TransportLayerClient(object): destination=destination, path=path, data=content, ignore_backoff=True ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -339,7 +339,7 @@ class TransportLayerClient(object): destination=remote_server, path=path, args=args, ignore_backoff=True ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -350,7 +350,7 @@ class TransportLayerClient(object): destination=destination, path=path, data=event_dict ) - defer.returnValue(response) + return response @defer.inlineCallbacks @log_function @@ -359,7 +359,7 @@ class TransportLayerClient(object): content = yield self.client.get_json(destination=destination, path=path) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -370,7 +370,7 @@ class TransportLayerClient(object): destination=destination, path=path, data=content ) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -402,7 +402,7 @@ class TransportLayerClient(object): content = yield self.client.post_json( destination=destination, path=path, data=query_content, timeout=timeout ) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -426,7 +426,7 @@ class TransportLayerClient(object): content = yield self.client.get_json( destination=destination, path=path, timeout=timeout ) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -460,7 +460,7 @@ class TransportLayerClient(object): content = yield self.client.post_json( destination=destination, path=path, data=query_content, timeout=timeout ) - defer.returnValue(content) + return content @defer.inlineCallbacks @log_function @@ -488,7 +488,7 @@ class TransportLayerClient(object): timeout=timeout, ) - defer.returnValue(content) + return content @log_function def get_group_profile(self, destination, group_id, requester_user_id): diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index f497711133..dfd7ae041b 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -157,7 +157,7 @@ class GroupAttestionRenewer(object): yield self.store.update_remote_attestion(group_id, user_id, attestation) - defer.returnValue({}) + return {} def _start_renew_attestations(self): return run_as_background_process("renew_attestations", self._renew_attestations) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 168c9e3f84..d50e691436 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -85,7 +85,7 @@ class GroupsServerHandler(object): if not is_admin: raise SynapseError(403, "User is not admin in group") - defer.returnValue(group) + return group @defer.inlineCallbacks def get_group_summary(self, group_id, requester_user_id): @@ -151,22 +151,20 @@ class GroupsServerHandler(object): group_id, requester_user_id ) - defer.returnValue( - { - "profile": profile, - "users_section": { - "users": users, - "roles": roles, - "total_user_count_estimate": 0, # TODO - }, - "rooms_section": { - "rooms": rooms, - "categories": categories, - "total_room_count_estimate": 0, # TODO - }, - "user": membership_info, - } - ) + return { + "profile": profile, + "users_section": { + "users": users, + "roles": roles, + "total_user_count_estimate": 0, # TODO + }, + "rooms_section": { + "rooms": rooms, + "categories": categories, + "total_room_count_estimate": 0, # TODO + }, + "user": membership_info, + } @defer.inlineCallbacks def update_group_summary_room( @@ -192,7 +190,7 @@ class GroupsServerHandler(object): is_public=is_public, ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def delete_group_summary_room( @@ -208,7 +206,7 @@ class GroupsServerHandler(object): group_id=group_id, room_id=room_id, category_id=category_id ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def set_group_join_policy(self, group_id, requester_user_id, content): @@ -228,7 +226,7 @@ class GroupsServerHandler(object): yield self.store.set_group_join_policy(group_id, join_policy=join_policy) - defer.returnValue({}) + return {} @defer.inlineCallbacks def get_group_categories(self, group_id, requester_user_id): @@ -237,7 +235,7 @@ class GroupsServerHandler(object): yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) categories = yield self.store.get_group_categories(group_id=group_id) - defer.returnValue({"categories": categories}) + return {"categories": categories} @defer.inlineCallbacks def get_group_category(self, group_id, requester_user_id, category_id): @@ -249,7 +247,7 @@ class GroupsServerHandler(object): group_id=group_id, category_id=category_id ) - defer.returnValue(res) + return res @defer.inlineCallbacks def update_group_category(self, group_id, requester_user_id, category_id, content): @@ -269,7 +267,7 @@ class GroupsServerHandler(object): profile=profile, ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def delete_group_category(self, group_id, requester_user_id, category_id): @@ -283,7 +281,7 @@ class GroupsServerHandler(object): group_id=group_id, category_id=category_id ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def get_group_roles(self, group_id, requester_user_id): @@ -292,7 +290,7 @@ class GroupsServerHandler(object): yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) roles = yield self.store.get_group_roles(group_id=group_id) - defer.returnValue({"roles": roles}) + return {"roles": roles} @defer.inlineCallbacks def get_group_role(self, group_id, requester_user_id, role_id): @@ -301,7 +299,7 @@ class GroupsServerHandler(object): yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) res = yield self.store.get_group_role(group_id=group_id, role_id=role_id) - defer.returnValue(res) + return res @defer.inlineCallbacks def update_group_role(self, group_id, requester_user_id, role_id, content): @@ -319,7 +317,7 @@ class GroupsServerHandler(object): group_id=group_id, role_id=role_id, is_public=is_public, profile=profile ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def delete_group_role(self, group_id, requester_user_id, role_id): @@ -331,7 +329,7 @@ class GroupsServerHandler(object): yield self.store.remove_group_role(group_id=group_id, role_id=role_id) - defer.returnValue({}) + return {} @defer.inlineCallbacks def update_group_summary_user( @@ -355,7 +353,7 @@ class GroupsServerHandler(object): is_public=is_public, ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def delete_group_summary_user(self, group_id, requester_user_id, user_id, role_id): @@ -369,7 +367,7 @@ class GroupsServerHandler(object): group_id=group_id, user_id=user_id, role_id=role_id ) - defer.returnValue({}) + return {} @defer.inlineCallbacks def get_group_profile(self, group_id, requester_user_id): @@ -391,7 +389,7 @@ class GroupsServerHandler(object): group_description = {key: group[key] for key in cols} group_description["is_openly_joinable"] = group["join_policy"] == "open" - defer.returnValue(group_description) + return group_description else: raise SynapseError(404, "Unknown group") @@ -461,9 +459,7 @@ class GroupsServerHandler(object): # TODO: If admin add lists of users whose attestations have timed out - defer.returnValue( - {"chunk": chunk, "total_user_count_estimate": len(user_results)} - ) + return {"chunk": chunk, "total_user_count_estimate": len(user_results)} @defer.inlineCallbacks def get_invited_users_in_group(self, group_id, requester_user_id): @@ -494,9 +490,7 @@ class GroupsServerHandler(object): logger.warn("Error getting profile for %s: %s", user_id, e) user_profiles.append(user_profile) - defer.returnValue( - {"chunk": user_profiles, "total_user_count_estimate": len(invited_users)} - ) + return {"chunk": user_profiles, "total_user_count_estimate": len(invited_users)} @defer.inlineCallbacks def get_rooms_in_group(self, group_id, requester_user_id): @@ -533,9 +527,7 @@ class GroupsServerHandler(object): chunk.sort(key=lambda e: -e["num_joined_members"]) - defer.returnValue( - {"chunk": chunk, "total_room_count_estimate": len(room_results)} - ) + return {"chunk": chunk, "total_room_count_estimate": len(room_results)} @defer.inlineCallbacks def add_room_to_group(self, group_id, requester_user_id, room_id, content): @@ -551,7 +543,7 @@ class GroupsServerHandler(object): yield self.store.add_room_to_group(group_id, room_id, is_public=is_public) - defer.returnValue({}) + return {} @defer.inlineCallbacks def update_room_in_group( @@ -574,7 +566,7 @@ class GroupsServerHandler(object): else: raise SynapseError(400, "Uknown config option") - defer.returnValue({}) + return {} @defer.inlineCallbacks def remove_room_from_group(self, group_id, requester_user_id, room_id): @@ -586,7 +578,7 @@ class GroupsServerHandler(object): yield self.store.remove_room_from_group(group_id, room_id) - defer.returnValue({}) + return {} @defer.inlineCallbacks def invite_to_group(self, group_id, user_id, requester_user_id, content): @@ -644,9 +636,9 @@ class GroupsServerHandler(object): ) elif res["state"] == "invite": yield self.store.add_group_invite(group_id, user_id) - defer.returnValue({"state": "invite"}) + return {"state": "invite"} elif res["state"] == "reject": - defer.returnValue({"state": "reject"}) + return {"state": "reject"} else: raise SynapseError(502, "Unknown state returned by HS") @@ -679,7 +671,7 @@ class GroupsServerHandler(object): remote_attestation=remote_attestation, ) - defer.returnValue(local_attestation) + return local_attestation @defer.inlineCallbacks def accept_invite(self, group_id, requester_user_id, content): @@ -699,7 +691,7 @@ class GroupsServerHandler(object): local_attestation = yield self._add_user(group_id, requester_user_id, content) - defer.returnValue({"state": "join", "attestation": local_attestation}) + return {"state": "join", "attestation": local_attestation} @defer.inlineCallbacks def join_group(self, group_id, requester_user_id, content): @@ -716,7 +708,7 @@ class GroupsServerHandler(object): local_attestation = yield self._add_user(group_id, requester_user_id, content) - defer.returnValue({"state": "join", "attestation": local_attestation}) + return {"state": "join", "attestation": local_attestation} @defer.inlineCallbacks def knock(self, group_id, requester_user_id, content): @@ -769,7 +761,7 @@ class GroupsServerHandler(object): if not self.hs.is_mine_id(user_id): yield self.store.maybe_delete_remote_profile_cache(user_id) - defer.returnValue({}) + return {} @defer.inlineCallbacks def create_group(self, group_id, requester_user_id, content): @@ -845,7 +837,7 @@ class GroupsServerHandler(object): avatar_url=user_profile.get("avatar_url"), ) - defer.returnValue({"group_id": group_id}) + return {"group_id": group_id} @defer.inlineCallbacks def delete_group(self, group_id, requester_user_id): diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py index e62e6cab77..8acd9f9a83 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py @@ -51,8 +51,8 @@ class AccountDataEventSource(object): {"type": account_data_type, "content": content, "room_id": room_id} ) - defer.returnValue((results, current_stream_id)) + return (results, current_stream_id) @defer.inlineCallbacks def get_pagination_rows(self, user, config, key): - defer.returnValue(([], config.to_id)) + return ([], config.to_id) diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 1f1708ba7d..930204e2d0 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -193,7 +193,7 @@ class AccountValidityHandler(object): if threepid["medium"] == "email": addresses.append(threepid["address"]) - defer.returnValue(addresses) + return addresses @defer.inlineCallbacks def _get_renewal_token(self, user_id): @@ -214,7 +214,7 @@ class AccountValidityHandler(object): try: renewal_token = stringutils.random_string(32) yield self.store.set_renewal_token_for_user(user_id, renewal_token) - defer.returnValue(renewal_token) + return renewal_token except StoreError: attempts += 1 raise StoreError(500, "Couldn't generate a unique string as refresh string.") @@ -254,4 +254,4 @@ class AccountValidityHandler(object): user_id=user_id, expiration_ts=expiration_ts, email_sent=email_sent ) - defer.returnValue(expiration_ts) + return expiration_ts diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index fbef2f3d38..46ac73106d 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -100,4 +100,4 @@ class AcmeHandler(object): logger.exception("Failed saving!") raise - defer.returnValue(True) + return True diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index e8a651e231..2f22f56ca4 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -49,7 +49,7 @@ class AdminHandler(BaseHandler): "devices": {"": {"sessions": [{"connections": connections}]}}, } - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_users(self): @@ -61,7 +61,7 @@ class AdminHandler(BaseHandler): """ ret = yield self.store.get_users() - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_users_paginate(self, order, start, limit): @@ -78,7 +78,7 @@ class AdminHandler(BaseHandler): """ ret = yield self.store.get_users_paginate(order, start, limit) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def search_users(self, term): @@ -92,7 +92,7 @@ class AdminHandler(BaseHandler): """ ret = yield self.store.search_users(term) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def export_user_data(self, user_id, writer): @@ -225,7 +225,7 @@ class AdminHandler(BaseHandler): state = yield self.store.get_state_for_event(event_id) writer.write_state(room_id, event_id, state) - defer.returnValue(writer.finished()) + return writer.finished() class ExfiltrationWriter(object): diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 8f089f0e33..d1a51df6f9 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -167,8 +167,8 @@ class ApplicationServicesHandler(object): for user_service in user_query_services: is_known_user = yield self.appservice_api.query_user(user_service, user_id) if is_known_user: - defer.returnValue(True) - defer.returnValue(False) + return True + return False @defer.inlineCallbacks def query_room_alias_exists(self, room_alias): @@ -192,7 +192,7 @@ class ApplicationServicesHandler(object): if is_known_alias: # the alias exists now so don't query more ASes. result = yield self.store.get_association_from_room_alias(room_alias) - defer.returnValue(result) + return result @defer.inlineCallbacks def query_3pe(self, kind, protocol, fields): @@ -215,7 +215,7 @@ class ApplicationServicesHandler(object): if success: ret.extend(result) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_3pe_protocols(self, only_protocol=None): @@ -254,7 +254,7 @@ class ApplicationServicesHandler(object): for p in protocols.keys(): protocols[p] = _merge_instances(protocols[p]) - defer.returnValue(protocols) + return protocols @defer.inlineCallbacks def _get_services_for_event(self, event): @@ -276,7 +276,7 @@ class ApplicationServicesHandler(object): if (yield s.is_interested(event, self.store)): interested_list.append(s) - defer.returnValue(interested_list) + return interested_list def _get_services_for_user(self, user_id): services = self.store.get_app_services() @@ -293,23 +293,23 @@ class ApplicationServicesHandler(object): if not self.is_mine_id(user_id): # we don't know if they are unknown or not since it isn't one of our # users. We can't poke ASes. - defer.returnValue(False) + return False return user_info = yield self.store.get_user_by_id(user_id) if user_info: - defer.returnValue(False) + return False return # user not found; could be the AS though, so check. services = self.store.get_app_services() service_list = [s for s in services if s.sender == user_id] - defer.returnValue(len(service_list) == 0) + return len(service_list) == 0 @defer.inlineCallbacks def _check_user_exists(self, user_id): unknown_user = yield self._is_unknown_user(user_id) if unknown_user: exists = yield self.query_user_exists(user_id) - defer.returnValue(exists) - defer.returnValue(True) + return exists + return True diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index d4d6574975..05be5b7c48 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -155,7 +155,7 @@ class AuthHandler(BaseHandler): if user_id != requester.user.to_string(): raise AuthError(403, "Invalid auth") - defer.returnValue(params) + return params @defer.inlineCallbacks def check_auth(self, flows, clientdict, clientip, password_servlet=False): @@ -280,7 +280,7 @@ class AuthHandler(BaseHandler): creds, list(clientdict), ) - defer.returnValue((creds, clientdict, session["id"])) + return (creds, clientdict, session["id"]) ret = self._auth_dict_for_flows(flows, session) ret["completed"] = list(creds) @@ -307,8 +307,8 @@ class AuthHandler(BaseHandler): if result: creds[stagetype] = result self._save_session(sess) - defer.returnValue(True) - defer.returnValue(False) + return True + return False def get_session_id(self, clientdict): """ @@ -379,7 +379,7 @@ class AuthHandler(BaseHandler): res = yield checker( authdict, clientip=clientip, password_servlet=password_servlet ) - defer.returnValue(res) + return res # build a v1-login-style dict out of the authdict and fall back to the # v1 code @@ -389,7 +389,7 @@ class AuthHandler(BaseHandler): raise SynapseError(400, "", Codes.MISSING_PARAM) (canonical_id, callback) = yield self.validate_login(user_id, authdict) - defer.returnValue(canonical_id) + return canonical_id @defer.inlineCallbacks def _check_recaptcha(self, authdict, clientip, **kwargs): @@ -433,7 +433,7 @@ class AuthHandler(BaseHandler): resp_body.get("hostname"), ) if resp_body["success"]: - defer.returnValue(True) + return True raise LoginError(401, "", errcode=Codes.UNAUTHORIZED) def _check_email_identity(self, authdict, **kwargs): @@ -502,7 +502,7 @@ class AuthHandler(BaseHandler): threepid["threepid_creds"] = authdict["threepid_creds"] - defer.returnValue(threepid) + return threepid def _get_params_recaptcha(self): return {"public_key": self.hs.config.recaptcha_public_key} @@ -606,7 +606,7 @@ class AuthHandler(BaseHandler): yield self.store.delete_access_token(access_token) raise StoreError(400, "Login raced against device deletion") - defer.returnValue(access_token) + return access_token @defer.inlineCallbacks def check_user_exists(self, user_id): @@ -629,8 +629,8 @@ class AuthHandler(BaseHandler): self.ratelimit_login_per_account(user_id) res = yield self._find_user_id_and_pwd_hash(user_id) if res is not None: - defer.returnValue(res[0]) - defer.returnValue(None) + return res[0] + return None @defer.inlineCallbacks def _find_user_id_and_pwd_hash(self, user_id): @@ -661,7 +661,7 @@ class AuthHandler(BaseHandler): user_id, user_infos.keys(), ) - defer.returnValue(result) + return result def get_supported_login_types(self): """Get a the login types supported for the /login API @@ -722,7 +722,7 @@ class AuthHandler(BaseHandler): known_login_type = True is_valid = yield provider.check_password(qualified_user_id, password) if is_valid: - defer.returnValue((qualified_user_id, None)) + return (qualified_user_id, None) if not hasattr(provider, "get_supported_login_types") or not hasattr( provider, "check_auth" @@ -756,7 +756,7 @@ class AuthHandler(BaseHandler): if result: if isinstance(result, str): result = (result, None) - defer.returnValue(result) + return result if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled: known_login_type = True @@ -766,7 +766,7 @@ class AuthHandler(BaseHandler): ) if canonical_user_id: - defer.returnValue((canonical_user_id, None)) + return (canonical_user_id, None) if not known_login_type: raise SynapseError(400, "Unknown login type %s" % login_type) @@ -814,9 +814,9 @@ class AuthHandler(BaseHandler): if isinstance(result, str): # If it's a str, set callback function to None result = (result, None) - defer.returnValue(result) + return result - defer.returnValue((None, None)) + return (None, None) @defer.inlineCallbacks def _check_local_password(self, user_id, password): @@ -838,7 +838,7 @@ class AuthHandler(BaseHandler): """ lookupres = yield self._find_user_id_and_pwd_hash(user_id) if not lookupres: - defer.returnValue(None) + return None (user_id, password_hash) = lookupres # If the password hash is None, the account has likely been deactivated @@ -850,8 +850,8 @@ class AuthHandler(BaseHandler): result = yield self.validate_hash(password, password_hash) if not result: logger.warn("Failed password login for user %s", user_id) - defer.returnValue(None) - defer.returnValue(user_id) + return None + return user_id @defer.inlineCallbacks def validate_short_term_login_token_and_get_user_id(self, login_token): @@ -865,7 +865,7 @@ class AuthHandler(BaseHandler): raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN) self.ratelimit_login_per_account(user_id) yield self.auth.check_auth_blocking(user_id) - defer.returnValue(user_id) + return user_id @defer.inlineCallbacks def delete_access_token(self, access_token): @@ -976,7 +976,7 @@ class AuthHandler(BaseHandler): ) yield self.store.user_delete_threepid(user_id, medium, address) - defer.returnValue(result) + return result def _save_session(self, session): # TODO: Persistent storage diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index e8f9da6098..5f804d1f13 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -125,7 +125,7 @@ class DeactivateAccountHandler(BaseHandler): # Mark the user as deactivated. yield self.store.set_user_deactivated_status(user_id, True) - defer.returnValue(identity_server_supports_unbinding) + return identity_server_supports_unbinding def _start_user_parting(self): """ diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 99e8413092..d6ab337783 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -64,7 +64,7 @@ class DeviceWorkerHandler(BaseHandler): for device in devices: _update_device_from_client_ips(device, ips) - defer.returnValue(devices) + return devices @defer.inlineCallbacks def get_device(self, user_id, device_id): @@ -85,7 +85,7 @@ class DeviceWorkerHandler(BaseHandler): raise errors.NotFoundError ips = yield self.store.get_last_client_ip_by_device(user_id, device_id) _update_device_from_client_ips(device, ips) - defer.returnValue(device) + return device @measure_func("device.get_user_ids_changed") @defer.inlineCallbacks @@ -200,9 +200,7 @@ class DeviceWorkerHandler(BaseHandler): possibly_joined = [] possibly_left = [] - defer.returnValue( - {"changed": list(possibly_joined), "left": list(possibly_left)} - ) + return {"changed": list(possibly_joined), "left": list(possibly_left)} class DeviceHandler(DeviceWorkerHandler): @@ -250,7 +248,7 @@ class DeviceHandler(DeviceWorkerHandler): ) if new_device: yield self.notify_device_update(user_id, [device_id]) - defer.returnValue(device_id) + return device_id # if the device id is not specified, we'll autogen one, but loop a few # times in case of a clash. @@ -264,7 +262,7 @@ class DeviceHandler(DeviceWorkerHandler): ) if new_device: yield self.notify_device_update(user_id, [device_id]) - defer.returnValue(device_id) + return device_id attempts += 1 raise errors.StoreError(500, "Couldn't generate a device ID.") @@ -411,9 +409,7 @@ class DeviceHandler(DeviceWorkerHandler): @defer.inlineCallbacks def on_federation_query_user_devices(self, user_id): stream_id, devices = yield self.store.get_devices_with_keys_by_user(user_id) - defer.returnValue( - {"user_id": user_id, "stream_id": stream_id, "devices": devices} - ) + return {"user_id": user_id, "stream_id": stream_id, "devices": devices} @defer.inlineCallbacks def user_left_room(self, user, room_id): @@ -623,7 +619,7 @@ class DeviceListEduUpdater(object): for _, stream_id, prev_ids, _ in updates: if not prev_ids: # We always do a resync if there are no previous IDs - defer.returnValue(True) + return True for prev_id in prev_ids: if prev_id == extremity: @@ -633,8 +629,8 @@ class DeviceListEduUpdater(object): elif prev_id in stream_id_in_updates: continue else: - defer.returnValue(True) + return True stream_id_in_updates.add(stream_id) - defer.returnValue(False) + return False diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 42d5b3db30..0fd423197c 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -210,7 +210,7 @@ class DirectoryHandler(BaseHandler): except AuthError as e: logger.info("Failed to update alias events: %s", e) - defer.returnValue(room_id) + return room_id @defer.inlineCallbacks def delete_appservice_association(self, service, room_alias): @@ -229,7 +229,7 @@ class DirectoryHandler(BaseHandler): room_id = yield self.store.delete_room_alias(room_alias) - defer.returnValue(room_id) + return room_id @defer.inlineCallbacks def get_association(self, room_alias): @@ -277,7 +277,7 @@ class DirectoryHandler(BaseHandler): else: servers = list(servers) - defer.returnValue({"room_id": room_id, "servers": servers}) + return {"room_id": room_id, "servers": servers} return @defer.inlineCallbacks @@ -289,7 +289,7 @@ class DirectoryHandler(BaseHandler): result = yield self.get_association_from_room_alias(room_alias) if result is not None: - defer.returnValue({"room_id": result.room_id, "servers": result.servers}) + return {"room_id": result.room_id, "servers": result.servers} else: raise SynapseError( 404, @@ -342,7 +342,7 @@ class DirectoryHandler(BaseHandler): # Query AS to see if it exists as_handler = self.appservice_handler result = yield as_handler.query_room_alias_exists(room_alias) - defer.returnValue(result) + return result def can_modify_alias(self, alias, user_id=None): # Any application service "interested" in an alias they are regexing on @@ -369,10 +369,10 @@ class DirectoryHandler(BaseHandler): creator = yield self.store.get_room_alias_creator(alias.to_string()) if creator is not None and creator == user_id: - defer.returnValue(True) + return True is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id)) - defer.returnValue(is_admin) + return is_admin @defer.inlineCallbacks def edit_published_room_list(self, requester, room_id, visibility): diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index fdfe8611b6..1300b540e3 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -144,7 +144,7 @@ class E2eKeysHandler(object): ) ) - defer.returnValue({"device_keys": results, "failures": failures}) + return {"device_keys": results, "failures": failures} @defer.inlineCallbacks def query_local_devices(self, query): @@ -189,7 +189,7 @@ class E2eKeysHandler(object): r["unsigned"]["device_display_name"] = display_name result_dict[user_id][device_id] = r - defer.returnValue(result_dict) + return result_dict @defer.inlineCallbacks def on_federation_query_client_keys(self, query_body): @@ -197,7 +197,7 @@ class E2eKeysHandler(object): """ device_keys_query = query_body.get("device_keys", {}) res = yield self.query_local_devices(device_keys_query) - defer.returnValue({"device_keys": res}) + return {"device_keys": res} @defer.inlineCallbacks def claim_one_time_keys(self, query, timeout): @@ -259,7 +259,7 @@ class E2eKeysHandler(object): ), ) - defer.returnValue({"one_time_keys": json_result, "failures": failures}) + return {"one_time_keys": json_result, "failures": failures} @defer.inlineCallbacks def upload_keys_for_user(self, user_id, device_id, keys): @@ -297,7 +297,7 @@ class E2eKeysHandler(object): result = yield self.store.count_e2e_one_time_keys(user_id, device_id) - defer.returnValue({"one_time_key_counts": result}) + return {"one_time_key_counts": result} @defer.inlineCallbacks def _upload_one_time_keys_for_user( diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index ebd807bca6..41b871fc59 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -84,7 +84,7 @@ class E2eRoomKeysHandler(object): user_id, version, room_id, session_id ) - defer.returnValue(results) + return results @defer.inlineCallbacks def delete_room_keys(self, user_id, version, room_id=None, session_id=None): @@ -262,7 +262,7 @@ class E2eRoomKeysHandler(object): new_version = yield self.store.create_e2e_room_keys_version( user_id, version_info ) - defer.returnValue(new_version) + return new_version @defer.inlineCallbacks def get_version_info(self, user_id, version=None): @@ -292,7 +292,7 @@ class E2eRoomKeysHandler(object): raise NotFoundError("Unknown backup version") else: raise - defer.returnValue(res) + return res @defer.inlineCallbacks def delete_version(self, user_id, version=None): @@ -350,4 +350,4 @@ class E2eRoomKeysHandler(object): user_id, version, version_info ) - defer.returnValue({}) + return {} diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 6a38328af3..2f1f10a9af 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -143,7 +143,7 @@ class EventStreamHandler(BaseHandler): "end": tokens[1].to_string(), } - defer.returnValue(chunk) + return chunk class EventHandler(BaseHandler): @@ -166,7 +166,7 @@ class EventHandler(BaseHandler): event = yield self.store.get_event(event_id, check_room_id=room_id) if not event: - defer.returnValue(None) + return None return users = yield self.store.get_users_in_room(event.room_id) @@ -179,4 +179,4 @@ class EventHandler(BaseHandler): if not filtered: raise AuthError(403, "You don't have permission to access that event.") - defer.returnValue(event) + return event diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 57be968c67..2aa208a2b8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -210,7 +210,7 @@ class FederationHandler(BaseHandler): event_id, origin, ) - defer.returnValue(None) + return None state = None auth_chain = [] @@ -676,7 +676,7 @@ class FederationHandler(BaseHandler): events = [e for e in events if e.event_id not in seen_events] if not events: - defer.returnValue([]) + return [] event_map = {e.event_id: e for e in events} @@ -838,7 +838,7 @@ class FederationHandler(BaseHandler): # TODO: We can probably do something more clever here. yield self._handle_new_event(dest, event, backfilled=True) - defer.returnValue(events) + return events @defer.inlineCallbacks def maybe_backfill(self, room_id, current_depth): @@ -894,7 +894,7 @@ class FederationHandler(BaseHandler): ) if not filtered_extremities: - defer.returnValue(False) + return False # Check if we reached a point where we should start backfilling. sorted_extremeties_tuple = sorted(extremities.items(), key=lambda e: -int(e[1])) @@ -965,7 +965,7 @@ class FederationHandler(BaseHandler): # If this succeeded then we probably already have the # appropriate stuff. # TODO: We can probably do something more intelligent here. - defer.returnValue(True) + return True except SynapseError as e: logger.info("Failed to backfill from %s because %s", dom, e) continue @@ -985,11 +985,11 @@ class FederationHandler(BaseHandler): logger.exception("Failed to backfill from %s because %s", dom, e) continue - defer.returnValue(False) + return False success = yield try_backfill(likely_domains) if success: - defer.returnValue(True) + return True # Huh, well *those* domains didn't work out. Lets try some domains # from the time. @@ -1031,11 +1031,11 @@ class FederationHandler(BaseHandler): [dom for dom, _ in likely_domains if dom not in tried_domains] ) if success: - defer.returnValue(True) + return True tried_domains.update(dom for dom, _ in likely_domains) - defer.returnValue(False) + return False def _sanity_check_event(self, ev): """ @@ -1082,7 +1082,7 @@ class FederationHandler(BaseHandler): pdu=event, ) - defer.returnValue(pdu) + return pdu @defer.inlineCallbacks def on_event_auth(self, event_id): @@ -1090,7 +1090,7 @@ class FederationHandler(BaseHandler): auth = yield self.store.get_auth_chain( [auth_id for auth_id in event.auth_event_ids()], include_given=True ) - defer.returnValue([e for e in auth]) + return [e for e in auth] @log_function @defer.inlineCallbacks @@ -1177,7 +1177,7 @@ class FederationHandler(BaseHandler): run_in_background(self._handle_queued_pdus, room_queue) - defer.returnValue(True) + return True @defer.inlineCallbacks def _handle_queued_pdus(self, room_queue): @@ -1247,7 +1247,7 @@ class FederationHandler(BaseHandler): room_version, event, context, do_sig_check=False ) - defer.returnValue(event) + return event @defer.inlineCallbacks @log_function @@ -1308,7 +1308,7 @@ class FederationHandler(BaseHandler): state = yield self.store.get_events(list(prev_state_ids.values())) - defer.returnValue({"state": list(state.values()), "auth_chain": auth_chain}) + return {"state": list(state.values()), "auth_chain": auth_chain} @defer.inlineCallbacks def on_invite_request(self, origin, pdu): @@ -1364,7 +1364,7 @@ class FederationHandler(BaseHandler): context = yield self.state_handler.compute_event_context(event) yield self.persist_events_and_notify([(event, context)]) - defer.returnValue(event) + return event @defer.inlineCallbacks def do_remotely_reject_invite(self, target_hosts, room_id, user_id): @@ -1389,7 +1389,7 @@ class FederationHandler(BaseHandler): context = yield self.state_handler.compute_event_context(event) yield self.persist_events_and_notify([(event, context)]) - defer.returnValue(event) + return event @defer.inlineCallbacks def _make_and_verify_event( @@ -1407,7 +1407,7 @@ class FederationHandler(BaseHandler): assert event.user_id == user_id assert event.state_key == user_id assert event.room_id == room_id - defer.returnValue((origin, event, format_ver)) + return (origin, event, format_ver) @defer.inlineCallbacks @log_function @@ -1451,7 +1451,7 @@ class FederationHandler(BaseHandler): logger.warn("Failed to create new leave %r because %s", event, e) raise e - defer.returnValue(event) + return event @defer.inlineCallbacks @log_function @@ -1484,7 +1484,7 @@ class FederationHandler(BaseHandler): event.signatures, ) - defer.returnValue(None) + return None @defer.inlineCallbacks def get_state_for_pdu(self, room_id, event_id): @@ -1512,9 +1512,9 @@ class FederationHandler(BaseHandler): del results[(event.type, event.state_key)] res = list(results.values()) - defer.returnValue(res) + return res else: - defer.returnValue([]) + return [] @defer.inlineCallbacks def get_state_ids_for_pdu(self, room_id, event_id): @@ -1539,9 +1539,9 @@ class FederationHandler(BaseHandler): else: results.pop((event.type, event.state_key), None) - defer.returnValue(list(results.values())) + return list(results.values()) else: - defer.returnValue([]) + return [] @defer.inlineCallbacks @log_function @@ -1554,7 +1554,7 @@ class FederationHandler(BaseHandler): events = yield filter_events_for_server(self.store, origin, events) - defer.returnValue(events) + return events @defer.inlineCallbacks @log_function @@ -1584,9 +1584,9 @@ class FederationHandler(BaseHandler): events = yield filter_events_for_server(self.store, origin, [event]) event = events[0] - defer.returnValue(event) + return event else: - defer.returnValue(None) + return None def get_min_depth_for_context(self, context): return self.store.get_min_depth(context) @@ -1618,7 +1618,7 @@ class FederationHandler(BaseHandler): self.store.remove_push_actions_from_staging, event.event_id ) - defer.returnValue(context) + return context @defer.inlineCallbacks def _handle_new_events(self, origin, event_infos, backfilled=False): @@ -1641,7 +1641,7 @@ class FederationHandler(BaseHandler): auth_events=ev_info.get("auth_events"), backfilled=backfilled, ) - defer.returnValue(res) + return res contexts = yield make_deferred_yieldable( defer.gatherResults( @@ -1800,7 +1800,7 @@ class FederationHandler(BaseHandler): if event.type == EventTypes.GuestAccess and not context.rejected: yield self.maybe_kick_guest_users(event) - defer.returnValue(context) + return context @defer.inlineCallbacks def _check_for_soft_fail(self, event, state, backfilled): @@ -1919,7 +1919,7 @@ class FederationHandler(BaseHandler): logger.debug("on_query_auth returning: %s", ret) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def on_get_missing_events( @@ -1942,7 +1942,7 @@ class FederationHandler(BaseHandler): self.store, origin, missing_events ) - defer.returnValue(missing_events) + return missing_events @defer.inlineCallbacks @log_function @@ -2418,16 +2418,14 @@ class FederationHandler(BaseHandler): logger.debug("construct_auth_difference returning") - defer.returnValue( - { - "auth_chain": local_auth, - "rejects": { - e.event_id: {"reason": reason_map[e.event_id], "proof": None} - for e in base_remote_rejected - }, - "missing": [e.event_id for e in missing_locals], - } - ) + return { + "auth_chain": local_auth, + "rejects": { + e.event_id: {"reason": reason_map[e.event_id], "proof": None} + for e in base_remote_rejected + }, + "missing": [e.event_id for e in missing_locals], + } @defer.inlineCallbacks @log_function @@ -2575,7 +2573,7 @@ class FederationHandler(BaseHandler): builder=builder ) EventValidator().validate_new(event) - defer.returnValue((event, context)) + return (event, context) @defer.inlineCallbacks def _check_signature(self, event, context): diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 7da63bb643..7b67c8ae0f 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -162,7 +162,7 @@ class GroupsLocalHandler(object): res.setdefault("user", {})["is_publicised"] = is_publicised - defer.returnValue(res) + return res @defer.inlineCallbacks def create_group(self, group_id, user_id, content): @@ -207,7 +207,7 @@ class GroupsLocalHandler(object): ) self.notifier.on_new_event("groups_key", token, users=[user_id]) - defer.returnValue(res) + return res @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): @@ -217,7 +217,7 @@ class GroupsLocalHandler(object): res = yield self.groups_server_handler.get_users_in_group( group_id, requester_user_id ) - defer.returnValue(res) + return res group_server_name = get_domain_from_id(group_id) @@ -244,7 +244,7 @@ class GroupsLocalHandler(object): res["chunk"] = valid_entries - defer.returnValue(res) + return res @defer.inlineCallbacks def join_group(self, group_id, user_id, content): @@ -285,7 +285,7 @@ class GroupsLocalHandler(object): ) self.notifier.on_new_event("groups_key", token, users=[user_id]) - defer.returnValue({}) + return {} @defer.inlineCallbacks def accept_invite(self, group_id, user_id, content): @@ -326,7 +326,7 @@ class GroupsLocalHandler(object): ) self.notifier.on_new_event("groups_key", token, users=[user_id]) - defer.returnValue({}) + return {} @defer.inlineCallbacks def invite(self, group_id, user_id, requester_user_id, config): @@ -346,7 +346,7 @@ class GroupsLocalHandler(object): content, ) - defer.returnValue(res) + return res @defer.inlineCallbacks def on_invite(self, group_id, user_id, content): @@ -377,7 +377,7 @@ class GroupsLocalHandler(object): logger.warn("No profile for user %s: %s", user_id, e) user_profile = {} - defer.returnValue({"state": "invite", "user_profile": user_profile}) + return {"state": "invite", "user_profile": user_profile} @defer.inlineCallbacks def remove_user_from_group(self, group_id, user_id, requester_user_id, content): @@ -406,7 +406,7 @@ class GroupsLocalHandler(object): content, ) - defer.returnValue(res) + return res @defer.inlineCallbacks def user_removed_from_group(self, group_id, user_id, content): @@ -421,7 +421,7 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def get_joined_groups(self, user_id): group_ids = yield self.store.get_joined_groups(user_id) - defer.returnValue({"groups": group_ids}) + return {"groups": group_ids} @defer.inlineCallbacks def get_publicised_groups_for_user(self, user_id): @@ -433,14 +433,14 @@ class GroupsLocalHandler(object): for app_service in self.store.get_app_services(): result.extend(app_service.get_groups_for_user(user_id)) - defer.returnValue({"groups": result}) + return {"groups": result} else: bulk_result = yield self.transport_client.bulk_get_publicised_groups( get_domain_from_id(user_id), [user_id] ) result = bulk_result.get("users", {}).get(user_id) # TODO: Verify attestations - defer.returnValue({"groups": result}) + return {"groups": result} @defer.inlineCallbacks def bulk_get_publicised_groups(self, user_ids, proxy=True): @@ -475,4 +475,4 @@ class GroupsLocalHandler(object): for app_service in self.store.get_app_services(): results[uid].extend(app_service.get_groups_for_user(uid)) - defer.returnValue({"users": results}) + return {"users": results} diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 546d6169e9..d199521b58 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -82,7 +82,7 @@ class IdentityHandler(BaseHandler): "%s is not a trusted ID server: rejecting 3pid " + "credentials", id_server, ) - defer.returnValue(None) + return None try: data = yield self.http_client.get_json( @@ -95,8 +95,8 @@ class IdentityHandler(BaseHandler): raise e.to_synapse_error() if "medium" in data: - defer.returnValue(data) - defer.returnValue(None) + return data + return None @defer.inlineCallbacks def bind_threepid(self, creds, mxid): @@ -133,7 +133,7 @@ class IdentityHandler(BaseHandler): ) except CodeMessageException as e: data = json.loads(e.msg) # XXX WAT? - defer.returnValue(data) + return data @defer.inlineCallbacks def try_unbind_threepid(self, mxid, threepid): @@ -161,7 +161,7 @@ class IdentityHandler(BaseHandler): # We don't know where to unbind, so we don't have a choice but to return if not id_servers: - defer.returnValue(False) + return False changed = True for id_server in id_servers: @@ -169,7 +169,7 @@ class IdentityHandler(BaseHandler): mxid, threepid, id_server ) - defer.returnValue(changed) + return changed @defer.inlineCallbacks def try_unbind_threepid_with_id_server(self, mxid, threepid, id_server): @@ -224,7 +224,7 @@ class IdentityHandler(BaseHandler): id_server=id_server, ) - defer.returnValue(changed) + return changed @defer.inlineCallbacks def requestEmailToken( @@ -250,7 +250,7 @@ class IdentityHandler(BaseHandler): % (id_server, "/_matrix/identity/api/v1/validate/email/requestToken"), params, ) - defer.returnValue(data) + return data except HttpResponseException as e: logger.info("Proxied requestToken failed: %r", e) raise e.to_synapse_error() @@ -278,7 +278,7 @@ class IdentityHandler(BaseHandler): % (id_server, "/_matrix/identity/api/v1/validate/msisdn/requestToken"), params, ) - defer.returnValue(data) + return data except HttpResponseException as e: logger.info("Proxied requestToken failed: %r", e) raise e.to_synapse_error() diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 54c966c8a6..42d6650ed9 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -250,7 +250,7 @@ class InitialSyncHandler(BaseHandler): "end": now_token.to_string(), } - defer.returnValue(ret) + return ret @defer.inlineCallbacks def room_initial_sync(self, requester, room_id, pagin_config=None): @@ -301,7 +301,7 @@ class InitialSyncHandler(BaseHandler): result["account_data"] = account_data_events - defer.returnValue(result) + return result @defer.inlineCallbacks def _room_initial_sync_parted( @@ -330,28 +330,24 @@ class InitialSyncHandler(BaseHandler): time_now = self.clock.time_msec() - defer.returnValue( - { - "membership": membership, - "room_id": room_id, - "messages": { - "chunk": ( - yield self._event_serializer.serialize_events( - messages, time_now - ) - ), - "start": start_token.to_string(), - "end": end_token.to_string(), - }, - "state": ( - yield self._event_serializer.serialize_events( - room_state.values(), time_now - ) + return { + "membership": membership, + "room_id": room_id, + "messages": { + "chunk": ( + yield self._event_serializer.serialize_events(messages, time_now) ), - "presence": [], - "receipts": [], - } - ) + "start": start_token.to_string(), + "end": end_token.to_string(), + }, + "state": ( + yield self._event_serializer.serialize_events( + room_state.values(), time_now + ) + ), + "presence": [], + "receipts": [], + } @defer.inlineCallbacks def _room_initial_sync_joined( @@ -384,13 +380,13 @@ class InitialSyncHandler(BaseHandler): def get_presence(): # If presence is disabled, return an empty list if not self.hs.config.use_presence: - defer.returnValue([]) + return [] states = yield presence_handler.get_states( [m.user_id for m in room_members], as_event=True ) - defer.returnValue(states) + return states @defer.inlineCallbacks def get_receipts(): @@ -399,7 +395,7 @@ class InitialSyncHandler(BaseHandler): ) if not receipts: receipts = [] - defer.returnValue(receipts) + return receipts presence, receipts, (messages, token) = yield make_deferred_yieldable( defer.gatherResults( @@ -442,7 +438,7 @@ class InitialSyncHandler(BaseHandler): if not is_peeking: ret["membership"] = membership - defer.returnValue(ret) + return ret @defer.inlineCallbacks def _check_in_room_or_world_readable(self, room_id, user_id): @@ -453,7 +449,7 @@ class InitialSyncHandler(BaseHandler): # * The user is a guest user, and has joined the room # else it will throw. member_event = yield self.auth.check_user_was_in_room(room_id, user_id) - defer.returnValue((member_event.membership, member_event.event_id)) + return (member_event.membership, member_event.event_id) return except AuthError: visibility = yield self.state_handler.get_current_state( @@ -463,7 +459,7 @@ class InitialSyncHandler(BaseHandler): visibility and visibility.content["history_visibility"] == "world_readable" ): - defer.returnValue((Membership.JOIN, None)) + return (Membership.JOIN, None) return raise AuthError( 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 6d7a987f13..8b27e23378 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -87,7 +87,7 @@ class MessageHandler(object): ) data = room_state[membership_event_id].get(key) - defer.returnValue(data) + return data @defer.inlineCallbacks def get_state_events( @@ -174,7 +174,7 @@ class MessageHandler(object): # events, as clients won't use them. bundle_aggregations=False, ) - defer.returnValue(events) + return events @defer.inlineCallbacks def get_joined_members(self, requester, room_id): @@ -213,15 +213,13 @@ class MessageHandler(object): # Loop fell through, AS has no interested users in room raise AuthError(403, "Appservice not in room") - defer.returnValue( - { - user_id: { - "avatar_url": profile.avatar_url, - "display_name": profile.display_name, - } - for user_id, profile in iteritems(users_with_profile) + return { + user_id: { + "avatar_url": profile.avatar_url, + "display_name": profile.display_name, } - ) + for user_id, profile in iteritems(users_with_profile) + } class EventCreationHandler(object): @@ -398,7 +396,7 @@ class EventCreationHandler(object): self.validator.validate_new(event) - defer.returnValue((event, context)) + return (event, context) def _is_exempt_from_privacy_policy(self, builder, requester): """"Determine if an event to be sent is exempt from having to consent @@ -425,9 +423,9 @@ class EventCreationHandler(object): @defer.inlineCallbacks def _is_server_notices_room(self, room_id): if self.config.server_notices_mxid is None: - defer.returnValue(False) + return False user_ids = yield self.store.get_users_in_room(room_id) - defer.returnValue(self.config.server_notices_mxid in user_ids) + return self.config.server_notices_mxid in user_ids @defer.inlineCallbacks def assert_accepted_privacy_policy(self, requester): @@ -507,7 +505,7 @@ class EventCreationHandler(object): event.event_id, prev_state.event_id, ) - defer.returnValue(prev_state) + return prev_state yield self.handle_new_client_event( requester=requester, event=event, context=context, ratelimit=ratelimit @@ -531,7 +529,7 @@ class EventCreationHandler(object): prev_content = encode_canonical_json(prev_event.content) next_content = encode_canonical_json(event.content) if prev_content == next_content: - defer.returnValue(prev_event) + return prev_event return @defer.inlineCallbacks @@ -563,7 +561,7 @@ class EventCreationHandler(object): yield self.send_nonmember_event( requester, event, context, ratelimit=ratelimit ) - defer.returnValue(event) + return event @measure_func("create_new_client_event") @defer.inlineCallbacks @@ -626,7 +624,7 @@ class EventCreationHandler(object): logger.debug("Created event %s", event.event_id) - defer.returnValue((event, context)) + return (event, context) @measure_func("handle_new_client_event") @defer.inlineCallbacks diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 20bcfed334..d83aab3f74 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -242,13 +242,11 @@ class PaginationHandler(object): ) if not events: - defer.returnValue( - { - "chunk": [], - "start": pagin_config.from_token.to_string(), - "end": next_token.to_string(), - } - ) + return { + "chunk": [], + "start": pagin_config.from_token.to_string(), + "end": next_token.to_string(), + } state = None if event_filter and event_filter.lazy_load_members() and len(events) > 0: @@ -286,4 +284,4 @@ class PaginationHandler(object): ) ) - defer.returnValue(chunk) + return chunk diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 6f3537e435..ea54d0b991 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -461,7 +461,7 @@ class PresenceHandler(object): if affect_presence: run_in_background(_end) - defer.returnValue(_user_syncing()) + return _user_syncing() def get_currently_syncing_users(self): """Get the set of user ids that are currently syncing on this HS. @@ -556,7 +556,7 @@ class PresenceHandler(object): """Get the current presence state for a user. """ res = yield self.current_state_for_users([user_id]) - defer.returnValue(res[user_id]) + return res[user_id] @defer.inlineCallbacks def current_state_for_users(self, user_ids): @@ -585,7 +585,7 @@ class PresenceHandler(object): states.update(new) self.user_to_current_state.update(new) - defer.returnValue(states) + return states @defer.inlineCallbacks def _persist_and_notify(self, states): @@ -681,7 +681,7 @@ class PresenceHandler(object): def get_state(self, target_user, as_event=False): results = yield self.get_states([target_user.to_string()], as_event=as_event) - defer.returnValue(results[0]) + return results[0] @defer.inlineCallbacks def get_states(self, target_user_ids, as_event=False): @@ -703,17 +703,15 @@ class PresenceHandler(object): now = self.clock.time_msec() if as_event: - defer.returnValue( - [ - { - "type": "m.presence", - "content": format_user_presence_state(state, now), - } - for state in updates - ] - ) + return [ + { + "type": "m.presence", + "content": format_user_presence_state(state, now), + } + for state in updates + ] else: - defer.returnValue(updates) + return updates @defer.inlineCallbacks def set_state(self, target_user, state, ignore_status_msg=False): @@ -757,9 +755,9 @@ class PresenceHandler(object): ) if observer_room_ids & observed_room_ids: - defer.returnValue(True) + return True - defer.returnValue(False) + return False @defer.inlineCallbacks def get_all_presence_updates(self, last_id, current_id): @@ -778,7 +776,7 @@ class PresenceHandler(object): # TODO(markjh): replicate the unpersisted changes. # This could use the in-memory stores for recent changes. rows = yield self.store.get_all_presence_updates(last_id, current_id) - defer.returnValue(rows) + return rows def notify_new_event(self): """Called when new events have happened. Handles users and servers @@ -1034,7 +1032,7 @@ class PresenceEventSource(object): # # Hence this guard where we just return nothing so that the sync # doesn't return. C.f. #5503. - defer.returnValue(([], max_token)) + return ([], max_token) presence = self.get_presence_handler() stream_change_cache = self.store.presence_stream_cache @@ -1068,17 +1066,11 @@ class PresenceEventSource(object): updates = yield presence.current_state_for_users(user_ids_changed) if include_offline: - defer.returnValue((list(updates.values()), max_token)) + return (list(updates.values()), max_token) else: - defer.returnValue( - ( - [ - s - for s in itervalues(updates) - if s.state != PresenceState.OFFLINE - ], - max_token, - ) + return ( + [s for s in itervalues(updates) if s.state != PresenceState.OFFLINE], + max_token, ) def get_current_key(self): @@ -1107,7 +1099,7 @@ class PresenceEventSource(object): ) users_interested_in.update(user_ids) - defer.returnValue(users_interested_in) + return users_interested_in def handle_timeouts(user_states, is_mine_fn, syncing_user_ids, now): @@ -1287,7 +1279,7 @@ def get_interested_parties(store, states): # Always notify self users_to_states.setdefault(state.user_id, []).append(state) - defer.returnValue((room_ids_to_states, users_to_states)) + return (room_ids_to_states, users_to_states) @defer.inlineCallbacks @@ -1321,4 +1313,4 @@ def get_interested_remotes(store, states, state_handler): host = get_domain_from_id(user_id) hosts_and_states.append(([host], states)) - defer.returnValue(hosts_and_states) + return hosts_and_states diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index a2388a7091..2cc237e6a5 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -73,7 +73,7 @@ class BaseProfileHandler(BaseHandler): raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) raise - defer.returnValue({"displayname": displayname, "avatar_url": avatar_url}) + return {"displayname": displayname, "avatar_url": avatar_url} else: try: result = yield self.federation.make_query( @@ -82,7 +82,7 @@ class BaseProfileHandler(BaseHandler): args={"user_id": user_id}, ignore_backoff=True, ) - defer.returnValue(result) + return result except RequestSendFailed as e: raise_from(SynapseError(502, "Failed to fetch profile"), e) except HttpResponseException as e: @@ -108,10 +108,10 @@ class BaseProfileHandler(BaseHandler): raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) raise - defer.returnValue({"displayname": displayname, "avatar_url": avatar_url}) + return {"displayname": displayname, "avatar_url": avatar_url} else: profile = yield self.store.get_from_remote_profile_cache(user_id) - defer.returnValue(profile or {}) + return profile or {} @defer.inlineCallbacks def get_displayname(self, target_user): @@ -125,7 +125,7 @@ class BaseProfileHandler(BaseHandler): raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) raise - defer.returnValue(displayname) + return displayname else: try: result = yield self.federation.make_query( @@ -139,7 +139,7 @@ class BaseProfileHandler(BaseHandler): except HttpResponseException as e: raise e.to_synapse_error() - defer.returnValue(result["displayname"]) + return result["displayname"] @defer.inlineCallbacks def set_displayname(self, target_user, requester, new_displayname, by_admin=False): @@ -186,7 +186,7 @@ class BaseProfileHandler(BaseHandler): if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) raise - defer.returnValue(avatar_url) + return avatar_url else: try: result = yield self.federation.make_query( @@ -200,7 +200,7 @@ class BaseProfileHandler(BaseHandler): except HttpResponseException as e: raise e.to_synapse_error() - defer.returnValue(result["avatar_url"]) + return result["avatar_url"] @defer.inlineCallbacks def set_avatar_url(self, target_user, requester, new_avatar_url, by_admin=False): @@ -251,7 +251,7 @@ class BaseProfileHandler(BaseHandler): raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) raise - defer.returnValue(response) + return response @defer.inlineCallbacks def _update_join_states(self, requester, target_user): diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index a85dd8cdee..218d60f0c3 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -84,7 +84,7 @@ class ReceiptsHandler(BaseHandler): if min_batch_id is None: # no new receipts - defer.returnValue(False) + return False affected_room_ids = list(set([r.room_id for r in receipts])) @@ -94,7 +94,7 @@ class ReceiptsHandler(BaseHandler): min_batch_id, max_batch_id, affected_room_ids ) - defer.returnValue(True) + return True @defer.inlineCallbacks def received_client_receipt(self, room_id, receipt_type, user_id, event_id): @@ -124,9 +124,9 @@ class ReceiptsHandler(BaseHandler): ) if not result: - defer.returnValue([]) + return [] - defer.returnValue(result) + return result class ReceiptEventSource(object): @@ -139,13 +139,13 @@ class ReceiptEventSource(object): to_key = yield self.get_current_key() if from_key == to_key: - defer.returnValue(([], to_key)) + return ([], to_key) events = yield self.store.get_linearized_receipts_for_rooms( room_ids, from_key=from_key, to_key=to_key ) - defer.returnValue((events, to_key)) + return (events, to_key) def get_current_key(self, direction="f"): return self.store.get_max_receipt_stream_id() @@ -164,4 +164,4 @@ class ReceiptEventSource(object): room_ids, from_key=from_key, to_key=to_key ) - defer.returnValue((events, to_key)) + return (events, to_key) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index bb7cfd71b9..4631fab94e 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -265,7 +265,7 @@ class RegistrationHandler(BaseHandler): # Bind email to new account yield self._register_email_threepid(user_id, threepid_dict, None, False) - defer.returnValue(user_id) + return user_id @defer.inlineCallbacks def _auto_join_rooms(self, user_id): @@ -360,7 +360,7 @@ class RegistrationHandler(BaseHandler): appservice_id=service_id, create_profile_with_displayname=user.localpart, ) - defer.returnValue(user_id) + return user_id @defer.inlineCallbacks def check_recaptcha(self, ip, private_key, challenge, response): @@ -461,7 +461,7 @@ class RegistrationHandler(BaseHandler): id = self._next_generated_user_id self._next_generated_user_id += 1 - defer.returnValue(str(id)) + return str(id) @defer.inlineCallbacks def _validate_captcha(self, ip_addr, private_key, challenge, response): @@ -481,7 +481,7 @@ class RegistrationHandler(BaseHandler): "error_url": "http://www.recaptcha.net/recaptcha/api/challenge?" + "error=%s" % lines[1], } - defer.returnValue(json) + return json @defer.inlineCallbacks def _submit_captcha(self, ip_addr, private_key, challenge, response): @@ -497,7 +497,7 @@ class RegistrationHandler(BaseHandler): "response": response, }, ) - defer.returnValue(data) + return data @defer.inlineCallbacks def _join_user_to_room(self, requester, room_identifier): @@ -622,7 +622,7 @@ class RegistrationHandler(BaseHandler): initial_display_name=initial_display_name, is_guest=is_guest, ) - defer.returnValue((r["device_id"], r["access_token"])) + return (r["device_id"], r["access_token"]) valid_until_ms = None if self.session_lifetime is not None: @@ -645,7 +645,7 @@ class RegistrationHandler(BaseHandler): user_id, device_id=device_id, valid_until_ms=valid_until_ms ) - defer.returnValue((device_id, access_token)) + return (device_id, access_token) @defer.inlineCallbacks def post_registration_actions( @@ -798,7 +798,7 @@ class RegistrationHandler(BaseHandler): if ex.errcode == Codes.MISSING_PARAM: # This will only happen if the ID server returns a malformed response logger.info("Can't add incomplete 3pid") - defer.returnValue(None) + return None raise yield self._auth_handler.add_threepid( diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index db3f8cb76b..5caa90c3b7 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -128,7 +128,7 @@ class RoomCreationHandler(BaseHandler): old_room_id, new_version, # args for _upgrade_room ) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def _upgrade_room(self, requester, old_room_id, new_version): @@ -193,7 +193,7 @@ class RoomCreationHandler(BaseHandler): requester, old_room_id, new_room_id, old_room_state ) - defer.returnValue(new_room_id) + return new_room_id @defer.inlineCallbacks def _update_upgraded_room_pls( @@ -671,7 +671,7 @@ class RoomCreationHandler(BaseHandler): result["room_alias"] = room_alias.to_string() yield directory_handler.send_room_alias_update_event(requester, room_id) - defer.returnValue(result) + return result @defer.inlineCallbacks def _send_events_for_new_room( @@ -796,7 +796,7 @@ class RoomCreationHandler(BaseHandler): room_creator_user_id=creator_id, is_public=is_public, ) - defer.returnValue(gen_room_id) + return gen_room_id except StoreError: attempts += 1 raise StoreError(500, "Couldn't generate a room ID.") @@ -839,7 +839,7 @@ class RoomContextHandler(object): event_id, get_prev_content=True, allow_none=True ) if not event: - defer.returnValue(None) + return None return filtered = yield (filter_evts([event])) @@ -890,7 +890,7 @@ class RoomContextHandler(object): results["end"] = token.copy_and_replace("room_key", results["end"]).to_string() - defer.returnValue(results) + return results class RoomEventSource(object): @@ -941,7 +941,7 @@ class RoomEventSource(object): else: end_key = to_key - defer.returnValue((events, end_key)) + return (events, end_key) def get_current_key(self): return self.store.get_room_events_max_id() @@ -959,4 +959,4 @@ class RoomEventSource(object): limit=config.limit, ) - defer.returnValue((events, next_key)) + return (events, next_key) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index aae696a7e8..e9094ad02b 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -325,7 +325,7 @@ class RoomListHandler(BaseHandler): current_limit=since_token.current_limit - 1, ).to_token() - defer.returnValue(results) + return results @defer.inlineCallbacks def _append_room_entry_to_chunk( @@ -420,7 +420,7 @@ class RoomListHandler(BaseHandler): if join_rules_event: join_rule = join_rules_event.content.get("join_rule", None) if not allow_private and join_rule and join_rule != JoinRules.PUBLIC: - defer.returnValue(None) + return None # Return whether this room is open to federation users or not create_event = current_state.get((EventTypes.Create, "")) @@ -469,7 +469,7 @@ class RoomListHandler(BaseHandler): if avatar_url: result["avatar_url"] = avatar_url - defer.returnValue(result) + return result @defer.inlineCallbacks def get_remote_public_room_list( @@ -482,7 +482,7 @@ class RoomListHandler(BaseHandler): third_party_instance_id=None, ): if not self.enable_room_list_search: - defer.returnValue({"chunk": [], "total_room_count_estimate": 0}) + return {"chunk": [], "total_room_count_estimate": 0} if search_filter: # We currently don't support searching across federation, so we have @@ -507,7 +507,7 @@ class RoomListHandler(BaseHandler): ] } - defer.returnValue(res) + return res def _get_remote_list_cached( self, diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index e0196ef83e..baea08ddd0 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -191,7 +191,7 @@ class RoomMemberHandler(object): ) if duplicate is not None: # Discard the new event since this membership change is a no-op. - defer.returnValue(duplicate) + return duplicate yield self.event_creation_handler.handle_new_client_event( requester, event, context, extra_users=[target], ratelimit=ratelimit @@ -233,7 +233,7 @@ class RoomMemberHandler(object): if prev_member_event.membership == Membership.JOIN: yield self._user_left_room(target, room_id) - defer.returnValue(event) + return event @defer.inlineCallbacks def copy_room_tags_and_direct_to_room(self, old_room_id, new_room_id, user_id): @@ -303,7 +303,7 @@ class RoomMemberHandler(object): require_consent=require_consent, ) - defer.returnValue(result) + return result @defer.inlineCallbacks def _update_membership( @@ -423,7 +423,7 @@ class RoomMemberHandler(object): same_membership = old_membership == effective_membership_state same_sender = requester.user.to_string() == old_state.sender if same_sender and same_membership and same_content: - defer.returnValue(old_state) + return old_state if old_membership in ["ban", "leave"] and action == "kick": raise AuthError(403, "The target user is not in the room") @@ -473,7 +473,7 @@ class RoomMemberHandler(object): ret = yield self._remote_join( requester, remote_room_hosts, room_id, target, content ) - defer.returnValue(ret) + return ret elif effective_membership_state == Membership.LEAVE: if not is_host_in_room: @@ -495,7 +495,7 @@ class RoomMemberHandler(object): res = yield self._remote_reject_invite( requester, remote_room_hosts, room_id, target ) - defer.returnValue(res) + return res res = yield self._local_membership_update( requester=requester, @@ -508,7 +508,7 @@ class RoomMemberHandler(object): content=content, require_consent=require_consent, ) - defer.returnValue(res) + return res @defer.inlineCallbacks def send_membership_event( @@ -596,11 +596,11 @@ class RoomMemberHandler(object): """ guest_access_id = current_state_ids.get((EventTypes.GuestAccess, ""), None) if not guest_access_id: - defer.returnValue(False) + return False guest_access = yield self.store.get_event(guest_access_id) - defer.returnValue( + return ( guest_access and guest_access.content and "guest_access" in guest_access.content @@ -635,7 +635,7 @@ class RoomMemberHandler(object): servers.remove(room_alias.domain) servers.insert(0, room_alias.domain) - defer.returnValue((RoomID.from_string(room_id), servers)) + return (RoomID.from_string(room_id), servers) @defer.inlineCallbacks def _get_inviter(self, user_id, room_id): @@ -643,7 +643,7 @@ class RoomMemberHandler(object): user_id=user_id, room_id=room_id ) if invite: - defer.returnValue(UserID.from_string(invite.sender)) + return UserID.from_string(invite.sender) @defer.inlineCallbacks def do_3pid_invite( @@ -708,11 +708,11 @@ class RoomMemberHandler(object): if "signatures" not in data: raise AuthError(401, "No signatures on 3pid binding") yield self._verify_any_signature(data, id_server) - defer.returnValue(data["mxid"]) + return data["mxid"] except IOError as e: logger.warn("Error from identity server lookup: %s" % (e,)) - defer.returnValue(None) + return None @defer.inlineCallbacks def _verify_any_signature(self, data, server_hostname): @@ -904,7 +904,7 @@ class RoomMemberHandler(object): if not public_keys: public_keys.append(fallback_public_key) display_name = data["display_name"] - defer.returnValue((token, public_keys, fallback_public_key, display_name)) + return (token, public_keys, fallback_public_key, display_name) @defer.inlineCallbacks def _is_host_in_room(self, current_state_ids): @@ -913,7 +913,7 @@ class RoomMemberHandler(object): create_event_id = current_state_ids.get(("m.room.create", "")) if len(current_state_ids) == 1 and create_event_id: # We can only get here if we're in the process of creating the room - defer.returnValue(True) + return True for etype, state_key in current_state_ids: if etype != EventTypes.Member or not self.hs.is_mine_id(state_key): @@ -925,16 +925,16 @@ class RoomMemberHandler(object): continue if event.membership == Membership.JOIN: - defer.returnValue(True) + return True - defer.returnValue(False) + return False @defer.inlineCallbacks def _is_server_notice_room(self, room_id): if self._server_notices_mxid is None: - defer.returnValue(False) + return False user_ids = yield self.store.get_users_in_room(room_id) - defer.returnValue(self._server_notices_mxid in user_ids) + return self._server_notices_mxid in user_ids class RoomMemberMasterHandler(RoomMemberHandler): @@ -978,7 +978,7 @@ class RoomMemberMasterHandler(RoomMemberHandler): ret = yield fed_handler.do_remotely_reject_invite( remote_room_hosts, room_id, target.to_string() ) - defer.returnValue(ret) + return ret except Exception as e: # if we were unable to reject the exception, just mark # it as rejected on our end and plough ahead. @@ -989,7 +989,7 @@ class RoomMemberMasterHandler(RoomMemberHandler): logger.warn("Failed to reject invite: %s", e) yield self.store.locally_reject_invite(target.to_string(), room_id) - defer.returnValue({}) + return {} def _user_joined_room(self, target, room_id): """Implements RoomMemberHandler._user_joined_room diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index fc873a3ba6..75e96ae1a2 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -53,7 +53,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler): yield self._user_joined_room(user, room_id) - defer.returnValue(ret) + return ret def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target): """Implements RoomMemberHandler._remote_reject_invite diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index ddc4430d03..cd5e90bacb 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -69,7 +69,7 @@ class SearchHandler(BaseHandler): # Scan through the old room for further predecessors room_id = predecessor["room_id"] - defer.returnValue(historical_room_ids) + return historical_room_ids @defer.inlineCallbacks def search(self, user, content, batch=None): @@ -186,13 +186,11 @@ class SearchHandler(BaseHandler): room_ids.intersection_update({batch_group_key}) if not room_ids: - defer.returnValue( - { - "search_categories": { - "room_events": {"results": [], "count": 0, "highlights": []} - } + return { + "search_categories": { + "room_events": {"results": [], "count": 0, "highlights": []} } - ) + } rank_map = {} # event_id -> rank of event allowed_events = [] @@ -455,4 +453,4 @@ class SearchHandler(BaseHandler): if global_next_batch: rooms_cat_res["next_batch"] = global_next_batch - defer.returnValue({"search_categories": {"room_events": rooms_cat_res}}) + return {"search_categories": {"room_events": rooms_cat_res}} diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py index 6b364befd5..f065970c40 100644 --- a/synapse/handlers/state_deltas.py +++ b/synapse/handlers/state_deltas.py @@ -48,7 +48,7 @@ class StateDeltasHandler(object): if not event and not prev_event: logger.debug("Neither event exists: %r %r", prev_event_id, event_id) - defer.returnValue(None) + return None prev_value = None value = None @@ -62,8 +62,8 @@ class StateDeltasHandler(object): logger.debug("prev_value: %r -> value: %r", prev_value, value) if value == public_value and prev_value != public_value: - defer.returnValue(True) + return True elif value != public_value and prev_value == public_value: - defer.returnValue(False) + return False else: - defer.returnValue(None) + return None diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index a0ee8db988..4449da6669 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -86,7 +86,7 @@ class StatsHandler(StateDeltasHandler): # If still None then the initial background update hasn't happened yet if self.pos is None: - defer.returnValue(None) + return None # Loop round handling deltas until we're up to date while True: @@ -328,6 +328,6 @@ class StatsHandler(StateDeltasHandler): == "world_readable" ) ): - defer.returnValue(True) + return True else: - defer.returnValue(False) + return False diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index cd1ac0a27a..4007284e5b 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -263,7 +263,7 @@ class SyncHandler(object): timeout, full_state, ) - defer.returnValue(res) + return res @defer.inlineCallbacks def _wait_for_sync_for_user(self, sync_config, since_token, timeout, full_state): @@ -303,7 +303,7 @@ class SyncHandler(object): lazy_loaded = "false" non_empty_sync_counter.labels(sync_type, lazy_loaded).inc() - defer.returnValue(result) + return result def current_sync_for_user(self, sync_config, since_token=None, full_state=False): """Get the sync for client needed to match what the server has now. @@ -317,7 +317,7 @@ class SyncHandler(object): user_id = user.to_string() rules = yield self.store.get_push_rules_for_user(user_id) rules = format_push_rules_for_user(user, rules) - defer.returnValue(rules) + return rules @defer.inlineCallbacks def ephemeral_by_room(self, sync_result_builder, now_token, since_token=None): @@ -378,7 +378,7 @@ class SyncHandler(object): event_copy = {k: v for (k, v) in iteritems(event) if k != "room_id"} ephemeral_by_room.setdefault(room_id, []).append(event_copy) - defer.returnValue((now_token, ephemeral_by_room)) + return (now_token, ephemeral_by_room) @defer.inlineCallbacks def _load_filtered_recents( @@ -426,8 +426,8 @@ class SyncHandler(object): recents = [] if not limited or block_all_timeline: - defer.returnValue( - TimelineBatch(events=recents, prev_batch=now_token, limited=False) + return TimelineBatch( + events=recents, prev_batch=now_token, limited=False ) filtering_factor = 2 @@ -490,12 +490,10 @@ class SyncHandler(object): prev_batch_token = now_token.copy_and_replace("room_key", room_key) - defer.returnValue( - TimelineBatch( - events=recents, - prev_batch=prev_batch_token, - limited=limited or newly_joined_room, - ) + return TimelineBatch( + events=recents, + prev_batch=prev_batch_token, + limited=limited or newly_joined_room, ) @defer.inlineCallbacks @@ -517,7 +515,7 @@ class SyncHandler(object): if event.is_state(): state_ids = state_ids.copy() state_ids[(event.type, event.state_key)] = event.event_id - defer.returnValue(state_ids) + return state_ids @defer.inlineCallbacks def get_state_at(self, room_id, stream_position, state_filter=StateFilter.all()): @@ -549,7 +547,7 @@ class SyncHandler(object): else: # no events in this room - so presumably no state state = {} - defer.returnValue(state) + return state @defer.inlineCallbacks def compute_summary(self, room_id, sync_config, batch, state, now_token): @@ -579,7 +577,7 @@ class SyncHandler(object): ) if not last_events: - defer.returnValue(None) + return None return last_event = last_events[-1] @@ -611,14 +609,14 @@ class SyncHandler(object): if name_id: name = yield self.store.get_event(name_id, allow_none=True) if name and name.content.get("name"): - defer.returnValue(summary) + return summary if canonical_alias_id: canonical_alias = yield self.store.get_event( canonical_alias_id, allow_none=True ) if canonical_alias and canonical_alias.content.get("alias"): - defer.returnValue(summary) + return summary me = sync_config.user.to_string() @@ -652,7 +650,7 @@ class SyncHandler(object): summary["m.heroes"] = sorted([user_id for user_id in gone_user_ids])[0:5] if not sync_config.filter_collection.lazy_load_members(): - defer.returnValue(summary) + return summary # ensure we send membership events for heroes if needed cache_key = (sync_config.user.to_string(), sync_config.device_id) @@ -686,7 +684,7 @@ class SyncHandler(object): cache.set(s.state_key, s.event_id) state[(EventTypes.Member, s.state_key)] = s - defer.returnValue(summary) + return summary def get_lazy_loaded_members_cache(self, cache_key): cache = self.lazy_loaded_members_cache.get(cache_key) @@ -871,14 +869,12 @@ class SyncHandler(object): if state_ids: state = yield self.store.get_events(list(state_ids.values())) - defer.returnValue( - { - (e.type, e.state_key): e - for e in sync_config.filter_collection.filter_room_state( - list(state.values()) - ) - } - ) + return { + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state( + list(state.values()) + ) + } @defer.inlineCallbacks def unread_notifs_for_room_id(self, room_id, sync_config): @@ -894,11 +890,11 @@ class SyncHandler(object): notifs = yield self.store.get_unread_event_push_actions_by_room_for_user( room_id, sync_config.user.to_string(), last_unread_event_id ) - defer.returnValue(notifs) + return notifs # There is no new information in this period, so your notification # count is whatever it was last time. - defer.returnValue(None) + return None @defer.inlineCallbacks def generate_sync_result(self, sync_config, since_token=None, full_state=False): @@ -989,19 +985,17 @@ class SyncHandler(object): "Sync result for newly joined room %s: %r", room_id, joined_room ) - defer.returnValue( - SyncResult( - presence=sync_result_builder.presence, - account_data=sync_result_builder.account_data, - joined=sync_result_builder.joined, - invited=sync_result_builder.invited, - archived=sync_result_builder.archived, - to_device=sync_result_builder.to_device, - device_lists=device_lists, - groups=sync_result_builder.groups, - device_one_time_keys_count=one_time_key_counts, - next_batch=sync_result_builder.now_token, - ) + return SyncResult( + presence=sync_result_builder.presence, + account_data=sync_result_builder.account_data, + joined=sync_result_builder.joined, + invited=sync_result_builder.invited, + archived=sync_result_builder.archived, + to_device=sync_result_builder.to_device, + device_lists=device_lists, + groups=sync_result_builder.groups, + device_one_time_keys_count=one_time_key_counts, + next_batch=sync_result_builder.now_token, ) @measure_func("_generate_sync_entry_for_groups") @@ -1124,11 +1118,9 @@ class SyncHandler(object): # Remove any users that we still share a room with. newly_left_users -= users_who_share_room - defer.returnValue( - DeviceLists(changed=users_that_have_changed, left=newly_left_users) - ) + return DeviceLists(changed=users_that_have_changed, left=newly_left_users) else: - defer.returnValue(DeviceLists(changed=[], left=[])) + return DeviceLists(changed=[], left=[]) @defer.inlineCallbacks def _generate_sync_entry_for_to_device(self, sync_result_builder): @@ -1225,7 +1217,7 @@ class SyncHandler(object): sync_result_builder.account_data = account_data_for_user - defer.returnValue(account_data_by_room) + return account_data_by_room @defer.inlineCallbacks def _generate_sync_entry_for_presence( @@ -1325,7 +1317,7 @@ class SyncHandler(object): ) if not tags_by_room: logger.debug("no-oping sync") - defer.returnValue(([], [], [], [])) + return ([], [], [], []) ignored_account_data = yield self.store.get_global_account_data_by_type_for_user( "m.ignored_user_list", user_id=user_id @@ -1388,13 +1380,11 @@ class SyncHandler(object): newly_left_users -= newly_joined_or_invited_users - defer.returnValue( - ( - newly_joined_rooms, - newly_joined_or_invited_users, - newly_left_rooms, - newly_left_users, - ) + return ( + newly_joined_rooms, + newly_joined_or_invited_users, + newly_left_rooms, + newly_left_users, ) @defer.inlineCallbacks @@ -1414,13 +1404,13 @@ class SyncHandler(object): ) if rooms_changed: - defer.returnValue(True) + return True stream_id = RoomStreamToken.parse_stream_token(since_token.room_key).stream for room_id in sync_result_builder.joined_room_ids: if self.store.has_room_changed_since(room_id, stream_id): - defer.returnValue(True) - defer.returnValue(False) + return True + return False @defer.inlineCallbacks def _get_rooms_changed(self, sync_result_builder, ignored_users): @@ -1637,7 +1627,7 @@ class SyncHandler(object): ) room_entries.append(entry) - defer.returnValue((room_entries, invited, newly_joined_rooms, newly_left_rooms)) + return (room_entries, invited, newly_joined_rooms, newly_left_rooms) @defer.inlineCallbacks def _get_all_rooms(self, sync_result_builder, ignored_users): @@ -1711,7 +1701,7 @@ class SyncHandler(object): ) ) - defer.returnValue((room_entries, invited, [])) + return (room_entries, invited, []) @defer.inlineCallbacks def _generate_room_entry( @@ -1912,7 +1902,7 @@ class SyncHandler(object): joined_room_ids.add(room_id) joined_room_ids = frozenset(joined_room_ids) - defer.returnValue(joined_room_ids) + return joined_room_ids def _action_has_highlight(actions): diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index c3e0c8fc7e..6b661aa93d 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -140,7 +140,7 @@ class TypingHandler(object): if was_present: # No point sending another notification - defer.returnValue(None) + return None self._push_update(member=member, typing=True) @@ -173,7 +173,7 @@ class TypingHandler(object): def _stopped_typing(self, member): if member.user_id not in self._room_typing.get(member.room_id, set()): # No point - defer.returnValue(None) + return None self._member_typing_until.pop(member, None) self._member_last_federation_poke.pop(member, None) diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 5de9630950..e53669e40d 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -133,7 +133,7 @@ class UserDirectoryHandler(StateDeltasHandler): # If still None then the initial background update hasn't happened yet if self.pos is None: - defer.returnValue(None) + return None # Loop round handling deltas until we're up to date while True: diff --git a/synapse/http/client.py b/synapse/http/client.py index 45d5010952..0ac20ebefc 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -294,7 +294,7 @@ class SimpleHttpClient(object): logger.info( "Received response to %s %s: %s", method, redact_uri(uri), response.code ) - defer.returnValue(response) + return response except Exception as e: incoming_responses_counter.labels(method, "ERR").inc() logger.info( @@ -345,7 +345,7 @@ class SimpleHttpClient(object): body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: - defer.returnValue(json.loads(body)) + return json.loads(body) else: raise HttpResponseException(response.code, response.phrase, body) @@ -385,7 +385,7 @@ class SimpleHttpClient(object): body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: - defer.returnValue(json.loads(body)) + return json.loads(body) else: raise HttpResponseException(response.code, response.phrase, body) @@ -410,7 +410,7 @@ class SimpleHttpClient(object): ValueError: if the response was not JSON """ body = yield self.get_raw(uri, args, headers=headers) - defer.returnValue(json.loads(body)) + return json.loads(body) @defer.inlineCallbacks def put_json(self, uri, json_body, args={}, headers=None): @@ -453,7 +453,7 @@ class SimpleHttpClient(object): body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: - defer.returnValue(json.loads(body)) + return json.loads(body) else: raise HttpResponseException(response.code, response.phrase, body) @@ -488,7 +488,7 @@ class SimpleHttpClient(object): body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: - defer.returnValue(body) + return body else: raise HttpResponseException(response.code, response.phrase, body) @@ -545,13 +545,11 @@ class SimpleHttpClient(object): except Exception as e: raise_from(SynapseError(502, ("Failed to download remote body: %s" % e)), e) - defer.returnValue( - ( - length, - resp_headers, - response.request.absoluteURI.decode("ascii"), - response.code, - ) + return ( + length, + resp_headers, + response.request.absoluteURI.decode("ascii"), + response.code, ) @@ -627,10 +625,10 @@ class CaptchaServerHttpClient(SimpleHttpClient): try: body = yield make_deferred_yieldable(readBody(response)) - defer.returnValue(body) + return body except PartialDownloadError as e: # twisted dislikes google's response, no content length. - defer.returnValue(e.response) + return e.response def encode_urlencode_args(args): diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 054c321a20..c03ddb724f 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -177,7 +177,7 @@ class MatrixFederationAgent(object): res = yield make_deferred_yieldable( agent.request(method, uri, headers, bodyProducer) ) - defer.returnValue(res) + return res @defer.inlineCallbacks def _route_matrix_uri(self, parsed_uri, lookup_well_known=True): @@ -205,24 +205,20 @@ class MatrixFederationAgent(object): port = parsed_uri.port if port == -1: port = 8448 - defer.returnValue( - _RoutingResult( - host_header=parsed_uri.netloc, - tls_server_name=parsed_uri.host, - target_host=parsed_uri.host, - target_port=port, - ) + return _RoutingResult( + host_header=parsed_uri.netloc, + tls_server_name=parsed_uri.host, + target_host=parsed_uri.host, + target_port=port, ) if parsed_uri.port != -1: # there is an explicit port - defer.returnValue( - _RoutingResult( - host_header=parsed_uri.netloc, - tls_server_name=parsed_uri.host, - target_host=parsed_uri.host, - target_port=parsed_uri.port, - ) + return _RoutingResult( + host_header=parsed_uri.netloc, + tls_server_name=parsed_uri.host, + target_host=parsed_uri.host, + target_port=parsed_uri.port, ) if lookup_well_known: @@ -259,7 +255,7 @@ class MatrixFederationAgent(object): ) res = yield self._route_matrix_uri(new_uri, lookup_well_known=False) - defer.returnValue(res) + return res # try a SRV lookup service_name = b"_matrix._tcp.%s" % (parsed_uri.host,) @@ -283,13 +279,11 @@ class MatrixFederationAgent(object): parsed_uri.host.decode("ascii"), ) - defer.returnValue( - _RoutingResult( - host_header=parsed_uri.netloc, - tls_server_name=parsed_uri.host, - target_host=target_host, - target_port=port, - ) + return _RoutingResult( + host_header=parsed_uri.netloc, + tls_server_name=parsed_uri.host, + target_host=target_host, + target_port=port, ) @defer.inlineCallbacks @@ -314,7 +308,7 @@ class MatrixFederationAgent(object): if cache_period > 0: self._well_known_cache.set(server_name, result, cache_period) - defer.returnValue(result) + return result @defer.inlineCallbacks def _do_get_well_known(self, server_name): @@ -354,7 +348,7 @@ class MatrixFederationAgent(object): # after startup cache_period = WELL_KNOWN_INVALID_CACHE_PERIOD cache_period += random.uniform(0, WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER) - defer.returnValue((None, cache_period)) + return (None, cache_period) result = parsed_body["m.server"].encode("ascii") @@ -369,7 +363,7 @@ class MatrixFederationAgent(object): else: cache_period = min(cache_period, WELL_KNOWN_MAX_CACHE_PERIOD) - defer.returnValue((result, cache_period)) + return (result, cache_period) @implementer(IStreamClientEndpoint) diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py index ecc88f9b96..b32188766d 100644 --- a/synapse/http/federation/srv_resolver.py +++ b/synapse/http/federation/srv_resolver.py @@ -120,7 +120,7 @@ class SrvResolver(object): if cache_entry: if all(s.expires > now for s in cache_entry): servers = list(cache_entry) - defer.returnValue(servers) + return servers try: answers, _, _ = yield make_deferred_yieldable( @@ -129,7 +129,7 @@ class SrvResolver(object): except DNSNameError: # TODO: cache this. We can get the SOA out of the exception, and use # the negative-TTL value. - defer.returnValue([]) + return [] except DomainError as e: # We failed to resolve the name (other than a NameError) # Try something in the cache, else rereaise @@ -138,7 +138,7 @@ class SrvResolver(object): logger.warn( "Failed to resolve %r, falling back to cache. %r", service_name, e ) - defer.returnValue(list(cache_entry)) + return list(cache_entry) else: raise e @@ -169,4 +169,4 @@ class SrvResolver(object): ) self._cache[service_name] = list(servers) - defer.returnValue(servers) + return servers diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index e60334547e..d07d356464 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -158,7 +158,7 @@ def _handle_json_response(reactor, timeout_sec, request, response): response.code, response.phrase.decode("ascii", errors="replace"), ) - defer.returnValue(body) + return body class MatrixFederationHttpClient(object): @@ -256,7 +256,7 @@ class MatrixFederationHttpClient(object): response = yield self._send_request(request, **send_request_args) - defer.returnValue(response) + return response @defer.inlineCallbacks def _send_request( @@ -520,7 +520,7 @@ class MatrixFederationHttpClient(object): _flatten_response_never_received(e), ) raise - defer.returnValue(response) + return response def build_auth_headers( self, destination, method, url_bytes, content=None, destination_is=None @@ -644,7 +644,7 @@ class MatrixFederationHttpClient(object): self.reactor, self.default_timeout, request, response ) - defer.returnValue(body) + return body @defer.inlineCallbacks def post_json( @@ -713,7 +713,7 @@ class MatrixFederationHttpClient(object): body = yield _handle_json_response( self.reactor, _sec_timeout, request, response ) - defer.returnValue(body) + return body @defer.inlineCallbacks def get_json( @@ -778,7 +778,7 @@ class MatrixFederationHttpClient(object): self.reactor, self.default_timeout, request, response ) - defer.returnValue(body) + return body @defer.inlineCallbacks def delete_json( @@ -836,7 +836,7 @@ class MatrixFederationHttpClient(object): body = yield _handle_json_response( self.reactor, self.default_timeout, request, response ) - defer.returnValue(body) + return body @defer.inlineCallbacks def get_file( @@ -902,7 +902,7 @@ class MatrixFederationHttpClient(object): response.phrase.decode("ascii", errors="replace"), length, ) - defer.returnValue((length, headers)) + return (length, headers) class _ReadBodyToFileProtocol(protocol.Protocol): diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 96a4714d82..fb338ca223 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -89,7 +89,7 @@ the function becomes the operation name for the span. # We start yield we_wait # we finish - defer.returnValue(something_usual_and_useful) + return something_usual_and_useful Operation names can be explicitly set for functions by using ``trace_using_operation_name`` and @@ -113,7 +113,7 @@ Operation names can be explicitly set for functions by using # We start yield we_wait # we finish - defer.returnValue(something_usual_and_useful) + return something_usual_and_useful Contexts and carriers --------------------- @@ -694,7 +694,7 @@ def trace_servlet(servlet_name, func): }, ): result = yield defer.maybeDeferred(func, request, *args, **kwargs) - defer.returnValue(result) + return result return _trace_servlet_inner diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 7bb020cb45..41147d4292 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -101,7 +101,7 @@ class ModuleApi(object): ) user_id = yield self.register_user(localpart, displayname, emails) _, access_token = yield self.register_device(user_id) - defer.returnValue((user_id, access_token)) + return (user_id, access_token) def register_user(self, localpart, displayname=None, emails=[]): """Registers a new user with given localpart and optional displayname, emails. diff --git a/synapse/notifier.py b/synapse/notifier.py index 918ef64897..bd80c801b6 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -365,7 +365,7 @@ class Notifier(object): current_token = user_stream.current_token result = yield callback(prev_token, current_token) - defer.returnValue(result) + return result @defer.inlineCallbacks def get_events_for( @@ -400,7 +400,7 @@ class Notifier(object): @defer.inlineCallbacks def check_for_updates(before_token, after_token): if not after_token.is_after(before_token): - defer.returnValue(EventStreamResult([], (from_token, from_token))) + return EventStreamResult([], (from_token, from_token)) events = [] end_token = from_token @@ -440,7 +440,7 @@ class Notifier(object): events.extend(new_events) end_token = end_token.copy_and_replace(keyname, new_key) - defer.returnValue(EventStreamResult(events, (from_token, end_token))) + return EventStreamResult(events, (from_token, end_token)) user_id_for_stream = user.to_string() if is_peeking: @@ -465,18 +465,18 @@ class Notifier(object): from_token=from_token, ) - defer.returnValue(result) + return result @defer.inlineCallbacks def _get_room_ids(self, user, explicit_room_id): joined_room_ids = yield self.store.get_rooms_for_user(user.to_string()) if explicit_room_id: if explicit_room_id in joined_room_ids: - defer.returnValue(([explicit_room_id], True)) + return ([explicit_room_id], True) if (yield self._is_world_readable(explicit_room_id)): - defer.returnValue(([explicit_room_id], False)) + return ([explicit_room_id], False) raise AuthError(403, "Non-joined access not allowed") - defer.returnValue((joined_room_ids, True)) + return (joined_room_ids, True) @defer.inlineCallbacks def _is_world_readable(self, room_id): @@ -484,9 +484,9 @@ class Notifier(object): room_id, EventTypes.RoomHistoryVisibility, "" ) if state and "history_visibility" in state.content: - defer.returnValue(state.content["history_visibility"] == "world_readable") + return state.content["history_visibility"] == "world_readable" else: - defer.returnValue(False) + return False @log_function def remove_expired_streams(self): diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index c8a5b381da..c831975635 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -95,7 +95,7 @@ class BulkPushRuleEvaluator(object): invited ) - defer.returnValue(rules_by_user) + return rules_by_user @cached() def _get_rules_for_room(self, room_id): @@ -134,7 +134,7 @@ class BulkPushRuleEvaluator(object): pl_event = auth_events.get(POWER_KEY) - defer.returnValue((pl_event.content if pl_event else {}, sender_level)) + return (pl_event.content if pl_event else {}, sender_level) @defer.inlineCallbacks def action_for_event_by_user(self, event, context): @@ -283,13 +283,13 @@ class RulesForRoom(object): if state_group and self.state_group == state_group: logger.debug("Using cached rules for %r", self.room_id) self.room_push_rule_cache_metrics.inc_hits() - defer.returnValue(self.rules_by_user) + return self.rules_by_user with (yield self.linearizer.queue(())): if state_group and self.state_group == state_group: logger.debug("Using cached rules for %r", self.room_id) self.room_push_rule_cache_metrics.inc_hits() - defer.returnValue(self.rules_by_user) + return self.rules_by_user self.room_push_rule_cache_metrics.inc_misses() @@ -366,7 +366,7 @@ class RulesForRoom(object): logger.debug( "Returning push rules for %r %r", self.room_id, ret_rules_by_user.keys() ) - defer.returnValue(ret_rules_by_user) + return ret_rules_by_user @defer.inlineCallbacks def _update_rules_with_member_event_ids( diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 4e7b6a5531..5b15b0dbe7 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -258,17 +258,17 @@ class HttpPusher(object): @defer.inlineCallbacks def _process_one(self, push_action): if "notify" not in push_action["actions"]: - defer.returnValue(True) + return True tweaks = push_rule_evaluator.tweaks_for_actions(push_action["actions"]) badge = yield push_tools.get_badge_count(self.hs.get_datastore(), self.user_id) event = yield self.store.get_event(push_action["event_id"], allow_none=True) if event is None: - defer.returnValue(True) # It's been redacted + return True # It's been redacted rejected = yield self.dispatch_push(event, tweaks, badge) if rejected is False: - defer.returnValue(False) + return False if isinstance(rejected, list) or isinstance(rejected, tuple): for pk in rejected: @@ -282,7 +282,7 @@ class HttpPusher(object): else: logger.info("Pushkey %s was rejected: removing", pk) yield self.hs.remove_pusher(self.app_id, pk, self.user_id) - defer.returnValue(True) + return True @defer.inlineCallbacks def _build_notification_dict(self, event, tweaks, badge): @@ -302,7 +302,7 @@ class HttpPusher(object): ], } } - defer.returnValue(d) + return d ctx = yield push_tools.get_context_for_event( self.store, self.state_handler, event, self.user_id @@ -345,13 +345,13 @@ class HttpPusher(object): if "name" in ctx and len(ctx["name"]) > 0: d["notification"]["room_name"] = ctx["name"] - defer.returnValue(d) + return d @defer.inlineCallbacks def dispatch_push(self, event, tweaks, badge): notification_dict = yield self._build_notification_dict(event, tweaks, badge) if not notification_dict: - defer.returnValue([]) + return [] try: resp = yield self.http_client.post_json_get_json( self.url, notification_dict @@ -364,11 +364,11 @@ class HttpPusher(object): type(e), e, ) - defer.returnValue(False) + return False rejected = [] if "rejected" in resp: rejected = resp["rejected"] - defer.returnValue(rejected) + return rejected @defer.inlineCallbacks def _send_badge(self, badge): diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 521c6e2cd7..4245ce26f3 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -316,7 +316,7 @@ class Mailer(object): if not merge: room_vars["notifs"].append(notifvars) - defer.returnValue(room_vars) + return room_vars @defer.inlineCallbacks def get_notif_vars(self, notif, user_id, notif_event, room_state_ids): @@ -343,7 +343,7 @@ class Mailer(object): if messagevars is not None: ret["messages"].append(messagevars) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_message_vars(self, notif, event, room_state_ids): @@ -379,7 +379,7 @@ class Mailer(object): if "body" in event.content: ret["body_text_plain"] = event.content["body"] - defer.returnValue(ret) + return ret def add_text_message_vars(self, messagevars, event): msgformat = event.content.get("format") @@ -428,19 +428,16 @@ class Mailer(object): inviter_name = name_from_member_event(inviter_member_event) if room_name is None: - defer.returnValue( - INVITE_FROM_PERSON - % {"person": inviter_name, "app": self.app_name} - ) + return INVITE_FROM_PERSON % { + "person": inviter_name, + "app": self.app_name, + } else: - defer.returnValue( - INVITE_FROM_PERSON_TO_ROOM - % { - "person": inviter_name, - "room": room_name, - "app": self.app_name, - } - ) + return INVITE_FROM_PERSON_TO_ROOM % { + "person": inviter_name, + "room": room_name, + "app": self.app_name, + } sender_name = None if len(notifs_by_room[room_id]) == 1: @@ -454,26 +451,21 @@ class Mailer(object): sender_name = name_from_member_event(state_event) if sender_name is not None and room_name is not None: - defer.returnValue( - MESSAGE_FROM_PERSON_IN_ROOM - % { - "person": sender_name, - "room": room_name, - "app": self.app_name, - } - ) + return MESSAGE_FROM_PERSON_IN_ROOM % { + "person": sender_name, + "room": room_name, + "app": self.app_name, + } elif sender_name is not None: - defer.returnValue( - MESSAGE_FROM_PERSON - % {"person": sender_name, "app": self.app_name} - ) + return MESSAGE_FROM_PERSON % { + "person": sender_name, + "app": self.app_name, + } else: # There's more than one notification for this room, so just # say there are several if room_name is not None: - defer.returnValue( - MESSAGES_IN_ROOM % {"room": room_name, "app": self.app_name} - ) + return MESSAGES_IN_ROOM % {"room": room_name, "app": self.app_name} else: # If the room doesn't have a name, say who the messages # are from explicitly to avoid, "messages in the Bob room" @@ -493,24 +485,19 @@ class Mailer(object): ] ) - defer.returnValue( - MESSAGES_FROM_PERSON - % { - "person": descriptor_from_member_events( - member_events.values() - ), - "app": self.app_name, - } - ) + return MESSAGES_FROM_PERSON % { + "person": descriptor_from_member_events(member_events.values()), + "app": self.app_name, + } else: # Stuff's happened in multiple different rooms # ...but we still refer to the 'reason' room which triggered the mail if reason["room_name"] is not None: - defer.returnValue( - MESSAGES_IN_ROOM_AND_OTHERS - % {"room": reason["room_name"], "app": self.app_name} - ) + return MESSAGES_IN_ROOM_AND_OTHERS % { + "room": reason["room_name"], + "app": self.app_name, + } else: # If the reason room doesn't have a name, say who the messages # are from explicitly to avoid, "messages in the Bob room" @@ -527,13 +514,10 @@ class Mailer(object): [room_state_ids[room_id][("m.room.member", s)] for s in sender_ids] ) - defer.returnValue( - MESSAGES_FROM_PERSON_AND_OTHERS - % { - "person": descriptor_from_member_events(member_events.values()), - "app": self.app_name, - } - ) + return MESSAGES_FROM_PERSON_AND_OTHERS % { + "person": descriptor_from_member_events(member_events.values()), + "app": self.app_name, + } def make_room_link(self, room_id): if self.hs.config.email_riot_base_url: diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py index 06056fbf4f..16a7e8e31d 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py @@ -55,7 +55,7 @@ def calculate_room_name( room_state_ids[("m.room.name", "")], allow_none=True ) if m_room_name and m_room_name.content and m_room_name.content["name"]: - defer.returnValue(m_room_name.content["name"]) + return m_room_name.content["name"] # does it have a canonical alias? if ("m.room.canonical_alias", "") in room_state_ids: @@ -68,7 +68,7 @@ def calculate_room_name( and canon_alias.content["alias"] and _looks_like_an_alias(canon_alias.content["alias"]) ): - defer.returnValue(canon_alias.content["alias"]) + return canon_alias.content["alias"] # at this point we're going to need to search the state by all state keys # for an event type, so rearrange the data structure @@ -82,10 +82,10 @@ def calculate_room_name( if alias_event and alias_event.content.get("aliases"): the_aliases = alias_event.content["aliases"] if len(the_aliases) > 0 and _looks_like_an_alias(the_aliases[0]): - defer.returnValue(the_aliases[0]) + return the_aliases[0] if not fallback_to_members: - defer.returnValue(None) + return None my_member_event = None if ("m.room.member", user_id) in room_state_ids: @@ -104,14 +104,13 @@ def calculate_room_name( ) if inviter_member_event: if fallback_to_single_member: - defer.returnValue( - "Invite from %s" - % (name_from_member_event(inviter_member_event),) + return "Invite from %s" % ( + name_from_member_event(inviter_member_event), ) else: return else: - defer.returnValue("Room Invite") + return "Room Invite" # we're going to have to generate a name based on who's in the room, # so find out who is in the room that isn't the user. @@ -154,17 +153,17 @@ def calculate_room_name( # return "Inviting %s" % ( # descriptor_from_member_events(third_party_invites) # ) - defer.returnValue("Inviting email address") + return "Inviting email address" else: - defer.returnValue(ALL_ALONE) + return ALL_ALONE else: - defer.returnValue(name_from_member_event(all_members[0])) + return name_from_member_event(all_members[0]) else: - defer.returnValue(ALL_ALONE) + return ALL_ALONE elif len(other_members) == 1 and not fallback_to_single_member: return else: - defer.returnValue(descriptor_from_member_events(other_members)) + return descriptor_from_member_events(other_members) def descriptor_from_member_events(member_events): diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index e37269cdb9..a54051a726 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -39,7 +39,7 @@ def get_badge_count(store, user_id): # return one badge count per conversation, as count per # message is so noisy as to be almost useless badge += 1 if notifs["notify_count"] else 0 - defer.returnValue(badge) + return badge @defer.inlineCallbacks @@ -61,4 +61,4 @@ def get_context_for_event(store, state_handler, ev, user_id): sender_state_event = yield store.get_event(sender_state_event_id) ctx["sender_display_name"] = name_from_member_event(sender_state_event) - defer.returnValue(ctx) + return ctx diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index df6f670740..08e840fdc2 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -123,7 +123,7 @@ class PusherPool: ) pusher = yield self.start_pusher_by_id(app_id, pushkey, user_id) - defer.returnValue(pusher) + return pusher @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user( @@ -224,7 +224,7 @@ class PusherPool: if pusher_dict: pusher = yield self._start_pusher(pusher_dict) - defer.returnValue(pusher) + return pusher @defer.inlineCallbacks def _start_pushers(self): @@ -293,7 +293,7 @@ class PusherPool: p.on_started(have_notifs) - defer.returnValue(p) + return p @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index fe482e279f..f5074b101a 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -185,7 +185,7 @@ class ReplicationEndpoint(object): except RequestSendFailed as e: raise_from(SynapseError(502, "Failed to talk to master"), e) - defer.returnValue(result) + return result return send_request diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 61eafbe708..fed4f08820 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -80,7 +80,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): payload = {"events": event_payloads, "backfilled": backfilled} - defer.returnValue(payload) + return payload @defer.inlineCallbacks def _handle_request(self, request): @@ -113,7 +113,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): event_and_contexts, backfilled ) - defer.returnValue((200, {})) + return (200, {}) class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): @@ -156,7 +156,7 @@ class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): result = yield self.registry.on_edu(edu_type, origin, edu_content) - defer.returnValue((200, result)) + return (200, result) class ReplicationGetQueryRestServlet(ReplicationEndpoint): @@ -204,7 +204,7 @@ class ReplicationGetQueryRestServlet(ReplicationEndpoint): result = yield self.registry.on_query(query_type, args) - defer.returnValue((200, result)) + return (200, result) class ReplicationCleanRoomRestServlet(ReplicationEndpoint): @@ -238,7 +238,7 @@ class ReplicationCleanRoomRestServlet(ReplicationEndpoint): def _handle_request(self, request, room_id): yield self.store.clean_room_for_join(room_id) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py index 7c1197e5dd..f17d3a2da4 100644 --- a/synapse/replication/http/login.py +++ b/synapse/replication/http/login.py @@ -64,7 +64,7 @@ class RegisterDeviceReplicationServlet(ReplicationEndpoint): user_id, device_id, initial_display_name, is_guest ) - defer.returnValue((200, {"device_id": device_id, "access_token": access_token})) + return (200, {"device_id": device_id, "access_token": access_token}) def register_servlets(hs, http_server): diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 2d9cbbaefc..4217335d88 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -83,7 +83,7 @@ class ReplicationRemoteJoinRestServlet(ReplicationEndpoint): remote_room_hosts, room_id, user_id, event_content ) - defer.returnValue((200, {})) + return (200, {}) class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): @@ -153,7 +153,7 @@ class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): yield self.store.locally_reject_invite(user_id, room_id) ret = {} - defer.returnValue((200, ret)) + return (200, ret) class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py index 2bf2173895..3341320a87 100644 --- a/synapse/replication/http/register.py +++ b/synapse/replication/http/register.py @@ -90,7 +90,7 @@ class ReplicationRegisterServlet(ReplicationEndpoint): address=content["address"], ) - defer.returnValue((200, {})) + return (200, {}) class ReplicationPostRegisterActionsServlet(ReplicationEndpoint): @@ -143,7 +143,7 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint): bind_msisdn=bind_msisdn, ) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 034763fe99..eff7bd7305 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -85,7 +85,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): "extra_users": [u.to_string() for u in extra_users], } - defer.returnValue(payload) + return payload @defer.inlineCallbacks def _handle_request(self, request, event_id): @@ -117,7 +117,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): requester, event, context, ratelimit=ratelimit, extra_users=extra_users ) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index 7ef67a5a73..c10b85d2ff 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -158,7 +158,7 @@ class Stream(object): updates, current_token = yield self.get_updates_since(self.last_token) self.last_token = current_token - defer.returnValue((updates, current_token)) + return (updates, current_token) @defer.inlineCallbacks def get_updates_since(self, from_token): @@ -172,14 +172,14 @@ class Stream(object): sent over the replication steam. """ if from_token in ("NOW", "now"): - defer.returnValue(([], self.upto_token)) + return ([], self.upto_token) current_token = self.upto_token from_token = int(from_token) if from_token == current_token: - defer.returnValue(([], current_token)) + return ([], current_token) if self._LIMITED: rows = yield self.update_function( @@ -198,7 +198,7 @@ class Stream(object): if self._LIMITED and len(updates) >= MAX_EVENTS_BEHIND: raise Exception("stream %s has fallen behind" % (self.NAME)) - defer.returnValue((updates, current_token)) + return (updates, current_token) def current_token(self): """Gets the current token of the underlying streams. Should be provided @@ -297,7 +297,7 @@ class PushRulesStream(Stream): @defer.inlineCallbacks def update_function(self, from_token, to_token, limit): rows = yield self.store.get_all_push_rule_updates(from_token, to_token, limit) - defer.returnValue([(row[0], row[2]) for row in rows]) + return [(row[0], row[2]) for row in rows] class PushersStream(Stream): @@ -424,7 +424,7 @@ class AccountDataStream(Stream): for stream_id, user_id, account_data_type, content in global_results ) - defer.returnValue(results) + return results class GroupServerStream(Stream): diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py index 3d0694bb11..d97669c886 100644 --- a/synapse/replication/tcp/streams/events.py +++ b/synapse/replication/tcp/streams/events.py @@ -134,7 +134,7 @@ class EventsStream(Stream): all_updates = heapq.merge(event_updates, state_updates) - defer.returnValue(all_updates) + return all_updates @classmethod def parse_row(cls, row): diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 6888ae5590..0a7d9b81b2 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -84,7 +84,7 @@ class UsersRestServlet(RestServlet): ret = yield self.handlers.admin_handler.get_users() - defer.returnValue((200, ret)) + return (200, ret) class VersionServlet(RestServlet): @@ -227,7 +227,7 @@ class UserRegisterServlet(RestServlet): ) result = yield register._create_registration_details(user_id, body) - defer.returnValue((200, result)) + return (200, result) class WhoisRestServlet(RestServlet): @@ -252,7 +252,7 @@ class WhoisRestServlet(RestServlet): ret = yield self.handlers.admin_handler.get_whois(target_user) - defer.returnValue((200, ret)) + return (200, ret) class PurgeMediaCacheRestServlet(RestServlet): @@ -271,7 +271,7 @@ class PurgeMediaCacheRestServlet(RestServlet): ret = yield self.media_repository.delete_old_remote_media(before_ts) - defer.returnValue((200, ret)) + return (200, ret) class PurgeHistoryRestServlet(RestServlet): @@ -356,7 +356,7 @@ class PurgeHistoryRestServlet(RestServlet): room_id, token, delete_local_events=delete_local_events ) - defer.returnValue((200, {"purge_id": purge_id})) + return (200, {"purge_id": purge_id}) class PurgeHistoryStatusRestServlet(RestServlet): @@ -381,7 +381,7 @@ class PurgeHistoryStatusRestServlet(RestServlet): if purge_status is None: raise NotFoundError("purge id '%s' not found" % purge_id) - defer.returnValue((200, purge_status.asdict())) + return (200, purge_status.asdict()) class DeactivateAccountRestServlet(RestServlet): @@ -413,7 +413,7 @@ class DeactivateAccountRestServlet(RestServlet): else: id_server_unbind_result = "no-support" - defer.returnValue((200, {"id_server_unbind_result": id_server_unbind_result})) + return (200, {"id_server_unbind_result": id_server_unbind_result}) class ShutdownRoomRestServlet(RestServlet): @@ -531,16 +531,14 @@ class ShutdownRoomRestServlet(RestServlet): room_id, new_room_id, requester_user_id ) - defer.returnValue( - ( - 200, - { - "kicked_users": kicked_users, - "failed_to_kick_users": failed_to_kick_users, - "local_aliases": aliases_for_room, - "new_room_id": new_room_id, - }, - ) + return ( + 200, + { + "kicked_users": kicked_users, + "failed_to_kick_users": failed_to_kick_users, + "local_aliases": aliases_for_room, + "new_room_id": new_room_id, + }, ) @@ -564,7 +562,7 @@ class QuarantineMediaInRoom(RestServlet): room_id, requester.user.to_string() ) - defer.returnValue((200, {"num_quarantined": num_quarantined})) + return (200, {"num_quarantined": num_quarantined}) class ListMediaInRoom(RestServlet): @@ -585,7 +583,7 @@ class ListMediaInRoom(RestServlet): local_mxcs, remote_mxcs = yield self.store.get_media_mxcs_in_room(room_id) - defer.returnValue((200, {"local": local_mxcs, "remote": remote_mxcs})) + return (200, {"local": local_mxcs, "remote": remote_mxcs}) class ResetPasswordRestServlet(RestServlet): @@ -629,7 +627,7 @@ class ResetPasswordRestServlet(RestServlet): yield self._set_password_handler.set_password( target_user_id, new_password, requester ) - defer.returnValue((200, {})) + return (200, {}) class GetUsersPaginatedRestServlet(RestServlet): @@ -671,7 +669,7 @@ class GetUsersPaginatedRestServlet(RestServlet): logger.info("limit: %s, start: %s", limit, start) ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit) - defer.returnValue((200, ret)) + return (200, ret) @defer.inlineCallbacks def on_POST(self, request, target_user_id): @@ -699,7 +697,7 @@ class GetUsersPaginatedRestServlet(RestServlet): logger.info("limit: %s, start: %s", limit, start) ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit) - defer.returnValue((200, ret)) + return (200, ret) class SearchUsersRestServlet(RestServlet): @@ -742,7 +740,7 @@ class SearchUsersRestServlet(RestServlet): logger.info("term: %s ", term) ret = yield self.handlers.admin_handler.search_users(term) - defer.returnValue((200, ret)) + return (200, ret) class DeleteGroupAdminRestServlet(RestServlet): @@ -765,7 +763,7 @@ class DeleteGroupAdminRestServlet(RestServlet): raise SynapseError(400, "Can only delete local groups") yield self.group_server.delete_group(group_id, requester.user.to_string()) - defer.returnValue((200, {})) + return (200, {}) class AccountValidityRenewServlet(RestServlet): @@ -796,7 +794,7 @@ class AccountValidityRenewServlet(RestServlet): ) res = {"expiration_ts": expiration_ts} - defer.returnValue((200, res)) + return (200, res) ######################################################################################## diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py index ee66838a0d..90c0ee15dc 100644 --- a/synapse/rest/admin/server_notice_servlet.py +++ b/synapse/rest/admin/server_notice_servlet.py @@ -87,7 +87,7 @@ class SendServerNoticeServlet(RestServlet): event_content=body["content"], ) - defer.returnValue((200, {"event_id": event.event_id})) + return (200, {"event_id": event.event_id}) def on_PUT(self, request, txn_id): return self.txns.fetch_or_execute_request( diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 57542c2b4b..4284738021 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -54,7 +54,7 @@ class ClientDirectoryServer(RestServlet): dir_handler = self.handlers.directory_handler res = yield dir_handler.get_association(room_alias) - defer.returnValue((200, res)) + return (200, res) @defer.inlineCallbacks def on_PUT(self, request, room_alias): @@ -87,7 +87,7 @@ class ClientDirectoryServer(RestServlet): requester, room_alias, room_id, servers ) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_DELETE(self, request, room_alias): @@ -102,7 +102,7 @@ class ClientDirectoryServer(RestServlet): service.url, room_alias.to_string(), ) - defer.returnValue((200, {})) + return (200, {}) except InvalidClientCredentialsError: # fallback to default user behaviour if they aren't an AS pass @@ -118,7 +118,7 @@ class ClientDirectoryServer(RestServlet): "User %s deleted alias %s", user.to_string(), room_alias.to_string() ) - defer.returnValue((200, {})) + return (200, {}) class ClientDirectoryListServer(RestServlet): @@ -136,9 +136,7 @@ class ClientDirectoryListServer(RestServlet): if room is None: raise NotFoundError("Unknown room") - defer.returnValue( - (200, {"visibility": "public" if room["is_public"] else "private"}) - ) + return (200, {"visibility": "public" if room["is_public"] else "private"}) @defer.inlineCallbacks def on_PUT(self, request, room_id): @@ -151,7 +149,7 @@ class ClientDirectoryListServer(RestServlet): requester, room_id, visibility ) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_DELETE(self, request, room_id): @@ -161,7 +159,7 @@ class ClientDirectoryListServer(RestServlet): requester, room_id, "private" ) - defer.returnValue((200, {})) + return (200, {}) class ClientAppserviceDirectoryListServer(RestServlet): @@ -195,4 +193,4 @@ class ClientAppserviceDirectoryListServer(RestServlet): requester.app_service.id, network_id, room_id, visibility ) - defer.returnValue((200, {})) + return (200, {}) diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py index d6de2b7360..53ebed2203 100644 --- a/synapse/rest/client/v1/events.py +++ b/synapse/rest/client/v1/events.py @@ -67,7 +67,7 @@ class EventStreamRestServlet(RestServlet): is_guest=is_guest, ) - defer.returnValue((200, chunk)) + return (200, chunk) def on_OPTIONS(self, request): return (200, {}) @@ -91,9 +91,9 @@ class EventRestServlet(RestServlet): time_now = self.clock.time_msec() if event: event = yield self._event_serializer.serialize_event(event, time_now) - defer.returnValue((200, event)) + return (200, event) else: - defer.returnValue((404, "Event not found.")) + return (404, "Event not found.") def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py index 0fe5f2d79b..70b8478e90 100644 --- a/synapse/rest/client/v1/initial_sync.py +++ b/synapse/rest/client/v1/initial_sync.py @@ -42,7 +42,7 @@ class InitialSyncRestServlet(RestServlet): include_archived=include_archived, ) - defer.returnValue((200, content)) + return (200, content) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 0d05945f0a..5762b9fd06 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -152,7 +152,7 @@ class LoginRestServlet(RestServlet): well_known_data = self._well_known_builder.get_well_known() if well_known_data: result["well_known"] = well_known_data - defer.returnValue((200, result)) + return (200, result) @defer.inlineCallbacks def _do_other_login(self, login_submission): @@ -212,7 +212,7 @@ class LoginRestServlet(RestServlet): result = yield self._register_device_with_callback( canonical_user_id, login_submission, callback_3pid ) - defer.returnValue(result) + return result # No password providers were able to handle this 3pid # Check local store @@ -241,7 +241,7 @@ class LoginRestServlet(RestServlet): result = yield self._register_device_with_callback( canonical_user_id, login_submission, callback ) - defer.returnValue(result) + return result @defer.inlineCallbacks def _register_device_with_callback(self, user_id, login_submission, callback=None): @@ -273,7 +273,7 @@ class LoginRestServlet(RestServlet): if callback is not None: yield callback(result) - defer.returnValue(result) + return result @defer.inlineCallbacks def do_token_login(self, login_submission): @@ -284,7 +284,7 @@ class LoginRestServlet(RestServlet): ) result = yield self._register_device_with_callback(user_id, login_submission) - defer.returnValue(result) + return result @defer.inlineCallbacks def do_jwt_login(self, login_submission): @@ -321,7 +321,7 @@ class LoginRestServlet(RestServlet): result = yield self._register_device_with_callback( registered_user_id, login_submission ) - defer.returnValue(result) + return result class BaseSSORedirectServlet(RestServlet): @@ -395,7 +395,7 @@ class CasTicketServlet(RestServlet): # even if that's being used old-http style to signal end-of-data body = pde.response result = yield self.handle_cas_response(request, body, client_redirect_url) - defer.returnValue(result) + return result def handle_cas_response(self, request, cas_response_body, client_redirect_url): user, attributes = self.parse_cas_response(cas_response_body) diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py index cd711be519..2769f3a189 100644 --- a/synapse/rest/client/v1/logout.py +++ b/synapse/rest/client/v1/logout.py @@ -49,7 +49,7 @@ class LogoutRestServlet(RestServlet): requester.user.to_string(), requester.device_id ) - defer.returnValue((200, {})) + return (200, {}) class LogoutAllRestServlet(RestServlet): @@ -75,7 +75,7 @@ class LogoutAllRestServlet(RestServlet): # .. and then delete any access tokens which weren't associated with # devices. yield self._auth_handler.delete_access_tokens_for_user(user_id) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index 3e87f0fdb3..1eb1068c98 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -56,7 +56,7 @@ class PresenceStatusRestServlet(RestServlet): state = yield self.presence_handler.get_state(target_user=user) state = format_user_presence_state(state, self.clock.time_msec()) - defer.returnValue((200, state)) + return (200, state) @defer.inlineCallbacks def on_PUT(self, request, user_id): @@ -88,7 +88,7 @@ class PresenceStatusRestServlet(RestServlet): if self.hs.config.use_presence: yield self.presence_handler.set_state(user, state) - defer.returnValue((200, {})) + return (200, {}) def on_OPTIONS(self, request): return (200, {}) diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index 4d8ab1f47e..2657ae45bb 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -48,7 +48,7 @@ class ProfileDisplaynameRestServlet(RestServlet): if displayname is not None: ret["displayname"] = displayname - defer.returnValue((200, ret)) + return (200, ret) @defer.inlineCallbacks def on_PUT(self, request, user_id): @@ -61,11 +61,11 @@ class ProfileDisplaynameRestServlet(RestServlet): try: new_name = content["displayname"] except Exception: - defer.returnValue((400, "Unable to parse name")) + return (400, "Unable to parse name") yield self.profile_handler.set_displayname(user, requester, new_name, is_admin) - defer.returnValue((200, {})) + return (200, {}) def on_OPTIONS(self, request, user_id): return (200, {}) @@ -98,7 +98,7 @@ class ProfileAvatarURLRestServlet(RestServlet): if avatar_url is not None: ret["avatar_url"] = avatar_url - defer.returnValue((200, ret)) + return (200, ret) @defer.inlineCallbacks def on_PUT(self, request, user_id): @@ -110,11 +110,11 @@ class ProfileAvatarURLRestServlet(RestServlet): try: new_name = content["avatar_url"] except Exception: - defer.returnValue((400, "Unable to parse name")) + return (400, "Unable to parse name") yield self.profile_handler.set_avatar_url(user, requester, new_name, is_admin) - defer.returnValue((200, {})) + return (200, {}) def on_OPTIONS(self, request, user_id): return (200, {}) @@ -150,7 +150,7 @@ class ProfileRestServlet(RestServlet): if avatar_url is not None: ret["avatar_url"] = avatar_url - defer.returnValue((200, ret)) + return (200, ret) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index e635efb420..c3ae8b98a8 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -69,7 +69,7 @@ class PushRuleRestServlet(RestServlet): if "attr" in spec: yield self.set_rule_attr(user_id, spec, content) self.notify_user(user_id) - defer.returnValue((200, {})) + return (200, {}) if spec["rule_id"].startswith("."): # Rule ids starting with '.' are reserved for server default rules. @@ -106,7 +106,7 @@ class PushRuleRestServlet(RestServlet): except RuleNotFoundException as e: raise SynapseError(400, str(e)) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_DELETE(self, request, path): @@ -123,7 +123,7 @@ class PushRuleRestServlet(RestServlet): try: yield self.store.delete_push_rule(user_id, namespaced_rule_id) self.notify_user(user_id) - defer.returnValue((200, {})) + return (200, {}) except StoreError as e: if e.code == 404: raise NotFoundError() @@ -151,10 +151,10 @@ class PushRuleRestServlet(RestServlet): ) if path[0] == "": - defer.returnValue((200, rules)) + return (200, rules) elif path[0] == "global": result = _filter_ruleset_with_path(rules["global"], path[1:]) - defer.returnValue((200, result)) + return (200, result) else: raise UnrecognizedRequestError() diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index e9246018df..ebc3dec516 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -62,7 +62,7 @@ class PushersRestServlet(RestServlet): if k not in allowed_keys: del p[k] - defer.returnValue((200, {"pushers": pushers})) + return (200, {"pushers": pushers}) def on_OPTIONS(self, _): return 200, {} @@ -94,7 +94,7 @@ class PushersSetRestServlet(RestServlet): yield self.pusher_pool.remove_pusher( content["app_id"], content["pushkey"], user_id=user.to_string() ) - defer.returnValue((200, {})) + return (200, {}) assert_params_in_dict( content, @@ -143,7 +143,7 @@ class PushersSetRestServlet(RestServlet): self.notifier.on_new_replication_data() - defer.returnValue((200, {})) + return (200, {}) def on_OPTIONS(self, _): return 200, {} @@ -190,7 +190,7 @@ class PushersRemoveRestServlet(RestServlet): ) request.write(PushersRemoveRestServlet.SUCCESS_HTML) finish_request(request) - defer.returnValue(None) + return None def on_OPTIONS(self, _): return 200, {} diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 7709c2d705..012e7a44a6 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -85,7 +85,7 @@ class RoomCreateRestServlet(TransactionRestServlet): requester, self.get_room_config(request) ) - defer.returnValue((200, info)) + return (200, info) def get_room_config(self, request): user_supplied_config = parse_json_object_from_request(request) @@ -155,9 +155,9 @@ class RoomStateEventRestServlet(TransactionRestServlet): if format == "event": event = format_event_for_client_v2(data.get_dict()) - defer.returnValue((200, event)) + return (200, event) elif format == "content": - defer.returnValue((200, data.get_dict()["content"])) + return (200, data.get_dict()["content"]) @defer.inlineCallbacks def on_PUT(self, request, room_id, event_type, state_key, txn_id=None): @@ -192,7 +192,7 @@ class RoomStateEventRestServlet(TransactionRestServlet): ret = {} if event: ret = {"event_id": event.event_id} - defer.returnValue((200, ret)) + return (200, ret) # TODO: Needs unit testing for generic events + feedback @@ -226,7 +226,7 @@ class RoomSendEventRestServlet(TransactionRestServlet): requester, event_dict, txn_id=txn_id ) - defer.returnValue((200, {"event_id": event.event_id})) + return (200, {"event_id": event.event_id}) def on_GET(self, request, room_id, event_type, txn_id): return (200, "Not implemented") @@ -289,7 +289,7 @@ class JoinRoomAliasServlet(TransactionRestServlet): third_party_signed=content.get("third_party_signed", None), ) - defer.returnValue((200, {"room_id": room_id})) + return (200, {"room_id": room_id}) def on_PUT(self, request, room_identifier, txn_id): return self.txns.fetch_or_execute_request( @@ -342,7 +342,7 @@ class PublicRoomListRestServlet(TransactionRestServlet): limit=limit, since_token=since_token ) - defer.returnValue((200, data)) + return (200, data) @defer.inlineCallbacks def on_POST(self, request): @@ -387,7 +387,7 @@ class PublicRoomListRestServlet(TransactionRestServlet): network_tuple=network_tuple, ) - defer.returnValue((200, data)) + return (200, data) # TODO: Needs unit testing @@ -438,7 +438,7 @@ class RoomMemberListRestServlet(RestServlet): continue chunk.append(event) - defer.returnValue((200, {"chunk": chunk})) + return (200, {"chunk": chunk}) # deprecated in favour of /members?membership=join? @@ -459,7 +459,7 @@ class JoinedRoomMemberListRestServlet(RestServlet): requester, room_id ) - defer.returnValue((200, {"joined": users_with_profile})) + return (200, {"joined": users_with_profile}) # TODO: Needs better unit testing @@ -492,7 +492,7 @@ class RoomMessageListRestServlet(RestServlet): event_filter=event_filter, ) - defer.returnValue((200, msgs)) + return (200, msgs) # TODO: Needs unit testing @@ -513,7 +513,7 @@ class RoomStateRestServlet(RestServlet): user_id=requester.user.to_string(), is_guest=requester.is_guest, ) - defer.returnValue((200, events)) + return (200, events) # TODO: Needs unit testing @@ -532,7 +532,7 @@ class RoomInitialSyncRestServlet(RestServlet): content = yield self.initial_sync_handler.room_initial_sync( room_id=room_id, requester=requester, pagin_config=pagination_config ) - defer.returnValue((200, content)) + return (200, content) class RoomEventServlet(RestServlet): @@ -555,9 +555,9 @@ class RoomEventServlet(RestServlet): time_now = self.clock.time_msec() if event: event = yield self._event_serializer.serialize_event(event, time_now) - defer.returnValue((200, event)) + return (200, event) else: - defer.returnValue((404, "Event not found.")) + return (404, "Event not found.") class RoomEventContextServlet(RestServlet): @@ -607,7 +607,7 @@ class RoomEventContextServlet(RestServlet): results["state"], time_now ) - defer.returnValue((200, results)) + return (200, results) class RoomForgetRestServlet(TransactionRestServlet): @@ -626,7 +626,7 @@ class RoomForgetRestServlet(TransactionRestServlet): yield self.room_member_handler.forget(user=requester.user, room_id=room_id) - defer.returnValue((200, {})) + return (200, {}) def on_PUT(self, request, room_id, txn_id): return self.txns.fetch_or_execute_request( @@ -676,7 +676,7 @@ class RoomMembershipRestServlet(TransactionRestServlet): requester, txn_id, ) - defer.returnValue((200, {})) + return (200, {}) return target = requester.user @@ -703,7 +703,7 @@ class RoomMembershipRestServlet(TransactionRestServlet): if membership_action == "join": return_value["room_id"] = room_id - defer.returnValue((200, return_value)) + return (200, return_value) def _has_3pid_invite_keys(self, content): for key in {"id_server", "medium", "address"}: @@ -745,7 +745,7 @@ class RoomRedactEventRestServlet(TransactionRestServlet): txn_id=txn_id, ) - defer.returnValue((200, {"event_id": event.event_id})) + return (200, {"event_id": event.event_id}) def on_PUT(self, request, room_id, event_id, txn_id): return self.txns.fetch_or_execute_request( @@ -790,7 +790,7 @@ class RoomTypingRestServlet(RestServlet): target_user=target_user, auth_user=requester.user, room_id=room_id ) - defer.returnValue((200, {})) + return (200, {}) class SearchRestServlet(RestServlet): @@ -812,7 +812,7 @@ class SearchRestServlet(RestServlet): requester.user, content, batch ) - defer.returnValue((200, results)) + return (200, results) class JoinedRoomsRestServlet(RestServlet): @@ -828,7 +828,7 @@ class JoinedRoomsRestServlet(RestServlet): requester = yield self.auth.get_user_by_req(request, allow_guest=True) room_ids = yield self.store.get_rooms_for_user(requester.user.to_string()) - defer.returnValue((200, {"joined_rooms": list(room_ids)})) + return (200, {"joined_rooms": list(room_ids)}) def register_txn_path(servlet, regex_string, http_server, with_get=False): diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py index 41b3171ac8..497cddf8b8 100644 --- a/synapse/rest/client/v1/voip.py +++ b/synapse/rest/client/v1/voip.py @@ -60,18 +60,16 @@ class VoipRestServlet(RestServlet): password = turnPassword else: - defer.returnValue((200, {})) - - defer.returnValue( - ( - 200, - { - "username": username, - "password": password, - "ttl": userLifetime / 1000, - "uris": turnUris, - }, - ) + return (200, {}) + + return ( + 200, + { + "username": username, + "password": password, + "ttl": userLifetime / 1000, + "uris": turnUris, + }, ) def on_OPTIONS(self, request): diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index f143d8b85c..7ac456812a 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -117,7 +117,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): # Wrap the session id in a JSON object ret = {"sid": sid} - defer.returnValue((200, ret)) + return (200, ret) @defer.inlineCallbacks def send_password_reset(self, email, client_secret, send_attempt, next_link=None): @@ -149,7 +149,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): # Check that the send_attempt is higher than previous attempts if send_attempt <= last_send_attempt: # If not, just return a success without sending an email - defer.returnValue(session_id) + return session_id else: # An non-validated session does not exist yet. # Generate a session id @@ -185,7 +185,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): token_expires, ) - defer.returnValue(session_id) + return session_id class MsisdnPasswordRequestTokenRestServlet(RestServlet): @@ -221,7 +221,7 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet): raise SynapseError(400, "MSISDN not found", Codes.THREEPID_NOT_FOUND) ret = yield self.identity_handler.requestMsisdnToken(**body) - defer.returnValue((200, ret)) + return (200, ret) class PasswordResetSubmitTokenServlet(RestServlet): @@ -279,7 +279,7 @@ class PasswordResetSubmitTokenServlet(RestServlet): request.setResponseCode(302) request.setHeader("Location", next_link) finish_request(request) - defer.returnValue(None) + return None # Otherwise show the success template html = self.config.email_password_reset_success_html_content @@ -295,7 +295,7 @@ class PasswordResetSubmitTokenServlet(RestServlet): request.write(html.encode("utf-8")) finish_request(request) - defer.returnValue(None) + return None def load_jinja2_template(self, template_dir, template_filename, template_vars): """Loads a jinja2 template with variables to insert @@ -330,7 +330,7 @@ class PasswordResetSubmitTokenServlet(RestServlet): ) response_code = 200 if valid else 400 - defer.returnValue((response_code, {"success": valid})) + return (response_code, {"success": valid}) class PasswordRestServlet(RestServlet): @@ -399,7 +399,7 @@ class PasswordRestServlet(RestServlet): yield self._set_password_handler.set_password(user_id, new_password, requester) - defer.returnValue((200, {})) + return (200, {}) def on_OPTIONS(self, _): return 200, {} @@ -434,7 +434,7 @@ class DeactivateAccountRestServlet(RestServlet): yield self._deactivate_account_handler.deactivate_account( requester.user.to_string(), erase ) - defer.returnValue((200, {})) + return (200, {}) yield self.auth_handler.validate_user_via_ui_auth( requester, body, self.hs.get_ip_from_request(request) @@ -447,7 +447,7 @@ class DeactivateAccountRestServlet(RestServlet): else: id_server_unbind_result = "no-support" - defer.returnValue((200, {"id_server_unbind_result": id_server_unbind_result})) + return (200, {"id_server_unbind_result": id_server_unbind_result}) class EmailThreepidRequestTokenRestServlet(RestServlet): @@ -481,7 +481,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE) ret = yield self.identity_handler.requestEmailToken(**body) - defer.returnValue((200, ret)) + return (200, ret) class MsisdnThreepidRequestTokenRestServlet(RestServlet): @@ -516,7 +516,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet): raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE) ret = yield self.identity_handler.requestMsisdnToken(**body) - defer.returnValue((200, ret)) + return (200, ret) class ThreepidRestServlet(RestServlet): @@ -536,7 +536,7 @@ class ThreepidRestServlet(RestServlet): threepids = yield self.datastore.user_get_threepids(requester.user.to_string()) - defer.returnValue((200, {"threepids": threepids})) + return (200, {"threepids": threepids}) @defer.inlineCallbacks def on_POST(self, request): @@ -568,7 +568,7 @@ class ThreepidRestServlet(RestServlet): logger.debug("Binding threepid %s to %s", threepid, user_id) yield self.identity_handler.bind_threepid(threePidCreds, user_id) - defer.returnValue((200, {})) + return (200, {}) class ThreepidDeleteRestServlet(RestServlet): @@ -603,7 +603,7 @@ class ThreepidDeleteRestServlet(RestServlet): else: id_server_unbind_result = "no-support" - defer.returnValue((200, {"id_server_unbind_result": id_server_unbind_result})) + return (200, {"id_server_unbind_result": id_server_unbind_result}) class WhoamiRestServlet(RestServlet): @@ -617,7 +617,7 @@ class WhoamiRestServlet(RestServlet): def on_GET(self, request): requester = yield self.auth.get_user_by_req(request) - defer.returnValue((200, {"user_id": requester.user.to_string()})) + return (200, {"user_id": requester.user.to_string()}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py index f155c26259..98f2f6f4b5 100644 --- a/synapse/rest/client/v2_alpha/account_data.py +++ b/synapse/rest/client/v2_alpha/account_data.py @@ -55,7 +55,7 @@ class AccountDataServlet(RestServlet): self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_GET(self, request, user_id, account_data_type): @@ -70,7 +70,7 @@ class AccountDataServlet(RestServlet): if event is None: raise NotFoundError("Account data not found") - defer.returnValue((200, event)) + return (200, event) class RoomAccountDataServlet(RestServlet): @@ -112,7 +112,7 @@ class RoomAccountDataServlet(RestServlet): self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_GET(self, request, user_id, room_id, account_data_type): @@ -127,7 +127,7 @@ class RoomAccountDataServlet(RestServlet): if event is None: raise NotFoundError("Room account data not found") - defer.returnValue((200, event)) + return (200, event) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py index d29c10b83d..133c61900a 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -58,7 +58,7 @@ class AccountValidityRenewServlet(RestServlet): ) request.write(AccountValidityRenewServlet.SUCCESS_HTML) finish_request(request) - defer.returnValue(None) + return None class AccountValiditySendMailServlet(RestServlet): @@ -87,7 +87,7 @@ class AccountValiditySendMailServlet(RestServlet): user_id = requester.user.to_string() yield self.account_activity_handler.send_renewal_email_to_user(user_id) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index bebc2951e7..f21aff39e5 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -207,7 +207,7 @@ class AuthRestServlet(RestServlet): request.write(html_bytes) finish_request(request) - defer.returnValue(None) + return None elif stagetype == LoginType.TERMS: if ("session" not in request.args or len(request.args["session"])) == 0: raise SynapseError(400, "No session supplied") @@ -239,7 +239,7 @@ class AuthRestServlet(RestServlet): request.write(html_bytes) finish_request(request) - defer.returnValue(None) + return None else: raise SynapseError(404, "Unknown auth stage type") diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py index fc7e2f4dd5..a4fa45fe11 100644 --- a/synapse/rest/client/v2_alpha/capabilities.py +++ b/synapse/rest/client/v2_alpha/capabilities.py @@ -58,7 +58,7 @@ class CapabilitiesRestServlet(RestServlet): "m.change_password": {"enabled": change_password}, } } - defer.returnValue((200, response)) + return (200, response) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index d279229d74..9adf76cc0c 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -48,7 +48,7 @@ class DevicesRestServlet(RestServlet): devices = yield self.device_handler.get_devices_by_user( requester.user.to_string() ) - defer.returnValue((200, {"devices": devices})) + return (200, {"devices": devices}) class DeleteDevicesRestServlet(RestServlet): @@ -91,7 +91,7 @@ class DeleteDevicesRestServlet(RestServlet): yield self.device_handler.delete_devices( requester.user.to_string(), body["devices"] ) - defer.returnValue((200, {})) + return (200, {}) class DeviceRestServlet(RestServlet): @@ -114,7 +114,7 @@ class DeviceRestServlet(RestServlet): device = yield self.device_handler.get_device( requester.user.to_string(), device_id ) - defer.returnValue((200, device)) + return (200, device) @interactive_auth_handler @defer.inlineCallbacks @@ -137,7 +137,7 @@ class DeviceRestServlet(RestServlet): ) yield self.device_handler.delete_device(requester.user.to_string(), device_id) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_PUT(self, request, device_id): @@ -147,7 +147,7 @@ class DeviceRestServlet(RestServlet): yield self.device_handler.update_device( requester.user.to_string(), device_id, body ) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index 3f0adf4a21..22be0ee3c5 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -56,7 +56,7 @@ class GetFilterRestServlet(RestServlet): user_localpart=target_user.localpart, filter_id=filter_id ) - defer.returnValue((200, filter.get_filter_json())) + return (200, filter.get_filter_json()) except (KeyError, StoreError): raise SynapseError(400, "No such filter", errcode=Codes.NOT_FOUND) @@ -89,7 +89,7 @@ class CreateFilterRestServlet(RestServlet): user_localpart=target_user.localpart, user_filter=content ) - defer.returnValue((200, {"filter_id": str(filter_id)})) + return (200, {"filter_id": str(filter_id)}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index a312dd2593..e629c4256d 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -47,7 +47,7 @@ class GroupServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, group_description)) + return (200, group_description) @defer.inlineCallbacks def on_POST(self, request, group_id): @@ -59,7 +59,7 @@ class GroupServlet(RestServlet): group_id, requester_user_id, content ) - defer.returnValue((200, {})) + return (200, {}) class GroupSummaryServlet(RestServlet): @@ -83,7 +83,7 @@ class GroupSummaryServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, get_group_summary)) + return (200, get_group_summary) class GroupSummaryRoomsCatServlet(RestServlet): @@ -120,7 +120,7 @@ class GroupSummaryRoomsCatServlet(RestServlet): content=content, ) - defer.returnValue((200, resp)) + return (200, resp) @defer.inlineCallbacks def on_DELETE(self, request, group_id, category_id, room_id): @@ -131,7 +131,7 @@ class GroupSummaryRoomsCatServlet(RestServlet): group_id, requester_user_id, room_id=room_id, category_id=category_id ) - defer.returnValue((200, resp)) + return (200, resp) class GroupCategoryServlet(RestServlet): @@ -157,7 +157,7 @@ class GroupCategoryServlet(RestServlet): group_id, requester_user_id, category_id=category_id ) - defer.returnValue((200, category)) + return (200, category) @defer.inlineCallbacks def on_PUT(self, request, group_id, category_id): @@ -169,7 +169,7 @@ class GroupCategoryServlet(RestServlet): group_id, requester_user_id, category_id=category_id, content=content ) - defer.returnValue((200, resp)) + return (200, resp) @defer.inlineCallbacks def on_DELETE(self, request, group_id, category_id): @@ -180,7 +180,7 @@ class GroupCategoryServlet(RestServlet): group_id, requester_user_id, category_id=category_id ) - defer.returnValue((200, resp)) + return (200, resp) class GroupCategoriesServlet(RestServlet): @@ -204,7 +204,7 @@ class GroupCategoriesServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, category)) + return (200, category) class GroupRoleServlet(RestServlet): @@ -228,7 +228,7 @@ class GroupRoleServlet(RestServlet): group_id, requester_user_id, role_id=role_id ) - defer.returnValue((200, category)) + return (200, category) @defer.inlineCallbacks def on_PUT(self, request, group_id, role_id): @@ -240,7 +240,7 @@ class GroupRoleServlet(RestServlet): group_id, requester_user_id, role_id=role_id, content=content ) - defer.returnValue((200, resp)) + return (200, resp) @defer.inlineCallbacks def on_DELETE(self, request, group_id, role_id): @@ -251,7 +251,7 @@ class GroupRoleServlet(RestServlet): group_id, requester_user_id, role_id=role_id ) - defer.returnValue((200, resp)) + return (200, resp) class GroupRolesServlet(RestServlet): @@ -275,7 +275,7 @@ class GroupRolesServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, category)) + return (200, category) class GroupSummaryUsersRoleServlet(RestServlet): @@ -312,7 +312,7 @@ class GroupSummaryUsersRoleServlet(RestServlet): content=content, ) - defer.returnValue((200, resp)) + return (200, resp) @defer.inlineCallbacks def on_DELETE(self, request, group_id, role_id, user_id): @@ -323,7 +323,7 @@ class GroupSummaryUsersRoleServlet(RestServlet): group_id, requester_user_id, user_id=user_id, role_id=role_id ) - defer.returnValue((200, resp)) + return (200, resp) class GroupRoomServlet(RestServlet): @@ -347,7 +347,7 @@ class GroupRoomServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, result)) + return (200, result) class GroupUsersServlet(RestServlet): @@ -371,7 +371,7 @@ class GroupUsersServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, result)) + return (200, result) class GroupInvitedUsersServlet(RestServlet): @@ -395,7 +395,7 @@ class GroupInvitedUsersServlet(RestServlet): group_id, requester_user_id ) - defer.returnValue((200, result)) + return (200, result) class GroupSettingJoinPolicyServlet(RestServlet): @@ -420,7 +420,7 @@ class GroupSettingJoinPolicyServlet(RestServlet): group_id, requester_user_id, content ) - defer.returnValue((200, result)) + return (200, result) class GroupCreateServlet(RestServlet): @@ -450,7 +450,7 @@ class GroupCreateServlet(RestServlet): group_id, requester_user_id, content ) - defer.returnValue((200, result)) + return (200, result) class GroupAdminRoomsServlet(RestServlet): @@ -477,7 +477,7 @@ class GroupAdminRoomsServlet(RestServlet): group_id, requester_user_id, room_id, content ) - defer.returnValue((200, result)) + return (200, result) @defer.inlineCallbacks def on_DELETE(self, request, group_id, room_id): @@ -488,7 +488,7 @@ class GroupAdminRoomsServlet(RestServlet): group_id, requester_user_id, room_id ) - defer.returnValue((200, result)) + return (200, result) class GroupAdminRoomsConfigServlet(RestServlet): @@ -516,7 +516,7 @@ class GroupAdminRoomsConfigServlet(RestServlet): group_id, requester_user_id, room_id, config_key, content ) - defer.returnValue((200, result)) + return (200, result) class GroupAdminUsersInviteServlet(RestServlet): @@ -546,7 +546,7 @@ class GroupAdminUsersInviteServlet(RestServlet): group_id, user_id, requester_user_id, config ) - defer.returnValue((200, result)) + return (200, result) class GroupAdminUsersKickServlet(RestServlet): @@ -573,7 +573,7 @@ class GroupAdminUsersKickServlet(RestServlet): group_id, user_id, requester_user_id, content ) - defer.returnValue((200, result)) + return (200, result) class GroupSelfLeaveServlet(RestServlet): @@ -598,7 +598,7 @@ class GroupSelfLeaveServlet(RestServlet): group_id, requester_user_id, requester_user_id, content ) - defer.returnValue((200, result)) + return (200, result) class GroupSelfJoinServlet(RestServlet): @@ -623,7 +623,7 @@ class GroupSelfJoinServlet(RestServlet): group_id, requester_user_id, content ) - defer.returnValue((200, result)) + return (200, result) class GroupSelfAcceptInviteServlet(RestServlet): @@ -648,7 +648,7 @@ class GroupSelfAcceptInviteServlet(RestServlet): group_id, requester_user_id, content ) - defer.returnValue((200, result)) + return (200, result) class GroupSelfUpdatePublicityServlet(RestServlet): @@ -672,7 +672,7 @@ class GroupSelfUpdatePublicityServlet(RestServlet): publicise = content["publicise"] yield self.store.update_group_publicity(group_id, requester_user_id, publicise) - defer.returnValue((200, {})) + return (200, {}) class PublicisedGroupsForUserServlet(RestServlet): @@ -694,7 +694,7 @@ class PublicisedGroupsForUserServlet(RestServlet): result = yield self.groups_handler.get_publicised_groups_for_user(user_id) - defer.returnValue((200, result)) + return (200, result) class PublicisedGroupsForUsersServlet(RestServlet): @@ -719,7 +719,7 @@ class PublicisedGroupsForUsersServlet(RestServlet): result = yield self.groups_handler.bulk_get_publicised_groups(user_ids) - defer.returnValue((200, result)) + return (200, result) class GroupsForUserServlet(RestServlet): @@ -741,7 +741,7 @@ class GroupsForUserServlet(RestServlet): result = yield self.groups_handler.get_joined_groups(requester_user_id) - defer.returnValue((200, result)) + return (200, result) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 45c9928b65..6008adec7c 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -95,7 +95,7 @@ class KeyUploadServlet(RestServlet): result = yield self.e2e_keys_handler.upload_keys_for_user( user_id, device_id, body ) - defer.returnValue((200, result)) + return (200, result) class KeyQueryServlet(RestServlet): @@ -149,7 +149,7 @@ class KeyQueryServlet(RestServlet): timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) result = yield self.e2e_keys_handler.query_devices(body, timeout) - defer.returnValue((200, result)) + return (200, result) class KeyChangesServlet(RestServlet): @@ -189,7 +189,7 @@ class KeyChangesServlet(RestServlet): results = yield self.device_handler.get_user_ids_changed(user_id, from_token) - defer.returnValue((200, results)) + return (200, results) class OneTimeKeyServlet(RestServlet): @@ -224,7 +224,7 @@ class OneTimeKeyServlet(RestServlet): timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) result = yield self.e2e_keys_handler.claim_one_time_keys(body, timeout) - defer.returnValue((200, result)) + return (200, result) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py index 728a52328f..d034863a3c 100644 --- a/synapse/rest/client/v2_alpha/notifications.py +++ b/synapse/rest/client/v2_alpha/notifications.py @@ -88,9 +88,7 @@ class NotificationsServlet(RestServlet): returned_push_actions.append(returned_pa) next_token = str(pa["stream_ordering"]) - defer.returnValue( - (200, {"notifications": returned_push_actions, "next_token": next_token}) - ) + return (200, {"notifications": returned_push_actions, "next_token": next_token}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/openid.py b/synapse/rest/client/v2_alpha/openid.py index b1b5385b09..b4925c0f59 100644 --- a/synapse/rest/client/v2_alpha/openid.py +++ b/synapse/rest/client/v2_alpha/openid.py @@ -83,16 +83,14 @@ class IdTokenServlet(RestServlet): yield self.store.insert_open_id_token(token, ts_valid_until_ms, user_id) - defer.returnValue( - ( - 200, - { - "access_token": token, - "token_type": "Bearer", - "matrix_server_name": self.server_name, - "expires_in": self.EXPIRES_MS / 1000, - }, - ) + return ( + 200, + { + "access_token": token, + "token_type": "Bearer", + "matrix_server_name": self.server_name, + "expires_in": self.EXPIRES_MS / 1000, + }, ) diff --git a/synapse/rest/client/v2_alpha/read_marker.py b/synapse/rest/client/v2_alpha/read_marker.py index e75664279b..d93d6a9f24 100644 --- a/synapse/rest/client/v2_alpha/read_marker.py +++ b/synapse/rest/client/v2_alpha/read_marker.py @@ -59,7 +59,7 @@ class ReadMarkerRestServlet(RestServlet): event_id=read_marker_event_id, ) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py index 488905626a..98a97b7059 100644 --- a/synapse/rest/client/v2_alpha/receipts.py +++ b/synapse/rest/client/v2_alpha/receipts.py @@ -52,7 +52,7 @@ class ReceiptRestServlet(RestServlet): room_id, receipt_type, user_id=requester.user.to_string(), event_id=event_id ) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index f327999e59..05ea1459e3 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -95,7 +95,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE) ret = yield self.identity_handler.requestEmailToken(**body) - defer.returnValue((200, ret)) + return (200, ret) class MsisdnRegisterRequestTokenRestServlet(RestServlet): @@ -138,7 +138,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet): ) ret = yield self.identity_handler.requestMsisdnToken(**body) - defer.returnValue((200, ret)) + return (200, ret) class UsernameAvailabilityRestServlet(RestServlet): @@ -178,7 +178,7 @@ class UsernameAvailabilityRestServlet(RestServlet): yield self.registration_handler.check_username(username) - defer.returnValue((200, {"available": True})) + return (200, {"available": True}) class RegisterRestServlet(RestServlet): @@ -230,7 +230,7 @@ class RegisterRestServlet(RestServlet): if kind == b"guest": ret = yield self._do_guest_registration(body, address=client_addr) - defer.returnValue(ret) + return ret return elif kind != b"user": raise UnrecognizedRequestError( @@ -282,7 +282,7 @@ class RegisterRestServlet(RestServlet): result = yield self._do_appservice_registration( desired_username, access_token, body ) - defer.returnValue((200, result)) # we throw for non 200 responses + return (200, result) # we throw for non 200 responses return # for either shared secret or regular registration, downcase the @@ -301,7 +301,7 @@ class RegisterRestServlet(RestServlet): result = yield self._do_shared_secret_registration( desired_username, desired_password, body ) - defer.returnValue((200, result)) # we throw for non 200 responses + return (200, result) # we throw for non 200 responses return # == Normal User Registration == (everyone else) @@ -500,7 +500,7 @@ class RegisterRestServlet(RestServlet): bind_msisdn=params.get("bind_msisdn"), ) - defer.returnValue((200, return_dict)) + return (200, return_dict) def on_OPTIONS(self, _): return 200, {} @@ -510,7 +510,7 @@ class RegisterRestServlet(RestServlet): user_id = yield self.registration_handler.appservice_register( username, as_token ) - defer.returnValue((yield self._create_registration_details(user_id, body))) + return (yield self._create_registration_details(user_id, body)) @defer.inlineCallbacks def _do_shared_secret_registration(self, username, password, body): @@ -546,7 +546,7 @@ class RegisterRestServlet(RestServlet): ) result = yield self._create_registration_details(user_id, body) - defer.returnValue(result) + return result @defer.inlineCallbacks def _create_registration_details(self, user_id, params): @@ -570,7 +570,7 @@ class RegisterRestServlet(RestServlet): ) result.update({"access_token": access_token, "device_id": device_id}) - defer.returnValue(result) + return result @defer.inlineCallbacks def _do_guest_registration(self, params, address=None): @@ -588,16 +588,14 @@ class RegisterRestServlet(RestServlet): user_id, device_id, initial_display_name, is_guest=True ) - defer.returnValue( - ( - 200, - { - "user_id": user_id, - "device_id": device_id, - "access_token": access_token, - "home_server": self.hs.hostname, - }, - ) + return ( + 200, + { + "user_id": user_id, + "device_id": device_id, + "access_token": access_token, + "home_server": self.hs.hostname, + }, ) diff --git a/synapse/rest/client/v2_alpha/relations.py b/synapse/rest/client/v2_alpha/relations.py index 6e52f6d284..6fde3decdb 100644 --- a/synapse/rest/client/v2_alpha/relations.py +++ b/synapse/rest/client/v2_alpha/relations.py @@ -116,7 +116,7 @@ class RelationSendServlet(RestServlet): requester, event_dict=event_dict, txn_id=txn_id ) - defer.returnValue((200, {"event_id": event.event_id})) + return (200, {"event_id": event.event_id}) class RelationPaginationServlet(RestServlet): @@ -196,7 +196,7 @@ class RelationPaginationServlet(RestServlet): return_value["chunk"] = events return_value["original_event"] = original_event - defer.returnValue((200, return_value)) + return (200, return_value) class RelationAggregationPaginationServlet(RestServlet): @@ -268,7 +268,7 @@ class RelationAggregationPaginationServlet(RestServlet): to_token=to_token, ) - defer.returnValue((200, pagination_chunk.to_dict())) + return (200, pagination_chunk.to_dict()) class RelationAggregationGroupPaginationServlet(RestServlet): @@ -354,7 +354,7 @@ class RelationAggregationGroupPaginationServlet(RestServlet): return_value = result.to_dict() return_value["chunk"] = events - defer.returnValue((200, return_value)) + return (200, return_value) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/report_event.py b/synapse/rest/client/v2_alpha/report_event.py index e7578af804..3fdd4584a3 100644 --- a/synapse/rest/client/v2_alpha/report_event.py +++ b/synapse/rest/client/v2_alpha/report_event.py @@ -72,7 +72,7 @@ class ReportEventRestServlet(RestServlet): received_ts=self.clock.time_msec(), ) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 8d1b810565..10dec96208 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -135,7 +135,7 @@ class RoomKeysServlet(RestServlet): body = {"rooms": {room_id: body}} yield self.e2e_room_keys_handler.upload_room_keys(user_id, version, body) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_GET(self, request, room_id, session_id): @@ -218,7 +218,7 @@ class RoomKeysServlet(RestServlet): else: room_keys = room_keys["rooms"][room_id] - defer.returnValue((200, room_keys)) + return (200, room_keys) @defer.inlineCallbacks def on_DELETE(self, request, room_id, session_id): @@ -242,7 +242,7 @@ class RoomKeysServlet(RestServlet): yield self.e2e_room_keys_handler.delete_room_keys( user_id, version, room_id, session_id ) - defer.returnValue((200, {})) + return (200, {}) class RoomKeysNewVersionServlet(RestServlet): @@ -293,7 +293,7 @@ class RoomKeysNewVersionServlet(RestServlet): info = parse_json_object_from_request(request) new_version = yield self.e2e_room_keys_handler.create_version(user_id, info) - defer.returnValue((200, {"version": new_version})) + return (200, {"version": new_version}) # we deliberately don't have a PUT /version, as these things really should # be immutable to avoid people footgunning @@ -338,7 +338,7 @@ class RoomKeysVersionServlet(RestServlet): except SynapseError as e: if e.code == 404: raise SynapseError(404, "No backup found", Codes.NOT_FOUND) - defer.returnValue((200, info)) + return (200, info) @defer.inlineCallbacks def on_DELETE(self, request, version): @@ -358,7 +358,7 @@ class RoomKeysVersionServlet(RestServlet): user_id = requester.user.to_string() yield self.e2e_room_keys_handler.delete_version(user_id, version) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_PUT(self, request, version): @@ -392,7 +392,7 @@ class RoomKeysVersionServlet(RestServlet): ) yield self.e2e_room_keys_handler.update_version(user_id, version, info) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py index d7f7faa029..14ba61a63e 100644 --- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py @@ -80,7 +80,7 @@ class RoomUpgradeRestServlet(RestServlet): ret = {"replacement_room": new_room_id} - defer.returnValue((200, ret)) + return (200, ret) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 78075b8fc0..2613648d82 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -60,7 +60,7 @@ class SendToDeviceRestServlet(servlet.RestServlet): ) response = (200, {}) - defer.returnValue(response) + return response def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 02d56dee6c..7b32dd2212 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -174,7 +174,7 @@ class SyncRestServlet(RestServlet): time_now, sync_result, requester.access_token_id, filter ) - defer.returnValue((200, response_content)) + return (200, response_content) @defer.inlineCallbacks def encode_response(self, time_now, sync_result, access_token_id, filter): @@ -205,27 +205,23 @@ class SyncRestServlet(RestServlet): event_formatter, ) - defer.returnValue( - { - "account_data": {"events": sync_result.account_data}, - "to_device": {"events": sync_result.to_device}, - "device_lists": { - "changed": list(sync_result.device_lists.changed), - "left": list(sync_result.device_lists.left), - }, - "presence": SyncRestServlet.encode_presence( - sync_result.presence, time_now - ), - "rooms": {"join": joined, "invite": invited, "leave": archived}, - "groups": { - "join": sync_result.groups.join, - "invite": sync_result.groups.invite, - "leave": sync_result.groups.leave, - }, - "device_one_time_keys_count": sync_result.device_one_time_keys_count, - "next_batch": sync_result.next_batch.to_string(), - } - ) + return { + "account_data": {"events": sync_result.account_data}, + "to_device": {"events": sync_result.to_device}, + "device_lists": { + "changed": list(sync_result.device_lists.changed), + "left": list(sync_result.device_lists.left), + }, + "presence": SyncRestServlet.encode_presence(sync_result.presence, time_now), + "rooms": {"join": joined, "invite": invited, "leave": archived}, + "groups": { + "join": sync_result.groups.join, + "invite": sync_result.groups.invite, + "leave": sync_result.groups.leave, + }, + "device_one_time_keys_count": sync_result.device_one_time_keys_count, + "next_batch": sync_result.next_batch.to_string(), + } @staticmethod def encode_presence(events, time_now): @@ -273,7 +269,7 @@ class SyncRestServlet(RestServlet): event_formatter=event_formatter, ) - defer.returnValue(joined) + return joined @defer.inlineCallbacks def encode_invited(self, rooms, time_now, token_id, event_formatter): @@ -309,7 +305,7 @@ class SyncRestServlet(RestServlet): invited_state.append(invite) invited[room.room_id] = {"invite_state": {"events": invited_state}} - defer.returnValue(invited) + return invited @defer.inlineCallbacks def encode_archived(self, rooms, time_now, token_id, event_fields, event_formatter): @@ -342,7 +338,7 @@ class SyncRestServlet(RestServlet): event_formatter=event_formatter, ) - defer.returnValue(joined) + return joined @defer.inlineCallbacks def encode_room( @@ -414,7 +410,7 @@ class SyncRestServlet(RestServlet): result["unread_notifications"] = room.unread_notifications result["summary"] = room.summary - defer.returnValue(result) + return result def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py index 07b6ede603..d173544355 100644 --- a/synapse/rest/client/v2_alpha/tags.py +++ b/synapse/rest/client/v2_alpha/tags.py @@ -45,7 +45,7 @@ class TagListServlet(RestServlet): tags = yield self.store.get_tags_for_room(user_id, room_id) - defer.returnValue((200, {"tags": tags})) + return (200, {"tags": tags}) class TagServlet(RestServlet): @@ -76,7 +76,7 @@ class TagServlet(RestServlet): self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) - defer.returnValue((200, {})) + return (200, {}) @defer.inlineCallbacks def on_DELETE(self, request, user_id, room_id, tag): @@ -88,7 +88,7 @@ class TagServlet(RestServlet): self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) - defer.returnValue((200, {})) + return (200, {}) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py index 1e66662a05..158e686b01 100644 --- a/synapse/rest/client/v2_alpha/thirdparty.py +++ b/synapse/rest/client/v2_alpha/thirdparty.py @@ -40,7 +40,7 @@ class ThirdPartyProtocolsServlet(RestServlet): yield self.auth.get_user_by_req(request, allow_guest=True) protocols = yield self.appservice_handler.get_3pe_protocols() - defer.returnValue((200, protocols)) + return (200, protocols) class ThirdPartyProtocolServlet(RestServlet): @@ -60,9 +60,9 @@ class ThirdPartyProtocolServlet(RestServlet): only_protocol=protocol ) if protocol in protocols: - defer.returnValue((200, protocols[protocol])) + return (200, protocols[protocol]) else: - defer.returnValue((404, {"error": "Unknown protocol"})) + return (404, {"error": "Unknown protocol"}) class ThirdPartyUserServlet(RestServlet): @@ -85,7 +85,7 @@ class ThirdPartyUserServlet(RestServlet): ThirdPartyEntityKind.USER, protocol, fields ) - defer.returnValue((200, results)) + return (200, results) class ThirdPartyLocationServlet(RestServlet): @@ -108,7 +108,7 @@ class ThirdPartyLocationServlet(RestServlet): ThirdPartyEntityKind.LOCATION, protocol, fields ) - defer.returnValue((200, results)) + return (200, results) def register_servlets(hs, http_server): diff --git a/synapse/rest/client/v2_alpha/user_directory.py b/synapse/rest/client/v2_alpha/user_directory.py index e19fb6d583..7ab2b80e46 100644 --- a/synapse/rest/client/v2_alpha/user_directory.py +++ b/synapse/rest/client/v2_alpha/user_directory.py @@ -60,7 +60,7 @@ class UserDirectorySearchRestServlet(RestServlet): user_id = requester.user.to_string() if not self.hs.config.user_directory_search_enabled: - defer.returnValue((200, {"limited": False, "results": []})) + return (200, {"limited": False, "results": []}) body = parse_json_object_from_request(request) @@ -76,7 +76,7 @@ class UserDirectorySearchRestServlet(RestServlet): user_id, search_term, limit ) - defer.returnValue((200, results)) + return (200, results) def register_servlets(hs, http_server): diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 65afffbb42..92beefa176 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -171,7 +171,7 @@ class MediaRepository(object): yield self._generate_thumbnails(None, media_id, media_id, media_type) - defer.returnValue("mxc://%s/%s" % (self.server_name, media_id)) + return "mxc://%s/%s" % (self.server_name, media_id) @defer.inlineCallbacks def get_local_media(self, request, media_id, name): @@ -282,7 +282,7 @@ class MediaRepository(object): with responder: pass - defer.returnValue(media_info) + return media_info @defer.inlineCallbacks def _get_remote_media_impl(self, server_name, media_id): @@ -317,14 +317,14 @@ class MediaRepository(object): responder = yield self.media_storage.fetch_media(file_info) if responder: - defer.returnValue((responder, media_info)) + return (responder, media_info) # Failed to find the file anywhere, lets download it. media_info = yield self._download_remote_file(server_name, media_id, file_id) responder = yield self.media_storage.fetch_media(file_info) - defer.returnValue((responder, media_info)) + return (responder, media_info) @defer.inlineCallbacks def _download_remote_file(self, server_name, media_id, file_id): @@ -421,7 +421,7 @@ class MediaRepository(object): yield self._generate_thumbnails(server_name, media_id, file_id, media_type) - defer.returnValue(media_info) + return media_info def _get_thumbnail_requirements(self, media_type): return self.thumbnail_requirements.get(media_type, ()) @@ -500,7 +500,7 @@ class MediaRepository(object): media_id, t_width, t_height, t_type, t_method, t_len ) - defer.returnValue(output_path) + return output_path @defer.inlineCallbacks def generate_remote_exact_thumbnail( @@ -554,7 +554,7 @@ class MediaRepository(object): t_len, ) - defer.returnValue(output_path) + return output_path @defer.inlineCallbacks def _generate_thumbnails( @@ -667,7 +667,7 @@ class MediaRepository(object): media_id, t_width, t_height, t_type, t_method, t_len ) - defer.returnValue({"width": m_width, "height": m_height}) + return {"width": m_width, "height": m_height} @defer.inlineCallbacks def delete_old_remote_media(self, before_ts): @@ -704,7 +704,7 @@ class MediaRepository(object): yield self.store.delete_remote_media(origin, media_id) deleted += 1 - defer.returnValue({"deleted": deleted}) + return {"deleted": deleted} class MediaRepositoryResource(Resource): diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index 25e5ac2848..3b87717a5a 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -69,7 +69,7 @@ class MediaStorage(object): ) yield finish_cb() - defer.returnValue(fname) + return fname @contextlib.contextmanager def store_into_file(self, file_info): @@ -143,14 +143,14 @@ class MediaStorage(object): path = self._file_info_to_path(file_info) local_path = os.path.join(self.local_media_directory, path) if os.path.exists(local_path): - defer.returnValue(FileResponder(open(local_path, "rb"))) + return FileResponder(open(local_path, "rb")) for provider in self.storage_providers: res = yield provider.fetch(path, file_info) if res: - defer.returnValue(res) + return res - defer.returnValue(None) + return None @defer.inlineCallbacks def ensure_media_is_in_local_cache(self, file_info): @@ -166,7 +166,7 @@ class MediaStorage(object): path = self._file_info_to_path(file_info) local_path = os.path.join(self.local_media_directory, path) if os.path.exists(local_path): - defer.returnValue(local_path) + return local_path dirname = os.path.dirname(local_path) if not os.path.exists(dirname): @@ -181,7 +181,7 @@ class MediaStorage(object): ) yield res.write_to_consumer(consumer) yield consumer.wait() - defer.returnValue(local_path) + return local_path raise Exception("file could not be found") diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 5871737bfd..bd40891a7f 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -182,7 +182,7 @@ class PreviewUrlResource(DirectServeResource): og = cache_result["og"] if isinstance(og, six.text_type): og = og.encode("utf8") - defer.returnValue(og) + return og return media_info = yield self._download_url(url, user) @@ -284,7 +284,7 @@ class PreviewUrlResource(DirectServeResource): media_info["created_ts"], ) - defer.returnValue(jsonog) + return jsonog @defer.inlineCallbacks def _download_url(self, url, user): @@ -354,22 +354,20 @@ class PreviewUrlResource(DirectServeResource): # therefore not expire it. raise - defer.returnValue( - { - "media_type": media_type, - "media_length": length, - "download_name": download_name, - "created_ts": time_now_ms, - "filesystem_id": file_id, - "filename": fname, - "uri": uri, - "response_code": code, - # FIXME: we should calculate a proper expiration based on the - # Cache-Control and Expire headers. But for now, assume 1 hour. - "expires": 60 * 60 * 1000, - "etag": headers["ETag"][0] if "ETag" in headers else None, - } - ) + return { + "media_type": media_type, + "media_length": length, + "download_name": download_name, + "created_ts": time_now_ms, + "filesystem_id": file_id, + "filename": fname, + "uri": uri, + "response_code": code, + # FIXME: we should calculate a proper expiration based on the + # Cache-Control and Expire headers. But for now, assume 1 hour. + "expires": 60 * 60 * 1000, + "etag": headers["ETag"][0] if "ETag" in headers else None, + } def _start_expire_url_cache_data(self): return run_as_background_process( diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py index f183743f31..729c097e6d 100644 --- a/synapse/server_notices/resource_limits_server_notices.py +++ b/synapse/server_notices/resource_limits_server_notices.py @@ -193,4 +193,4 @@ class ResourceLimitsServerNotices(object): if event_id in referenced_events: referenced_events.remove(event.event_id) - defer.returnValue((currently_blocked, referenced_events)) + return (currently_blocked, referenced_events) diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index 71e7e75320..2dac90578c 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -86,7 +86,7 @@ class ServerNoticesManager(object): res = yield self._event_creation_handler.create_and_send_nonmember_event( requester, event_dict, ratelimit=False ) - defer.returnValue(res) + return res @cachedInlineCallbacks() def get_notice_room_for_user(self, user_id): @@ -120,7 +120,7 @@ class ServerNoticesManager(object): # we found a room which our user shares with the system notice # user logger.info("Using room %s", room.room_id) - defer.returnValue(room.room_id) + return room.room_id # apparently no existing notice room: create a new one logger.info("Creating server notices room for %s", user_id) @@ -158,4 +158,4 @@ class ServerNoticesManager(object): self._notifier.on_new_event("account_data_key", max_id, users=[user_id]) logger.info("Created server notices room %s for %s", room_id, user_id) - defer.returnValue(room_id) + return room_id diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 9f708fa205..a0d34f16ea 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -135,7 +135,7 @@ class StateHandler(object): event = None if event_id: event = yield self.store.get_event(event_id, allow_none=True) - defer.returnValue(event) + return event return state_map = yield self.store.get_events( @@ -145,7 +145,7 @@ class StateHandler(object): key: state_map[e_id] for key, e_id in iteritems(state) if e_id in state_map } - defer.returnValue(state) + return state @defer.inlineCallbacks def get_current_state_ids(self, room_id, latest_event_ids=None): @@ -169,7 +169,7 @@ class StateHandler(object): ret = yield self.resolve_state_groups_for_events(room_id, latest_event_ids) state = ret.state - defer.returnValue(state) + return state @defer.inlineCallbacks def get_current_users_in_room(self, room_id, latest_event_ids=None): @@ -189,7 +189,7 @@ class StateHandler(object): logger.debug("calling resolve_state_groups from get_current_users_in_room") entry = yield self.resolve_state_groups_for_events(room_id, latest_event_ids) joined_users = yield self.store.get_joined_users_from_state(room_id, entry) - defer.returnValue(joined_users) + return joined_users @defer.inlineCallbacks def get_current_hosts_in_room(self, room_id, latest_event_ids=None): @@ -198,7 +198,7 @@ class StateHandler(object): logger.debug("calling resolve_state_groups from get_current_hosts_in_room") entry = yield self.resolve_state_groups_for_events(room_id, latest_event_ids) joined_hosts = yield self.store.get_joined_hosts(room_id, entry) - defer.returnValue(joined_hosts) + return joined_hosts @defer.inlineCallbacks def compute_event_context(self, event, old_state=None): @@ -241,7 +241,7 @@ class StateHandler(object): prev_state_ids=prev_state_ids, ) - defer.returnValue(context) + return context if old_state: # We already have the state, so we don't need to calculate it. @@ -275,7 +275,7 @@ class StateHandler(object): prev_state_ids=prev_state_ids, ) - defer.returnValue(context) + return context logger.debug("calling resolve_state_groups from compute_event_context") @@ -343,7 +343,7 @@ class StateHandler(object): delta_ids=delta_ids, ) - defer.returnValue(context) + return context @defer.inlineCallbacks def resolve_state_groups_for_events(self, room_id, event_ids): @@ -368,19 +368,17 @@ class StateHandler(object): state_groups_ids = yield self.store.get_state_groups_ids(room_id, event_ids) if len(state_groups_ids) == 0: - defer.returnValue(_StateCacheEntry(state={}, state_group=None)) + return _StateCacheEntry(state={}, state_group=None) elif len(state_groups_ids) == 1: name, state_list = list(state_groups_ids.items()).pop() prev_group, delta_ids = yield self.store.get_state_group_delta(name) - defer.returnValue( - _StateCacheEntry( - state=state_list, - state_group=name, - prev_group=prev_group, - delta_ids=delta_ids, - ) + return _StateCacheEntry( + state=state_list, + state_group=name, + prev_group=prev_group, + delta_ids=delta_ids, ) room_version = yield self.store.get_room_version(room_id) @@ -392,7 +390,7 @@ class StateHandler(object): None, state_res_store=StateResolutionStore(self.store), ) - defer.returnValue(result) + return result @defer.inlineCallbacks def resolve_events(self, room_version, state_sets, event): @@ -415,7 +413,7 @@ class StateHandler(object): new_state = {key: state_map[ev_id] for key, ev_id in iteritems(new_state)} - defer.returnValue(new_state) + return new_state class StateResolutionHandler(object): @@ -479,7 +477,7 @@ class StateResolutionHandler(object): if self._state_cache is not None: cache = self._state_cache.get(group_names, None) if cache: - defer.returnValue(cache) + return cache logger.info( "Resolving state for %s with %d groups", room_id, len(state_groups_ids) @@ -525,7 +523,7 @@ class StateResolutionHandler(object): if self._state_cache is not None: self._state_cache[group_names] = cache - defer.returnValue(cache) + return cache def _make_state_cache_entry(new_state, state_groups_ids): diff --git a/synapse/state/v1.py b/synapse/state/v1.py index 88acd4817e..a2f92d9ff9 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -55,7 +55,7 @@ def resolve_events_with_store(state_sets, event_map, state_map_factory): a map from (type, state_key) to event_id. """ if len(state_sets) == 1: - defer.returnValue(state_sets[0]) + return state_sets[0] unconflicted_state, conflicted_state = _seperate(state_sets) @@ -97,10 +97,8 @@ def resolve_events_with_store(state_sets, event_map, state_map_factory): state_map_new = yield state_map_factory(new_needed_events) state_map.update(state_map_new) - defer.returnValue( - _resolve_with_state( - unconflicted_state, conflicted_state, auth_events, state_map - ) + return _resolve_with_state( + unconflicted_state, conflicted_state, auth_events, state_map ) diff --git a/synapse/state/v2.py b/synapse/state/v2.py index db969e8997..b327c86f40 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -63,7 +63,7 @@ def resolve_events_with_store(room_version, state_sets, event_map, state_res_sto unconflicted_state, conflicted_state = _seperate(state_sets) if not conflicted_state: - defer.returnValue(unconflicted_state) + return unconflicted_state logger.debug("%d conflicted state entries", len(conflicted_state)) logger.debug("Calculating auth chain difference") @@ -137,7 +137,7 @@ def resolve_events_with_store(room_version, state_sets, event_map, state_res_sto logger.debug("done") - defer.returnValue(resolved_state) + return resolved_state @defer.inlineCallbacks @@ -168,18 +168,18 @@ def _get_power_level_for_sender(event_id, event_map, state_res_store): aev = yield _get_event(aid, event_map, state_res_store) if (aev.type, aev.state_key) == (EventTypes.Create, ""): if aev.content.get("creator") == event.sender: - defer.returnValue(100) + return 100 break - defer.returnValue(0) + return 0 level = pl.content.get("users", {}).get(event.sender) if level is None: level = pl.content.get("users_default", 0) if level is None: - defer.returnValue(0) + return 0 else: - defer.returnValue(int(level)) + return int(level) @defer.inlineCallbacks @@ -224,7 +224,7 @@ def _get_auth_chain_difference(state_sets, event_map, state_res_store): intersection = set(auth_sets[0]).intersection(*auth_sets[1:]) union = set().union(*auth_sets) - defer.returnValue(union - intersection) + return union - intersection def _seperate(state_sets): @@ -343,7 +343,7 @@ def _reverse_topological_power_sort(event_ids, event_map, state_res_store, auth_ it = lexicographical_topological_sort(graph, key=_get_power_order) sorted_events = list(it) - defer.returnValue(sorted_events) + return sorted_events @defer.inlineCallbacks @@ -396,7 +396,7 @@ def _iterative_auth_checks( except AuthError: pass - defer.returnValue(resolved_state) + return resolved_state @defer.inlineCallbacks @@ -439,7 +439,7 @@ def _mainline_sort(event_ids, resolved_power_event_id, event_map, state_res_stor event_ids.sort(key=lambda ev_id: order_map[ev_id]) - defer.returnValue(event_ids) + return event_ids @defer.inlineCallbacks @@ -462,7 +462,7 @@ def _get_mainline_depth_for_event(event, mainline_map, event_map, state_res_stor while event: depth = mainline_map.get(event.event_id) if depth is not None: - defer.returnValue(depth) + return depth auth_events = event.auth_event_ids() event = None @@ -474,7 +474,7 @@ def _get_mainline_depth_for_event(event, mainline_map, event_map, state_res_stor break # Didn't find a power level auth event, so we just return 0 - defer.returnValue(0) + return 0 @defer.inlineCallbacks @@ -493,7 +493,7 @@ def _get_event(event_id, event_map, state_res_store): if event_id not in event_map: events = yield state_res_store.get_events([event_id], allow_rejected=True) event_map.update(events) - defer.returnValue(event_map[event_id]) + return event_map[event_id] def lexicographical_topological_sort(graph, key): diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 86a333a919..e7f6ea7286 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -498,7 +498,7 @@ class DataStore( ) count = yield self.runInteraction("get_users_paginate", self.get_user_count_txn) retval = {"users": users, "total": count} - defer.returnValue(retval) + return retval def search_users(self, term): """Function to search users list for one or more users with diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index a7c93efa46..489ce82fae 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -513,7 +513,7 @@ class SQLBaseStore(object): after_callback(*after_args, **after_kwargs) raise - defer.returnValue(result) + return result @defer.inlineCallbacks def runWithConnection(self, func, *args, **kwargs): @@ -553,7 +553,7 @@ class SQLBaseStore(object): with PreserveLoggingContext(): result = yield self._db_pool.runWithConnection(inner_func, *args, **kwargs) - defer.returnValue(result) + return result @staticmethod def cursor_to_dict(cursor): @@ -615,8 +615,8 @@ class SQLBaseStore(object): # a cursor after we receive an error from the db. if not or_ignore: raise - defer.returnValue(False) - defer.returnValue(True) + return False + return True @staticmethod def _simple_insert_txn(txn, table, values): @@ -708,7 +708,7 @@ class SQLBaseStore(object): insertion_values, lock=lock, ) - defer.returnValue(result) + return result except self.database_engine.module.IntegrityError as e: attempts += 1 if attempts >= 5: @@ -1121,7 +1121,7 @@ class SQLBaseStore(object): results = [] if not iterable: - defer.returnValue(results) + return results # iterables can not be sliced, so convert it to a list first it_list = list(iterable) @@ -1142,7 +1142,7 @@ class SQLBaseStore(object): results.extend(rows) - defer.returnValue(results) + return results @classmethod def _simple_select_many_txn(cls, txn, table, column, iterable, keyvalues, retcols): diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py index 8394389073..9fa5b4f3d6 100644 --- a/synapse/storage/account_data.py +++ b/synapse/storage/account_data.py @@ -111,9 +111,9 @@ class AccountDataWorkerStore(SQLBaseStore): ) if result: - defer.returnValue(json.loads(result)) + return json.loads(result) else: - defer.returnValue(None) + return None @cached(num_args=2) def get_account_data_for_room(self, user_id, room_id): @@ -264,11 +264,9 @@ class AccountDataWorkerStore(SQLBaseStore): on_invalidate=cache_context.invalidate, ) if not ignored_account_data: - defer.returnValue(False) + return False - defer.returnValue( - ignored_user_id in ignored_account_data.get("ignored_users", {}) - ) + return ignored_user_id in ignored_account_data.get("ignored_users", {}) class AccountDataStore(AccountDataWorkerStore): @@ -332,7 +330,7 @@ class AccountDataStore(AccountDataWorkerStore): ) result = self._account_data_id_gen.get_current_token() - defer.returnValue(result) + return result @defer.inlineCallbacks def add_account_data_for_user(self, user_id, account_data_type, content): @@ -373,7 +371,7 @@ class AccountDataStore(AccountDataWorkerStore): ) result = self._account_data_id_gen.get_current_token() - defer.returnValue(result) + return result def _update_max_stream_id(self, next_id): """Update the max stream_id diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index eb329ebd8b..05d9c05c3f 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -145,7 +145,7 @@ class ApplicationServiceTransactionWorkerStore( for service in as_list: if service.id == res["as_id"]: services.append(service) - defer.returnValue(services) + return services @defer.inlineCallbacks def get_appservice_state(self, service): @@ -164,9 +164,9 @@ class ApplicationServiceTransactionWorkerStore( desc="get_appservice_state", ) if result: - defer.returnValue(result.get("state")) + return result.get("state") return - defer.returnValue(None) + return None def set_appservice_state(self, service, state): """Set the application service state. @@ -298,15 +298,13 @@ class ApplicationServiceTransactionWorkerStore( ) if not entry: - defer.returnValue(None) + return None event_ids = json.loads(entry["event_ids"]) events = yield self.get_events_as_list(event_ids) - defer.returnValue( - AppServiceTransaction(service=service, id=entry["txn_id"], events=events) - ) + return AppServiceTransaction(service=service, id=entry["txn_id"], events=events) def _get_last_txn(self, txn, service_id): txn.execute( @@ -360,7 +358,7 @@ class ApplicationServiceTransactionWorkerStore( events = yield self.get_events_as_list(event_ids) - defer.returnValue((upper_bound, events)) + return (upper_bound, events) class ApplicationServiceTransactionStore(ApplicationServiceTransactionWorkerStore): diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 50f913a414..e5f0668f09 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -115,7 +115,7 @@ class BackgroundUpdateStore(SQLBaseStore): " Unscheduling background update task." ) self._all_done = True - defer.returnValue(None) + return None @defer.inlineCallbacks def has_completed_background_updates(self): @@ -127,11 +127,11 @@ class BackgroundUpdateStore(SQLBaseStore): # if we've previously determined that there is nothing left to do, that # is easy if self._all_done: - defer.returnValue(True) + return True # obviously, if we have things in our queue, we're not done. if self._background_update_queue: - defer.returnValue(False) + return False # otherwise, check if there are updates to be run. This is important, # as we may be running on a worker which doesn't perform the bg updates @@ -144,9 +144,9 @@ class BackgroundUpdateStore(SQLBaseStore): ) if not updates: self._all_done = True - defer.returnValue(True) + return True - defer.returnValue(False) + return False @defer.inlineCallbacks def do_next_background_update(self, desired_duration_ms): @@ -173,14 +173,14 @@ class BackgroundUpdateStore(SQLBaseStore): if not self._background_update_queue: # no work left to do - defer.returnValue(None) + return None # pop from the front, and add back to the back update_name = self._background_update_queue.pop(0) self._background_update_queue.append(update_name) res = yield self._do_background_update(update_name, desired_duration_ms) - defer.returnValue(res) + return res @defer.inlineCallbacks def _do_background_update(self, update_name, desired_duration_ms): @@ -231,7 +231,7 @@ class BackgroundUpdateStore(SQLBaseStore): performance.update(items_updated, duration_ms) - defer.returnValue(len(self._background_update_performance)) + return len(self._background_update_performance) def register_background_update_handler(self, update_name, update_handler): """Register a handler for doing a background update. @@ -266,7 +266,7 @@ class BackgroundUpdateStore(SQLBaseStore): @defer.inlineCallbacks def noop_update(progress, batch_size): yield self._end_background_update(update_name) - defer.returnValue(1) + return 1 self.register_background_update_handler(update_name, noop_update) @@ -370,7 +370,7 @@ class BackgroundUpdateStore(SQLBaseStore): logger.info("Adding index %s to %s", index_name, table) yield self.runWithConnection(runner) yield self._end_background_update(update_name) - defer.returnValue(1) + return 1 self.register_background_update_handler(update_name, updater) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index bda68de5be..6db8c54077 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -104,7 +104,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): yield self.runWithConnection(f) yield self._end_background_update("user_ips_drop_nonunique_index") - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _analyze_user_ip(self, progress, batch_size): @@ -121,7 +121,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): yield self._end_background_update("user_ips_analyze") - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _remove_user_ip_dupes(self, progress, batch_size): @@ -291,7 +291,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): if last: yield self._end_background_update("user_ips_remove_dupes") - defer.returnValue(batch_size) + return batch_size @defer.inlineCallbacks def insert_client_ip( @@ -401,7 +401,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): "device_id": did, "last_seen": last_seen, } - defer.returnValue(ret) + return ret @classmethod def _get_last_client_ip_by_device_txn(cls, txn, user_id, device_id, retcols): @@ -461,14 +461,12 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): ((row["access_token"], row["ip"]), (row["user_agent"], row["last_seen"])) for row in rows ) - defer.returnValue( - list( - { - "access_token": access_token, - "ip": ip, - "user_agent": user_agent, - "last_seen": last_seen, - } - for (access_token, ip), (user_agent, last_seen) in iteritems(results) - ) + return list( + { + "access_token": access_token, + "ip": ip, + "user_agent": user_agent, + "last_seen": last_seen, + } + for (access_token, ip), (user_agent, last_seen) in iteritems(results) ) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 4ea0deea4f..79bb0ea46d 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -92,7 +92,7 @@ class DeviceInboxWorkerStore(SQLBaseStore): user_id, last_deleted_stream_id ) if not has_changed: - defer.returnValue(0) + return 0 def delete_messages_for_device_txn(txn): sql = ( @@ -115,7 +115,7 @@ class DeviceInboxWorkerStore(SQLBaseStore): last_deleted_stream_id, up_to_stream_id ) - defer.returnValue(count) + return count def get_new_device_msgs_for_remote( self, destination, last_stream_id, current_stream_id, limit @@ -263,7 +263,7 @@ class DeviceInboxStore(DeviceInboxWorkerStore, BackgroundUpdateStore): destination, stream_id ) - defer.returnValue(self._device_inbox_id_gen.get_current_token()) + return self._device_inbox_id_gen.get_current_token() @defer.inlineCallbacks def add_messages_from_remote_to_device_inbox( @@ -312,7 +312,7 @@ class DeviceInboxStore(DeviceInboxWorkerStore, BackgroundUpdateStore): for user_id in local_messages_by_user_then_device.keys(): self._device_inbox_stream_cache.entity_has_changed(user_id, stream_id) - defer.returnValue(stream_id) + return stream_id def _add_messages_to_local_device_inbox_txn( self, txn, stream_id, messages_by_user_then_device @@ -426,4 +426,4 @@ class DeviceInboxStore(DeviceInboxWorkerStore, BackgroundUpdateStore): yield self._end_background_update(self.DEVICE_INBOX_STREAM_ID) - defer.returnValue(1) + return 1 diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index d2b113a4e7..8f72d92895 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -71,7 +71,7 @@ class DeviceWorkerStore(SQLBaseStore): desc="get_devices_by_user", ) - defer.returnValue({d["device_id"]: d for d in devices}) + return {d["device_id"]: d for d in devices} @defer.inlineCallbacks def get_devices_by_remote(self, destination, from_stream_id, limit): @@ -88,7 +88,7 @@ class DeviceWorkerStore(SQLBaseStore): destination, int(from_stream_id) ) if not has_changed: - defer.returnValue((now_stream_id, [])) + return (now_stream_id, []) # We retrieve n+1 devices from the list of outbound pokes where n is # our outbound device update limit. We then check if the very last @@ -111,7 +111,7 @@ class DeviceWorkerStore(SQLBaseStore): # Return an empty list if there are no updates if not updates: - defer.returnValue((now_stream_id, [])) + return (now_stream_id, []) # if we have exceeded the limit, we need to exclude any results with the # same stream_id as the last row. @@ -147,13 +147,13 @@ class DeviceWorkerStore(SQLBaseStore): # skip that stream_id and return an empty list, and continue with the next # stream_id next time. if not query_map: - defer.returnValue((stream_id_cutoff, [])) + return (stream_id_cutoff, []) results = yield self._get_device_update_edus_by_remote( destination, from_stream_id, query_map ) - defer.returnValue((now_stream_id, results)) + return (now_stream_id, results) def _get_devices_by_remote_txn( self, txn, destination, from_stream_id, now_stream_id, limit @@ -232,7 +232,7 @@ class DeviceWorkerStore(SQLBaseStore): results.append(result) - defer.returnValue(results) + return results def _get_last_device_update_for_remote_user( self, destination, user_id, from_stream_id @@ -330,7 +330,7 @@ class DeviceWorkerStore(SQLBaseStore): else: results[user_id] = yield self._get_cached_devices_for_user(user_id) - defer.returnValue((user_ids_not_in_cache, results)) + return (user_ids_not_in_cache, results) @cachedInlineCallbacks(num_args=2, tree=True) def _get_cached_user_device(self, user_id, device_id): @@ -340,7 +340,7 @@ class DeviceWorkerStore(SQLBaseStore): retcol="content", desc="_get_cached_user_device", ) - defer.returnValue(db_to_json(content)) + return db_to_json(content) @cachedInlineCallbacks() def _get_cached_devices_for_user(self, user_id): @@ -350,9 +350,9 @@ class DeviceWorkerStore(SQLBaseStore): retcols=("device_id", "content"), desc="_get_cached_devices_for_user", ) - defer.returnValue( - {device["device_id"]: db_to_json(device["content"]) for device in devices} - ) + return { + device["device_id"]: db_to_json(device["content"]) for device in devices + } def get_devices_with_keys_by_user(self, user_id): """Get all devices (with any device keys) for a user @@ -482,7 +482,7 @@ class DeviceWorkerStore(SQLBaseStore): results = {user_id: None for user_id in user_ids} results.update({row["user_id"]: row["stream_id"] for row in rows}) - defer.returnValue(results) + return results class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore): @@ -543,7 +543,7 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore): """ key = (user_id, device_id) if self.device_id_exists_cache.get(key, None): - defer.returnValue(False) + return False try: inserted = yield self._simple_insert( @@ -557,7 +557,7 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore): or_ignore=True, ) self.device_id_exists_cache.prefill(key, True) - defer.returnValue(inserted) + return inserted except Exception as e: logger.error( "store_device with device_id=%s(%r) user_id=%s(%r)" @@ -780,7 +780,7 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore): hosts, stream_id, ) - defer.returnValue(stream_id) + return stream_id def _add_device_change_txn(self, txn, user_id, device_ids, hosts, stream_id): now = self._clock.time_msec() @@ -889,4 +889,4 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore): yield self.runWithConnection(f) yield self._end_background_update(DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES) - defer.returnValue(1) + return 1 diff --git a/synapse/storage/directory.py b/synapse/storage/directory.py index 201bbd430c..e966a73f3d 100644 --- a/synapse/storage/directory.py +++ b/synapse/storage/directory.py @@ -46,7 +46,7 @@ class DirectoryWorkerStore(SQLBaseStore): ) if not room_id: - defer.returnValue(None) + return None return servers = yield self._simple_select_onecol( @@ -57,10 +57,10 @@ class DirectoryWorkerStore(SQLBaseStore): ) if not servers: - defer.returnValue(None) + return None return - defer.returnValue(RoomAliasMapping(room_id, room_alias.to_string(), servers)) + return RoomAliasMapping(room_id, room_alias.to_string(), servers) def get_room_alias_creator(self, room_alias): return self._simple_select_one_onecol( @@ -125,7 +125,7 @@ class DirectoryStore(DirectoryWorkerStore): raise SynapseError( 409, "Room alias %s already exists" % room_alias.to_string() ) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def delete_room_alias(self, room_alias): @@ -133,7 +133,7 @@ class DirectoryStore(DirectoryWorkerStore): "delete_room_alias", self._delete_room_alias_txn, room_alias ) - defer.returnValue(room_id) + return room_id def _delete_room_alias_txn(self, txn, room_alias): txn.execute( diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index f40ef2ab64..99128f2df7 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -61,7 +61,7 @@ class EndToEndRoomKeyStore(SQLBaseStore): row["session_data"] = json.loads(row["session_data"]) - defer.returnValue(row) + return row @defer.inlineCallbacks def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): @@ -118,7 +118,7 @@ class EndToEndRoomKeyStore(SQLBaseStore): try: version = int(version) except ValueError: - defer.returnValue({"rooms": {}}) + return {"rooms": {}} keyvalues = {"user_id": user_id, "version": version} if room_id: @@ -151,7 +151,7 @@ class EndToEndRoomKeyStore(SQLBaseStore): "session_data": json.loads(row["session_data"]), } - defer.returnValue(sessions) + return sessions @defer.inlineCallbacks def delete_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 2fabb9e2cb..1e07474e70 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -41,7 +41,7 @@ class EndToEndKeyWorkerStore(SQLBaseStore): dict containing "key_json", "device_display_name". """ if not query_list: - defer.returnValue({}) + return {} results = yield self.runInteraction( "get_e2e_device_keys", @@ -55,7 +55,7 @@ class EndToEndKeyWorkerStore(SQLBaseStore): for device_id, device_info in iteritems(device_keys): device_info["keys"] = db_to_json(device_info.pop("key_json")) - defer.returnValue(results) + return results def _get_e2e_device_keys_txn( self, txn, query_list, include_all_devices=False, include_deleted_devices=False @@ -130,9 +130,7 @@ class EndToEndKeyWorkerStore(SQLBaseStore): desc="add_e2e_one_time_keys_check", ) - defer.returnValue( - {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows} - ) + return {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows} @defer.inlineCallbacks def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index cb4478342f..4f500d893e 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -131,9 +131,9 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas ) if not rows: - defer.returnValue(0) + return 0 else: - defer.returnValue(max(row["depth"] for row in rows)) + return max(row["depth"] for row in rows) def _get_oldest_events_in_room_txn(self, txn, room_id): return self._simple_select_onecol_txn( @@ -169,7 +169,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas # make sure that we don't completely ignore the older events. res = res[0:5] + random.sample(res[5:], 5) - defer.returnValue(res) + return res def get_latest_event_ids_and_hashes_in_room(self, room_id): """ @@ -411,7 +411,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas limit, ) events = yield self.get_events_as_list(ids) - defer.returnValue(events) + return events def _get_missing_events(self, txn, room_id, earliest_events, latest_events, limit): @@ -463,7 +463,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas desc="get_successor_events", ) - defer.returnValue([row["event_id"] for row in rows]) + return [row["event_id"] for row in rows] class EventFederationStore(EventFederationWorkerStore): @@ -654,4 +654,4 @@ class EventFederationStore(EventFederationWorkerStore): if not result: yield self._end_background_update(self.EVENT_AUTH_STATE_ONLY) - defer.returnValue(batch_size) + return batch_size diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index dcfb67e029..22025effbc 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -100,7 +100,7 @@ class EventPushActionsWorkerStore(SQLBaseStore): user_id, last_read_event_id, ) - defer.returnValue(ret) + return ret def _get_unread_counts_by_receipt_txn( self, txn, room_id, user_id, last_read_event_id @@ -178,7 +178,7 @@ class EventPushActionsWorkerStore(SQLBaseStore): return [r[0] for r in txn] ret = yield self.runInteraction("get_push_action_users_in_range", f) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_unread_push_actions_for_user_in_range_for_http( @@ -279,7 +279,7 @@ class EventPushActionsWorkerStore(SQLBaseStore): # Take only up to the limit. We have to stop at the limit because # one of the subqueries may have hit the limit. - defer.returnValue(notifs[:limit]) + return notifs[:limit] @defer.inlineCallbacks def get_unread_push_actions_for_user_in_range_for_email( @@ -380,7 +380,7 @@ class EventPushActionsWorkerStore(SQLBaseStore): notifs.sort(key=lambda r: -(r["received_ts"] or 0)) # Now return the first `limit` - defer.returnValue(notifs[:limit]) + return notifs[:limit] def get_if_maybe_push_in_range_for_user(self, user_id, min_stream_ordering): """A fast check to see if there might be something to push for the @@ -477,7 +477,7 @@ class EventPushActionsWorkerStore(SQLBaseStore): keyvalues={"event_id": event_id}, desc="remove_push_actions_from_staging", ) - defer.returnValue(res) + return res except Exception: # this method is called from an exception handler, so propagating # another exception here really isn't helpful - there's nothing @@ -732,7 +732,7 @@ class EventPushActionsStore(EventPushActionsWorkerStore): push_actions = yield self.runInteraction("get_push_actions_for_user", f) for pa in push_actions: pa["actions"] = _deserialize_action(pa["actions"], pa["highlight"]) - defer.returnValue(push_actions) + return push_actions @defer.inlineCallbacks def get_time_of_last_push_action_before(self, stream_ordering): @@ -749,7 +749,7 @@ class EventPushActionsStore(EventPushActionsWorkerStore): return txn.fetchone() result = yield self.runInteraction("get_time_of_last_push_action_before", f) - defer.returnValue(result[0] if result else None) + return result[0] if result else None @defer.inlineCallbacks def get_latest_push_action_stream_ordering(self): @@ -758,7 +758,7 @@ class EventPushActionsStore(EventPushActionsWorkerStore): return txn.fetchone() result = yield self.runInteraction("get_latest_push_action_stream_ordering", f) - defer.returnValue(result[0] or 0) + return result[0] or 0 def _remove_push_actions_for_event_id_txn(self, txn, room_id, event_id): # Sad that we have to blow away the cache for the whole room here diff --git a/synapse/storage/events.py b/synapse/storage/events.py index b70457bfc6..88c0180116 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -223,7 +223,7 @@ def _retry_on_integrity_error(func): except self.database_engine.module.IntegrityError: logger.exception("IntegrityError, retrying.") res = yield func(self, *args, delete_existing=True, **kwargs) - defer.returnValue(res) + return res return f @@ -309,7 +309,7 @@ class EventsStore( max_persisted_id = yield self._stream_id_gen.get_current_token() - defer.returnValue(max_persisted_id) + return max_persisted_id @defer.inlineCallbacks @log_function @@ -334,7 +334,7 @@ class EventsStore( yield make_deferred_yieldable(deferred) max_persisted_id = yield self._stream_id_gen.get_current_token() - defer.returnValue((event.internal_metadata.stream_ordering, max_persisted_id)) + return (event.internal_metadata.stream_ordering, max_persisted_id) def _maybe_start_persisting(self, room_id): @defer.inlineCallbacks @@ -595,7 +595,7 @@ class EventsStore( stale = latest_event_ids & result stale_forward_extremities_counter.observe(len(stale)) - defer.returnValue(result) + return result @defer.inlineCallbacks def _get_events_which_are_prevs(self, event_ids): @@ -633,7 +633,7 @@ class EventsStore( "_get_events_which_are_prevs", _get_events_which_are_prevs_txn, chunk ) - defer.returnValue(results) + return results @defer.inlineCallbacks def _get_prevs_before_rejected(self, event_ids): @@ -695,7 +695,7 @@ class EventsStore( "_get_prevs_before_rejected", _get_prevs_before_rejected_txn, chunk ) - defer.returnValue(existing_prevs) + return existing_prevs @defer.inlineCallbacks def _get_new_state_after_events( @@ -796,7 +796,7 @@ class EventsStore( # If they old and new groups are the same then we don't need to do # anything. if old_state_groups == new_state_groups: - defer.returnValue((None, None)) + return (None, None) if len(new_state_groups) == 1 and len(old_state_groups) == 1: # If we're going from one state group to another, lets check if @@ -813,7 +813,7 @@ class EventsStore( # the current state in memory then lets also return that, # but it doesn't matter if we don't. new_state = state_groups_map.get(new_state_group) - defer.returnValue((new_state, delta_ids)) + return (new_state, delta_ids) # Now that we have calculated new_state_groups we need to get # their state IDs so we can resolve to a single state set. @@ -825,7 +825,7 @@ class EventsStore( if len(new_state_groups) == 1: # If there is only one state group, then we know what the current # state is. - defer.returnValue((state_groups_map[new_state_groups.pop()], None)) + return (state_groups_map[new_state_groups.pop()], None) # Ok, we need to defer to the state handler to resolve our state sets. @@ -854,7 +854,7 @@ class EventsStore( state_res_store=StateResolutionStore(self), ) - defer.returnValue((res.state, None)) + return (res.state, None) @defer.inlineCallbacks def _calculate_state_delta(self, room_id, current_state): @@ -877,7 +877,7 @@ class EventsStore( if ev_id != existing_state.get(key) } - defer.returnValue((to_delete, to_insert)) + return (to_delete, to_insert) @log_function def _persist_events_txn( @@ -1564,7 +1564,7 @@ class EventsStore( return count ret = yield self.runInteraction("count_messages", _count_messages) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def count_daily_sent_messages(self): @@ -1585,7 +1585,7 @@ class EventsStore( return count ret = yield self.runInteraction("count_daily_sent_messages", _count_messages) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def count_daily_active_rooms(self): @@ -1600,7 +1600,7 @@ class EventsStore( return count ret = yield self.runInteraction("count_daily_active_rooms", _count) - defer.returnValue(ret) + return ret def get_current_backfill_token(self): """The current minimum token that backfilled events have reached""" @@ -2183,7 +2183,7 @@ class EventsStore( """ to_1, so_1 = yield self._get_event_ordering(event_id1) to_2, so_2 = yield self._get_event_ordering(event_id2) - defer.returnValue((to_1, so_1) > (to_2, so_2)) + return (to_1, so_1) > (to_2, so_2) @cachedInlineCallbacks(max_entries=5000) def _get_event_ordering(self, event_id): @@ -2197,9 +2197,7 @@ class EventsStore( if not res: raise SynapseError(404, "Could not find event %s" % (event_id,)) - defer.returnValue( - (int(res["topological_ordering"]), int(res["stream_ordering"])) - ) + return (int(res["topological_ordering"]), int(res["stream_ordering"])) def get_all_updated_current_state_deltas(self, from_token, to_token, limit): def get_all_updated_current_state_deltas_txn(txn): diff --git a/synapse/storage/events_bg_updates.py b/synapse/storage/events_bg_updates.py index 1ce21d190c..6587f31e2b 100644 --- a/synapse/storage/events_bg_updates.py +++ b/synapse/storage/events_bg_updates.py @@ -135,7 +135,7 @@ class EventsBackgroundUpdatesStore(BackgroundUpdateStore): if not result: yield self._end_background_update(self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME) - defer.returnValue(result) + return result @defer.inlineCallbacks def _background_reindex_origin_server_ts(self, progress, batch_size): @@ -212,7 +212,7 @@ class EventsBackgroundUpdatesStore(BackgroundUpdateStore): if not result: yield self._end_background_update(self.EVENT_ORIGIN_SERVER_TS_NAME) - defer.returnValue(result) + return result @defer.inlineCallbacks def _cleanup_extremities_bg_update(self, progress, batch_size): @@ -396,4 +396,4 @@ class EventsBackgroundUpdatesStore(BackgroundUpdateStore): "_cleanup_extremities_bg_update_drop_table", _drop_table_txn ) - defer.returnValue(num_handled) + return num_handled diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 858fc755a1..44441957db 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -157,7 +157,7 @@ class EventsWorkerStore(SQLBaseStore): if event is None and not allow_none: raise NotFoundError("Could not find event %s" % (event_id,)) - defer.returnValue(event) + return event @defer.inlineCallbacks def get_events( @@ -187,7 +187,7 @@ class EventsWorkerStore(SQLBaseStore): allow_rejected=allow_rejected, ) - defer.returnValue({e.event_id: e for e in events}) + return {e.event_id: e for e in events} @defer.inlineCallbacks def get_events_as_list( @@ -217,7 +217,7 @@ class EventsWorkerStore(SQLBaseStore): """ if not event_ids: - defer.returnValue([]) + return [] # there may be duplicates so we cast the list to a set event_entry_map = yield self._get_events_from_cache_or_db( @@ -305,7 +305,7 @@ class EventsWorkerStore(SQLBaseStore): event.unsigned["prev_content"] = prev.content event.unsigned["prev_sender"] = prev.sender - defer.returnValue(events) + return events @defer.inlineCallbacks def _get_events_from_cache_or_db(self, event_ids, allow_rejected=False): @@ -452,7 +452,7 @@ class EventsWorkerStore(SQLBaseStore): without having to create a new transaction for each request for events. """ if not events: - defer.returnValue({}) + return {} events_d = defer.Deferred() with self._event_fetch_lock: @@ -496,7 +496,7 @@ class EventsWorkerStore(SQLBaseStore): ) ) - defer.returnValue({e.event.event_id: e for e in res if e}) + return {e.event.event_id: e for e in res if e} def _fetch_event_rows(self, txn, event_ids): """Fetch event rows from the database @@ -609,7 +609,7 @@ class EventsWorkerStore(SQLBaseStore): self._get_event_cache.prefill((original_ev.event_id,), cache_entry) - defer.returnValue(cache_entry) + return cache_entry @defer.inlineCallbacks def _maybe_redact_event_row(self, original_ev, redactions): @@ -679,7 +679,7 @@ class EventsWorkerStore(SQLBaseStore): desc="have_events_in_timeline", ) - defer.returnValue(set(r["event_id"] for r in rows)) + return set(r["event_id"] for r in rows) @defer.inlineCallbacks def have_seen_events(self, event_ids): @@ -705,7 +705,7 @@ class EventsWorkerStore(SQLBaseStore): input_iterator = iter(event_ids) for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)), []): yield self.runInteraction("have_seen_events", have_seen_events_txn, chunk) - defer.returnValue(results) + return results def get_seen_events_with_rejections(self, event_ids): """Given a list of event ids, check if we rejected them. @@ -816,4 +816,4 @@ class EventsWorkerStore(SQLBaseStore): # it. complexity_v1 = round(state_events / 500, 2) - defer.returnValue({"v1": complexity_v1}) + return {"v1": complexity_v1} diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py index b195dc66a0..23b48f6cea 100644 --- a/synapse/storage/filtering.py +++ b/synapse/storage/filtering.py @@ -15,8 +15,6 @@ from canonicaljson import encode_canonical_json -from twisted.internet import defer - from synapse.api.errors import Codes, SynapseError from synapse.util.caches.descriptors import cachedInlineCallbacks @@ -41,7 +39,7 @@ class FilteringStore(SQLBaseStore): desc="get_user_filter", ) - defer.returnValue(db_to_json(def_json)) + return db_to_json(def_json) def add_user_filter(self, user_localpart, user_filter): def_json = encode_canonical_json(user_filter) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 73e6fc6de2..15b01c6958 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -307,15 +307,13 @@ class GroupServerStore(SQLBaseStore): desc="get_group_categories", ) - defer.returnValue( - { - row["category_id"]: { - "is_public": row["is_public"], - "profile": json.loads(row["profile"]), - } - for row in rows + return { + row["category_id"]: { + "is_public": row["is_public"], + "profile": json.loads(row["profile"]), } - ) + for row in rows + } @defer.inlineCallbacks def get_group_category(self, group_id, category_id): @@ -328,7 +326,7 @@ class GroupServerStore(SQLBaseStore): category["profile"] = json.loads(category["profile"]) - defer.returnValue(category) + return category def upsert_group_category(self, group_id, category_id, profile, is_public): """Add/update room category for group @@ -370,15 +368,13 @@ class GroupServerStore(SQLBaseStore): desc="get_group_roles", ) - defer.returnValue( - { - row["role_id"]: { - "is_public": row["is_public"], - "profile": json.loads(row["profile"]), - } - for row in rows + return { + row["role_id"]: { + "is_public": row["is_public"], + "profile": json.loads(row["profile"]), } - ) + for row in rows + } @defer.inlineCallbacks def get_group_role(self, group_id, role_id): @@ -391,7 +387,7 @@ class GroupServerStore(SQLBaseStore): role["profile"] = json.loads(role["profile"]) - defer.returnValue(role) + return role def upsert_group_role(self, group_id, role_id, profile, is_public): """Add/remove user role @@ -960,7 +956,7 @@ class GroupServerStore(SQLBaseStore): _register_user_group_membership_txn, next_id, ) - defer.returnValue(res) + return res @defer.inlineCallbacks def create_group( @@ -1057,9 +1053,9 @@ class GroupServerStore(SQLBaseStore): now = int(self._clock.time_msec()) if row and now < row["valid_until_ms"]: - defer.returnValue(json.loads(row["attestation_json"])) + return json.loads(row["attestation_json"]) - defer.returnValue(None) + return None def get_joined_groups(self, user_id): return self._simple_select_onecol( diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 081564360f..752e9788a2 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -173,7 +173,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): ) if user_id: count = count + 1 - defer.returnValue(count) + return count @defer.inlineCallbacks def upsert_monthly_active_user(self, user_id): diff --git a/synapse/storage/presence.py b/synapse/storage/presence.py index 42ec8c6bb8..1a0f2d5768 100644 --- a/synapse/storage/presence.py +++ b/synapse/storage/presence.py @@ -90,9 +90,7 @@ class PresenceStore(SQLBaseStore): presence_states, ) - defer.returnValue( - (stream_orderings[-1], self._presence_id_gen.get_current_token()) - ) + return (stream_orderings[-1], self._presence_id_gen.get_current_token()) def _update_presence_txn(self, txn, stream_orderings, presence_states): for stream_id, state in zip(stream_orderings, presence_states): @@ -180,7 +178,7 @@ class PresenceStore(SQLBaseStore): for row in rows: row["currently_active"] = bool(row["currently_active"]) - defer.returnValue({row["user_id"]: UserPresenceState(**row) for row in rows}) + return {row["user_id"]: UserPresenceState(**row) for row in rows} def get_current_presence_token(self): return self._presence_id_gen.get_current_token() diff --git a/synapse/storage/profile.py b/synapse/storage/profile.py index 0ff392bdb4..8a5d8e9b18 100644 --- a/synapse/storage/profile.py +++ b/synapse/storage/profile.py @@ -34,15 +34,13 @@ class ProfileWorkerStore(SQLBaseStore): except StoreError as e: if e.code == 404: # no match - defer.returnValue(ProfileInfo(None, None)) + return ProfileInfo(None, None) return else: raise - defer.returnValue( - ProfileInfo( - avatar_url=profile["avatar_url"], display_name=profile["displayname"] - ) + return ProfileInfo( + avatar_url=profile["avatar_url"], display_name=profile["displayname"] ) def get_profile_displayname(self, user_localpart): @@ -168,7 +166,7 @@ class ProfileStore(ProfileWorkerStore): ) if res: - defer.returnValue(True) + return True res = yield self._simple_select_one_onecol( table="group_invites", @@ -179,4 +177,4 @@ class ProfileStore(ProfileWorkerStore): ) if res: - defer.returnValue(True) + return True diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 98cec8c82b..a6517c4cf3 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -120,7 +120,7 @@ class PushRulesWorkerStore( rules = _load_rules(rows, enabled_map) - defer.returnValue(rules) + return rules @cachedInlineCallbacks(max_entries=5000) def get_push_rules_enabled_for_user(self, user_id): @@ -130,9 +130,7 @@ class PushRulesWorkerStore( retcols=("user_name", "rule_id", "enabled"), desc="get_push_rules_enabled_for_user", ) - defer.returnValue( - {r["rule_id"]: False if r["enabled"] == 0 else True for r in results} - ) + return {r["rule_id"]: False if r["enabled"] == 0 else True for r in results} def have_push_rules_changed_for_user(self, user_id, last_id): if not self.push_rules_stream_cache.has_entity_changed(user_id, last_id): @@ -160,7 +158,7 @@ class PushRulesWorkerStore( ) def bulk_get_push_rules(self, user_ids): if not user_ids: - defer.returnValue({}) + return {} results = {user_id: [] for user_id in user_ids} @@ -182,7 +180,7 @@ class PushRulesWorkerStore( for user_id, rules in results.items(): results[user_id] = _load_rules(rules, enabled_map_by_user.get(user_id, {})) - defer.returnValue(results) + return results @defer.inlineCallbacks def move_push_rule_from_room_to_room(self, new_room_id, user_id, rule): @@ -253,7 +251,7 @@ class PushRulesWorkerStore( result = yield self._bulk_get_push_rules_for_room( event.room_id, state_group, current_state_ids, event=event ) - defer.returnValue(result) + return result @cachedInlineCallbacks(num_args=2, cache_context=True) def _bulk_get_push_rules_for_room( @@ -312,7 +310,7 @@ class PushRulesWorkerStore( rules_by_user = {k: v for k, v in rules_by_user.items() if v is not None} - defer.returnValue(rules_by_user) + return rules_by_user @cachedList( cached_method_name="get_push_rules_enabled_for_user", @@ -322,7 +320,7 @@ class PushRulesWorkerStore( ) def bulk_get_push_rules_enabled(self, user_ids): if not user_ids: - defer.returnValue({}) + return {} results = {user_id: {} for user_id in user_ids} @@ -336,7 +334,7 @@ class PushRulesWorkerStore( for row in rows: enabled = bool(row["enabled"]) results.setdefault(row["user_name"], {})[row["rule_id"]] = enabled - defer.returnValue(results) + return results class PushRuleStore(PushRulesWorkerStore): diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index cfe0a94330..be3d4d9ded 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -63,7 +63,7 @@ class PusherWorkerStore(SQLBaseStore): ret = yield self._simple_select_one_onecol( "pushers", {"user_name": user_id}, "id", allow_none=True ) - defer.returnValue(ret is not None) + return ret is not None def get_pushers_by_app_id_and_pushkey(self, app_id, pushkey): return self.get_pushers_by({"app_id": app_id, "pushkey": pushkey}) @@ -95,7 +95,7 @@ class PusherWorkerStore(SQLBaseStore): ], desc="get_pushers_by", ) - defer.returnValue(self._decode_pushers_rows(ret)) + return self._decode_pushers_rows(ret) @defer.inlineCallbacks def get_all_pushers(self): @@ -106,7 +106,7 @@ class PusherWorkerStore(SQLBaseStore): return self._decode_pushers_rows(rows) rows = yield self.runInteraction("get_all_pushers", get_pushers) - defer.returnValue(rows) + return rows def get_all_updated_pushers(self, last_id, current_id, limit): if last_id == current_id: @@ -205,7 +205,7 @@ class PusherWorkerStore(SQLBaseStore): result = {user_id: False for user_id in user_ids} result.update({r["user_name"]: True for r in rows}) - defer.returnValue(result) + return result class PusherStore(PusherWorkerStore): @@ -343,7 +343,7 @@ class PusherStore(PusherWorkerStore): "throttle_ms": row["throttle_ms"], } - defer.returnValue(params_by_room) + return params_by_room @defer.inlineCallbacks def set_throttle_params(self, pusher_id, room_id, params): diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index b477da12b1..6aa6d98ebb 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -58,7 +58,7 @@ class ReceiptsWorkerStore(SQLBaseStore): @cachedInlineCallbacks() def get_users_with_read_receipts_in_room(self, room_id): receipts = yield self.get_receipts_for_room(room_id, "m.read") - defer.returnValue(set(r["user_id"] for r in receipts)) + return set(r["user_id"] for r in receipts) @cached(num_args=2) def get_receipts_for_room(self, room_id, receipt_type): @@ -92,7 +92,7 @@ class ReceiptsWorkerStore(SQLBaseStore): desc="get_receipts_for_user", ) - defer.returnValue({row["room_id"]: row["event_id"] for row in rows}) + return {row["room_id"]: row["event_id"] for row in rows} @defer.inlineCallbacks def get_receipts_for_user_with_orderings(self, user_id, receipt_type): @@ -110,16 +110,14 @@ class ReceiptsWorkerStore(SQLBaseStore): return txn.fetchall() rows = yield self.runInteraction("get_receipts_for_user_with_orderings", f) - defer.returnValue( - { - row[0]: { - "event_id": row[1], - "topological_ordering": row[2], - "stream_ordering": row[3], - } - for row in rows + return { + row[0]: { + "event_id": row[1], + "topological_ordering": row[2], + "stream_ordering": row[3], } - ) + for row in rows + } @defer.inlineCallbacks def get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None): @@ -147,7 +145,7 @@ class ReceiptsWorkerStore(SQLBaseStore): room_ids, to_key, from_key=from_key ) - defer.returnValue([ev for res in results.values() for ev in res]) + return [ev for res in results.values() for ev in res] def get_linearized_receipts_for_room(self, room_id, to_key, from_key=None): """Get receipts for a single room for sending to clients. @@ -197,7 +195,7 @@ class ReceiptsWorkerStore(SQLBaseStore): rows = yield self.runInteraction("get_linearized_receipts_for_room", f) if not rows: - defer.returnValue([]) + return [] content = {} for row in rows: @@ -205,9 +203,7 @@ class ReceiptsWorkerStore(SQLBaseStore): row["user_id"] ] = json.loads(row["data"]) - defer.returnValue( - [{"type": "m.receipt", "room_id": room_id, "content": content}] - ) + return [{"type": "m.receipt", "room_id": room_id, "content": content}] @cachedList( cached_method_name="_get_linearized_receipts_for_room", @@ -217,7 +213,7 @@ class ReceiptsWorkerStore(SQLBaseStore): ) def _get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None): if not room_ids: - defer.returnValue({}) + return {} def f(txn): if from_key: @@ -264,7 +260,7 @@ class ReceiptsWorkerStore(SQLBaseStore): room_id: [results[room_id]] if room_id in results else [] for room_id in room_ids } - defer.returnValue(results) + return results def get_all_updated_receipts(self, last_id, current_id, limit=None): if last_id == current_id: @@ -468,7 +464,7 @@ class ReceiptsStore(ReceiptsWorkerStore): ) if event_ts is None: - defer.returnValue(None) + return None now = self._clock.time_msec() logger.debug( @@ -482,7 +478,7 @@ class ReceiptsStore(ReceiptsWorkerStore): max_persisted_id = self._receipts_id_gen.get_current_token() - defer.returnValue((stream_id, max_persisted_id)) + return (stream_id, max_persisted_id) def insert_graph_receipt(self, room_id, receipt_type, user_id, event_ids, data): return self.runInteraction( diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 8b2c2a97ab..999c10a308 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -75,12 +75,12 @@ class RegistrationWorkerStore(SQLBaseStore): info = yield self.get_user_by_id(user_id) if not info: - defer.returnValue(False) + return False now = self.clock.time_msec() trial_duration_ms = self.config.mau_trial_days * 24 * 60 * 60 * 1000 is_trial = (now - info["creation_ts"] * 1000) < trial_duration_ms - defer.returnValue(is_trial) + return is_trial @cached() def get_user_by_access_token(self, token): @@ -115,7 +115,7 @@ class RegistrationWorkerStore(SQLBaseStore): allow_none=True, desc="get_expiration_ts_for_user", ) - defer.returnValue(res) + return res @defer.inlineCallbacks def set_account_validity_for_user( @@ -190,7 +190,7 @@ class RegistrationWorkerStore(SQLBaseStore): desc="get_user_from_renewal_token", ) - defer.returnValue(res) + return res @defer.inlineCallbacks def get_renewal_token_for_user(self, user_id): @@ -209,7 +209,7 @@ class RegistrationWorkerStore(SQLBaseStore): desc="get_renewal_token_for_user", ) - defer.returnValue(res) + return res @defer.inlineCallbacks def get_users_expiring_soon(self): @@ -237,7 +237,7 @@ class RegistrationWorkerStore(SQLBaseStore): self.config.account_validity.renew_at, ) - defer.returnValue(res) + return res @defer.inlineCallbacks def set_renewal_mail_status(self, user_id, email_sent): @@ -280,7 +280,7 @@ class RegistrationWorkerStore(SQLBaseStore): desc="is_server_admin", ) - defer.returnValue(res if res else False) + return res if res else False def _query_for_auth(self, txn, token): sql = ( @@ -311,7 +311,7 @@ class RegistrationWorkerStore(SQLBaseStore): res = yield self.runInteraction( "is_support_user", self.is_support_user_txn, user_id ) - defer.returnValue(res) + return res def is_support_user_txn(self, txn, user_id): res = self._simple_select_one_onecol_txn( @@ -349,7 +349,7 @@ class RegistrationWorkerStore(SQLBaseStore): return 0 ret = yield self.runInteraction("count_users", _count_users) - defer.returnValue(ret) + return ret def count_daily_user_type(self): """ @@ -395,7 +395,7 @@ class RegistrationWorkerStore(SQLBaseStore): return count ret = yield self.runInteraction("count_users", _count_users) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def find_next_generated_user_id_localpart(self): @@ -425,7 +425,7 @@ class RegistrationWorkerStore(SQLBaseStore): if i not in found: return i - defer.returnValue( + return ( ( yield self.runInteraction( "find_next_generated_user_id", _find_next_generated_user_id @@ -447,7 +447,7 @@ class RegistrationWorkerStore(SQLBaseStore): user_id = yield self.runInteraction( "get_user_id_by_threepid", self.get_user_id_by_threepid_txn, medium, address ) - defer.returnValue(user_id) + return user_id def get_user_id_by_threepid_txn(self, txn, medium, address): """Returns user id from threepid @@ -487,7 +487,7 @@ class RegistrationWorkerStore(SQLBaseStore): ["medium", "address", "validated_at", "added_at"], "user_get_threepids", ) - defer.returnValue(ret) + return ret def user_delete_threepid(self, user_id, medium, address): return self._simple_delete( @@ -677,7 +677,7 @@ class RegistrationStore( if end: yield self._end_background_update("users_set_deactivated_flag") - defer.returnValue(batch_size) + return batch_size @defer.inlineCallbacks def add_access_token_to_user(self, user_id, token, device_id, valid_until_ms): @@ -957,7 +957,7 @@ class RegistrationStore( desc="is_guest", ) - defer.returnValue(res if res else False) + return res if res else False def add_user_pending_deactivation(self, user_id): """ @@ -1024,7 +1024,7 @@ class RegistrationStore( yield self._end_background_update("user_threepids_grandfather") - defer.returnValue(1) + return 1 def get_threepid_validation_session( self, medium, client_secret, address=None, sid=None, validated=True @@ -1337,4 +1337,4 @@ class RegistrationStore( ) # Convert the integer into a boolean. - defer.returnValue(res == 1) + return res == 1 diff --git a/synapse/storage/relations.py b/synapse/storage/relations.py index 9954bc094f..fcb5f2f23a 100644 --- a/synapse/storage/relations.py +++ b/synapse/storage/relations.py @@ -17,8 +17,6 @@ import logging import attr -from twisted.internet import defer - from synapse.api.constants import RelationTypes from synapse.api.errors import SynapseError from synapse.storage._base import SQLBaseStore @@ -363,7 +361,7 @@ class RelationsWorkerStore(SQLBaseStore): return edit_event = yield self.get_event(edit_id, allow_none=True) - defer.returnValue(edit_event) + return edit_event def has_user_annotated_event(self, parent_id, event_type, aggregation_key, sender): """Check if a user has already annotated an event with the same key diff --git a/synapse/storage/room.py b/synapse/storage/room.py index fe9d79d792..bc606292b8 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -193,14 +193,12 @@ class RoomWorkerStore(SQLBaseStore): ) if row: - defer.returnValue( - RatelimitOverride( - messages_per_second=row["messages_per_second"], - burst_count=row["burst_count"], - ) + return RatelimitOverride( + messages_per_second=row["messages_per_second"], + burst_count=row["burst_count"], ) else: - defer.returnValue(None) + return None class RoomStore(RoomWorkerStore, SearchStore): diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index b3c002b9eb..cb88e49b51 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -108,7 +108,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): room_id, on_invalidate=cache_context.invalidate ) hosts = frozenset(get_domain_from_id(user_id) for user_id in user_ids) - defer.returnValue(hosts) + return hosts @cached(max_entries=100000, iterable=True) def get_users_in_room(self, room_id): @@ -253,8 +253,8 @@ class RoomMemberWorkerStore(EventsWorkerStore): invites = yield self.get_invited_rooms_for_user(user_id) for invite in invites: if invite.room_id == room_id: - defer.returnValue(invite) - defer.returnValue(None) + return invite + return None def get_rooms_for_user_where_membership_is(self, user_id, membership_list): """ Get all the rooms for this user where the membership for this user @@ -347,11 +347,9 @@ class RoomMemberWorkerStore(EventsWorkerStore): rooms = yield self.get_rooms_for_user_where_membership_is( user_id, membership_list=[Membership.JOIN] ) - defer.returnValue( - frozenset( - GetRoomsForUserWithStreamOrdering(r.room_id, r.stream_ordering) - for r in rooms - ) + return frozenset( + GetRoomsForUserWithStreamOrdering(r.room_id, r.stream_ordering) + for r in rooms ) @defer.inlineCallbacks @@ -361,7 +359,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): rooms = yield self.get_rooms_for_user_with_stream_ordering( user_id, on_invalidate=on_invalidate ) - defer.returnValue(frozenset(r.room_id for r in rooms)) + return frozenset(r.room_id for r in rooms) @cachedInlineCallbacks(max_entries=500000, cache_context=True, iterable=True) def get_users_who_share_room_with_user(self, user_id, cache_context): @@ -378,7 +376,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): ) user_who_share_room.update(user_ids) - defer.returnValue(user_who_share_room) + return user_who_share_room @defer.inlineCallbacks def get_joined_users_from_context(self, event, context): @@ -394,7 +392,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): result = yield self._get_joined_users_from_context( event.room_id, state_group, current_state_ids, event=event, context=context ) - defer.returnValue(result) + return result def get_joined_users_from_state(self, room_id, state_entry): state_group = state_entry.state_group @@ -508,7 +506,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): avatar_url=to_ascii(event.content.get("avatar_url", None)), ) - defer.returnValue(users_in_room) + return users_in_room @cachedInlineCallbacks(max_entries=10000) def is_host_joined(self, room_id, host): @@ -533,14 +531,14 @@ class RoomMemberWorkerStore(EventsWorkerStore): rows = yield self._execute("is_host_joined", None, sql, room_id, like_clause) if not rows: - defer.returnValue(False) + return False user_id = rows[0][0] if get_domain_from_id(user_id) != host: # This can only happen if the host name has something funky in it raise Exception("Invalid host name") - defer.returnValue(True) + return True @cachedInlineCallbacks() def was_host_joined(self, room_id, host): @@ -573,14 +571,14 @@ class RoomMemberWorkerStore(EventsWorkerStore): rows = yield self._execute("was_host_joined", None, sql, room_id, like_clause) if not rows: - defer.returnValue(False) + return False user_id = rows[0][0] if get_domain_from_id(user_id) != host: # This can only happen if the host name has something funky in it raise Exception("Invalid host name") - defer.returnValue(True) + return True def get_joined_hosts(self, room_id, state_entry): state_group = state_entry.state_group @@ -607,7 +605,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): cache = self._get_joined_hosts_cache(room_id) joined_hosts = yield cache.get_destinations(state_entry) - defer.returnValue(joined_hosts) + return joined_hosts @cached(max_entries=10000) def _get_joined_hosts_cache(self, room_id): @@ -637,7 +635,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): return rows[0][0] count = yield self.runInteraction("did_forget_membership", f) - defer.returnValue(count == 0) + return count == 0 @defer.inlineCallbacks def get_rooms_user_has_been_in(self, user_id): @@ -847,7 +845,7 @@ class RoomMemberStore(RoomMemberWorkerStore): if not result: yield self._end_background_update(_MEMBERSHIP_PROFILE_UPDATE_NAME) - defer.returnValue(result) + return result @defer.inlineCallbacks def _background_current_state_membership(self, progress, batch_size): @@ -905,7 +903,7 @@ class RoomMemberStore(RoomMemberWorkerStore): if finished: yield self._end_background_update(_CURRENT_STATE_MEMBERSHIP_UPDATE_NAME) - defer.returnValue(row_count) + return row_count class _JoinedHostsCache(object): @@ -933,7 +931,7 @@ class _JoinedHostsCache(object): state_entry(synapse.state._StateCacheEntry) """ if state_entry.state_group == self.state_group: - defer.returnValue(frozenset(self.hosts_to_joined_users)) + return frozenset(self.hosts_to_joined_users) with (yield self.linearizer.queue(())): if state_entry.state_group == self.state_group: @@ -970,7 +968,7 @@ class _JoinedHostsCache(object): else: self.state_group = object() self._len = sum(len(v) for v in itervalues(self.hosts_to_joined_users)) - defer.returnValue(frozenset(self.hosts_to_joined_users)) + return frozenset(self.hosts_to_joined_users) def __len__(self): return self._len diff --git a/synapse/storage/search.py b/synapse/storage/search.py index f3b1cec933..df87ab6a6d 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -166,7 +166,7 @@ class SearchStore(BackgroundUpdateStore): if not result: yield self._end_background_update(self.EVENT_SEARCH_UPDATE_NAME) - defer.returnValue(result) + return result @defer.inlineCallbacks def _background_reindex_gin_search(self, progress, batch_size): @@ -209,7 +209,7 @@ class SearchStore(BackgroundUpdateStore): yield self.runWithConnection(create_index) yield self._end_background_update(self.EVENT_SEARCH_USE_GIN_POSTGRES_NAME) - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _background_reindex_search_order(self, progress, batch_size): @@ -287,7 +287,7 @@ class SearchStore(BackgroundUpdateStore): if not finished: yield self._end_background_update(self.EVENT_SEARCH_ORDER_UPDATE_NAME) - defer.returnValue(num_rows) + return num_rows def store_event_search_txn(self, txn, event, key, value): """Add event to the search table @@ -454,17 +454,15 @@ class SearchStore(BackgroundUpdateStore): count = sum(row["count"] for row in count_results if row["room_id"] in room_ids) - defer.returnValue( - { - "results": [ - {"event": event_map[r["event_id"]], "rank": r["rank"]} - for r in results - if r["event_id"] in event_map - ], - "highlights": highlights, - "count": count, - } - ) + return { + "results": [ + {"event": event_map[r["event_id"]], "rank": r["rank"]} + for r in results + if r["event_id"] in event_map + ], + "highlights": highlights, + "count": count, + } @defer.inlineCallbacks def search_rooms(self, room_ids, search_term, keys, limit, pagination_token=None): @@ -599,22 +597,20 @@ class SearchStore(BackgroundUpdateStore): count = sum(row["count"] for row in count_results if row["room_id"] in room_ids) - defer.returnValue( - { - "results": [ - { - "event": event_map[r["event_id"]], - "rank": r["rank"], - "pagination_token": "%s,%s" - % (r["origin_server_ts"], r["stream_ordering"]), - } - for r in results - if r["event_id"] in event_map - ], - "highlights": highlights, - "count": count, - } - ) + return { + "results": [ + { + "event": event_map[r["event_id"]], + "rank": r["rank"], + "pagination_token": "%s,%s" + % (r["origin_server_ts"], r["stream_ordering"]), + } + for r in results + if r["event_id"] in event_map + ], + "highlights": highlights, + "count": count, + } def _find_highlights_in_postgres(self, search_query, events): """Given a list of events and a search term, return a list of words diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 6bd81e84ad..fb83218f90 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -59,7 +59,7 @@ class SignatureWorkerStore(SQLBaseStore): for e_id, h in hashes.items() } - defer.returnValue(list(hashes.items())) + return list(hashes.items()) def _get_event_reference_hashes_txn(self, txn, event_id): """Get all the hashes for a given PDU. diff --git a/synapse/storage/state.py b/synapse/storage/state.py index a35289876d..1980a87108 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -422,7 +422,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): # Retrieve the room's create event create_event = yield self.get_create_event_for_room(room_id) - defer.returnValue(create_event.content.get("room_version", "1")) + return create_event.content.get("room_version", "1") @defer.inlineCallbacks def get_room_predecessor(self, room_id): @@ -442,7 +442,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): create_event = yield self.get_create_event_for_room(room_id) # Return predecessor if present - defer.returnValue(create_event.content.get("predecessor", None)) + return create_event.content.get("predecessor", None) @defer.inlineCallbacks def get_create_event_for_room(self, room_id): @@ -466,7 +466,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): # Retrieve the room's create event and return create_event = yield self.get_event(create_id) - defer.returnValue(create_event) + return create_event @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): @@ -563,7 +563,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): if not event: return - defer.returnValue(event.content.get("canonical_alias")) + return event.content.get("canonical_alias") @cached(max_entries=10000, iterable=True) def get_state_group_delta(self, state_group): @@ -613,14 +613,14 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): dict of state_group_id -> (dict of (type, state_key) -> event id) """ if not event_ids: - defer.returnValue({}) + return {} event_to_groups = yield self._get_state_group_for_events(event_ids) groups = set(itervalues(event_to_groups)) group_to_state = yield self._get_state_for_groups(groups) - defer.returnValue(group_to_state) + return group_to_state @defer.inlineCallbacks def get_state_ids_for_group(self, state_group): @@ -634,7 +634,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): """ group_to_state = yield self._get_state_for_groups((state_group,)) - defer.returnValue(group_to_state[state_group]) + return group_to_state[state_group] @defer.inlineCallbacks def get_state_groups(self, room_id, event_ids): @@ -645,7 +645,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): dict of state_group_id -> list of state events. """ if not event_ids: - defer.returnValue({}) + return {} group_to_ids = yield self.get_state_groups_ids(room_id, event_ids) @@ -658,16 +658,14 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): get_prev_content=False, ) - defer.returnValue( - { - group: [ - state_event_map[v] - for v in itervalues(event_id_map) - if v in state_event_map - ] - for group, event_id_map in iteritems(group_to_ids) - } - ) + return { + group: [ + state_event_map[v] + for v in itervalues(event_id_map) + if v in state_event_map + ] + for group, event_id_map in iteritems(group_to_ids) + } @defer.inlineCallbacks def _get_state_groups_from_groups(self, groups, state_filter): @@ -694,7 +692,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): ) results.update(res) - defer.returnValue(results) + return results def _get_state_groups_from_groups_txn( self, txn, groups, state_filter=StateFilter.all() @@ -829,7 +827,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): for event_id, group in iteritems(event_to_groups) } - defer.returnValue({event: event_to_state[event] for event in event_ids}) + return {event: event_to_state[event] for event in event_ids} @defer.inlineCallbacks def get_state_ids_for_events(self, event_ids, state_filter=StateFilter.all()): @@ -855,7 +853,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): for event_id, group in iteritems(event_to_groups) } - defer.returnValue({event: event_to_state[event] for event in event_ids}) + return {event: event_to_state[event] for event in event_ids} @defer.inlineCallbacks def get_state_for_event(self, event_id, state_filter=StateFilter.all()): @@ -871,7 +869,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): A deferred dict from (type, state_key) -> state_event """ state_map = yield self.get_state_for_events([event_id], state_filter) - defer.returnValue(state_map[event_id]) + return state_map[event_id] @defer.inlineCallbacks def get_state_ids_for_event(self, event_id, state_filter=StateFilter.all()): @@ -887,7 +885,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): A deferred dict from (type, state_key) -> state_event """ state_map = yield self.get_state_ids_for_events([event_id], state_filter) - defer.returnValue(state_map[event_id]) + return state_map[event_id] @cached(max_entries=50000) def _get_state_group_for_event(self, event_id): @@ -917,7 +915,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): desc="_get_state_group_for_events", ) - defer.returnValue({row["event_id"]: row["state_group"] for row in rows}) + return {row["event_id"]: row["state_group"] for row in rows} def _get_state_for_group_using_cache(self, cache, group, state_filter): """Checks if group is in cache. See `_get_state_for_groups` @@ -997,7 +995,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): incomplete_groups = incomplete_groups_m | incomplete_groups_nm if not incomplete_groups: - defer.returnValue(state) + return state cache_sequence_nm = self._state_group_cache.sequence cache_sequence_m = self._state_group_members_cache.sequence @@ -1024,7 +1022,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): # everything we need from the database anyway. state[group] = state_filter.filter_state(group_state_dict) - defer.returnValue(state) + return state def _get_state_for_groups_using_cache(self, groups, cache, state_filter): """Gets the state at each of a list of state groups, optionally @@ -1498,7 +1496,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore): self.STATE_GROUP_DEDUPLICATION_UPDATE_NAME ) - defer.returnValue(result * BATCH_SIZE_SCALE_FACTOR) + return result * BATCH_SIZE_SCALE_FACTOR @defer.inlineCallbacks def _background_index_state(self, progress, batch_size): @@ -1528,4 +1526,4 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore): yield self._end_background_update(self.STATE_GROUP_INDEX_UPDATE_NAME) - defer.returnValue(1) + return 1 diff --git a/synapse/storage/stats.py b/synapse/storage/stats.py index 1cec84ee2e..e893b05ee7 100644 --- a/synapse/storage/stats.py +++ b/synapse/storage/stats.py @@ -66,7 +66,7 @@ class StatsStore(StateDeltasStore): if not self.stats_enabled: yield self._end_background_update("populate_stats_createtables") - defer.returnValue(1) + return 1 # Get all the rooms that we want to process. def _make_staging_area(txn): @@ -120,7 +120,7 @@ class StatsStore(StateDeltasStore): self.get_earliest_token_for_room_stats.invalidate_all() yield self._end_background_update("populate_stats_createtables") - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _populate_stats_cleanup(self, progress, batch_size): @@ -129,7 +129,7 @@ class StatsStore(StateDeltasStore): """ if not self.stats_enabled: yield self._end_background_update("populate_stats_cleanup") - defer.returnValue(1) + return 1 position = yield self._simple_select_one_onecol( TEMP_TABLE + "_position", None, "position" @@ -143,14 +143,14 @@ class StatsStore(StateDeltasStore): yield self.runInteraction("populate_stats_cleanup", _delete_staging_area) yield self._end_background_update("populate_stats_cleanup") - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _populate_stats_process_rooms(self, progress, batch_size): if not self.stats_enabled: yield self._end_background_update("populate_stats_process_rooms") - defer.returnValue(1) + return 1 # If we don't have progress filed, delete everything. if not progress: @@ -186,7 +186,7 @@ class StatsStore(StateDeltasStore): # No more rooms -- complete the transaction. if not rooms_to_work_on: yield self._end_background_update("populate_stats_process_rooms") - defer.returnValue(1) + return 1 logger.info( "Processing the next %d rooms of %d remaining", @@ -303,9 +303,9 @@ class StatsStore(StateDeltasStore): if processed_event_count > batch_size: # Don't process any more rooms, we've hit our batch size. - defer.returnValue(processed_event_count) + return processed_event_count - defer.returnValue(processed_event_count) + return processed_event_count def delete_all_stats(self): """ diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index a0465484df..856c2ee8d8 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -300,7 +300,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): ) if not room_ids: - defer.returnValue({}) + return {} results = {} room_ids = list(room_ids) @@ -323,7 +323,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): ) results.update(dict(zip(rm_ids, res))) - defer.returnValue(results) + return results def get_rooms_that_changed(self, room_ids, from_key): """Given a list of rooms and a token, return rooms where there may have @@ -364,7 +364,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): the chunk of events returned. """ if from_key == to_key: - defer.returnValue(([], from_key)) + return ([], from_key) from_id = RoomStreamToken.parse_stream_token(from_key).stream to_id = RoomStreamToken.parse_stream_token(to_key).stream @@ -374,7 +374,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): ) if not has_changed: - defer.returnValue(([], from_key)) + return ([], from_key) def f(txn): sql = ( @@ -407,7 +407,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): # get. key = from_key - defer.returnValue((ret, key)) + return (ret, key) @defer.inlineCallbacks def get_membership_changes_for_user(self, user_id, from_key, to_key): @@ -415,14 +415,14 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): to_id = RoomStreamToken.parse_stream_token(to_key).stream if from_key == to_key: - defer.returnValue([]) + return [] if from_id: has_changed = self._membership_stream_cache.has_entity_changed( user_id, int(from_id) ) if not has_changed: - defer.returnValue([]) + return [] def f(txn): sql = ( @@ -447,7 +447,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): self._set_before_and_after(ret, rows, topo_order=False) - defer.returnValue(ret) + return ret @defer.inlineCallbacks def get_recent_events_for_room(self, room_id, limit, end_token): @@ -477,7 +477,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): self._set_before_and_after(events, rows) - defer.returnValue((events, token)) + return (events, token) @defer.inlineCallbacks def get_recent_event_ids_for_room(self, room_id, limit, end_token): @@ -496,7 +496,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): """ # Allow a zero limit here, and no-op. if limit == 0: - defer.returnValue(([], end_token)) + return ([], end_token) end_token = RoomStreamToken.parse(end_token) @@ -511,7 +511,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): # We want to return the results in ascending order. rows.reverse() - defer.returnValue((rows, token)) + return (rows, token) def get_room_event_after_stream_ordering(self, room_id, stream_ordering): """Gets details of the first event in a room at or after a stream ordering @@ -549,12 +549,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): """ token = yield self.get_room_max_stream_ordering() if room_id is None: - defer.returnValue("s%d" % (token,)) + return "s%d" % (token,) else: topo = yield self.runInteraction( "_get_max_topological_txn", self._get_max_topological_txn, room_id ) - defer.returnValue("t%d-%d" % (topo, token)) + return "t%d-%d" % (topo, token) def get_stream_token_for_event(self, event_id): """The stream token for an event @@ -674,14 +674,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): [e for e in results["after"]["event_ids"]], get_prev_content=True ) - defer.returnValue( - { - "events_before": events_before, - "events_after": events_after, - "start": results["before"]["token"], - "end": results["after"]["token"], - } - ) + return { + "events_before": events_before, + "events_after": events_after, + "start": results["before"]["token"], + "end": results["after"]["token"], + } def _get_events_around_txn( self, txn, room_id, event_id, before_limit, after_limit, event_filter @@ -785,7 +783,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): events = yield self.get_events_as_list(event_ids) - defer.returnValue((upper_bound, events)) + return (upper_bound, events) def get_federation_out_pos(self, typ): return self._simple_select_one_onecol( @@ -939,7 +937,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): self._set_before_and_after(events, rows) - defer.returnValue((events, token)) + return (events, token) class StreamStore(StreamWorkerStore): diff --git a/synapse/storage/tags.py b/synapse/storage/tags.py index e88f8ea35f..20dd6bd53d 100644 --- a/synapse/storage/tags.py +++ b/synapse/storage/tags.py @@ -66,7 +66,7 @@ class TagsWorkerStore(AccountDataWorkerStore): room_id string, tag string and content string. """ if last_id == current_id: - defer.returnValue([]) + return [] def get_all_updated_tags_txn(txn): sql = ( @@ -107,7 +107,7 @@ class TagsWorkerStore(AccountDataWorkerStore): ) results.extend(tags) - defer.returnValue(results) + return results @defer.inlineCallbacks def get_updated_tags(self, user_id, stream_id): @@ -135,7 +135,7 @@ class TagsWorkerStore(AccountDataWorkerStore): user_id, int(stream_id) ) if not changed: - defer.returnValue({}) + return {} room_ids = yield self.runInteraction("get_updated_tags", get_updated_tags_txn) @@ -145,7 +145,7 @@ class TagsWorkerStore(AccountDataWorkerStore): for room_id in room_ids: results[room_id] = tags_by_room.get(room_id, {}) - defer.returnValue(results) + return results def get_tags_for_room(self, user_id, room_id): """Get all the tags for the given room @@ -194,7 +194,7 @@ class TagsStore(TagsWorkerStore): self.get_tags_for_user.invalidate((user_id,)) result = self._account_data_id_gen.get_current_token() - defer.returnValue(result) + return result @defer.inlineCallbacks def remove_tag_from_room(self, user_id, room_id, tag): @@ -217,7 +217,7 @@ class TagsStore(TagsWorkerStore): self.get_tags_for_user.invalidate((user_id,)) result = self._account_data_id_gen.get_current_token() - defer.returnValue(result) + return result def _update_revision_txn(self, txn, user_id, room_id, next_id): """Update the latest revision of the tags for the given user and room. diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index c585cf6cf7..b3c3bf55bc 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -147,7 +147,7 @@ class TransactionStore(SQLBaseStore): result = self._destination_retry_cache.get(destination, SENTINEL) if result is not SENTINEL: - defer.returnValue(result) + return result result = yield self.runInteraction( "get_destination_retry_timings", @@ -158,7 +158,7 @@ class TransactionStore(SQLBaseStore): # We don't hugely care about race conditions between getting and # invalidating the cache, since we time out fairly quickly anyway. self._destination_retry_cache[destination] = result - defer.returnValue(result) + return result def _get_destination_retry_timings(self, txn, destination): result = self._simple_select_one_txn( diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index 7fd16fe65e..b5188d9bee 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -109,7 +109,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): yield self._simple_insert(TEMP_TABLE + "_position", {"position": new_pos}) yield self._end_background_update("populate_user_directory_createtables") - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _populate_user_directory_cleanup(self, progress, batch_size): @@ -131,7 +131,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): ) yield self._end_background_update("populate_user_directory_cleanup") - defer.returnValue(1) + return 1 @defer.inlineCallbacks def _populate_user_directory_process_rooms(self, progress, batch_size): @@ -177,7 +177,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): # No more rooms -- complete the transaction. if not rooms_to_work_on: yield self._end_background_update("populate_user_directory_process_rooms") - defer.returnValue(1) + return 1 logger.info( "Processing the next %d rooms of %d remaining" @@ -257,9 +257,9 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): if processed_event_count > batch_size: # Don't process any more rooms, we've hit our batch size. - defer.returnValue(processed_event_count) + return processed_event_count - defer.returnValue(processed_event_count) + return processed_event_count @defer.inlineCallbacks def _populate_user_directory_process_users(self, progress, batch_size): @@ -268,7 +268,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): """ if not self.hs.config.user_directory_search_all_users: yield self._end_background_update("populate_user_directory_process_users") - defer.returnValue(1) + return 1 def _get_next_batch(txn): sql = "SELECT user_id FROM %s LIMIT %s" % ( @@ -298,7 +298,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): # No more users -- complete the transaction. if not users_to_work_on: yield self._end_background_update("populate_user_directory_process_users") - defer.returnValue(1) + return 1 logger.info( "Processing the next %d users of %d remaining" @@ -322,7 +322,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): progress, ) - defer.returnValue(len(users_to_work_on)) + return len(users_to_work_on) @defer.inlineCallbacks def is_room_world_readable_or_publicly_joinable(self, room_id): @@ -344,16 +344,16 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): join_rule_ev = yield self.get_event(join_rules_id, allow_none=True) if join_rule_ev: if join_rule_ev.content.get("join_rule") == JoinRules.PUBLIC: - defer.returnValue(True) + return True hist_vis_id = current_state_ids.get((EventTypes.RoomHistoryVisibility, "")) if hist_vis_id: hist_vis_ev = yield self.get_event(hist_vis_id, allow_none=True) if hist_vis_ev: if hist_vis_ev.content.get("history_visibility") == "world_readable": - defer.returnValue(True) + return True - defer.returnValue(False) + return False def update_profile_in_user_dir(self, user_id, display_name, avatar_url): """ @@ -499,7 +499,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): user_ids = set(user_ids_share_pub) user_ids.update(user_ids_share_priv) - defer.returnValue(user_ids) + return user_ids def add_users_who_share_private_room(self, room_id, user_id_tuples): """Insert entries into the users_who_share_private_rooms table. The first @@ -609,7 +609,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): users = set(pub_rows) users.update(rows) - defer.returnValue(list(users)) + return list(users) @defer.inlineCallbacks def get_rooms_in_common_for_users(self, user_id, other_user_id): @@ -635,7 +635,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): "get_rooms_in_common_for_users", None, sql, user_id, other_user_id ) - defer.returnValue([room_id for room_id, in rows]) + return [room_id for room_id, in rows] def delete_all_from_user_dir(self): """Delete the entire user directory @@ -782,7 +782,7 @@ class UserDirectoryStore(StateDeltasStore, BackgroundUpdateStore): limited = len(results) > limit - defer.returnValue({"limited": limited, "results": results}) + return {"limited": limited, "results": results} def _parse_query_sqlite(search_term): diff --git a/synapse/storage/user_erasure_store.py b/synapse/storage/user_erasure_store.py index 1815fdc0dd..05cabc2282 100644 --- a/synapse/storage/user_erasure_store.py +++ b/synapse/storage/user_erasure_store.py @@ -12,9 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import operator -from twisted.internet import defer +import operator from synapse.storage._base import SQLBaseStore from synapse.util.caches.descriptors import cached, cachedList @@ -67,7 +66,7 @@ class UserErasureWorkerStore(SQLBaseStore): erased_users = yield self.runInteraction("are_users_erased", _get_erased_users) res = dict((u, u in erased_users) for u in user_ids) - defer.returnValue(res) + return res class UserErasureStore(UserErasureWorkerStore): diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 488c49747a..b91fb2db7b 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -56,7 +56,7 @@ class EventSources(object): device_list_key=device_list_key, groups_key=groups_key, ) - defer.returnValue(token) + return token @defer.inlineCallbacks def get_current_token_for_pagination(self): @@ -80,4 +80,4 @@ class EventSources(object): device_list_key=0, groups_key=0, ) - defer.returnValue(token) + return token diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index f506b2a695..841625a991 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -49,7 +49,7 @@ class Clock(object): with context.PreserveLoggingContext(): self._reactor.callLater(seconds, d.callback, seconds) res = yield d - defer.returnValue(res) + return res def time(self): """Returns the current system time in seconds since epoch.""" diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 58a6b8764f..f1c46836b1 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -366,7 +366,7 @@ class ReadWriteLock(object): new_defer.callback(None) self.key_to_current_readers.get(key, set()).discard(new_defer) - defer.returnValue(_ctx_manager()) + return _ctx_manager() @defer.inlineCallbacks def write(self, key): @@ -396,7 +396,7 @@ class ReadWriteLock(object): if self.key_to_current_writer[key] == new_defer: self.key_to_current_writer.pop(key) - defer.returnValue(_ctx_manager()) + return _ctx_manager() def _cancelled_to_timed_out_error(value, timeout): diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 675db2f448..a1acacbde9 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -289,7 +289,7 @@ class CacheDescriptor(_CacheDescriptorBase): def foo(self, key, cache_context): r1 = yield self.bar1(key, on_invalidate=cache_context.invalidate) r2 = yield self.bar2(key, on_invalidate=cache_context.invalidate) - defer.returnValue(r1 + r2) + return r1 + r2 Args: num_args (int): number of positional arguments (excluding ``self`` and diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index d6908e169d..82d3eefe0e 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -121,7 +121,7 @@ class ResponseCache(object): @defer.inlineCallbacks def handle_request(request): # etc - defer.returnValue(result) + return result result = yield response_cache.wrap( key, diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index c30b6de19c..0910930c21 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -67,7 +67,7 @@ def measure_func(name): def measured_func(self, *args, **kwargs): with Measure(self.clock, name): r = yield func(self, *args, **kwargs) - defer.returnValue(r) + return r return measured_func diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index d8d0ceae51..0862b5ca5a 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -95,15 +95,13 @@ def get_retry_limiter(destination, clock, store, ignore_backoff=False, **kwargs) # maximum backoff even though it might only have been down briefly backoff_on_failure = not ignore_backoff - defer.returnValue( - RetryDestinationLimiter( - destination, - clock, - store, - retry_interval, - backoff_on_failure=backoff_on_failure, - **kwargs - ) + return RetryDestinationLimiter( + destination, + clock, + store, + retry_interval, + backoff_on_failure=backoff_on_failure, + **kwargs ) diff --git a/synapse/visibility.py b/synapse/visibility.py index 2a11c83596..bf0f1eebd8 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -208,7 +208,7 @@ def filter_events_for_client( filtered_events = filter(operator.truth, filtered_events) # we turn it into a list before returning it. - defer.returnValue(list(filtered_events)) + return list(filtered_events) @defer.inlineCallbacks @@ -317,11 +317,11 @@ def filter_events_for_server( elif redact: to_return.append(prune_event(e)) - defer.returnValue(to_return) + return to_return # If there are no erased users then we can just return the given list # of events without having to copy it. - defer.returnValue(events) + return events # Ok, so we're dealing with events that have non-trivial visibility # rules, so we need to also get the memberships of the room. @@ -384,4 +384,4 @@ def filter_events_for_server( elif redact: to_return.append(prune_event(e)) - defer.returnValue(to_return) + return to_return diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 8d94a503d6..c4f0bbd3dd 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -107,7 +107,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.assertEquals(LoggingContext.current_context().request, "11") with PreserveLoggingContext(): yield persp_deferred - defer.returnValue(persp_resp) + return persp_resp self.http_client.post_json.side_effect = get_perspectives @@ -554,7 +554,7 @@ def run_in_context(f, *args, **kwargs): # logs. ctx.request = "testctx" rv = yield f(*args, **kwargs) - defer.returnValue(rv) + return rv def _verify_json_for_server(kr, *args): @@ -565,6 +565,6 @@ def _verify_json_for_server(kr, *args): @defer.inlineCallbacks def v(): rv1 = yield kr.verify_json_for_server(*args) - defer.returnValue(rv1) + return rv1 return run_in_context(v) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 90d0129374..99dce45cfe 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -283,4 +283,4 @@ class RegistrationTestCase(unittest.HomeserverTestCase): user, requester, displayname, by_admin=True ) - defer.returnValue((user_id, token)) + return (user_id, token) diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index a49f9b3224..b906686b49 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -145,7 +145,7 @@ class MatrixFederationAgentTests(TestCase): try: fetch_res = yield fetch_d - defer.returnValue(fetch_res) + return fetch_res except Exception as e: logger.info("Fetch of %s failed: %s", uri.decode("ascii"), e) raise @@ -936,7 +936,7 @@ class MatrixFederationAgentTests(TestCase): except Exception as e: logger.warning("Error fetching well-known: %s", e) raise - defer.returnValue(result) + return result def test_well_known_cache(self): self.reactor.lookups["testserv"] = "1.2.3.4" diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index 65b51dc981..3b885ef64b 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -61,7 +61,7 @@ class SrvResolverTestCase(unittest.TestCase): # should have restored our context self.assertIs(LoggingContext.current_context(), ctx) - defer.returnValue(result) + return result test_d = do_lookup() self.assertNoResult(test_d) diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index b9d6d7ad1c..2b01f40a42 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -68,7 +68,7 @@ class FederationClientTests(HomeserverTestCase): try: fetch_res = yield fetch_d - defer.returnValue(fetch_res) + return fetch_res finally: check_logcontext(context) diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index a8adc9a61d..a3d7e3c046 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -46,7 +46,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase): @defer.inlineCallbacks def cb(): yield Clock(reactor).sleep(0) - defer.returnValue("yay") + return "yay" @defer.inlineCallbacks def test(): diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index fbb9302694..9fabe3fbc0 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -43,7 +43,7 @@ class BackgroundUpdateTestCase(unittest.TestCase): "test_update", progress, ) - defer.returnValue(count) + return count self.update_handler.side_effect = update @@ -60,7 +60,7 @@ class BackgroundUpdateTestCase(unittest.TestCase): @defer.inlineCallbacks def update(progress, count): yield self.store._end_background_update("test_update") - defer.returnValue(count) + return count self.update_handler.side_effect = update self.update_handler.reset_mock() diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 732a778fab..1cb471205b 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -69,7 +69,7 @@ class RedactionTestCase(unittest.TestCase): yield self.store.persist_event(event, context) - defer.returnValue(event) + return event @defer.inlineCallbacks def inject_message(self, room, user, body): @@ -92,7 +92,7 @@ class RedactionTestCase(unittest.TestCase): yield self.store.persist_event(event, context) - defer.returnValue(event) + return event @defer.inlineCallbacks def inject_redaction(self, room, event_id, user, reason): diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index 73ed943f5a..c6e8196b91 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -67,7 +67,7 @@ class RoomMemberStoreTestCase(unittest.TestCase): yield self.store.persist_event(event, context) - defer.returnValue(event) + return event @defer.inlineCallbacks def test_one_member(self): diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 212a7ae765..5c2cf3c2db 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -65,7 +65,7 @@ class StateStoreTestCase(tests.unittest.TestCase): yield self.store.persist_event(event, context) - defer.returnValue(event) + return event def assertStateMapEqual(self, s1, s2): for t in s1: diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 118c3bd238..e0605dac2f 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -139,7 +139,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): builder ) yield self.hs.get_datastore().persist_event(event, context) - defer.returnValue(event) + return event @defer.inlineCallbacks def inject_room_member(self, user_id, membership="join", extra_content={}): @@ -161,7 +161,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): ) yield self.hs.get_datastore().persist_event(event, context) - defer.returnValue(event) + return event @defer.inlineCallbacks def inject_message(self, user_id, content=None): @@ -182,7 +182,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): ) yield self.hs.get_datastore().persist_event(event, context) - defer.returnValue(event) + return event @defer.inlineCallbacks def test_large_room(self): diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 7807328e2f..56320bbaf9 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -159,7 +159,7 @@ class DescriptorTestCase(unittest.TestCase): def inner_fn(): with PreserveLoggingContext(): yield complete_lookup - defer.returnValue(1) + return 1 return inner_fn() @@ -169,7 +169,7 @@ class DescriptorTestCase(unittest.TestCase): c1.name = "c1" r = yield obj.fn(1) self.assertEqual(LoggingContext.current_context(), c1) - defer.returnValue(r) + return r def check_result(r): self.assertEqual(r, 1) @@ -286,7 +286,7 @@ class CachedListDescriptorTestCase(unittest.TestCase): # we want this to behave like an asynchronous function yield run_on_reactor() assert LoggingContext.current_context().request == "c1" - defer.returnValue(self.mock(args1, arg2)) + return self.mock(args1, arg2) with LoggingContext() as c1: c1.request = "c1" @@ -334,7 +334,7 @@ class CachedListDescriptorTestCase(unittest.TestCase): def list_fn(self, args1, arg2): # we want this to behave like an asynchronous function yield run_on_reactor() - defer.returnValue(self.mock(args1, arg2)) + return self.mock(args1, arg2) obj = Cls() invalidate0 = mock.Mock() diff --git a/tests/utils.py b/tests/utils.py index 8a94ce0b47..425e3387db 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -361,7 +361,7 @@ def setup_test_homeserver( if fed: register_federation_servlets(hs, fed) - defer.returnValue(hs) + return hs def register_federation_servlets(hs, resource): @@ -465,9 +465,9 @@ class MockHttpResource(HttpServer): args = [urlparse.unquote(u) for u in matcher.groups()] (code, response) = yield func(mock_request, *args) - defer.returnValue((code, response)) + return (code, response) except CodeMessageException as e: - defer.returnValue((e.code, cs_error(e.msg, code=e.errcode))) + return (e.code, cs_error(e.msg, code=e.errcode)) raise KeyError("No event can handle %s" % path) -- cgit 1.4.1 From f30a71a67b6605cb0f09975af3befc61090326bd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 24 Jul 2019 13:16:18 +0100 Subject: Stop trying to fetch events with event_id=None. (#5753) `None` is not a valid event id, so queuing up a database fetch for it seems like a silly thing to do. I considered making `get_event` return `None` if `event_id is None`, but then its interaction with `allow_none` seemed uninituitive, and strong typing ftw. --- changelog.d/5753.misc | 1 + synapse/handlers/message.py | 8 +++++++- synapse/storage/events_worker.py | 5 ++++- synapse/storage/stats.py | 20 +++++++++++--------- 4 files changed, 23 insertions(+), 11 deletions(-) create mode 100644 changelog.d/5753.misc (limited to 'synapse/handlers') diff --git a/changelog.d/5753.misc b/changelog.d/5753.misc new file mode 100644 index 0000000000..22bba9ce3c --- /dev/null +++ b/changelog.d/5753.misc @@ -0,0 +1 @@ +Stop trying to fetch events with event_id=None. diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 8b27e23378..e951c39fa7 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -378,7 +378,11 @@ class EventCreationHandler(object): # tolerate them in event_auth.check(). prev_state_ids = yield context.get_prev_state_ids(self.store) prev_event_id = prev_state_ids.get((EventTypes.Member, event.sender)) - prev_event = yield self.store.get_event(prev_event_id, allow_none=True) + prev_event = ( + yield self.store.get_event(prev_event_id, allow_none=True) + if prev_event_id + else None + ) if not prev_event or prev_event.membership != Membership.JOIN: logger.warning( ( @@ -521,6 +525,8 @@ class EventCreationHandler(object): """ prev_state_ids = yield context.get_prev_state_ids(self.store) prev_event_id = prev_state_ids.get((event.type, event.state_key)) + if not prev_event_id: + return prev_event = yield self.store.get_event(prev_event_id, allow_none=True) if not prev_event: return diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 44441957db..83fe4764d8 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -139,8 +139,11 @@ class EventsWorkerStore(SQLBaseStore): If there is a mismatch, behave as per allow_none. Returns: - Deferred : A FrozenEvent. + Deferred[EventBase|None] """ + if not isinstance(event_id, str): + raise TypeError("Invalid event event_id %r" % (event_id,)) + events = yield self.get_events_as_list( [event_id], check_redacted=check_redacted, diff --git a/synapse/storage/stats.py b/synapse/storage/stats.py index e893b05ee7..e13efed417 100644 --- a/synapse/storage/stats.py +++ b/synapse/storage/stats.py @@ -211,16 +211,18 @@ class StatsStore(StateDeltasStore): avatar_id = current_state_ids.get((EventTypes.RoomAvatar, "")) canonical_alias_id = current_state_ids.get((EventTypes.CanonicalAlias, "")) + event_ids = [ + join_rules_id, + history_visibility_id, + encryption_id, + name_id, + topic_id, + avatar_id, + canonical_alias_id, + ] + state_events = yield self.get_events( - [ - join_rules_id, - history_visibility_id, - encryption_id, - name_id, - topic_id, - avatar_id, - canonical_alias_id, - ] + [ev for ev in event_ids if ev is not None] ) def _get_or_none(event_id, arg): -- cgit 1.4.1 From d1020653fcbecabcf8e109dafc6258b1f2c2afd0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Jul 2019 10:08:22 +0100 Subject: Log when we receive a /make_* request from a different origin --- changelog.d/5744.bugfix | 1 + synapse/federation/federation_server.py | 4 ++-- synapse/handlers/federation.py | 37 +++++++++++++++++++++++++++++++-- 3 files changed, 38 insertions(+), 4 deletions(-) create mode 100644 changelog.d/5744.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/5744.bugfix b/changelog.d/5744.bugfix new file mode 100644 index 0000000000..7b67ebb2d3 --- /dev/null +++ b/changelog.d/5744.bugfix @@ -0,0 +1 @@ +Log when we receive a `/make_*` request from a different origin. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 8c0a18b120..ed2b6d5eef 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -369,7 +369,7 @@ class FederationServer(FederationBase): logger.warn("Room version %s not in %s", room_version, supported_versions) raise IncompatibleRoomVersionError(room_version=room_version) - pdu = yield self.handler.on_make_join_request(room_id, user_id) + pdu = yield self.handler.on_make_join_request(origin, room_id, user_id) time_now = self._clock.time_msec() defer.returnValue( {"event": pdu.get_pdu_json(time_now), "room_version": room_version} @@ -423,7 +423,7 @@ class FederationServer(FederationBase): def on_make_leave_request(self, origin, room_id, user_id): origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, room_id) - pdu = yield self.handler.on_make_leave_request(room_id, user_id) + pdu = yield self.handler.on_make_leave_request(origin, room_id, user_id) room_version = yield self.store.get_room_version(room_id) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 57be968c67..30b69af82c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1204,11 +1204,28 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks @log_function - def on_make_join_request(self, room_id, user_id): + def on_make_join_request(self, origin, room_id, user_id): """ We've received a /make_join/ request, so we create a partial join event for the room and return that. We do *not* persist or process it until the other server has signed it and sent it back. + + Args: + origin (str): The (verified) server name of the requesting server. + room_id (str): Room to create join event in + user_id (str): The user to create the join for + + Returns: + Deferred[FrozenEvent] """ + + if get_domain_from_id(user_id) != origin: + logger.info( + "Got /make_join request for user %r from different origin %s, ignoring", + user_id, + origin, + ) + raise SynapseError(403, "User not from origin", Codes.FORBIDDEN) + event_content = {"membership": Membership.JOIN} room_version = yield self.store.get_room_version(room_id) @@ -1411,11 +1428,27 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks @log_function - def on_make_leave_request(self, room_id, user_id): + def on_make_leave_request(self, origin, room_id, user_id): """ We've received a /make_leave/ request, so we create a partial leave event for the room and return that. We do *not* persist or process it until the other server has signed it and sent it back. + + Args: + origin (str): The (verified) server name of the requesting server. + room_id (str): Room to create leave event in + user_id (str): The user to create the leave for + + Returns: + Deferred[FrozenEvent] """ + if get_domain_from_id(user_id) != origin: + logger.info( + "Got /make_leave request for user %r from different origin %s, ignoring", + user_id, + origin, + ) + raise SynapseError(403, "User not from origin", Codes.FORBIDDEN) + room_version = yield self.store.get_room_version(room_id) builder = self.event_builder_factory.new( room_version, -- cgit 1.4.1 From 85b0bd8fe05ed78548c9b2b0da768927582f7d70 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 29 Jul 2019 16:34:44 +0100 Subject: Update the device list cache when keys/query is called (#5693) --- changelog.d/5693.bugfix | 1 + synapse/handlers/device.py | 150 ++++++++++++++++++++++--------------------- synapse/handlers/e2e_keys.py | 60 ++++++++++++++++- 3 files changed, 137 insertions(+), 74 deletions(-) create mode 100644 changelog.d/5693.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/5693.bugfix b/changelog.d/5693.bugfix new file mode 100644 index 0000000000..d6f4e590ae --- /dev/null +++ b/changelog.d/5693.bugfix @@ -0,0 +1 @@ +Fix UISIs during homeserver outage. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d6ab337783..5c1cf83c9d 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -209,12 +209,12 @@ class DeviceHandler(DeviceWorkerHandler): self.federation_sender = hs.get_federation_sender() - self._edu_updater = DeviceListEduUpdater(hs, self) + self.device_list_updater = DeviceListUpdater(hs, self) federation_registry = hs.get_federation_registry() federation_registry.register_edu_handler( - "m.device_list_update", self._edu_updater.incoming_device_list_update + "m.device_list_update", self.device_list_updater.incoming_device_list_update ) federation_registry.register_query_handler( "user_devices", self.on_federation_query_user_devices @@ -426,7 +426,7 @@ def _update_device_from_client_ips(device, client_ips): device.update({"last_seen_ts": ip.get("last_seen"), "last_seen_ip": ip.get("ip")}) -class DeviceListEduUpdater(object): +class DeviceListUpdater(object): "Handles incoming device list updates from federation and updates the DB" def __init__(self, hs, device_handler): @@ -519,75 +519,7 @@ class DeviceListEduUpdater(object): logger.debug("Need to re-sync devices for %r? %r", user_id, resync) if resync: - # Fetch all devices for the user. - origin = get_domain_from_id(user_id) - try: - result = yield self.federation.query_user_devices(origin, user_id) - except ( - NotRetryingDestination, - RequestSendFailed, - HttpResponseException, - ): - # TODO: Remember that we are now out of sync and try again - # later - logger.warn("Failed to handle device list update for %s", user_id) - # We abort on exceptions rather than accepting the update - # as otherwise synapse will 'forget' that its device list - # is out of date. If we bail then we will retry the resync - # next time we get a device list update for this user_id. - # This makes it more likely that the device lists will - # eventually become consistent. - return - except FederationDeniedError as e: - logger.info(e) - return - except Exception: - # TODO: Remember that we are now out of sync and try again - # later - logger.exception( - "Failed to handle device list update for %s", user_id - ) - return - - stream_id = result["stream_id"] - devices = result["devices"] - - # If the remote server has more than ~1000 devices for this user - # we assume that something is going horribly wrong (e.g. a bot - # that logs in and creates a new device every time it tries to - # send a message). Maintaining lots of devices per user in the - # cache can cause serious performance issues as if this request - # takes more than 60s to complete, internal replication from the - # inbound federation worker to the synapse master may time out - # causing the inbound federation to fail and causing the remote - # server to retry, causing a DoS. So in this scenario we give - # up on storing the total list of devices and only handle the - # delta instead. - if len(devices) > 1000: - logger.warn( - "Ignoring device list snapshot for %s as it has >1K devs (%d)", - user_id, - len(devices), - ) - devices = [] - - for device in devices: - logger.debug( - "Handling resync update %r/%r, ID: %r", - user_id, - device["device_id"], - stream_id, - ) - - yield self.store.update_remote_device_list_cache( - user_id, devices, stream_id - ) - device_ids = [device["device_id"] for device in devices] - yield self.device_handler.notify_device_update(user_id, device_ids) - - # We clobber the seen updates since we've re-synced from a given - # point. - self._seen_updates[user_id] = set([stream_id]) + yield self.user_device_resync(user_id) else: # Simply update the single device, since we know that is the only # change (because of the single prev_id matching the current cache) @@ -634,3 +566,77 @@ class DeviceListEduUpdater(object): stream_id_in_updates.add(stream_id) return False + + @defer.inlineCallbacks + def user_device_resync(self, user_id): + """Fetches all devices for a user and updates the device cache with them. + + Args: + user_id (str): The user's id whose device_list will be updated. + Returns: + Deferred[dict]: a dict with device info as under the "devices" in the result of this + request: + https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid + """ + # Fetch all devices for the user. + origin = get_domain_from_id(user_id) + try: + result = yield self.federation.query_user_devices(origin, user_id) + except (NotRetryingDestination, RequestSendFailed, HttpResponseException): + # TODO: Remember that we are now out of sync and try again + # later + logger.warn("Failed to handle device list update for %s", user_id) + # We abort on exceptions rather than accepting the update + # as otherwise synapse will 'forget' that its device list + # is out of date. If we bail then we will retry the resync + # next time we get a device list update for this user_id. + # This makes it more likely that the device lists will + # eventually become consistent. + return + except FederationDeniedError as e: + logger.info(e) + return + except Exception: + # TODO: Remember that we are now out of sync and try again + # later + logger.exception("Failed to handle device list update for %s", user_id) + return + stream_id = result["stream_id"] + devices = result["devices"] + + # If the remote server has more than ~1000 devices for this user + # we assume that something is going horribly wrong (e.g. a bot + # that logs in and creates a new device every time it tries to + # send a message). Maintaining lots of devices per user in the + # cache can cause serious performance issues as if this request + # takes more than 60s to complete, internal replication from the + # inbound federation worker to the synapse master may time out + # causing the inbound federation to fail and causing the remote + # server to retry, causing a DoS. So in this scenario we give + # up on storing the total list of devices and only handle the + # delta instead. + if len(devices) > 1000: + logger.warn( + "Ignoring device list snapshot for %s as it has >1K devs (%d)", + user_id, + len(devices), + ) + devices = [] + + for device in devices: + logger.debug( + "Handling resync update %r/%r, ID: %r", + user_id, + device["device_id"], + stream_id, + ) + + yield self.store.update_remote_device_list_cache(user_id, devices, stream_id) + device_ids = [device["device_id"] for device in devices] + yield self.device_handler.notify_device_update(user_id, device_ids) + + # We clobber the seen updates since we've re-synced from a given + # point. + self._seen_updates[user_id] = set([stream_id]) + + defer.returnValue(result) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 1300b540e3..366a0bc68b 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -65,6 +65,7 @@ class E2eKeysHandler(object): } } """ + device_keys_query = query_body.get("device_keys", {}) # separate users by domain. @@ -121,7 +122,58 @@ class E2eKeysHandler(object): # Now fetch any devices that we don't have in our cache @defer.inlineCallbacks def do_remote_query(destination): + """This is called when we are querying the device list of a user on + a remote homeserver and their device list is not in the device list + cache. If we share a room with this user and we're not querying for + specific user we will update the cache + with their device list.""" + destination_query = remote_queries_not_in_cache[destination] + + # We first consider whether we wish to update the device list cache with + # the users device list. We want to track a user's devices when the + # authenticated user shares a room with the queried user and the query + # has not specified a particular device. + # If we update the cache for the queried user we remove them from further + # queries. We use the more efficient batched query_client_keys for all + # remaining users + user_ids_updated = [] + for (user_id, device_list) in destination_query.items(): + if user_id in user_ids_updated: + continue + + if device_list: + continue + + room_ids = yield self.store.get_rooms_for_user(user_id) + if not room_ids: + continue + + # We've decided we're sharing a room with this user and should + # probably be tracking their device lists. However, we haven't + # done an initial sync on the device list so we do it now. + try: + user_devices = yield self.device_handler.device_list_updater.user_device_resync( + user_id + ) + user_devices = user_devices["devices"] + for device in user_devices: + results[user_id] = {device["device_id"]: device["keys"]} + user_ids_updated.append(user_id) + except Exception as e: + failures[destination] = failures.get(destination, []).append( + _exception_to_failure(e) + ) + + if len(destination_query) == len(user_ids_updated): + # We've updated all the users in the query and we do not need to + # make any further remote calls. + return + + # Remove all the users from the query which we have updated + for user_id in user_ids_updated: + destination_query.pop(user_id) + try: remote_result = yield self.federation.query_client_keys( destination, {"device_keys": destination_query}, timeout=timeout @@ -132,7 +184,8 @@ class E2eKeysHandler(object): results[user_id] = keys except Exception as e: - failures[destination] = _exception_to_failure(e) + failure = _exception_to_failure(e) + failures[destination] = failure yield make_deferred_yieldable( defer.gatherResults( @@ -234,8 +287,10 @@ class E2eKeysHandler(object): for user_id, keys in remote_result["one_time_keys"].items(): if user_id in device_keys: json_result[user_id] = keys + except Exception as e: - failures[destination] = _exception_to_failure(e) + failure = _exception_to_failure(e) + failures[destination] = failure yield make_deferred_yieldable( defer.gatherResults( @@ -263,6 +318,7 @@ class E2eKeysHandler(object): @defer.inlineCallbacks def upload_keys_for_user(self, user_id, device_id, keys): + time_now = self.clock.time_msec() # TODO: Validate the JSON to make sure it has the right keys. -- cgit 1.4.1 From 97a8b4caf7badb83c941c8afdb7ce237ee19cb7d Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 30 Jul 2019 02:02:18 +1000 Subject: Move some timeout checking logs to DEBUG #5785 --- changelog.d/5785.misc | 1 + synapse/handlers/presence.py | 2 +- synapse/handlers/typing.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5785.misc (limited to 'synapse/handlers') diff --git a/changelog.d/5785.misc b/changelog.d/5785.misc new file mode 100644 index 0000000000..0691222c42 --- /dev/null +++ b/changelog.d/5785.misc @@ -0,0 +1 @@ +Set the logs emitted when checking typing and presence timeouts to DEBUG level, not INFO. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index ea54d0b991..94a9ca0357 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -333,7 +333,7 @@ class PresenceHandler(object): """Checks the presence of users that have timed out and updates as appropriate. """ - logger.info("Handling presence timeouts") + logger.debug("Handling presence timeouts") now = self.clock.time_msec() # Fetch the list of users that *may* have timed out. Things may have diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 6b661aa93d..f882330293 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -83,7 +83,7 @@ class TypingHandler(object): self._room_typing = {} def _handle_timeouts(self): - logger.info("Checking for typing timeouts") + logger.debug("Checking for typing timeouts") now = self.clock.time_msec() -- cgit 1.4.1 From 865077f1d1f4866ab874c56b70abbd426fedfb97 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 30 Jul 2019 02:47:27 +1000 Subject: Room Complexity Client Implementation (#5783) --- changelog.d/5783.feature | 1 + docs/sample_config.yaml | 17 +++++++ synapse/config/server.py | 41 ++++++++++++++++ synapse/federation/federation_client.py | 36 ++++++++++++++ synapse/federation/transport/client.py | 31 +++++++++--- synapse/handlers/federation.py | 25 ++++++++++ synapse/handlers/room_member.py | 84 +++++++++++++++++++++++++++++++-- tests/federation/test_complexity.py | 77 ++++++++++++++++++++++++++++-- 8 files changed, 298 insertions(+), 14 deletions(-) create mode 100644 changelog.d/5783.feature (limited to 'synapse/handlers') diff --git a/changelog.d/5783.feature b/changelog.d/5783.feature new file mode 100644 index 0000000000..18f5a3cb28 --- /dev/null +++ b/changelog.d/5783.feature @@ -0,0 +1 @@ +Synapse can now be configured to not join remote rooms of a given "complexity" (currently, state events) over federation. This option can be used to prevent adverse performance on resource-constrained homeservers. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 7edf15207a..b92959692d 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -278,6 +278,23 @@ listeners: # Used by phonehome stats to group together related servers. #server_context: context +# Resource-constrained Homeserver Settings +# +# If limit_remote_rooms.enabled is True, the room complexity will be +# checked before a user joins a new remote room. If it is above +# limit_remote_rooms.complexity, it will disallow joining or +# instantly leave. +# +# limit_remote_rooms.complexity_error can be set to customise the text +# displayed to the user when a room above the complexity threshold has +# its join cancelled. +# +# Uncomment the below lines to enable: +#limit_remote_rooms: +# enabled: True +# complexity: 1.0 +# complexity_error: "This room is too complex." + # Whether to require a user to be in the room to add an alias to it. # Defaults to 'true'. # diff --git a/synapse/config/server.py b/synapse/config/server.py index 00170f1393..15449695d1 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -18,6 +18,7 @@ import logging import os.path +import attr from netaddr import IPSet from synapse.api.room_versions import KNOWN_ROOM_VERSIONS @@ -38,6 +39,12 @@ DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"] DEFAULT_ROOM_VERSION = "4" +ROOM_COMPLEXITY_TOO_GREAT = ( + "Your homeserver is unable to join rooms this large or complex. " + "Please speak to your server administrator, or upgrade your instance " + "to join this room." +) + class ServerConfig(Config): def read_config(self, config, **kwargs): @@ -247,6 +254,23 @@ class ServerConfig(Config): self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None)) + @attr.s + class LimitRemoteRoomsConfig(object): + enabled = attr.ib( + validator=attr.validators.instance_of(bool), default=False + ) + complexity = attr.ib( + validator=attr.validators.instance_of((int, float)), default=1.0 + ) + complexity_error = attr.ib( + validator=attr.validators.instance_of(str), + default=ROOM_COMPLEXITY_TOO_GREAT, + ) + + self.limit_remote_rooms = LimitRemoteRoomsConfig( + **config.get("limit_remote_rooms", {}) + ) + bind_port = config.get("bind_port") if bind_port: if config.get("no_tls", False): @@ -617,6 +641,23 @@ class ServerConfig(Config): # Used by phonehome stats to group together related servers. #server_context: context + # Resource-constrained Homeserver Settings + # + # If limit_remote_rooms.enabled is True, the room complexity will be + # checked before a user joins a new remote room. If it is above + # limit_remote_rooms.complexity, it will disallow joining or + # instantly leave. + # + # limit_remote_rooms.complexity_error can be set to customise the text + # displayed to the user when a room above the complexity threshold has + # its join cancelled. + # + # Uncomment the below lines to enable: + #limit_remote_rooms: + # enabled: True + # complexity: 1.0 + # complexity_error: "This room is too complex." + # Whether to require a user to be in the room to add an alias to it. # Defaults to 'true'. # diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 25ed1257f1..6e03ce21af 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -993,3 +993,39 @@ class FederationClient(FederationBase): ) raise RuntimeError("Failed to send to any server.") + + @defer.inlineCallbacks + def get_room_complexity(self, destination, room_id): + """ + Fetch the complexity of a remote room from another server. + + Args: + destination (str): The remote server + room_id (str): The room ID to ask about. + + Returns: + Deferred[dict] or Deferred[None]: Dict contains the complexity + metric versions, while None means we could not fetch the complexity. + """ + try: + complexity = yield self.transport_layer.get_room_complexity( + destination=destination, room_id=room_id + ) + defer.returnValue(complexity) + except CodeMessageException as e: + # We didn't manage to get it -- probably a 404. We are okay if other + # servers don't give it to us. + logger.debug( + "Failed to fetch room complexity via %s for %s, got a %d", + destination, + room_id, + e.code, + ) + except Exception: + logger.exception( + "Failed to fetch room complexity via %s for %s", destination, room_id + ) + + # If we don't manage to find it, return None. It's not an error if a + # server doesn't give it to us. + defer.returnValue(None) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 2a6709ff48..0cea0d2a10 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -21,7 +21,11 @@ from six.moves import urllib from twisted.internet import defer from synapse.api.constants import Membership -from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX +from synapse.api.urls import ( + FEDERATION_UNSTABLE_PREFIX, + FEDERATION_V1_PREFIX, + FEDERATION_V2_PREFIX, +) from synapse.logging.utils import log_function logger = logging.getLogger(__name__) @@ -935,6 +939,23 @@ class TransportLayerClient(object): destination=destination, path=path, data=content, ignore_backoff=True ) + def get_room_complexity(self, destination, room_id): + """ + Args: + destination (str): The remote server + room_id (str): The room ID to ask about. + """ + path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/rooms/%s/complexity", room_id) + + return self.client.get_json(destination=destination, path=path) + + +def _create_path(federation_prefix, path, *args): + """ + Ensures that all args are url encoded. + """ + return federation_prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args) + def _create_v1_path(path, *args): """Creates a path against V1 federation API from the path template and @@ -951,9 +972,7 @@ def _create_v1_path(path, *args): Returns: str """ - return FEDERATION_V1_PREFIX + path % tuple( - urllib.parse.quote(arg, "") for arg in args - ) + return _create_path(FEDERATION_V1_PREFIX, path, *args) def _create_v2_path(path, *args): @@ -971,6 +990,4 @@ def _create_v2_path(path, *args): Returns: str """ - return FEDERATION_V2_PREFIX + path % tuple( - urllib.parse.quote(arg, "") for arg in args - ) + return _create_path(FEDERATION_V2_PREFIX, path, *args) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 89b37dbc1c..10160bfe86 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2796,3 +2796,28 @@ class FederationHandler(BaseHandler): ) else: return user_joined_room(self.distributor, user, room_id) + + @defer.inlineCallbacks + def get_room_complexity(self, remote_room_hosts, room_id): + """ + Fetch the complexity of a remote room over federation. + + Args: + remote_room_hosts (list[str]): The remote servers to ask. + room_id (str): The room ID to ask about. + + Returns: + Deferred[dict] or Deferred[None]: Dict contains the complexity + metric versions, while None means we could not fetch the complexity. + """ + + for host in remote_room_hosts: + res = yield self.federation_client.get_room_complexity(host, room_id) + + # We got a result, return it. + if res: + defer.returnValue(res) + + # We fell off the bottom, couldn't get the complexity from anyone. Oh + # well. + defer.returnValue(None) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index baea08ddd0..249a6d9c5d 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -26,8 +26,7 @@ from unpaddedbase64 import decode_base64 from twisted.internet import defer -import synapse.server -import synapse.types +from synapse import types from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError from synapse.types import RoomID, UserID @@ -543,7 +542,7 @@ class RoomMemberHandler(object): ), "Sender (%s) must be same as requester (%s)" % (sender, requester.user) assert self.hs.is_mine(sender), "Sender must be our own: %s" % (sender,) else: - requester = synapse.types.create_requester(target_user) + requester = types.create_requester(target_user) prev_event = yield self.event_creation_handler.deduplicate_state_event( event, context @@ -945,6 +944,47 @@ class RoomMemberMasterHandler(RoomMemberHandler): self.distributor.declare("user_joined_room") self.distributor.declare("user_left_room") + @defer.inlineCallbacks + def _is_remote_room_too_complex(self, room_id, remote_room_hosts): + """ + Check if complexity of a remote room is too great. + + Args: + room_id (str) + remote_room_hosts (list[str]) + + Returns: bool of whether the complexity is too great, or None + if unable to be fetched + """ + max_complexity = self.hs.config.limit_remote_rooms.complexity + complexity = yield self.federation_handler.get_room_complexity( + remote_room_hosts, room_id + ) + + if complexity: + if complexity["v1"] > max_complexity: + return True + return False + return None + + @defer.inlineCallbacks + def _is_local_room_too_complex(self, room_id): + """ + Check if the complexity of a local room is too great. + + Args: + room_id (str) + + Returns: bool + """ + max_complexity = self.hs.config.limit_remote_rooms.complexity + complexity = yield self.store.get_room_complexity(room_id) + + if complexity["v1"] > max_complexity: + return True + + return False + @defer.inlineCallbacks def _remote_join(self, requester, remote_room_hosts, room_id, user, content): """Implements RoomMemberHandler._remote_join @@ -952,7 +992,6 @@ class RoomMemberMasterHandler(RoomMemberHandler): # filter ourselves out of remote_room_hosts: do_invite_join ignores it # and if it is the only entry we'd like to return a 404 rather than a # 500. - remote_room_hosts = [ host for host in remote_room_hosts if host != self.hs.hostname ] @@ -960,6 +999,18 @@ class RoomMemberMasterHandler(RoomMemberHandler): if len(remote_room_hosts) == 0: raise SynapseError(404, "No known servers") + if self.hs.config.limit_remote_rooms.enabled: + # Fetch the room complexity + too_complex = yield self._is_remote_room_too_complex( + room_id, remote_room_hosts + ) + if too_complex is True: + raise SynapseError( + code=400, + msg=self.hs.config.limit_remote_rooms.complexity_error, + errcode=Codes.RESOURCE_LIMIT_EXCEEDED, + ) + # We don't do an auth check if we are doing an invite # join dance for now, since we're kinda implicitly checking # that we are allowed to join when we decide whether or not we @@ -969,6 +1020,31 @@ class RoomMemberMasterHandler(RoomMemberHandler): ) yield self._user_joined_room(user, room_id) + # Check the room we just joined wasn't too large, if we didn't fetch the + # complexity of it before. + if self.hs.config.limit_remote_rooms.enabled: + if too_complex is False: + # We checked, and we're under the limit. + return + + # Check again, but with the local state events + too_complex = yield self._is_local_room_too_complex(room_id) + + if too_complex is False: + # We're under the limit. + return + + # The room is too large. Leave. + requester = types.create_requester(user, None, False, None) + yield self.update_membership( + requester=requester, target=user, room_id=room_id, action="leave" + ) + raise SynapseError( + code=400, + msg=self.hs.config.limit_remote_rooms.complexity_error, + errcode=Codes.RESOURCE_LIMIT_EXCEEDED, + ) + @defer.inlineCallbacks def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target): """Implements RoomMemberHandler._remote_reject_invite diff --git a/tests/federation/test_complexity.py b/tests/federation/test_complexity.py index a5b03005d7..51714a2b06 100644 --- a/tests/federation/test_complexity.py +++ b/tests/federation/test_complexity.py @@ -13,12 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock + from twisted.internet import defer +from synapse.api.errors import Codes, SynapseError from synapse.config.ratelimiting import FederationRateLimitConfig from synapse.federation.transport import server from synapse.rest import admin from synapse.rest.client.v1 import login, room +from synapse.types import UserID from synapse.util.ratelimitutils import FederationRateLimiter from tests import unittest @@ -33,9 +37,8 @@ class RoomComplexityTests(unittest.HomeserverTestCase): ] def default_config(self, name="test"): - config = super(RoomComplexityTests, self).default_config(name=name) - config["limit_large_remote_room_joins"] = True - config["limit_large_remote_room_complexity"] = 0.05 + config = super().default_config(name=name) + config["limit_remote_rooms"] = {"enabled": True, "complexity": 0.05} return config def prepare(self, reactor, clock, homeserver): @@ -88,3 +91,71 @@ class RoomComplexityTests(unittest.HomeserverTestCase): self.assertEquals(200, channel.code) complexity = channel.json_body["v1"] self.assertEqual(complexity, 1.23) + + def test_join_too_large(self): + + u1 = self.register_user("u1", "pass") + + handler = self.hs.get_room_member_handler() + fed_transport = self.hs.get_federation_transport_client() + + # Mock out some things, because we don't want to test the whole join + fed_transport.client.get_json = Mock(return_value=defer.succeed({"v1": 9999})) + handler.federation_handler.do_invite_join = Mock(return_value=defer.succeed(1)) + + d = handler._remote_join( + None, + ["otherserver.example"], + "roomid", + UserID.from_string(u1), + {"membership": "join"}, + ) + + self.pump() + + # The request failed with a SynapseError saying the resource limit was + # exceeded. + f = self.get_failure(d, SynapseError) + self.assertEqual(f.value.code, 400, f.value) + self.assertEqual(f.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) + + def test_join_too_large_once_joined(self): + + u1 = self.register_user("u1", "pass") + u1_token = self.login("u1", "pass") + + # Ok, this might seem a bit weird -- I want to test that we actually + # leave the room, but I don't want to simulate two servers. So, we make + # a local room, which we say we're joining remotely, even if there's no + # remote, because we mock that out. Then, we'll leave the (actually + # local) room, which will be propagated over federation in a real + # scenario. + room_1 = self.helper.create_room_as(u1, tok=u1_token) + + handler = self.hs.get_room_member_handler() + fed_transport = self.hs.get_federation_transport_client() + + # Mock out some things, because we don't want to test the whole join + fed_transport.client.get_json = Mock(return_value=defer.succeed(None)) + handler.federation_handler.do_invite_join = Mock(return_value=defer.succeed(1)) + + # Artificially raise the complexity + self.hs.get_datastore().get_current_state_event_counts = lambda x: defer.succeed( + 600 + ) + + d = handler._remote_join( + None, + ["otherserver.example"], + room_1, + UserID.from_string(u1), + {"membership": "join"}, + ) + + self.pump() + + # The request failed with a SynapseError saying the resource limit was + # exceeded. + f = self.get_failure(d, SynapseError) + self.assertEqual(f.value.code, 400) + self.assertEqual(f.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) -- cgit 1.4.1 From 8c97f6414cf322fc5b42a92ed0df2fb70bfab3fc Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 30 Jul 2019 08:25:02 +0100 Subject: Remove non-functional 'expire_access_token' setting (#5782) The `expire_access_token` didn't do what it sounded like it should do. What it actually did was make Synapse enforce the 'time' caveat on macaroons used as access tokens, but since our access token macaroons never contained such a caveat, it was always a no-op. (The code to add 'time' caveats was removed back in v0.18.5, in #1656) --- changelog.d/5782.removal | 1 + docs/sample_config.yaml | 4 ---- synapse/api/auth.py | 28 ++++------------------ synapse/config/key.py | 6 ----- synapse/handlers/auth.py | 2 +- tests/handlers/test_register.py | 2 +- .../test_resource_limits_server_notices.py | 2 +- tests/utils.py | 1 - 8 files changed, 9 insertions(+), 37 deletions(-) create mode 100644 changelog.d/5782.removal (limited to 'synapse/handlers') diff --git a/changelog.d/5782.removal b/changelog.d/5782.removal new file mode 100644 index 0000000000..658bf923ab --- /dev/null +++ b/changelog.d/5782.removal @@ -0,0 +1 @@ +Remove non-functional 'expire_access_token' setting. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index b92959692d..08316597fa 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -942,10 +942,6 @@ uploads_path: "DATADIR/uploads" # # macaroon_secret_key: -# Used to enable access token expiration. -# -#expire_access_token: False - # a secret which is used to calculate HMACs for form values, to stop # falsification of values. Must be specified for the User Consent # forms to work. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 351790cca4..179644852a 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -410,21 +410,16 @@ class Auth(object): try: user_id = self.get_user_id_from_macaroon(macaroon) - has_expiry = False guest = False for caveat in macaroon.caveats: - if caveat.caveat_id.startswith("time "): - has_expiry = True - elif caveat.caveat_id == "guest = true": + if caveat.caveat_id == "guest = true": guest = True - self.validate_macaroon( - macaroon, rights, self.hs.config.expire_access_token, user_id=user_id - ) + self.validate_macaroon(macaroon, rights, user_id=user_id) except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError): raise InvalidClientTokenError("Invalid macaroon passed.") - if not has_expiry and rights == "access": + if rights == "access": self.token_cache[token] = (user_id, guest) return user_id, guest @@ -450,7 +445,7 @@ class Auth(object): return caveat.caveat_id[len(user_prefix) :] raise InvalidClientTokenError("No user caveat in macaroon") - def validate_macaroon(self, macaroon, type_string, verify_expiry, user_id): + def validate_macaroon(self, macaroon, type_string, user_id): """ validate that a Macaroon is understood by and was signed by this server. @@ -458,7 +453,6 @@ class Auth(object): macaroon(pymacaroons.Macaroon): The macaroon to validate type_string(str): The kind of token required (e.g. "access", "delete_pusher") - verify_expiry(bool): Whether to verify whether the macaroon has expired. user_id (str): The user_id required """ v = pymacaroons.Verifier() @@ -471,19 +465,7 @@ class Auth(object): v.satisfy_exact("type = " + type_string) v.satisfy_exact("user_id = %s" % user_id) v.satisfy_exact("guest = true") - - # verify_expiry should really always be True, but there exist access - # tokens in the wild which expire when they should not, so we can't - # enforce expiry yet (so we have to allow any caveat starting with - # 'time < ' in access tokens). - # - # On the other hand, short-term login tokens (as used by CAS login, for - # example) have an expiry time which we do want to enforce. - - if verify_expiry: - v.satisfy_general(self._verify_expiry) - else: - v.satisfy_general(lambda c: c.startswith("time < ")) + v.satisfy_general(self._verify_expiry) # access_tokens include a nonce for uniqueness: any value is acceptable v.satisfy_general(lambda c: c.startswith("nonce = ")) diff --git a/synapse/config/key.py b/synapse/config/key.py index 8fc74f9cdf..fe8386985c 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -116,8 +116,6 @@ class KeyConfig(Config): seed = bytes(self.signing_key[0]) self.macaroon_secret_key = hashlib.sha256(seed).digest() - self.expire_access_token = config.get("expire_access_token", False) - # a secret which is used to calculate HMACs for form values, to stop # falsification of values self.form_secret = config.get("form_secret", None) @@ -144,10 +142,6 @@ class KeyConfig(Config): # %(macaroon_secret_key)s - # Used to enable access token expiration. - # - #expire_access_token: False - # a secret which is used to calculate HMACs for form values, to stop # falsification of values. Must be specified for the User Consent # forms to work. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 05be5b7c48..0f3ebf7ef8 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -860,7 +860,7 @@ class AuthHandler(BaseHandler): try: macaroon = pymacaroons.Macaroon.deserialize(login_token) user_id = auth_api.get_user_id_from_macaroon(macaroon) - auth_api.validate_macaroon(macaroon, "login", True, user_id) + auth_api.validate_macaroon(macaroon, "login", user_id) except Exception: raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN) self.ratelimit_login_per_account(user_id) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 99dce45cfe..0ad0a88165 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -44,7 +44,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): hs_config["max_mau_value"] = 50 hs_config["limit_usage_by_mau"] = True - hs = self.setup_test_homeserver(config=hs_config, expire_access_token=True) + hs = self.setup_test_homeserver(config=hs_config) return hs def prepare(self, reactor, clock, hs): diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index 984feb623f..cdf89e3383 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -36,7 +36,7 @@ class TestResourceLimitsServerNotices(unittest.HomeserverTestCase): "room_name": "Server Notices", } - hs = self.setup_test_homeserver(config=hs_config, expire_access_token=True) + hs = self.setup_test_homeserver(config=hs_config) return hs def prepare(self, reactor, clock, hs): diff --git a/tests/utils.py b/tests/utils.py index 6350646263..f1eb9a545c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -126,7 +126,6 @@ def default_config(name, parse=False): "enable_registration": True, "enable_registration_captcha": False, "macaroon_secret_key": "not even a little secret", - "expire_access_token": False, "trusted_third_party_id_servers": [], "room_invite_state_types": [], "password_providers": [], -- cgit 1.4.1 From 458e51df7aabe6fc2736c1aeb6a3556374309879 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 30 Jul 2019 13:07:02 +0100 Subject: Fix error handling when fetching remote device keys --- synapse/handlers/e2e_keys.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 366a0bc68b..848cd3a0d5 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -161,9 +161,7 @@ class E2eKeysHandler(object): results[user_id] = {device["device_id"]: device["keys"]} user_ids_updated.append(user_id) except Exception as e: - failures[destination] = failures.get(destination, []).append( - _exception_to_failure(e) - ) + failures[destination] = _exception_to_failure(e) if len(destination_query) == len(user_ids_updated): # We've updated all the users in the query and we do not need to -- cgit 1.4.1 From 1ec7d656dd57bce3c43994cc53727639ea05593e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 30 Jul 2019 13:09:02 +0100 Subject: Unwrap error --- synapse/handlers/e2e_keys.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 848cd3a0d5..1f90b0d278 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.api.errors import CodeMessageException, SynapseError from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.types import UserID, get_domain_from_id +from synapse.util import unwrapFirstError from synapse.util.retryutils import NotRetryingDestination logger = logging.getLogger(__name__) @@ -192,7 +193,7 @@ class E2eKeysHandler(object): for destination in remote_queries_not_in_cache ], consumeErrors=True, - ) + ).addErrback(unwrapFirstError) ) return {"device_keys": results, "failures": failures} -- cgit 1.4.1