diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py
index e62e6cab77..8acd9f9a83 100644
--- a/synapse/handlers/account_data.py
+++ b/synapse/handlers/account_data.py
@@ -51,8 +51,8 @@ class AccountDataEventSource(object):
{"type": account_data_type, "content": content, "room_id": room_id}
)
- defer.returnValue((results, current_stream_id))
+ return (results, current_stream_id)
@defer.inlineCallbacks
def get_pagination_rows(self, user, config, key):
- defer.returnValue(([], config.to_id))
+ return ([], config.to_id)
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 1f1708ba7d..51305b0c90 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -43,6 +43,8 @@ class AccountValidityHandler(object):
self.clock = self.hs.get_clock()
self._account_validity = self.hs.config.account_validity
+ self._show_users_in_user_directory = self.hs.config.show_users_in_user_directory
+ self.profile_handler = self.hs.get_profile_handler()
if self._account_validity.renew_by_email_enabled and load_jinja2_templates:
# Don't do email-specific configuration if renewal by email is disabled.
@@ -77,6 +79,9 @@ class AccountValidityHandler(object):
self.clock.looping_call(send_emails, 30 * 60 * 1000)
+ # Check every hour to remove expired users from the user directory
+ self.clock.looping_call(self._mark_expired_users_as_inactive, 60 * 60 * 1000)
+
@defer.inlineCallbacks
def send_renewal_emails(self):
"""Gets the list of users whose account is expiring in the amount of time
@@ -193,7 +198,7 @@ class AccountValidityHandler(object):
if threepid["medium"] == "email":
addresses.append(threepid["address"])
- defer.returnValue(addresses)
+ return addresses
@defer.inlineCallbacks
def _get_renewal_token(self, user_id):
@@ -214,7 +219,7 @@ class AccountValidityHandler(object):
try:
renewal_token = stringutils.random_string(32)
yield self.store.set_renewal_token_for_user(user_id, renewal_token)
- defer.returnValue(renewal_token)
+ return renewal_token
except StoreError:
attempts += 1
raise StoreError(500, "Couldn't generate a unique string as refresh string.")
@@ -226,11 +231,19 @@ class AccountValidityHandler(object):
Args:
renewal_token (str): Token sent with the renewal request.
+ Returns:
+ bool: Whether the provided token is valid.
"""
- user_id = yield self.store.get_user_from_renewal_token(renewal_token)
+ try:
+ user_id = yield self.store.get_user_from_renewal_token(renewal_token)
+ except StoreError:
+ defer.returnValue(False)
+
logger.debug("Renewing an account for user %s", user_id)
yield self.renew_account_for_user(user_id)
+ defer.returnValue(True)
+
@defer.inlineCallbacks
def renew_account_for_user(self, user_id, expiration_ts=None, email_sent=False):
"""Renews the account attached to a given user by pushing back the
@@ -254,4 +267,27 @@ class AccountValidityHandler(object):
user_id=user_id, expiration_ts=expiration_ts, email_sent=email_sent
)
- defer.returnValue(expiration_ts)
+ # Check if renewed users should be reintroduced to the user directory
+ if self._show_users_in_user_directory:
+ # Show the user in the directory again by setting them to active
+ yield self.profile_handler.set_active(
+ UserID.from_string(user_id), True, True
+ )
+
+ return expiration_ts
+
+ @defer.inlineCallbacks
+ def _mark_expired_users_as_inactive(self):
+ """Iterate over expired users. Mark them as inactive in order to hide them from the
+ user directory.
+
+ Returns:
+ Deferred
+ """
+ # Get expired users
+ expired_user_ids = yield self.store.get_expired_users()
+ expired_users = [UserID.from_string(user_id) for user_id in expired_user_ids]
+
+ # Mark each one as non-active
+ for user in expired_users:
+ yield self.profile_handler.set_active(user, False, True)
diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py
index fbef2f3d38..46ac73106d 100644
--- a/synapse/handlers/acme.py
+++ b/synapse/handlers/acme.py
@@ -100,4 +100,4 @@ class AcmeHandler(object):
logger.exception("Failed saving!")
raise
- defer.returnValue(True)
+ return True
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index e8a651e231..2f22f56ca4 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -49,7 +49,7 @@ class AdminHandler(BaseHandler):
"devices": {"": {"sessions": [{"connections": connections}]}},
}
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def get_users(self):
@@ -61,7 +61,7 @@ class AdminHandler(BaseHandler):
"""
ret = yield self.store.get_users()
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def get_users_paginate(self, order, start, limit):
@@ -78,7 +78,7 @@ class AdminHandler(BaseHandler):
"""
ret = yield self.store.get_users_paginate(order, start, limit)
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def search_users(self, term):
@@ -92,7 +92,7 @@ class AdminHandler(BaseHandler):
"""
ret = yield self.store.search_users(term)
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def export_user_data(self, user_id, writer):
@@ -225,7 +225,7 @@ class AdminHandler(BaseHandler):
state = yield self.store.get_state_for_event(event_id)
writer.write_state(room_id, event_id, state)
- defer.returnValue(writer.finished())
+ return writer.finished()
class ExfiltrationWriter(object):
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index 8f089f0e33..d1a51df6f9 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -167,8 +167,8 @@ class ApplicationServicesHandler(object):
for user_service in user_query_services:
is_known_user = yield self.appservice_api.query_user(user_service, user_id)
if is_known_user:
- defer.returnValue(True)
- defer.returnValue(False)
+ return True
+ return False
@defer.inlineCallbacks
def query_room_alias_exists(self, room_alias):
@@ -192,7 +192,7 @@ class ApplicationServicesHandler(object):
if is_known_alias:
# the alias exists now so don't query more ASes.
result = yield self.store.get_association_from_room_alias(room_alias)
- defer.returnValue(result)
+ return result
@defer.inlineCallbacks
def query_3pe(self, kind, protocol, fields):
@@ -215,7 +215,7 @@ class ApplicationServicesHandler(object):
if success:
ret.extend(result)
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def get_3pe_protocols(self, only_protocol=None):
@@ -254,7 +254,7 @@ class ApplicationServicesHandler(object):
for p in protocols.keys():
protocols[p] = _merge_instances(protocols[p])
- defer.returnValue(protocols)
+ return protocols
@defer.inlineCallbacks
def _get_services_for_event(self, event):
@@ -276,7 +276,7 @@ class ApplicationServicesHandler(object):
if (yield s.is_interested(event, self.store)):
interested_list.append(s)
- defer.returnValue(interested_list)
+ return interested_list
def _get_services_for_user(self, user_id):
services = self.store.get_app_services()
@@ -293,23 +293,23 @@ class ApplicationServicesHandler(object):
if not self.is_mine_id(user_id):
# we don't know if they are unknown or not since it isn't one of our
# users. We can't poke ASes.
- defer.returnValue(False)
+ return False
return
user_info = yield self.store.get_user_by_id(user_id)
if user_info:
- defer.returnValue(False)
+ return False
return
# user not found; could be the AS though, so check.
services = self.store.get_app_services()
service_list = [s for s in services if s.sender == user_id]
- defer.returnValue(len(service_list) == 0)
+ return len(service_list) == 0
@defer.inlineCallbacks
def _check_user_exists(self, user_id):
unknown_user = yield self._is_unknown_user(user_id)
if unknown_user:
exists = yield self.query_user_exists(user_id)
- defer.returnValue(exists)
- defer.returnValue(True)
+ return exists
+ return True
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index d4d6574975..bf124032f1 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -155,7 +155,7 @@ class AuthHandler(BaseHandler):
if user_id != requester.user.to_string():
raise AuthError(403, "Invalid auth")
- defer.returnValue(params)
+ return params
@defer.inlineCallbacks
def check_auth(self, flows, clientdict, clientip, password_servlet=False):
@@ -280,7 +280,7 @@ class AuthHandler(BaseHandler):
creds,
list(clientdict),
)
- defer.returnValue((creds, clientdict, session["id"]))
+ return (creds, clientdict, session["id"])
ret = self._auth_dict_for_flows(flows, session)
ret["completed"] = list(creds)
@@ -307,8 +307,8 @@ class AuthHandler(BaseHandler):
if result:
creds[stagetype] = result
self._save_session(sess)
- defer.returnValue(True)
- defer.returnValue(False)
+ return True
+ return False
def get_session_id(self, clientdict):
"""
@@ -379,7 +379,7 @@ class AuthHandler(BaseHandler):
res = yield checker(
authdict, clientip=clientip, password_servlet=password_servlet
)
- defer.returnValue(res)
+ return res
# build a v1-login-style dict out of the authdict and fall back to the
# v1 code
@@ -389,7 +389,7 @@ class AuthHandler(BaseHandler):
raise SynapseError(400, "", Codes.MISSING_PARAM)
(canonical_id, callback) = yield self.validate_login(user_id, authdict)
- defer.returnValue(canonical_id)
+ return canonical_id
@defer.inlineCallbacks
def _check_recaptcha(self, authdict, clientip, **kwargs):
@@ -409,7 +409,7 @@ class AuthHandler(BaseHandler):
# TODO: get this from the homeserver rather than creating a new one for
# each request
try:
- client = self.hs.get_simple_http_client()
+ client = self.hs.get_proxied_http_client()
resp_body = yield client.post_urlencoded_get_json(
self.hs.config.recaptcha_siteverify_api,
args={
@@ -433,7 +433,7 @@ class AuthHandler(BaseHandler):
resp_body.get("hostname"),
)
if resp_body["success"]:
- defer.returnValue(True)
+ return True
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
def _check_email_identity(self, authdict, **kwargs):
@@ -502,7 +502,7 @@ class AuthHandler(BaseHandler):
threepid["threepid_creds"] = authdict["threepid_creds"]
- defer.returnValue(threepid)
+ return threepid
def _get_params_recaptcha(self):
return {"public_key": self.hs.config.recaptcha_public_key}
@@ -606,7 +606,7 @@ class AuthHandler(BaseHandler):
yield self.store.delete_access_token(access_token)
raise StoreError(400, "Login raced against device deletion")
- defer.returnValue(access_token)
+ return access_token
@defer.inlineCallbacks
def check_user_exists(self, user_id):
@@ -629,8 +629,8 @@ class AuthHandler(BaseHandler):
self.ratelimit_login_per_account(user_id)
res = yield self._find_user_id_and_pwd_hash(user_id)
if res is not None:
- defer.returnValue(res[0])
- defer.returnValue(None)
+ return res[0]
+ return None
@defer.inlineCallbacks
def _find_user_id_and_pwd_hash(self, user_id):
@@ -661,7 +661,7 @@ class AuthHandler(BaseHandler):
user_id,
user_infos.keys(),
)
- defer.returnValue(result)
+ return result
def get_supported_login_types(self):
"""Get a the login types supported for the /login API
@@ -722,7 +722,7 @@ class AuthHandler(BaseHandler):
known_login_type = True
is_valid = yield provider.check_password(qualified_user_id, password)
if is_valid:
- defer.returnValue((qualified_user_id, None))
+ return (qualified_user_id, None)
if not hasattr(provider, "get_supported_login_types") or not hasattr(
provider, "check_auth"
@@ -756,7 +756,7 @@ class AuthHandler(BaseHandler):
if result:
if isinstance(result, str):
result = (result, None)
- defer.returnValue(result)
+ return result
if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled:
known_login_type = True
@@ -766,7 +766,7 @@ class AuthHandler(BaseHandler):
)
if canonical_user_id:
- defer.returnValue((canonical_user_id, None))
+ return (canonical_user_id, None)
if not known_login_type:
raise SynapseError(400, "Unknown login type %s" % login_type)
@@ -814,9 +814,9 @@ class AuthHandler(BaseHandler):
if isinstance(result, str):
# If it's a str, set callback function to None
result = (result, None)
- defer.returnValue(result)
+ return result
- defer.returnValue((None, None))
+ return (None, None)
@defer.inlineCallbacks
def _check_local_password(self, user_id, password):
@@ -838,7 +838,7 @@ class AuthHandler(BaseHandler):
"""
lookupres = yield self._find_user_id_and_pwd_hash(user_id)
if not lookupres:
- defer.returnValue(None)
+ return None
(user_id, password_hash) = lookupres
# If the password hash is None, the account has likely been deactivated
@@ -850,8 +850,8 @@ class AuthHandler(BaseHandler):
result = yield self.validate_hash(password, password_hash)
if not result:
logger.warn("Failed password login for user %s", user_id)
- defer.returnValue(None)
- defer.returnValue(user_id)
+ return None
+ return user_id
@defer.inlineCallbacks
def validate_short_term_login_token_and_get_user_id(self, login_token):
@@ -865,7 +865,7 @@ class AuthHandler(BaseHandler):
raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN)
self.ratelimit_login_per_account(user_id)
yield self.auth.check_auth_blocking(user_id)
- defer.returnValue(user_id)
+ return user_id
@defer.inlineCallbacks
def delete_access_token(self, access_token):
@@ -976,7 +976,7 @@ class AuthHandler(BaseHandler):
)
yield self.store.user_delete_threepid(user_id, medium, address)
- defer.returnValue(result)
+ return result
def _save_session(self, session):
# TODO: Persistent storage
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index e8f9da6098..ad00dcecfd 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -35,6 +35,7 @@ class DeactivateAccountHandler(BaseHandler):
self._device_handler = hs.get_device_handler()
self._room_member_handler = hs.get_room_member_handler()
self._identity_handler = hs.get_handlers().identity_handler
+ self._profile_handler = hs.get_profile_handler()
self.user_directory_handler = hs.get_user_directory_handler()
# Flag that indicates whether the process to part users from rooms is running
@@ -102,6 +103,9 @@ class DeactivateAccountHandler(BaseHandler):
yield self.store.user_set_password_hash(user_id, None)
+ user = UserID.from_string(user_id)
+ yield self._profile_handler.set_active(user, False, False)
+
# Add the user to a table of users pending deactivation (ie.
# removal from all the rooms they're a member of)
yield self.store.add_user_pending_deactivation(user_id)
@@ -118,6 +122,10 @@ class DeactivateAccountHandler(BaseHandler):
# parts users from rooms (if it isn't already running)
self._start_user_parting()
+ # Reject all pending invites for the user, so that the user doesn't show up in the
+ # "invited" section of rooms' members list.
+ yield self._reject_pending_invites_for_user(user_id)
+
# Remove all information on the user from the account_validity table.
if self._account_validity_enabled:
yield self.store.delete_account_validity_for_user(user_id)
@@ -125,7 +133,40 @@ class DeactivateAccountHandler(BaseHandler):
# Mark the user as deactivated.
yield self.store.set_user_deactivated_status(user_id, True)
- defer.returnValue(identity_server_supports_unbinding)
+ return identity_server_supports_unbinding
+
+ @defer.inlineCallbacks
+ def _reject_pending_invites_for_user(self, user_id):
+ """Reject pending invites addressed to a given user ID.
+
+ Args:
+ user_id (str): The user ID to reject pending invites for.
+ """
+ user = UserID.from_string(user_id)
+ pending_invites = yield self.store.get_invited_rooms_for_user(user_id)
+
+ for room in pending_invites:
+ try:
+ yield self._room_member_handler.update_membership(
+ create_requester(user),
+ user,
+ room.room_id,
+ "leave",
+ ratelimit=False,
+ require_consent=False,
+ )
+ logger.info(
+ "Rejected invite for deactivated user %r in room %r",
+ user_id,
+ room.room_id,
+ )
+ except Exception:
+ logger.exception(
+ "Failed to reject invite for user %r in room %r:"
+ " ignoring and continuing",
+ user_id,
+ room.room_id,
+ )
def _start_user_parting(self):
"""
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 99e8413092..d36dd850fd 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -64,7 +64,7 @@ class DeviceWorkerHandler(BaseHandler):
for device in devices:
_update_device_from_client_ips(device, ips)
- defer.returnValue(devices)
+ return devices
@defer.inlineCallbacks
def get_device(self, user_id, device_id):
@@ -85,7 +85,7 @@ class DeviceWorkerHandler(BaseHandler):
raise errors.NotFoundError
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id)
_update_device_from_client_ips(device, ips)
- defer.returnValue(device)
+ return device
@measure_func("device.get_user_ids_changed")
@defer.inlineCallbacks
@@ -200,9 +200,7 @@ class DeviceWorkerHandler(BaseHandler):
possibly_joined = []
possibly_left = []
- defer.returnValue(
- {"changed": list(possibly_joined), "left": list(possibly_left)}
- )
+ return {"changed": list(possibly_joined), "left": list(possibly_left)}
class DeviceHandler(DeviceWorkerHandler):
@@ -250,7 +248,7 @@ class DeviceHandler(DeviceWorkerHandler):
)
if new_device:
yield self.notify_device_update(user_id, [device_id])
- defer.returnValue(device_id)
+ return device_id
# if the device id is not specified, we'll autogen one, but loop a few
# times in case of a clash.
@@ -264,7 +262,7 @@ class DeviceHandler(DeviceWorkerHandler):
)
if new_device:
yield self.notify_device_update(user_id, [device_id])
- defer.returnValue(device_id)
+ return device_id
attempts += 1
raise errors.StoreError(500, "Couldn't generate a device ID.")
@@ -411,9 +409,7 @@ class DeviceHandler(DeviceWorkerHandler):
@defer.inlineCallbacks
def on_federation_query_user_devices(self, user_id):
stream_id, devices = yield self.store.get_devices_with_keys_by_user(user_id)
- defer.returnValue(
- {"user_id": user_id, "stream_id": stream_id, "devices": devices}
- )
+ return {"user_id": user_id, "stream_id": stream_id, "devices": devices}
@defer.inlineCallbacks
def user_left_room(self, user, room_id):
@@ -556,6 +552,14 @@ class DeviceListEduUpdater(object):
stream_id = result["stream_id"]
devices = result["devices"]
+ for device in devices:
+ logger.debug(
+ "Handling resync update %r/%r, ID: %r",
+ user_id,
+ device["device_id"],
+ stream_id,
+ )
+
# If the remote server has more than ~1000 devices for this user
# we assume that something is going horribly wrong (e.g. a bot
# that logs in and creates a new device every time it tries to
@@ -623,7 +627,7 @@ class DeviceListEduUpdater(object):
for _, stream_id, prev_ids, _ in updates:
if not prev_ids:
# We always do a resync if there are no previous IDs
- defer.returnValue(True)
+ return True
for prev_id in prev_ids:
if prev_id == extremity:
@@ -633,8 +637,8 @@ class DeviceListEduUpdater(object):
elif prev_id in stream_id_in_updates:
continue
else:
- defer.returnValue(True)
+ return True
stream_id_in_updates.add(stream_id)
- defer.returnValue(False)
+ return False
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 42d5b3db30..0fd423197c 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -210,7 +210,7 @@ class DirectoryHandler(BaseHandler):
except AuthError as e:
logger.info("Failed to update alias events: %s", e)
- defer.returnValue(room_id)
+ return room_id
@defer.inlineCallbacks
def delete_appservice_association(self, service, room_alias):
@@ -229,7 +229,7 @@ class DirectoryHandler(BaseHandler):
room_id = yield self.store.delete_room_alias(room_alias)
- defer.returnValue(room_id)
+ return room_id
@defer.inlineCallbacks
def get_association(self, room_alias):
@@ -277,7 +277,7 @@ class DirectoryHandler(BaseHandler):
else:
servers = list(servers)
- defer.returnValue({"room_id": room_id, "servers": servers})
+ return {"room_id": room_id, "servers": servers}
return
@defer.inlineCallbacks
@@ -289,7 +289,7 @@ class DirectoryHandler(BaseHandler):
result = yield self.get_association_from_room_alias(room_alias)
if result is not None:
- defer.returnValue({"room_id": result.room_id, "servers": result.servers})
+ return {"room_id": result.room_id, "servers": result.servers}
else:
raise SynapseError(
404,
@@ -342,7 +342,7 @@ class DirectoryHandler(BaseHandler):
# Query AS to see if it exists
as_handler = self.appservice_handler
result = yield as_handler.query_room_alias_exists(room_alias)
- defer.returnValue(result)
+ return result
def can_modify_alias(self, alias, user_id=None):
# Any application service "interested" in an alias they are regexing on
@@ -369,10 +369,10 @@ class DirectoryHandler(BaseHandler):
creator = yield self.store.get_room_alias_creator(alias.to_string())
if creator is not None and creator == user_id:
- defer.returnValue(True)
+ return True
is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id))
- defer.returnValue(is_admin)
+ return is_admin
@defer.inlineCallbacks
def edit_published_room_list(self, requester, room_id, visibility):
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index fdfe8611b6..1300b540e3 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -144,7 +144,7 @@ class E2eKeysHandler(object):
)
)
- defer.returnValue({"device_keys": results, "failures": failures})
+ return {"device_keys": results, "failures": failures}
@defer.inlineCallbacks
def query_local_devices(self, query):
@@ -189,7 +189,7 @@ class E2eKeysHandler(object):
r["unsigned"]["device_display_name"] = display_name
result_dict[user_id][device_id] = r
- defer.returnValue(result_dict)
+ return result_dict
@defer.inlineCallbacks
def on_federation_query_client_keys(self, query_body):
@@ -197,7 +197,7 @@ class E2eKeysHandler(object):
"""
device_keys_query = query_body.get("device_keys", {})
res = yield self.query_local_devices(device_keys_query)
- defer.returnValue({"device_keys": res})
+ return {"device_keys": res}
@defer.inlineCallbacks
def claim_one_time_keys(self, query, timeout):
@@ -259,7 +259,7 @@ class E2eKeysHandler(object):
),
)
- defer.returnValue({"one_time_keys": json_result, "failures": failures})
+ return {"one_time_keys": json_result, "failures": failures}
@defer.inlineCallbacks
def upload_keys_for_user(self, user_id, device_id, keys):
@@ -297,7 +297,7 @@ class E2eKeysHandler(object):
result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
- defer.returnValue({"one_time_key_counts": result})
+ return {"one_time_key_counts": result}
@defer.inlineCallbacks
def _upload_one_time_keys_for_user(
diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py
index ebd807bca6..41b871fc59 100644
--- a/synapse/handlers/e2e_room_keys.py
+++ b/synapse/handlers/e2e_room_keys.py
@@ -84,7 +84,7 @@ class E2eRoomKeysHandler(object):
user_id, version, room_id, session_id
)
- defer.returnValue(results)
+ return results
@defer.inlineCallbacks
def delete_room_keys(self, user_id, version, room_id=None, session_id=None):
@@ -262,7 +262,7 @@ class E2eRoomKeysHandler(object):
new_version = yield self.store.create_e2e_room_keys_version(
user_id, version_info
)
- defer.returnValue(new_version)
+ return new_version
@defer.inlineCallbacks
def get_version_info(self, user_id, version=None):
@@ -292,7 +292,7 @@ class E2eRoomKeysHandler(object):
raise NotFoundError("Unknown backup version")
else:
raise
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def delete_version(self, user_id, version=None):
@@ -350,4 +350,4 @@ class E2eRoomKeysHandler(object):
user_id, version, version_info
)
- defer.returnValue({})
+ return {}
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 6a38328af3..2f1f10a9af 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -143,7 +143,7 @@ class EventStreamHandler(BaseHandler):
"end": tokens[1].to_string(),
}
- defer.returnValue(chunk)
+ return chunk
class EventHandler(BaseHandler):
@@ -166,7 +166,7 @@ class EventHandler(BaseHandler):
event = yield self.store.get_event(event_id, check_room_id=room_id)
if not event:
- defer.returnValue(None)
+ return None
return
users = yield self.store.get_users_in_room(event.room_id)
@@ -179,4 +179,4 @@ class EventHandler(BaseHandler):
if not filtered:
raise AuthError(403, "You don't have permission to access that event.")
- defer.returnValue(event)
+ return event
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 30b69af82c..319ee35d9a 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -210,7 +210,7 @@ class FederationHandler(BaseHandler):
event_id,
origin,
)
- defer.returnValue(None)
+ return None
state = None
auth_chain = []
@@ -676,7 +676,7 @@ class FederationHandler(BaseHandler):
events = [e for e in events if e.event_id not in seen_events]
if not events:
- defer.returnValue([])
+ return []
event_map = {e.event_id: e for e in events}
@@ -838,7 +838,7 @@ class FederationHandler(BaseHandler):
# TODO: We can probably do something more clever here.
yield self._handle_new_event(dest, event, backfilled=True)
- defer.returnValue(events)
+ return events
@defer.inlineCallbacks
def maybe_backfill(self, room_id, current_depth):
@@ -894,7 +894,7 @@ class FederationHandler(BaseHandler):
)
if not filtered_extremities:
- defer.returnValue(False)
+ return False
# Check if we reached a point where we should start backfilling.
sorted_extremeties_tuple = sorted(extremities.items(), key=lambda e: -int(e[1]))
@@ -965,7 +965,7 @@ class FederationHandler(BaseHandler):
# If this succeeded then we probably already have the
# appropriate stuff.
# TODO: We can probably do something more intelligent here.
- defer.returnValue(True)
+ return True
except SynapseError as e:
logger.info("Failed to backfill from %s because %s", dom, e)
continue
@@ -985,11 +985,11 @@ class FederationHandler(BaseHandler):
logger.exception("Failed to backfill from %s because %s", dom, e)
continue
- defer.returnValue(False)
+ return False
success = yield try_backfill(likely_domains)
if success:
- defer.returnValue(True)
+ return True
# Huh, well *those* domains didn't work out. Lets try some domains
# from the time.
@@ -1031,11 +1031,11 @@ class FederationHandler(BaseHandler):
[dom for dom, _ in likely_domains if dom not in tried_domains]
)
if success:
- defer.returnValue(True)
+ return True
tried_domains.update(dom for dom, _ in likely_domains)
- defer.returnValue(False)
+ return False
def _sanity_check_event(self, ev):
"""
@@ -1082,7 +1082,7 @@ class FederationHandler(BaseHandler):
pdu=event,
)
- defer.returnValue(pdu)
+ return pdu
@defer.inlineCallbacks
def on_event_auth(self, event_id):
@@ -1090,7 +1090,7 @@ class FederationHandler(BaseHandler):
auth = yield self.store.get_auth_chain(
[auth_id for auth_id in event.auth_event_ids()], include_given=True
)
- defer.returnValue([e for e in auth])
+ return [e for e in auth]
@log_function
@defer.inlineCallbacks
@@ -1177,7 +1177,7 @@ class FederationHandler(BaseHandler):
run_in_background(self._handle_queued_pdus, room_queue)
- defer.returnValue(True)
+ return True
@defer.inlineCallbacks
def _handle_queued_pdus(self, room_queue):
@@ -1264,7 +1264,7 @@ class FederationHandler(BaseHandler):
room_version, event, context, do_sig_check=False
)
- defer.returnValue(event)
+ return event
@defer.inlineCallbacks
@log_function
@@ -1325,7 +1325,7 @@ class FederationHandler(BaseHandler):
state = yield self.store.get_events(list(prev_state_ids.values()))
- defer.returnValue({"state": list(state.values()), "auth_chain": auth_chain})
+ return {"state": list(state.values()), "auth_chain": auth_chain}
@defer.inlineCallbacks
def on_invite_request(self, origin, pdu):
@@ -1345,8 +1345,15 @@ class FederationHandler(BaseHandler):
if self.hs.config.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
+ is_published = yield self.store.is_room_published(event.room_id)
+
if not self.spam_checker.user_may_invite(
- event.sender, event.state_key, event.room_id
+ event.sender,
+ event.state_key,
+ None,
+ room_id=event.room_id,
+ new_room=False,
+ published_room=is_published,
):
raise SynapseError(
403, "This user is not permitted to send invites to this server/user"
@@ -1381,7 +1388,7 @@ class FederationHandler(BaseHandler):
context = yield self.state_handler.compute_event_context(event)
yield self.persist_events_and_notify([(event, context)])
- defer.returnValue(event)
+ return event
@defer.inlineCallbacks
def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
@@ -1406,7 +1413,7 @@ class FederationHandler(BaseHandler):
context = yield self.state_handler.compute_event_context(event)
yield self.persist_events_and_notify([(event, context)])
- defer.returnValue(event)
+ return event
@defer.inlineCallbacks
def _make_and_verify_event(
@@ -1424,7 +1431,7 @@ class FederationHandler(BaseHandler):
assert event.user_id == user_id
assert event.state_key == user_id
assert event.room_id == room_id
- defer.returnValue((origin, event, format_ver))
+ return (origin, event, format_ver)
@defer.inlineCallbacks
@log_function
@@ -1484,7 +1491,7 @@ class FederationHandler(BaseHandler):
logger.warn("Failed to create new leave %r because %s", event, e)
raise e
- defer.returnValue(event)
+ return event
@defer.inlineCallbacks
@log_function
@@ -1517,7 +1524,7 @@ class FederationHandler(BaseHandler):
event.signatures,
)
- defer.returnValue(None)
+ return None
@defer.inlineCallbacks
def get_state_for_pdu(self, room_id, event_id):
@@ -1545,9 +1552,9 @@ class FederationHandler(BaseHandler):
del results[(event.type, event.state_key)]
res = list(results.values())
- defer.returnValue(res)
+ return res
else:
- defer.returnValue([])
+ return []
@defer.inlineCallbacks
def get_state_ids_for_pdu(self, room_id, event_id):
@@ -1572,9 +1579,9 @@ class FederationHandler(BaseHandler):
else:
results.pop((event.type, event.state_key), None)
- defer.returnValue(list(results.values()))
+ return list(results.values())
else:
- defer.returnValue([])
+ return []
@defer.inlineCallbacks
@log_function
@@ -1587,7 +1594,7 @@ class FederationHandler(BaseHandler):
events = yield filter_events_for_server(self.store, origin, events)
- defer.returnValue(events)
+ return events
@defer.inlineCallbacks
@log_function
@@ -1617,9 +1624,9 @@ class FederationHandler(BaseHandler):
events = yield filter_events_for_server(self.store, origin, [event])
event = events[0]
- defer.returnValue(event)
+ return event
else:
- defer.returnValue(None)
+ return None
def get_min_depth_for_context(self, context):
return self.store.get_min_depth(context)
@@ -1651,7 +1658,7 @@ class FederationHandler(BaseHandler):
self.store.remove_push_actions_from_staging, event.event_id
)
- defer.returnValue(context)
+ return context
@defer.inlineCallbacks
def _handle_new_events(self, origin, event_infos, backfilled=False):
@@ -1674,7 +1681,7 @@ class FederationHandler(BaseHandler):
auth_events=ev_info.get("auth_events"),
backfilled=backfilled,
)
- defer.returnValue(res)
+ return res
contexts = yield make_deferred_yieldable(
defer.gatherResults(
@@ -1833,7 +1840,7 @@ class FederationHandler(BaseHandler):
if event.type == EventTypes.GuestAccess and not context.rejected:
yield self.maybe_kick_guest_users(event)
- defer.returnValue(context)
+ return context
@defer.inlineCallbacks
def _check_for_soft_fail(self, event, state, backfilled):
@@ -1952,7 +1959,7 @@ class FederationHandler(BaseHandler):
logger.debug("on_query_auth returning: %s", ret)
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def on_get_missing_events(
@@ -1975,7 +1982,7 @@ class FederationHandler(BaseHandler):
self.store, origin, missing_events
)
- defer.returnValue(missing_events)
+ return missing_events
@defer.inlineCallbacks
@log_function
@@ -2451,16 +2458,14 @@ class FederationHandler(BaseHandler):
logger.debug("construct_auth_difference returning")
- defer.returnValue(
- {
- "auth_chain": local_auth,
- "rejects": {
- e.event_id: {"reason": reason_map[e.event_id], "proof": None}
- for e in base_remote_rejected
- },
- "missing": [e.event_id for e in missing_locals],
- }
- )
+ return {
+ "auth_chain": local_auth,
+ "rejects": {
+ e.event_id: {"reason": reason_map[e.event_id], "proof": None}
+ for e in base_remote_rejected
+ },
+ "missing": [e.event_id for e in missing_locals],
+ }
@defer.inlineCallbacks
@log_function
@@ -2505,7 +2510,7 @@ class FederationHandler(BaseHandler):
room_version, event_dict, event, context
)
- EventValidator().validate_new(event)
+ EventValidator().validate_new(event, self.config)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
@@ -2563,7 +2568,7 @@ class FederationHandler(BaseHandler):
)
try:
- self.auth.check_from_context(room_version, event, context)
+ yield self.auth.check_from_context(room_version, event, context)
except AuthError as e:
logger.warn("Denying third party invite %r because %s", event, e)
raise e
@@ -2592,7 +2597,12 @@ class FederationHandler(BaseHandler):
original_invite_id, allow_none=True
)
if original_invite:
- display_name = original_invite.content["display_name"]
+ # If the m.room.third_party_invite event's content is empty, it means the
+ # invite has been revoked. In this case, we don't have to raise an error here
+ # because the auth check will fail on the invite (because it's not able to
+ # fetch public keys from the m.room.third_party_invite event's content, which
+ # is empty).
+ display_name = original_invite.content.get("display_name")
event_dict["content"]["third_party_invite"]["display_name"] = display_name
else:
logger.info(
@@ -2607,8 +2617,8 @@ class FederationHandler(BaseHandler):
event, context = yield self.event_creation_handler.create_new_client_event(
builder=builder
)
- EventValidator().validate_new(event)
- defer.returnValue((event, context))
+ EventValidator().validate_new(event, self.config)
+ return (event, context)
@defer.inlineCallbacks
def _check_signature(self, event, context):
diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py
index 7da63bb643..7b67c8ae0f 100644
--- a/synapse/handlers/groups_local.py
+++ b/synapse/handlers/groups_local.py
@@ -162,7 +162,7 @@ class GroupsLocalHandler(object):
res.setdefault("user", {})["is_publicised"] = is_publicised
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def create_group(self, group_id, user_id, content):
@@ -207,7 +207,7 @@ class GroupsLocalHandler(object):
)
self.notifier.on_new_event("groups_key", token, users=[user_id])
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def get_users_in_group(self, group_id, requester_user_id):
@@ -217,7 +217,7 @@ class GroupsLocalHandler(object):
res = yield self.groups_server_handler.get_users_in_group(
group_id, requester_user_id
)
- defer.returnValue(res)
+ return res
group_server_name = get_domain_from_id(group_id)
@@ -244,7 +244,7 @@ class GroupsLocalHandler(object):
res["chunk"] = valid_entries
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def join_group(self, group_id, user_id, content):
@@ -285,7 +285,7 @@ class GroupsLocalHandler(object):
)
self.notifier.on_new_event("groups_key", token, users=[user_id])
- defer.returnValue({})
+ return {}
@defer.inlineCallbacks
def accept_invite(self, group_id, user_id, content):
@@ -326,7 +326,7 @@ class GroupsLocalHandler(object):
)
self.notifier.on_new_event("groups_key", token, users=[user_id])
- defer.returnValue({})
+ return {}
@defer.inlineCallbacks
def invite(self, group_id, user_id, requester_user_id, config):
@@ -346,7 +346,7 @@ class GroupsLocalHandler(object):
content,
)
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def on_invite(self, group_id, user_id, content):
@@ -377,7 +377,7 @@ class GroupsLocalHandler(object):
logger.warn("No profile for user %s: %s", user_id, e)
user_profile = {}
- defer.returnValue({"state": "invite", "user_profile": user_profile})
+ return {"state": "invite", "user_profile": user_profile}
@defer.inlineCallbacks
def remove_user_from_group(self, group_id, user_id, requester_user_id, content):
@@ -406,7 +406,7 @@ class GroupsLocalHandler(object):
content,
)
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def user_removed_from_group(self, group_id, user_id, content):
@@ -421,7 +421,7 @@ class GroupsLocalHandler(object):
@defer.inlineCallbacks
def get_joined_groups(self, user_id):
group_ids = yield self.store.get_joined_groups(user_id)
- defer.returnValue({"groups": group_ids})
+ return {"groups": group_ids}
@defer.inlineCallbacks
def get_publicised_groups_for_user(self, user_id):
@@ -433,14 +433,14 @@ class GroupsLocalHandler(object):
for app_service in self.store.get_app_services():
result.extend(app_service.get_groups_for_user(user_id))
- defer.returnValue({"groups": result})
+ return {"groups": result}
else:
bulk_result = yield self.transport_client.bulk_get_publicised_groups(
get_domain_from_id(user_id), [user_id]
)
result = bulk_result.get("users", {}).get(user_id)
# TODO: Verify attestations
- defer.returnValue({"groups": result})
+ return {"groups": result}
@defer.inlineCallbacks
def bulk_get_publicised_groups(self, user_ids, proxy=True):
@@ -475,4 +475,4 @@ class GroupsLocalHandler(object):
for app_service in self.store.get_app_services():
results[uid].extend(app_service.get_groups_for_user(uid))
- defer.returnValue({"users": results})
+ return {"users": results}
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index 546d6169e9..339e0dd04d 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
-# Copyright 2018 New Vector Ltd
+# Copyright 2018, 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,13 +20,18 @@
import logging
from canonicaljson import json
+from signedjson.key import decode_verify_key_bytes
+from signedjson.sign import verify_signed_json
+from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse.api.errors import (
+ AuthError,
CodeMessageException,
Codes,
HttpResponseException,
+ ProxiedRequestError,
SynapseError,
)
@@ -46,6 +51,8 @@ class IdentityHandler(BaseHandler):
self.trust_any_id_server_just_for_testing_do_not_use = (
hs.config.use_insecure_ssl_client_just_for_testing_do_not_use
)
+ self.rewrite_identity_server_urls = hs.config.rewrite_identity_server_urls
+ self._enable_lookup = hs.config.enable_3pid_lookup
def _should_trust_id_server(self, id_server):
if id_server not in self.trusted_id_servers:
@@ -82,8 +89,11 @@ class IdentityHandler(BaseHandler):
"%s is not a trusted ID server: rejecting 3pid " + "credentials",
id_server,
)
- defer.returnValue(None)
-
+ return None
+ # if we have a rewrite rule set for the identity server,
+ # apply it now.
+ if id_server in self.rewrite_identity_server_urls:
+ id_server = self.rewrite_identity_server_urls[id_server]
try:
data = yield self.http_client.get_json(
"https://%s%s"
@@ -95,8 +105,8 @@ class IdentityHandler(BaseHandler):
raise e.to_synapse_error()
if "medium" in data:
- defer.returnValue(data)
- defer.returnValue(None)
+ return data
+ return None
@defer.inlineCallbacks
def bind_threepid(self, creds, mxid):
@@ -117,9 +127,17 @@ class IdentityHandler(BaseHandler):
else:
raise SynapseError(400, "No client_secret in creds")
+ # if we have a rewrite rule set for the identity server,
+ # apply it now, but only for sending the request (not
+ # storing in the database).
+ if id_server in self.rewrite_identity_server_urls:
+ id_server_host = self.rewrite_identity_server_urls[id_server]
+ else:
+ id_server_host = id_server
+
try:
data = yield self.http_client.post_json_get_json(
- "https://%s%s" % (id_server, "/_matrix/identity/api/v1/3pid/bind"),
+ "https://%s%s" % (id_server_host, "/_matrix/identity/api/v1/3pid/bind"),
{"sid": creds["sid"], "client_secret": client_secret, "mxid": mxid},
)
logger.debug("bound threepid %r to %s", creds, mxid)
@@ -133,7 +151,7 @@ class IdentityHandler(BaseHandler):
)
except CodeMessageException as e:
data = json.loads(e.msg) # XXX WAT?
- defer.returnValue(data)
+ return data
@defer.inlineCallbacks
def try_unbind_threepid(self, mxid, threepid):
@@ -161,7 +179,7 @@ class IdentityHandler(BaseHandler):
# We don't know where to unbind, so we don't have a choice but to return
if not id_servers:
- defer.returnValue(False)
+ return False
changed = True
for id_server in id_servers:
@@ -169,7 +187,7 @@ class IdentityHandler(BaseHandler):
mxid, threepid, id_server
)
- defer.returnValue(changed)
+ return changed
@defer.inlineCallbacks
def try_unbind_threepid_with_id_server(self, mxid, threepid, id_server):
@@ -205,6 +223,16 @@ class IdentityHandler(BaseHandler):
)
headers = {b"Authorization": auth_headers}
+ # if we have a rewrite rule set for the identity server,
+ # apply it now.
+ #
+ # Note that destination_is has to be the real id_server, not
+ # the server we connect to.
+ if id_server in self.rewrite_identity_server_urls:
+ id_server = self.rewrite_identity_server_urls[id_server]
+
+ url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,)
+
try:
yield self.http_client.post_json_get_json(url, content, headers)
changed = True
@@ -224,7 +252,7 @@ class IdentityHandler(BaseHandler):
id_server=id_server,
)
- defer.returnValue(changed)
+ return changed
@defer.inlineCallbacks
def requestEmailToken(
@@ -241,6 +269,11 @@ class IdentityHandler(BaseHandler):
"send_attempt": send_attempt,
}
+ # if we have a rewrite rule set for the identity server,
+ # apply it now.
+ if id_server in self.rewrite_identity_server_urls:
+ id_server = self.rewrite_identity_server_urls[id_server]
+
if next_link:
params.update({"next_link": next_link})
@@ -250,7 +283,7 @@ class IdentityHandler(BaseHandler):
% (id_server, "/_matrix/identity/api/v1/validate/email/requestToken"),
params,
)
- defer.returnValue(data)
+ return data
except HttpResponseException as e:
logger.info("Proxied requestToken failed: %r", e)
raise e.to_synapse_error()
@@ -271,14 +304,134 @@ class IdentityHandler(BaseHandler):
"send_attempt": send_attempt,
}
params.update(kwargs)
-
+ # if we have a rewrite rule set for the identity server,
+ # apply it now.
+ if id_server in self.rewrite_identity_server_urls:
+ id_server = self.rewrite_identity_server_urls[id_server]
try:
data = yield self.http_client.post_json_get_json(
"https://%s%s"
% (id_server, "/_matrix/identity/api/v1/validate/msisdn/requestToken"),
params,
)
- defer.returnValue(data)
+ return data
except HttpResponseException as e:
logger.info("Proxied requestToken failed: %r", e)
raise e.to_synapse_error()
+
+ @defer.inlineCallbacks
+ def lookup_3pid(self, id_server, medium, address):
+ """Looks up a 3pid in the passed identity server.
+
+ Args:
+ id_server (str): The server name (including port, if required)
+ of the identity server to use.
+ medium (str): The type of the third party identifier (e.g. "email").
+ address (str): The third party identifier (e.g. "foo@example.com").
+
+ Returns:
+ Deferred[dict]: The result of the lookup. See
+ https://matrix.org/docs/spec/identity_service/r0.1.0.html#association-lookup
+ for details
+ """
+ if not self._should_trust_id_server(id_server):
+ raise SynapseError(
+ 400, "Untrusted ID server '%s'" % id_server, Codes.SERVER_NOT_TRUSTED
+ )
+
+ if not self._enable_lookup:
+ raise AuthError(
+ 403, "Looking up third-party identifiers is denied from this server"
+ )
+
+ target = self.rewrite_identity_server_urls.get(id_server, id_server)
+
+ try:
+ data = yield self.http_client.get_json(
+ "https://%s/_matrix/identity/api/v1/lookup" % (target,),
+ {"medium": medium, "address": address},
+ )
+
+ if "mxid" in data:
+ if "signatures" not in data:
+ raise AuthError(401, "No signatures on 3pid binding")
+ yield self._verify_any_signature(data, id_server)
+
+ except HttpResponseException as e:
+ logger.info("Proxied lookup failed: %r", e)
+ raise e.to_synapse_error()
+ except IOError as e:
+ logger.info("Failed to contact %r: %s", id_server, e)
+ raise ProxiedRequestError(503, "Failed to contact identity server")
+
+ defer.returnValue(data)
+
+ @defer.inlineCallbacks
+ def bulk_lookup_3pid(self, id_server, threepids):
+ """Looks up given 3pids in the passed identity server.
+
+ Args:
+ id_server (str): The server name (including port, if required)
+ of the identity server to use.
+ threepids ([[str, str]]): The third party identifiers to lookup, as
+ a list of 2-string sized lists ([medium, address]).
+
+ Returns:
+ Deferred[dict]: The result of the lookup. See
+ https://matrix.org/docs/spec/identity_service/r0.1.0.html#association-lookup
+ for details
+ """
+ if not self._should_trust_id_server(id_server):
+ raise SynapseError(
+ 400, "Untrusted ID server '%s'" % id_server, Codes.SERVER_NOT_TRUSTED
+ )
+
+ if not self._enable_lookup:
+ raise AuthError(
+ 403, "Looking up third-party identifiers is denied from this server"
+ )
+
+ target = self.rewrite_identity_server_urls.get(id_server, id_server)
+
+ try:
+ data = yield self.http_client.post_json_get_json(
+ "https://%s/_matrix/identity/api/v1/bulk_lookup" % (target,),
+ {"threepids": threepids},
+ )
+
+ except HttpResponseException as e:
+ logger.info("Proxied lookup failed: %r", e)
+ raise e.to_synapse_error()
+ except IOError as e:
+ logger.info("Failed to contact %r: %s", id_server, e)
+ raise ProxiedRequestError(503, "Failed to contact identity server")
+
+ defer.returnValue(data)
+
+ @defer.inlineCallbacks
+ def _verify_any_signature(self, data, server_hostname):
+ if server_hostname not in data["signatures"]:
+ raise AuthError(401, "No signature from server %s" % (server_hostname,))
+
+ for key_name, signature in data["signatures"][server_hostname].items():
+ target = self.rewrite_identity_server_urls.get(
+ server_hostname, server_hostname
+ )
+
+ key_data = yield self.http_client.get_json(
+ "https://%s/_matrix/identity/api/v1/pubkey/%s" % (target, key_name)
+ )
+ if "public_key" not in key_data:
+ raise AuthError(
+ 401, "No public key named %s from %s" % (key_name, server_hostname)
+ )
+ verify_signed_json(
+ data,
+ server_hostname,
+ decode_verify_key_bytes(
+ key_name, decode_base64(key_data["public_key"])
+ ),
+ )
+ return
+
+ raise AuthError(401, "No signature from server %s" % (server_hostname,))
diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py
index 54c966c8a6..42d6650ed9 100644
--- a/synapse/handlers/initial_sync.py
+++ b/synapse/handlers/initial_sync.py
@@ -250,7 +250,7 @@ class InitialSyncHandler(BaseHandler):
"end": now_token.to_string(),
}
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def room_initial_sync(self, requester, room_id, pagin_config=None):
@@ -301,7 +301,7 @@ class InitialSyncHandler(BaseHandler):
result["account_data"] = account_data_events
- defer.returnValue(result)
+ return result
@defer.inlineCallbacks
def _room_initial_sync_parted(
@@ -330,28 +330,24 @@ class InitialSyncHandler(BaseHandler):
time_now = self.clock.time_msec()
- defer.returnValue(
- {
- "membership": membership,
- "room_id": room_id,
- "messages": {
- "chunk": (
- yield self._event_serializer.serialize_events(
- messages, time_now
- )
- ),
- "start": start_token.to_string(),
- "end": end_token.to_string(),
- },
- "state": (
- yield self._event_serializer.serialize_events(
- room_state.values(), time_now
- )
+ return {
+ "membership": membership,
+ "room_id": room_id,
+ "messages": {
+ "chunk": (
+ yield self._event_serializer.serialize_events(messages, time_now)
),
- "presence": [],
- "receipts": [],
- }
- )
+ "start": start_token.to_string(),
+ "end": end_token.to_string(),
+ },
+ "state": (
+ yield self._event_serializer.serialize_events(
+ room_state.values(), time_now
+ )
+ ),
+ "presence": [],
+ "receipts": [],
+ }
@defer.inlineCallbacks
def _room_initial_sync_joined(
@@ -384,13 +380,13 @@ class InitialSyncHandler(BaseHandler):
def get_presence():
# If presence is disabled, return an empty list
if not self.hs.config.use_presence:
- defer.returnValue([])
+ return []
states = yield presence_handler.get_states(
[m.user_id for m in room_members], as_event=True
)
- defer.returnValue(states)
+ return states
@defer.inlineCallbacks
def get_receipts():
@@ -399,7 +395,7 @@ class InitialSyncHandler(BaseHandler):
)
if not receipts:
receipts = []
- defer.returnValue(receipts)
+ return receipts
presence, receipts, (messages, token) = yield make_deferred_yieldable(
defer.gatherResults(
@@ -442,7 +438,7 @@ class InitialSyncHandler(BaseHandler):
if not is_peeking:
ret["membership"] = membership
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def _check_in_room_or_world_readable(self, room_id, user_id):
@@ -453,7 +449,7 @@ class InitialSyncHandler(BaseHandler):
# * The user is a guest user, and has joined the room
# else it will throw.
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
- defer.returnValue((member_event.membership, member_event.event_id))
+ return (member_event.membership, member_event.event_id)
return
except AuthError:
visibility = yield self.state_handler.get_current_state(
@@ -463,7 +459,7 @@ class InitialSyncHandler(BaseHandler):
visibility
and visibility.content["history_visibility"] == "world_readable"
):
- defer.returnValue((Membership.JOIN, None))
+ return (Membership.JOIN, None)
return
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 6d7a987f13..2e1a989782 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -87,7 +87,7 @@ class MessageHandler(object):
)
data = room_state[membership_event_id].get(key)
- defer.returnValue(data)
+ return data
@defer.inlineCallbacks
def get_state_events(
@@ -135,7 +135,7 @@ class MessageHandler(object):
raise NotFoundError("Can't find event for token %s" % (at_token,))
visible_events = yield filter_events_for_client(
- self.store, user_id, last_events
+ self.store, user_id, last_events, apply_retention_policies=False
)
event = last_events[0]
@@ -174,7 +174,7 @@ class MessageHandler(object):
# events, as clients won't use them.
bundle_aggregations=False,
)
- defer.returnValue(events)
+ return events
@defer.inlineCallbacks
def get_joined_members(self, requester, room_id):
@@ -213,15 +213,13 @@ class MessageHandler(object):
# Loop fell through, AS has no interested users in room
raise AuthError(403, "Appservice not in room")
- defer.returnValue(
- {
- user_id: {
- "avatar_url": profile.avatar_url,
- "display_name": profile.display_name,
- }
- for user_id, profile in iteritems(users_with_profile)
+ return {
+ user_id: {
+ "avatar_url": profile.avatar_url,
+ "display_name": profile.display_name,
}
- )
+ for user_id, profile in iteritems(users_with_profile)
+ }
class EventCreationHandler(object):
@@ -380,7 +378,11 @@ class EventCreationHandler(object):
# tolerate them in event_auth.check().
prev_state_ids = yield context.get_prev_state_ids(self.store)
prev_event_id = prev_state_ids.get((EventTypes.Member, event.sender))
- prev_event = yield self.store.get_event(prev_event_id, allow_none=True)
+ prev_event = (
+ yield self.store.get_event(prev_event_id, allow_none=True)
+ if prev_event_id
+ else None
+ )
if not prev_event or prev_event.membership != Membership.JOIN:
logger.warning(
(
@@ -396,9 +398,9 @@ class EventCreationHandler(object):
403, "You must be in the room to create an alias for it"
)
- self.validator.validate_new(event)
+ self.validator.validate_new(event, self.config)
- defer.returnValue((event, context))
+ return (event, context)
def _is_exempt_from_privacy_policy(self, builder, requester):
""""Determine if an event to be sent is exempt from having to consent
@@ -425,9 +427,9 @@ class EventCreationHandler(object):
@defer.inlineCallbacks
def _is_server_notices_room(self, room_id):
if self.config.server_notices_mxid is None:
- defer.returnValue(False)
+ return False
user_ids = yield self.store.get_users_in_room(room_id)
- defer.returnValue(self.config.server_notices_mxid in user_ids)
+ return self.config.server_notices_mxid in user_ids
@defer.inlineCallbacks
def assert_accepted_privacy_policy(self, requester):
@@ -507,7 +509,7 @@ class EventCreationHandler(object):
event.event_id,
prev_state.event_id,
)
- defer.returnValue(prev_state)
+ return prev_state
yield self.handle_new_client_event(
requester=requester, event=event, context=context, ratelimit=ratelimit
@@ -523,6 +525,8 @@ class EventCreationHandler(object):
"""
prev_state_ids = yield context.get_prev_state_ids(self.store)
prev_event_id = prev_state_ids.get((event.type, event.state_key))
+ if not prev_event_id:
+ return
prev_event = yield self.store.get_event(prev_event_id, allow_none=True)
if not prev_event:
return
@@ -531,7 +535,7 @@ class EventCreationHandler(object):
prev_content = encode_canonical_json(prev_event.content)
next_content = encode_canonical_json(event.content)
if prev_content == next_content:
- defer.returnValue(prev_event)
+ return prev_event
return
@defer.inlineCallbacks
@@ -563,7 +567,7 @@ class EventCreationHandler(object):
yield self.send_nonmember_event(
requester, event, context, ratelimit=ratelimit
)
- defer.returnValue(event)
+ return event
@measure_func("create_new_client_event")
@defer.inlineCallbacks
@@ -608,7 +612,7 @@ class EventCreationHandler(object):
if requester:
context.app_service = requester.app_service
- self.validator.validate_new(event)
+ self.validator.validate_new(event, self.config)
# If this event is an annotation then we check that that the sender
# can't annotate the same way twice (e.g. stops users from liking an
@@ -626,7 +630,7 @@ class EventCreationHandler(object):
logger.debug("Created event %s", event.event_id)
- defer.returnValue((event, context))
+ return (event, context)
@measure_func("handle_new_client_event")
@defer.inlineCallbacks
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 20bcfed334..6711ced51a 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -15,12 +15,15 @@
# limitations under the License.
import logging
+from six import iteritems
+
from twisted.internet import defer
from twisted.python.failure import Failure
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import SynapseError
from synapse.logging.context import run_in_background
+from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.state import StateFilter
from synapse.types import RoomStreamToken
from synapse.util.async_helpers import ReadWriteLock
@@ -77,6 +80,111 @@ class PaginationHandler(object):
self._purges_by_id = {}
self._event_serializer = hs.get_event_client_serializer()
+ self._retention_default_max_lifetime = hs.config.retention_default_max_lifetime
+
+ if hs.config.retention_enabled:
+ # Run the purge jobs described in the configuration file.
+ for job in hs.config.retention_purge_jobs:
+ self.clock.looping_call(
+ run_as_background_process,
+ job["interval"],
+ "purge_history_for_rooms_in_range",
+ self.purge_history_for_rooms_in_range,
+ job["shortest_max_lifetime"],
+ job["longest_max_lifetime"],
+ )
+
+ @defer.inlineCallbacks
+ def purge_history_for_rooms_in_range(self, min_ms, max_ms):
+ """Purge outdated events from rooms within the given retention range.
+
+ If a default retention policy is defined in the server's configuration and its
+ 'max_lifetime' is within this range, also targets rooms which don't have a
+ retention policy.
+
+ Args:
+ min_ms (int|None): Duration in milliseconds that define the lower limit of
+ the range to handle (exclusive). If None, it means that the range has no
+ lower limit.
+ max_ms (int|None): Duration in milliseconds that define the upper limit of
+ the range to handle (inclusive). If None, it means that the range has no
+ upper limit.
+ """
+ # We want the storage layer to to include rooms with no retention policy in its
+ # return value only if a default retention policy is defined in the server's
+ # configuration and that policy's 'max_lifetime' is either lower (or equal) than
+ # max_ms or higher than min_ms (or both).
+ if self._retention_default_max_lifetime is not None:
+ include_null = True
+
+ if min_ms is not None and min_ms >= self._retention_default_max_lifetime:
+ # The default max_lifetime is lower than (or equal to) min_ms.
+ include_null = False
+
+ if max_ms is not None and max_ms < self._retention_default_max_lifetime:
+ # The default max_lifetime is higher than max_ms.
+ include_null = False
+ else:
+ include_null = False
+
+ rooms = yield self.store.get_rooms_for_retention_period_in_range(
+ min_ms, max_ms, include_null
+ )
+
+ for room_id, retention_policy in iteritems(rooms):
+ if room_id in self._purges_in_progress_by_room:
+ logger.warning(
+ "[purge] not purging room %s as there's an ongoing purge running"
+ " for this room",
+ room_id,
+ )
+ continue
+
+ max_lifetime = retention_policy["max_lifetime"]
+
+ if max_lifetime is None:
+ # If max_lifetime is None, it means that include_null equals True,
+ # therefore we can safely assume that there is a default policy defined
+ # in the server's configuration.
+ max_lifetime = self._retention_default_max_lifetime
+
+ # Figure out what token we should start purging at.
+ ts = self.clock.time_msec() - max_lifetime
+
+ stream_ordering = (yield self.store.find_first_stream_ordering_after_ts(ts))
+
+ r = (
+ yield self.store.get_room_event_after_stream_ordering(
+ room_id, stream_ordering
+ )
+ )
+ if not r:
+ logger.warning(
+ "[purge] purging events not possible: No event found "
+ "(ts %i => stream_ordering %i)",
+ ts,
+ stream_ordering,
+ )
+ continue
+
+ (stream, topo, _event_id) = r
+ token = "t%d-%d" % (topo, stream)
+
+ purge_id = random_string(16)
+
+ self._purges_by_id[purge_id] = PurgeStatus()
+
+ logger.info(
+ "Starting purging events in room %s (purge_id %s)" % (room_id, purge_id)
+ )
+
+ # We want to purge everything, including local events, and to run the purge in
+ # the background so that it's not blocking any other operation apart from
+ # other purges in the same room.
+ run_as_background_process(
+ "_purge_history", self._purge_history, purge_id, room_id, token, True
+ )
+
def start_purge_history(self, room_id, token, delete_local_events=False):
"""Start off a history purge on a room.
@@ -242,13 +350,11 @@ class PaginationHandler(object):
)
if not events:
- defer.returnValue(
- {
- "chunk": [],
- "start": pagin_config.from_token.to_string(),
- "end": next_token.to_string(),
- }
- )
+ return {
+ "chunk": [],
+ "start": pagin_config.from_token.to_string(),
+ "end": next_token.to_string(),
+ }
state = None
if event_filter and event_filter.lazy_load_members() and len(events) > 0:
@@ -286,4 +392,4 @@ class PaginationHandler(object):
)
)
- defer.returnValue(chunk)
+ return chunk
diff --git a/synapse/handlers/password_policy.py b/synapse/handlers/password_policy.py
new file mode 100644
index 0000000000..d06b110269
--- /dev/null
+++ b/synapse/handlers/password_policy.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+
+from synapse.api.errors import Codes, PasswordRefusedError
+
+logger = logging.getLogger(__name__)
+
+
+class PasswordPolicyHandler(object):
+ def __init__(self, hs):
+ self.policy = hs.config.password_policy
+ self.enabled = hs.config.password_policy_enabled
+
+ # Regexps for the spec'd policy parameters.
+ self.regexp_digit = re.compile("[0-9]")
+ self.regexp_symbol = re.compile("[^a-zA-Z0-9]")
+ self.regexp_uppercase = re.compile("[A-Z]")
+ self.regexp_lowercase = re.compile("[a-z]")
+
+ def validate_password(self, password):
+ """Checks whether a given password complies with the server's policy.
+
+ Args:
+ password (str): The password to check against the server's policy.
+
+ Raises:
+ PasswordRefusedError: The password doesn't comply with the server's policy.
+ """
+
+ if not self.enabled:
+ return
+
+ minimum_accepted_length = self.policy.get("minimum_length", 0)
+ if len(password) < minimum_accepted_length:
+ raise PasswordRefusedError(
+ msg=(
+ "The password must be at least %d characters long"
+ % minimum_accepted_length
+ ),
+ errcode=Codes.PASSWORD_TOO_SHORT,
+ )
+
+ if (
+ self.policy.get("require_digit", False)
+ and self.regexp_digit.search(password) is None
+ ):
+ raise PasswordRefusedError(
+ msg="The password must include at least one digit",
+ errcode=Codes.PASSWORD_NO_DIGIT,
+ )
+
+ if (
+ self.policy.get("require_symbol", False)
+ and self.regexp_symbol.search(password) is None
+ ):
+ raise PasswordRefusedError(
+ msg="The password must include at least one symbol",
+ errcode=Codes.PASSWORD_NO_SYMBOL,
+ )
+
+ if (
+ self.policy.get("require_uppercase", False)
+ and self.regexp_uppercase.search(password) is None
+ ):
+ raise PasswordRefusedError(
+ msg="The password must include at least one uppercase letter",
+ errcode=Codes.PASSWORD_NO_UPPERCASE,
+ )
+
+ if (
+ self.policy.get("require_lowercase", False)
+ and self.regexp_lowercase.search(password) is None
+ ):
+ raise PasswordRefusedError(
+ msg="The password must include at least one lowercase letter",
+ errcode=Codes.PASSWORD_NO_LOWERCASE,
+ )
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 6f3537e435..ea54d0b991 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -461,7 +461,7 @@ class PresenceHandler(object):
if affect_presence:
run_in_background(_end)
- defer.returnValue(_user_syncing())
+ return _user_syncing()
def get_currently_syncing_users(self):
"""Get the set of user ids that are currently syncing on this HS.
@@ -556,7 +556,7 @@ class PresenceHandler(object):
"""Get the current presence state for a user.
"""
res = yield self.current_state_for_users([user_id])
- defer.returnValue(res[user_id])
+ return res[user_id]
@defer.inlineCallbacks
def current_state_for_users(self, user_ids):
@@ -585,7 +585,7 @@ class PresenceHandler(object):
states.update(new)
self.user_to_current_state.update(new)
- defer.returnValue(states)
+ return states
@defer.inlineCallbacks
def _persist_and_notify(self, states):
@@ -681,7 +681,7 @@ class PresenceHandler(object):
def get_state(self, target_user, as_event=False):
results = yield self.get_states([target_user.to_string()], as_event=as_event)
- defer.returnValue(results[0])
+ return results[0]
@defer.inlineCallbacks
def get_states(self, target_user_ids, as_event=False):
@@ -703,17 +703,15 @@ class PresenceHandler(object):
now = self.clock.time_msec()
if as_event:
- defer.returnValue(
- [
- {
- "type": "m.presence",
- "content": format_user_presence_state(state, now),
- }
- for state in updates
- ]
- )
+ return [
+ {
+ "type": "m.presence",
+ "content": format_user_presence_state(state, now),
+ }
+ for state in updates
+ ]
else:
- defer.returnValue(updates)
+ return updates
@defer.inlineCallbacks
def set_state(self, target_user, state, ignore_status_msg=False):
@@ -757,9 +755,9 @@ class PresenceHandler(object):
)
if observer_room_ids & observed_room_ids:
- defer.returnValue(True)
+ return True
- defer.returnValue(False)
+ return False
@defer.inlineCallbacks
def get_all_presence_updates(self, last_id, current_id):
@@ -778,7 +776,7 @@ class PresenceHandler(object):
# TODO(markjh): replicate the unpersisted changes.
# This could use the in-memory stores for recent changes.
rows = yield self.store.get_all_presence_updates(last_id, current_id)
- defer.returnValue(rows)
+ return rows
def notify_new_event(self):
"""Called when new events have happened. Handles users and servers
@@ -1034,7 +1032,7 @@ class PresenceEventSource(object):
#
# Hence this guard where we just return nothing so that the sync
# doesn't return. C.f. #5503.
- defer.returnValue(([], max_token))
+ return ([], max_token)
presence = self.get_presence_handler()
stream_change_cache = self.store.presence_stream_cache
@@ -1068,17 +1066,11 @@ class PresenceEventSource(object):
updates = yield presence.current_state_for_users(user_ids_changed)
if include_offline:
- defer.returnValue((list(updates.values()), max_token))
+ return (list(updates.values()), max_token)
else:
- defer.returnValue(
- (
- [
- s
- for s in itervalues(updates)
- if s.state != PresenceState.OFFLINE
- ],
- max_token,
- )
+ return (
+ [s for s in itervalues(updates) if s.state != PresenceState.OFFLINE],
+ max_token,
)
def get_current_key(self):
@@ -1107,7 +1099,7 @@ class PresenceEventSource(object):
)
users_interested_in.update(user_ids)
- defer.returnValue(users_interested_in)
+ return users_interested_in
def handle_timeouts(user_states, is_mine_fn, syncing_user_ids, now):
@@ -1287,7 +1279,7 @@ def get_interested_parties(store, states):
# Always notify self
users_to_states.setdefault(state.user_id, []).append(state)
- defer.returnValue((room_ids_to_states, users_to_states))
+ return (room_ids_to_states, users_to_states)
@defer.inlineCallbacks
@@ -1321,4 +1313,4 @@ def get_interested_remotes(store, states, state_handler):
host = get_domain_from_id(user_id)
hosts_and_states.append(([host], states))
- defer.returnValue(hosts_and_states)
+ return hosts_and_states
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index a2388a7091..136128b625 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,8 +17,11 @@
import logging
from six import raise_from
+from six.moves import range
-from twisted.internet import defer
+from signedjson.sign import sign_json
+
+from twisted.internet import defer, reactor
from synapse.api.errors import (
AuthError,
@@ -27,6 +31,7 @@ from synapse.api.errors import (
StoreError,
SynapseError,
)
+from synapse.logging.context import run_in_background
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import UserID, get_domain_from_id
@@ -46,6 +51,8 @@ class BaseProfileHandler(BaseHandler):
subclass MasterProfileHandler
"""
+ PROFILE_REPLICATE_INTERVAL = 2 * 60 * 1000
+
def __init__(self, hs):
super(BaseProfileHandler, self).__init__(hs)
@@ -56,6 +63,87 @@ class BaseProfileHandler(BaseHandler):
self.user_directory_handler = hs.get_user_directory_handler()
+ self.http_client = hs.get_simple_http_client()
+
+ self.max_avatar_size = hs.config.max_avatar_size
+ self.allowed_avatar_mimetypes = hs.config.allowed_avatar_mimetypes
+
+ if hs.config.worker_app is None:
+ self.clock.looping_call(
+ self._start_update_remote_profile_cache, self.PROFILE_UPDATE_MS
+ )
+
+ if len(self.hs.config.replicate_user_profiles_to) > 0:
+ reactor.callWhenRunning(self._assign_profile_replication_batches)
+ reactor.callWhenRunning(self._replicate_profiles)
+ # Add a looping call to replicate_profiles: this handles retries
+ # if the replication is unsuccessful when the user updated their
+ # profile.
+ self.clock.looping_call(
+ self._replicate_profiles, self.PROFILE_REPLICATE_INTERVAL
+ )
+
+ @defer.inlineCallbacks
+ def _assign_profile_replication_batches(self):
+ """If no profile replication has been done yet, allocate replication batch
+ numbers to each profile to start the replication process.
+ """
+ logger.info("Assigning profile batch numbers...")
+ total = 0
+ while True:
+ assigned = yield self.store.assign_profile_batch()
+ total += assigned
+ if assigned == 0:
+ break
+ logger.info("Assigned %d profile batch numbers", total)
+
+ @defer.inlineCallbacks
+ def _replicate_profiles(self):
+ """If any profile data has been updated and not pushed to the replication targets,
+ replicate it.
+ """
+ host_batches = yield self.store.get_replication_hosts()
+ latest_batch = yield self.store.get_latest_profile_replication_batch_number()
+ if latest_batch is None:
+ latest_batch = -1
+ for repl_host in self.hs.config.replicate_user_profiles_to:
+ if repl_host not in host_batches:
+ host_batches[repl_host] = -1
+ try:
+ for i in range(host_batches[repl_host] + 1, latest_batch + 1):
+ yield self._replicate_host_profile_batch(repl_host, i)
+ except Exception:
+ logger.exception(
+ "Exception while replicating to %s: aborting for now", repl_host
+ )
+
+ @defer.inlineCallbacks
+ def _replicate_host_profile_batch(self, host, batchnum):
+ logger.info("Replicating profile batch %d to %s", batchnum, host)
+ batch_rows = yield self.store.get_profile_batch(batchnum)
+ batch = {
+ UserID(r["user_id"], self.hs.hostname).to_string(): (
+ {"display_name": r["displayname"], "avatar_url": r["avatar_url"]}
+ if r["active"]
+ else None
+ )
+ for r in batch_rows
+ }
+
+ url = "https://%s/_matrix/identity/api/v1/replicate_profiles" % (host,)
+ body = {"batchnum": batchnum, "batch": batch, "origin_server": self.hs.hostname}
+ signed_body = sign_json(body, self.hs.hostname, self.hs.config.signing_key[0])
+ try:
+ yield self.http_client.post_json_get_json(url, signed_body)
+ yield self.store.update_replication_batch_for_host(host, batchnum)
+ logger.info("Sucessfully replicated profile batch %d to %s", batchnum, host)
+ except Exception:
+ # This will get retried when the looping call next comes around
+ logger.exception(
+ "Failed to replicate profile batch %d to %s", batchnum, host
+ )
+ raise
+
@defer.inlineCallbacks
def get_profile(self, user_id):
target_user = UserID.from_string(user_id)
@@ -73,7 +161,7 @@ class BaseProfileHandler(BaseHandler):
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
- defer.returnValue({"displayname": displayname, "avatar_url": avatar_url})
+ return {"displayname": displayname, "avatar_url": avatar_url}
else:
try:
result = yield self.federation.make_query(
@@ -82,7 +170,7 @@ class BaseProfileHandler(BaseHandler):
args={"user_id": user_id},
ignore_backoff=True,
)
- defer.returnValue(result)
+ return result
except RequestSendFailed as e:
raise_from(SynapseError(502, "Failed to fetch profile"), e)
except HttpResponseException as e:
@@ -108,10 +196,10 @@ class BaseProfileHandler(BaseHandler):
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
- defer.returnValue({"displayname": displayname, "avatar_url": avatar_url})
+ return {"displayname": displayname, "avatar_url": avatar_url}
else:
profile = yield self.store.get_from_remote_profile_cache(user_id)
- defer.returnValue(profile or {})
+ return profile or {}
@defer.inlineCallbacks
def get_displayname(self, target_user):
@@ -125,7 +213,7 @@ class BaseProfileHandler(BaseHandler):
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
- defer.returnValue(displayname)
+ return displayname
else:
try:
result = yield self.federation.make_query(
@@ -139,7 +227,7 @@ class BaseProfileHandler(BaseHandler):
except HttpResponseException as e:
raise e.to_synapse_error()
- defer.returnValue(result["displayname"])
+ return result["displayname"]
@defer.inlineCallbacks
def set_displayname(self, target_user, requester, new_displayname, by_admin=False):
@@ -154,9 +242,16 @@ class BaseProfileHandler(BaseHandler):
if not self.hs.is_mine(target_user):
raise SynapseError(400, "User is not hosted on this Home Server")
- if not by_admin and target_user != requester.user:
+ if not by_admin and requester and target_user != requester.user:
raise AuthError(400, "Cannot set another user's displayname")
+ if not by_admin and self.hs.config.disable_set_displayname:
+ profile = yield self.store.get_profileinfo(target_user.localpart)
+ if profile.display_name:
+ raise SynapseError(
+ 400, "Changing displayname is disabled on this server"
+ )
+
if len(new_displayname) > MAX_DISPLAYNAME_LEN:
raise SynapseError(
400, "Displayname is too long (max %i)" % (MAX_DISPLAYNAME_LEN,)
@@ -165,7 +260,17 @@ class BaseProfileHandler(BaseHandler):
if new_displayname == "":
new_displayname = None
- yield self.store.set_profile_displayname(target_user.localpart, new_displayname)
+ if len(self.hs.config.replicate_user_profiles_to) > 0:
+ cur_batchnum = (
+ yield self.store.get_latest_profile_replication_batch_number()
+ )
+ new_batchnum = 0 if cur_batchnum is None else cur_batchnum + 1
+ else:
+ new_batchnum = None
+
+ yield self.store.set_profile_displayname(
+ target_user.localpart, new_displayname, new_batchnum
+ )
if self.hs.config.user_directory_search_all_users:
profile = yield self.store.get_profileinfo(target_user.localpart)
@@ -173,7 +278,39 @@ class BaseProfileHandler(BaseHandler):
target_user.to_string(), profile
)
- yield self._update_join_states(requester, target_user)
+ if requester:
+ yield self._update_join_states(requester, target_user)
+
+ # start a profile replication push
+ run_in_background(self._replicate_profiles)
+
+ @defer.inlineCallbacks
+ def set_active(self, target_user, active, hide):
+ """
+ Sets the 'active' flag on a user profile. If set to false, the user
+ account is considered deactivated or hidden.
+
+ If 'hide' is true, then we interpret active=False as a request to try to
+ hide the user rather than deactivating it. This means withholding the
+ profile from replication (and mark it as inactive) rather than clearing
+ the profile from the HS DB. Note that unlike set_displayname and
+ set_avatar_url, this does *not* perform authorization checks! This is
+ because the only place it's used currently is in account deactivation
+ where we've already done these checks anyway.
+ """
+ if len(self.hs.config.replicate_user_profiles_to) > 0:
+ cur_batchnum = (
+ yield self.store.get_latest_profile_replication_batch_number()
+ )
+ new_batchnum = 0 if cur_batchnum is None else cur_batchnum + 1
+ else:
+ new_batchnum = None
+ yield self.store.set_profile_active(
+ target_user.localpart, active, hide, new_batchnum
+ )
+
+ # start a profile replication push
+ run_in_background(self._replicate_profiles)
@defer.inlineCallbacks
def get_avatar_url(self, target_user):
@@ -186,7 +323,7 @@ class BaseProfileHandler(BaseHandler):
if e.code == 404:
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
- defer.returnValue(avatar_url)
+ return avatar_url
else:
try:
result = yield self.federation.make_query(
@@ -200,7 +337,7 @@ class BaseProfileHandler(BaseHandler):
except HttpResponseException as e:
raise e.to_synapse_error()
- defer.returnValue(result["avatar_url"])
+ return result["avatar_url"]
@defer.inlineCallbacks
def set_avatar_url(self, target_user, requester, new_avatar_url, by_admin=False):
@@ -212,12 +349,59 @@ class BaseProfileHandler(BaseHandler):
if not by_admin and target_user != requester.user:
raise AuthError(400, "Cannot set another user's avatar_url")
+ if not by_admin and self.hs.config.disable_set_avatar_url:
+ profile = yield self.store.get_profileinfo(target_user.localpart)
+ if profile.avatar_url:
+ raise SynapseError(
+ 400, "Changing avatar url is disabled on this server"
+ )
+
+ if len(self.hs.config.replicate_user_profiles_to) > 0:
+ cur_batchnum = (
+ yield self.store.get_latest_profile_replication_batch_number()
+ )
+ new_batchnum = 0 if cur_batchnum is None else cur_batchnum + 1
+ else:
+ new_batchnum = None
+
if len(new_avatar_url) > MAX_AVATAR_URL_LEN:
raise SynapseError(
400, "Avatar URL is too long (max %i)" % (MAX_AVATAR_URL_LEN,)
)
- yield self.store.set_profile_avatar_url(target_user.localpart, new_avatar_url)
+ # Enforce a max avatar size if one is defined
+ if self.max_avatar_size or self.allowed_avatar_mimetypes:
+ media_id = self._validate_and_parse_media_id_from_avatar_url(new_avatar_url)
+
+ # Check that this media exists locally
+ media_info = yield self.store.get_local_media(media_id)
+ if not media_info:
+ raise SynapseError(
+ 400, "Unknown media id supplied", errcode=Codes.NOT_FOUND
+ )
+
+ # Ensure avatar does not exceed max allowed avatar size
+ media_size = media_info["media_length"]
+ if self.max_avatar_size and media_size > self.max_avatar_size:
+ raise SynapseError(
+ 400,
+ "Avatars must be less than %s bytes in size"
+ % (self.max_avatar_size,),
+ errcode=Codes.TOO_LARGE,
+ )
+
+ # Ensure the avatar's file type is allowed
+ if (
+ self.allowed_avatar_mimetypes
+ and media_info["media_type"] not in self.allowed_avatar_mimetypes
+ ):
+ raise SynapseError(
+ 400, "Avatar file type '%s' not allowed" % media_info["media_type"]
+ )
+
+ yield self.store.set_profile_avatar_url(
+ target_user.localpart, new_avatar_url, new_batchnum
+ )
if self.hs.config.user_directory_search_all_users:
profile = yield self.store.get_profileinfo(target_user.localpart)
@@ -227,6 +411,23 @@ class BaseProfileHandler(BaseHandler):
yield self._update_join_states(requester, target_user)
+ # start a profile replication push
+ run_in_background(self._replicate_profiles)
+
+ def _validate_and_parse_media_id_from_avatar_url(self, mxc):
+ """Validate and parse a provided avatar url and return the local media id
+
+ Args:
+ mxc (str): A mxc URL
+
+ Returns:
+ str: The ID of the media
+ """
+ avatar_pieces = mxc.split("/")
+ if len(avatar_pieces) != 4 or avatar_pieces[0] != "mxc:":
+ raise SynapseError(400, "Invalid avatar URL '%s' supplied" % mxc)
+ return avatar_pieces[-1]
+
@defer.inlineCallbacks
def on_profile_query(self, args):
user = UserID.from_string(args["user_id"])
@@ -251,7 +452,7 @@ class BaseProfileHandler(BaseHandler):
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
- defer.returnValue(response)
+ return response
@defer.inlineCallbacks
def _update_join_states(self, requester, target_user):
@@ -282,7 +483,7 @@ class BaseProfileHandler(BaseHandler):
@defer.inlineCallbacks
def check_profile_query_allowed(self, target_user, requester=None):
"""Checks whether a profile query is allowed. If the
- 'require_auth_for_profile_requests' config flag is set to True and a
+ 'limit_profile_requests_to_known_users' config flag is set to True and a
'requester' is provided, the query is only allowed if the two users
share a room.
@@ -300,7 +501,11 @@ class BaseProfileHandler(BaseHandler):
# be None when this function is called outside of a profile query, e.g.
# when building a membership event. In this case, we must allow the
# lookup.
- if not self.hs.config.require_auth_for_profile_requests or not requester:
+ if not self.hs.config.limit_profile_requests_to_known_users or not requester:
+ return
+
+ # Always allow the user to query their own profile.
+ if target_user.to_string() == requester.to_string():
return
# Always allow the user to query their own profile.
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index e58bf7e360..73973502a4 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -93,7 +93,7 @@ class ReceiptsHandler(BaseHandler):
if min_batch_id is None:
# no new receipts
- defer.returnValue(False)
+ return False
affected_room_ids = list(set([r.room_id for r in receipts]))
@@ -103,7 +103,7 @@ class ReceiptsHandler(BaseHandler):
min_batch_id, max_batch_id, affected_room_ids
)
- defer.returnValue(True)
+ return True
@defer.inlineCallbacks
def received_client_receipt(self, room_id, receipt_type, user_id, event_id):
@@ -133,9 +133,9 @@ class ReceiptsHandler(BaseHandler):
)
if not result:
- defer.returnValue([])
+ return []
- defer.returnValue(result)
+ return result
class ReceiptEventSource(object):
@@ -148,13 +148,13 @@ class ReceiptEventSource(object):
to_key = yield self.get_current_key()
if from_key == to_key:
- defer.returnValue(([], to_key))
+ return ([], to_key)
events = yield self.store.get_linearized_receipts_for_rooms(
room_ids, from_key=from_key, to_key=to_key
)
- defer.returnValue((events, to_key))
+ return (events, to_key)
def get_current_key(self, direction="f"):
return self.store.get_max_receipt_stream_id()
@@ -173,4 +173,4 @@ class ReceiptEventSource(object):
room_ids, from_key=from_key, to_key=to_key
)
- defer.returnValue((events, to_key))
+ return (events, to_key)
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index bb7cfd71b9..0daf193945 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -60,6 +60,7 @@ class RegistrationHandler(BaseHandler):
self.profile_handler = hs.get_profile_handler()
self.user_directory_handler = hs.get_user_directory_handler()
self.captcha_client = CaptchaServerHttpClient(hs)
+ self.http_client = hs.get_simple_http_client()
self.identity_handler = self.hs.get_handlers().identity_handler
self.ratelimiter = hs.get_registration_ratelimiter()
@@ -72,6 +73,8 @@ class RegistrationHandler(BaseHandler):
)
self._server_notices_mxid = hs.config.server_notices_mxid
+ self._show_in_user_directory = self.hs.config.show_users_in_user_directory
+
if hs.config.worker_app:
self._register_client = ReplicationRegisterServlet.make_client(hs)
self._register_device_client = RegisterDeviceReplicationServlet.make_client(
@@ -213,6 +216,11 @@ class RegistrationHandler(BaseHandler):
address=address,
)
+ if default_display_name:
+ yield self.profile_handler.set_displayname(
+ user, None, default_display_name, by_admin=True
+ )
+
if self.hs.config.user_directory_search_all_users:
profile = yield self.store.get_profileinfo(localpart)
yield self.user_directory_handler.handle_local_profile_change(
@@ -238,6 +246,11 @@ class RegistrationHandler(BaseHandler):
create_profile_with_displayname=default_display_name,
address=address,
)
+
+ yield self.profile_handler.set_displayname(
+ user, None, default_display_name, by_admin=True
+ )
+
except SynapseError:
# if user id is taken, just generate another
user = None
@@ -265,7 +278,15 @@ class RegistrationHandler(BaseHandler):
# Bind email to new account
yield self._register_email_threepid(user_id, threepid_dict, None, False)
- defer.returnValue(user_id)
+ # Prevent the new user from showing up in the user directory if the server
+ # mandates it.
+ if not self._show_in_user_directory:
+ yield self.store.add_account_data_for_user(
+ user_id, "im.vector.hide_profile", {"hide_profile": True}
+ )
+ yield self.profile_handler.set_active(user, False, True)
+
+ return user_id
@defer.inlineCallbacks
def _auto_join_rooms(self, user_id):
@@ -335,7 +356,9 @@ class RegistrationHandler(BaseHandler):
yield self._auto_join_rooms(user_id)
@defer.inlineCallbacks
- def appservice_register(self, user_localpart, as_token):
+ def appservice_register(self, user_localpart, as_token, password, display_name):
+ # FIXME: this should be factored out and merged with normal register()
+
user = UserID(user_localpart, self.hs.hostname)
user_id = user.to_string()
service = self.store.get_app_service_by_token(as_token)
@@ -354,13 +377,30 @@ class RegistrationHandler(BaseHandler):
user_id, allowed_appservice=service
)
+ password_hash = ""
+ if password:
+ password_hash = yield self.auth_handler().hash(password)
+
+ display_name = display_name or user.localpart
+
yield self.register_with_store(
user_id=user_id,
- password_hash="",
+ password_hash=password_hash,
appservice_id=service_id,
- create_profile_with_displayname=user.localpart,
+ create_profile_with_displayname=display_name,
)
- defer.returnValue(user_id)
+
+ yield self.profile_handler.set_displayname(
+ user, None, display_name, by_admin=True
+ )
+
+ if self.hs.config.user_directory_search_all_users:
+ profile = yield self.store.get_profileinfo(user_localpart)
+ yield self.user_directory_handler.handle_local_profile_change(
+ user_id, profile
+ )
+
+ return user_id
@defer.inlineCallbacks
def check_recaptcha(self, ip, private_key, challenge, response):
@@ -384,6 +424,38 @@ class RegistrationHandler(BaseHandler):
logger.info("Valid captcha entered from %s", ip)
@defer.inlineCallbacks
+ def register_saml2(self, localpart):
+ """
+ Registers email_id as SAML2 Based Auth.
+ """
+ if types.contains_invalid_mxid_characters(localpart):
+ raise SynapseError(
+ 400, "User ID can only contain characters a-z, 0-9, or '=_-./'"
+ )
+ yield self.auth.check_auth_blocking()
+ user = UserID(localpart, self.hs.hostname)
+ user_id = user.to_string()
+
+ yield self.check_user_id_not_appservice_exclusive(user_id)
+ token = self.macaroon_gen.generate_access_token(user_id)
+ try:
+ yield self.register_with_store(
+ user_id=user_id,
+ token=token,
+ password_hash=None,
+ create_profile_with_displayname=user.localpart,
+ )
+
+ yield self.profile_handler.set_displayname(
+ user, None, user.localpart, by_admin=True
+ )
+ except Exception as e:
+ yield self.store.add_access_token_to_user(user_id, token)
+ # Ignore Registration errors
+ logger.exception(e)
+ defer.returnValue((user_id, token))
+
+ @defer.inlineCallbacks
def register_email(self, threepidCreds):
"""
Registers emails with an identity server.
@@ -451,6 +523,39 @@ class RegistrationHandler(BaseHandler):
)
@defer.inlineCallbacks
+ def shadow_register(self, localpart, display_name, auth_result, params):
+ """Invokes the current registration on another server, using
+ shared secret registration, passing in any auth_results from
+ other registration UI auth flows (e.g. validated 3pids)
+ Useful for setting up shadow/backup accounts on a parallel deployment.
+ """
+
+ # TODO: retries
+ shadow_hs_url = self.hs.config.shadow_server.get("hs_url")
+ as_token = self.hs.config.shadow_server.get("as_token")
+
+ yield self.http_client.post_json_get_json(
+ "%s/_matrix/client/r0/register?access_token=%s" % (shadow_hs_url, as_token),
+ {
+ # XXX: auth_result is an unspecified extension for shadow registration
+ "auth_result": auth_result,
+ # XXX: another unspecified extension for shadow registration to ensure
+ # that the displayname is correctly set by the masters erver
+ "display_name": display_name,
+ "username": localpart,
+ "password": params.get("password"),
+ "bind_email": params.get("bind_email"),
+ "bind_msisdn": params.get("bind_msisdn"),
+ "device_id": params.get("device_id"),
+ "initial_device_display_name": params.get(
+ "initial_device_display_name"
+ ),
+ "inhibit_login": False,
+ "access_token": as_token,
+ },
+ )
+
+ @defer.inlineCallbacks
def _generate_user_id(self, reseed=False):
if reseed or self._next_generated_user_id is None:
with (yield self._generate_user_id_linearizer.queue(())):
@@ -461,7 +566,7 @@ class RegistrationHandler(BaseHandler):
id = self._next_generated_user_id
self._next_generated_user_id += 1
- defer.returnValue(str(id))
+ return str(id)
@defer.inlineCallbacks
def _validate_captcha(self, ip_addr, private_key, challenge, response):
@@ -481,7 +586,7 @@ class RegistrationHandler(BaseHandler):
"error_url": "http://www.recaptcha.net/recaptcha/api/challenge?"
+ "error=%s" % lines[1],
}
- defer.returnValue(json)
+ return json
@defer.inlineCallbacks
def _submit_captcha(self, ip_addr, private_key, challenge, response):
@@ -497,7 +602,56 @@ class RegistrationHandler(BaseHandler):
"response": response,
},
)
- defer.returnValue(data)
+ return data
+
+ @defer.inlineCallbacks
+ def get_or_create_user(self, requester, localpart, displayname, password_hash=None):
+ """Creates a new user if the user does not exist,
+ else revokes all previous access tokens and generates a new one.
+
+ Args:
+ localpart : The local part of the user ID to register. If None,
+ one will be randomly generated.
+ Returns:
+ A tuple of (user_id, access_token).
+ Raises:
+ RegistrationError if there was a problem registering.
+
+ NB this is only used in tests. TODO: move it to the test package!
+ """
+ if localpart is None:
+ raise SynapseError(400, "Request must include user id")
+ yield self.auth.check_auth_blocking()
+ need_register = True
+
+ try:
+ yield self.check_username(localpart)
+ except SynapseError as e:
+ if e.errcode == Codes.USER_IN_USE:
+ need_register = False
+ else:
+ raise
+
+ user = UserID(localpart, self.hs.hostname)
+ user_id = user.to_string()
+ token = self.macaroon_gen.generate_access_token(user_id)
+
+ if need_register:
+ yield self.register_with_store(
+ user_id=user_id,
+ token=token,
+ password_hash=password_hash,
+ create_profile_with_displayname=displayname or user.localpart,
+ )
+ if displayname is not None:
+ yield self.profile_handler.set_displayname(
+ user, None, displayname or user.localpart, by_admin=True
+ )
+ else:
+ yield self._auth_handler.delete_access_tokens_for_user(user_id)
+ yield self.store.add_access_token_to_user(user_id=user_id, token=token)
+
+ defer.returnValue((user_id, token))
@defer.inlineCallbacks
def _join_user_to_room(self, requester, room_identifier):
@@ -622,7 +776,7 @@ class RegistrationHandler(BaseHandler):
initial_display_name=initial_display_name,
is_guest=is_guest,
)
- defer.returnValue((r["device_id"], r["access_token"]))
+ return (r["device_id"], r["access_token"])
valid_until_ms = None
if self.session_lifetime is not None:
@@ -645,7 +799,7 @@ class RegistrationHandler(BaseHandler):
user_id, device_id=device_id, valid_until_ms=valid_until_ms
)
- defer.returnValue((device_id, access_token))
+ return (device_id, access_token)
@defer.inlineCallbacks
def post_registration_actions(
@@ -798,7 +952,7 @@ class RegistrationHandler(BaseHandler):
if ex.errcode == Codes.MISSING_PARAM:
# This will only happen if the ID server returns a malformed response
logger.info("Can't add incomplete 3pid")
- defer.returnValue(None)
+ return None
raise
yield self._auth_handler.add_threepid(
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index db3f8cb76b..af7cfa7888 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -52,12 +52,14 @@ class RoomCreationHandler(BaseHandler):
"history_visibility": "shared",
"original_invitees_have_ops": False,
"guest_can_join": True,
+ "encryption_alg": "m.megolm.v1.aes-sha2",
},
RoomCreationPreset.TRUSTED_PRIVATE_CHAT: {
"join_rules": JoinRules.INVITE,
"history_visibility": "shared",
"original_invitees_have_ops": True,
"guest_can_join": True,
+ "encryption_alg": "m.megolm.v1.aes-sha2",
},
RoomCreationPreset.PUBLIC_CHAT: {
"join_rules": JoinRules.PUBLIC,
@@ -128,7 +130,7 @@ class RoomCreationHandler(BaseHandler):
old_room_id,
new_version, # args for _upgrade_room
)
- defer.returnValue(ret)
+ return ret
@defer.inlineCallbacks
def _upgrade_room(self, requester, old_room_id, new_version):
@@ -193,7 +195,7 @@ class RoomCreationHandler(BaseHandler):
requester, old_room_id, new_room_id, old_room_state
)
- defer.returnValue(new_room_id)
+ return new_room_id
@defer.inlineCallbacks
def _update_upgraded_room_pls(
@@ -294,7 +296,19 @@ class RoomCreationHandler(BaseHandler):
"""
user_id = requester.user.to_string()
- if not self.spam_checker.user_may_create_room(user_id):
+ if (
+ self._server_notices_mxid is not None
+ and requester.user.to_string() == self._server_notices_mxid
+ ):
+ # allow the server notices mxid to create rooms
+ is_requester_admin = True
+
+ else:
+ is_requester_admin = yield self.auth.is_server_admin(requester.user)
+
+ if not is_requester_admin and not self.spam_checker.user_may_create_room(
+ user_id, invite_list=[], third_party_invite_list=[], cloning=True
+ ):
raise SynapseError(403, "You are not permitted to create rooms")
creation_content = {
@@ -516,8 +530,14 @@ class RoomCreationHandler(BaseHandler):
requester, config, is_requester_admin=is_requester_admin
)
+ invite_list = config.get("invite", [])
+ invite_3pid_list = config.get("invite_3pid", [])
+
if not is_requester_admin and not self.spam_checker.user_may_create_room(
- user_id
+ user_id,
+ invite_list=invite_list,
+ third_party_invite_list=invite_3pid_list,
+ cloning=False,
):
raise SynapseError(403, "You are not permitted to create rooms")
@@ -551,7 +571,6 @@ class RoomCreationHandler(BaseHandler):
else:
room_alias = None
- invite_list = config.get("invite", [])
for i in invite_list:
try:
UserID.from_string(i)
@@ -560,8 +579,6 @@ class RoomCreationHandler(BaseHandler):
yield self.event_creation_handler.assert_accepted_privacy_policy(requester)
- invite_3pid_list = config.get("invite_3pid", [])
-
visibility = config.get("visibility", None)
is_public = visibility == "public"
@@ -649,6 +666,7 @@ class RoomCreationHandler(BaseHandler):
"invite",
ratelimit=False,
content=content,
+ new_room=True,
)
for invite_3pid in invite_3pid_list:
@@ -663,6 +681,7 @@ class RoomCreationHandler(BaseHandler):
id_server,
requester,
txn_id=None,
+ new_room=True,
)
result = {"room_id": room_id}
@@ -671,7 +690,7 @@ class RoomCreationHandler(BaseHandler):
result["room_alias"] = room_alias.to_string()
yield directory_handler.send_room_alias_update_event(requester, room_id)
- defer.returnValue(result)
+ return result
@defer.inlineCallbacks
def _send_events_for_new_room(
@@ -719,6 +738,7 @@ class RoomCreationHandler(BaseHandler):
"join",
ratelimit=False,
content=creator_join_profile,
+ new_room=True,
)
# We treat the power levels override specially as this needs to be one
@@ -780,6 +800,13 @@ class RoomCreationHandler(BaseHandler):
for (etype, state_key), content in initial_state.items():
yield send(etype=etype, state_key=state_key, content=content)
+ if "encryption_alg" in config:
+ yield send(
+ etype=EventTypes.Encryption,
+ state_key="",
+ content={"algorithm": config["encryption_alg"]},
+ )
+
@defer.inlineCallbacks
def _generate_room_id(self, creator_id, is_public):
# autogen room IDs and try to create it. We may clash, so just
@@ -796,7 +823,7 @@ class RoomCreationHandler(BaseHandler):
room_creator_user_id=creator_id,
is_public=is_public,
)
- defer.returnValue(gen_room_id)
+ return gen_room_id
except StoreError:
attempts += 1
raise StoreError(500, "Couldn't generate a room ID.")
@@ -839,7 +866,7 @@ class RoomContextHandler(object):
event_id, get_prev_content=True, allow_none=True
)
if not event:
- defer.returnValue(None)
+ return None
return
filtered = yield (filter_evts([event]))
@@ -890,7 +917,7 @@ class RoomContextHandler(object):
results["end"] = token.copy_and_replace("room_key", results["end"]).to_string()
- defer.returnValue(results)
+ return results
class RoomEventSource(object):
@@ -941,7 +968,7 @@ class RoomEventSource(object):
else:
end_key = to_key
- defer.returnValue((events, end_key))
+ return (events, end_key)
def get_current_key(self):
return self.store.get_room_events_max_id()
@@ -959,4 +986,4 @@ class RoomEventSource(object):
limit=config.limit,
)
- defer.returnValue((events, next_key))
+ return (events, next_key)
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index aae696a7e8..e9094ad02b 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -325,7 +325,7 @@ class RoomListHandler(BaseHandler):
current_limit=since_token.current_limit - 1,
).to_token()
- defer.returnValue(results)
+ return results
@defer.inlineCallbacks
def _append_room_entry_to_chunk(
@@ -420,7 +420,7 @@ class RoomListHandler(BaseHandler):
if join_rules_event:
join_rule = join_rules_event.content.get("join_rule", None)
if not allow_private and join_rule and join_rule != JoinRules.PUBLIC:
- defer.returnValue(None)
+ return None
# Return whether this room is open to federation users or not
create_event = current_state.get((EventTypes.Create, ""))
@@ -469,7 +469,7 @@ class RoomListHandler(BaseHandler):
if avatar_url:
result["avatar_url"] = avatar_url
- defer.returnValue(result)
+ return result
@defer.inlineCallbacks
def get_remote_public_room_list(
@@ -482,7 +482,7 @@ class RoomListHandler(BaseHandler):
third_party_instance_id=None,
):
if not self.enable_room_list_search:
- defer.returnValue({"chunk": [], "total_room_count_estimate": 0})
+ return {"chunk": [], "total_room_count_estimate": 0}
if search_filter:
# We currently don't support searching across federation, so we have
@@ -507,7 +507,7 @@ class RoomListHandler(BaseHandler):
]
}
- defer.returnValue(res)
+ return res
def _get_remote_list_cached(
self,
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index e0196ef83e..e2ac2637c5 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -20,22 +20,23 @@ import logging
from six.moves import http_client
-from signedjson.key import decode_verify_key_bytes
-from signedjson.sign import verify_signed_json
-from unpaddedbase64 import decode_base64
-
from twisted.internet import defer
import synapse.server
import synapse.types
from synapse.api.constants import EventTypes, Membership
-from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError
+from synapse.api.ratelimiting import Ratelimiter
+from synapse.api.errors import (
+ AuthError,
+ Codes,
+ ProxiedRequestError,
+ HttpResponseException,
+ SynapseError,
+)
from synapse.types import RoomID, UserID
from synapse.util.async_helpers import Linearizer
from synapse.util.distributor import user_joined_room, user_left_room
-from ._base import BaseHandler
-
logger = logging.getLogger(__name__)
id_server_scheme = "https://"
@@ -67,6 +68,7 @@ class RoomMemberHandler(object):
self.registration_handler = hs.get_registration_handler()
self.profile_handler = hs.get_profile_handler()
self.event_creation_handler = hs.get_event_creation_handler()
+ self.identity_handler = hs.get_handlers().identity_handler
self.member_linearizer = Linearizer(name="member")
@@ -74,13 +76,10 @@ class RoomMemberHandler(object):
self.spam_checker = hs.get_spam_checker()
self.third_party_event_rules = hs.get_third_party_event_rules()
self._server_notices_mxid = self.config.server_notices_mxid
+ self.rewrite_identity_server_urls = self.config.rewrite_identity_server_urls
self._enable_lookup = hs.config.enable_3pid_lookup
self.allow_per_room_profiles = self.config.allow_per_room_profiles
-
- # This is only used to get at ratelimit function, and
- # maybe_kick_guest_users. It's fine there are multiple of these as
- # it doesn't store state.
- self.base_handler = BaseHandler(hs)
+ self.ratelimiter = Ratelimiter()
@abc.abstractmethod
def _remote_join(self, requester, remote_room_hosts, room_id, user, content):
@@ -191,7 +190,7 @@ class RoomMemberHandler(object):
)
if duplicate is not None:
# Discard the new event since this membership change is a no-op.
- defer.returnValue(duplicate)
+ return duplicate
yield self.event_creation_handler.handle_new_client_event(
requester, event, context, extra_users=[target], ratelimit=ratelimit
@@ -233,7 +232,7 @@ class RoomMemberHandler(object):
if prev_member_event.membership == Membership.JOIN:
yield self._user_left_room(target, room_id)
- defer.returnValue(event)
+ return event
@defer.inlineCallbacks
def copy_room_tags_and_direct_to_room(self, old_room_id, new_room_id, user_id):
@@ -285,8 +284,31 @@ class RoomMemberHandler(object):
third_party_signed=None,
ratelimit=True,
content=None,
+ new_room=False,
require_consent=True,
):
+ """Update a users membership in a room
+
+ Args:
+ requester (Requester)
+ target (UserID)
+ room_id (str)
+ action (str): The "action" the requester is performing against the
+ target. One of join/leave/kick/ban/invite/unban.
+ txn_id (str|None): The transaction ID associated with the request,
+ or None not provided.
+ remote_room_hosts (list[str]|None): List of remote servers to try
+ and join via if server isn't already in the room.
+ third_party_signed (dict|None): The signed object for third party
+ invites.
+ ratelimit (bool): Whether to apply ratelimiting to this request.
+ content (dict|None): Fields to include in the new events content.
+ new_room (bool): Whether these membership changes are happening
+ as part of a room creation (e.g. initial joins and invites)
+
+ Returns:
+ Deferred[FrozenEvent]
+ """
key = (room_id,)
with (yield self.member_linearizer.queue(key)):
@@ -300,10 +322,11 @@ class RoomMemberHandler(object):
third_party_signed=third_party_signed,
ratelimit=ratelimit,
content=content,
+ new_room=new_room,
require_consent=require_consent,
)
- defer.returnValue(result)
+ return result
@defer.inlineCallbacks
def _update_membership(
@@ -317,6 +340,7 @@ class RoomMemberHandler(object):
third_party_signed=None,
ratelimit=True,
content=None,
+ new_room=False,
require_consent=True,
):
content_specified = bool(content)
@@ -381,8 +405,15 @@ class RoomMemberHandler(object):
)
block_invite = True
+ is_published = yield self.store.is_room_published(room_id)
+
if not self.spam_checker.user_may_invite(
- requester.user.to_string(), target.to_string(), room_id
+ requester.user.to_string(),
+ target.to_string(),
+ third_party_invite=None,
+ room_id=room_id,
+ new_room=new_room,
+ published_room=is_published,
):
logger.info("Blocking invite due to spam checker")
block_invite = True
@@ -423,7 +454,7 @@ class RoomMemberHandler(object):
same_membership = old_membership == effective_membership_state
same_sender = requester.user.to_string() == old_state.sender
if same_sender and same_membership and same_content:
- defer.returnValue(old_state)
+ return old_state
if old_membership in ["ban", "leave"] and action == "kick":
raise AuthError(403, "The target user is not in the room")
@@ -455,8 +486,26 @@ class RoomMemberHandler(object):
# so don't really fit into the general auth process.
raise AuthError(403, "Guest access not allowed")
+ if (
+ self._server_notices_mxid is not None
+ and requester.user.to_string() == self._server_notices_mxid
+ ):
+ # allow the server notices mxid to join rooms
+ is_requester_admin = True
+
+ else:
+ is_requester_admin = yield self.auth.is_server_admin(requester.user)
+
+ inviter = yield self._get_inviter(target.to_string(), room_id)
+ if not is_requester_admin:
+ # We assume that if the spam checker allowed the user to create
+ # a room then they're allowed to join it.
+ if not new_room and not self.spam_checker.user_may_join_room(
+ target.to_string(), room_id, is_invited=inviter is not None
+ ):
+ raise SynapseError(403, "Not allowed to join this room")
+
if not is_host_in_room:
- inviter = yield self._get_inviter(target.to_string(), room_id)
if inviter and not self.hs.is_mine(inviter):
remote_room_hosts.append(inviter.domain)
@@ -473,7 +522,7 @@ class RoomMemberHandler(object):
ret = yield self._remote_join(
requester, remote_room_hosts, room_id, target, content
)
- defer.returnValue(ret)
+ return ret
elif effective_membership_state == Membership.LEAVE:
if not is_host_in_room:
@@ -495,7 +544,7 @@ class RoomMemberHandler(object):
res = yield self._remote_reject_invite(
requester, remote_room_hosts, room_id, target
)
- defer.returnValue(res)
+ return res
res = yield self._local_membership_update(
requester=requester,
@@ -508,7 +557,7 @@ class RoomMemberHandler(object):
content=content,
require_consent=require_consent,
)
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def send_membership_event(
@@ -596,11 +645,11 @@ class RoomMemberHandler(object):
"""
guest_access_id = current_state_ids.get((EventTypes.GuestAccess, ""), None)
if not guest_access_id:
- defer.returnValue(False)
+ return False
guest_access = yield self.store.get_event(guest_access_id)
- defer.returnValue(
+ return (
guest_access
and guest_access.content
and "guest_access" in guest_access.content
@@ -635,7 +684,7 @@ class RoomMemberHandler(object):
servers.remove(room_alias.domain)
servers.insert(0, room_alias.domain)
- defer.returnValue((RoomID.from_string(room_id), servers))
+ return (RoomID.from_string(room_id), servers)
@defer.inlineCallbacks
def _get_inviter(self, user_id, room_id):
@@ -643,11 +692,19 @@ class RoomMemberHandler(object):
user_id=user_id, room_id=room_id
)
if invite:
- defer.returnValue(UserID.from_string(invite.sender))
+ return UserID.from_string(invite.sender)
@defer.inlineCallbacks
def do_3pid_invite(
- self, room_id, inviter, medium, address, id_server, requester, txn_id
+ self,
+ room_id,
+ inviter,
+ medium,
+ address,
+ id_server,
+ requester,
+ txn_id,
+ new_room=False,
):
if self.config.block_non_admin_invites:
is_requester_admin = yield self.auth.is_server_admin(requester.user)
@@ -658,7 +715,23 @@ class RoomMemberHandler(object):
# We need to rate limit *before* we send out any 3PID invites, so we
# can't just rely on the standard ratelimiting of events.
- yield self.base_handler.ratelimit(requester)
+ self.ratelimiter.ratelimit(
+ requester.user.to_string(),
+ time_now_s=self.hs.clock.time(),
+ rate_hz=self.hs.config.rc_third_party_invite.per_second,
+ burst_count=self.hs.config.rc_third_party_invite.burst_count,
+ update=True,
+ )
+
+ can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited(
+ medium, address, room_id
+ )
+ if not can_invite:
+ raise SynapseError(
+ 403,
+ "This third-party identifier can not be invited in this room",
+ Codes.FORBIDDEN,
+ )
can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited(
medium, address, room_id
@@ -672,6 +745,19 @@ class RoomMemberHandler(object):
invitee = yield self._lookup_3pid(id_server, medium, address)
+ is_published = yield self.store.is_room_published(room_id)
+
+ if not self.spam_checker.user_may_invite(
+ requester.user.to_string(),
+ invitee,
+ third_party_invite={"medium": medium, "address": address},
+ room_id=room_id,
+ new_room=new_room,
+ published_room=is_published,
+ ):
+ logger.info("Blocking invite due to spam checker")
+ raise SynapseError(403, "Invites have been disabled on this server")
+
if invitee:
yield self.update_membership(
requester, UserID.from_string(invitee), room_id, "invite", txn_id=txn_id
@@ -681,6 +767,20 @@ class RoomMemberHandler(object):
requester, id_server, medium, address, room_id, inviter, txn_id=txn_id
)
+ def _get_id_server_target(self, id_server):
+ """Looks up an id_server's actual http endpoint
+
+ Args:
+ id_server (str): the server name to lookup.
+
+ Returns:
+ the http endpoint to connect to.
+ """
+ if id_server in self.rewrite_identity_server_urls:
+ return self.rewrite_identity_server_urls[id_server]
+
+ return id_server
+
@defer.inlineCallbacks
def _lookup_3pid(self, id_server, medium, address):
"""Looks up a 3pid in the passed identity server.
@@ -694,47 +794,12 @@ class RoomMemberHandler(object):
Returns:
str: the matrix ID of the 3pid, or None if it is not recognized.
"""
- if not self._enable_lookup:
- raise SynapseError(
- 403, "Looking up third-party identifiers is denied from this server"
- )
try:
- data = yield self.simple_http_client.get_json(
- "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server),
- {"medium": medium, "address": address},
- )
-
- if "mxid" in data:
- if "signatures" not in data:
- raise AuthError(401, "No signatures on 3pid binding")
- yield self._verify_any_signature(data, id_server)
- defer.returnValue(data["mxid"])
-
- except IOError as e:
+ data = yield self.identity_handler.lookup_3pid(id_server, medium, address)
+ return data.get("mxid")
+ except ProxiedRequestError as e:
logger.warn("Error from identity server lookup: %s" % (e,))
- defer.returnValue(None)
-
- @defer.inlineCallbacks
- def _verify_any_signature(self, data, server_hostname):
- if server_hostname not in data["signatures"]:
- raise AuthError(401, "No signature from server %s" % (server_hostname,))
- for key_name, signature in data["signatures"][server_hostname].items():
- key_data = yield self.simple_http_client.get_json(
- "%s%s/_matrix/identity/api/v1/pubkey/%s"
- % (id_server_scheme, server_hostname, key_name)
- )
- if "public_key" not in key_data:
- raise AuthError(
- 401, "No public key named %s from %s" % (key_name, server_hostname)
- )
- verify_signed_json(
- data,
- server_hostname,
- decode_verify_key_bytes(
- key_name, decode_base64(key_data["public_key"])
- ),
- )
- return
+ return None
@defer.inlineCallbacks
def _make_and_store_3pid_invite(
@@ -854,9 +919,10 @@ class RoomMemberHandler(object):
user.
"""
+ target = self._get_id_server_target(id_server)
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
id_server_scheme,
- id_server,
+ target,
)
invite_config = {
@@ -896,7 +962,7 @@ class RoomMemberHandler(object):
fallback_public_key = {
"public_key": data["public_key"],
"key_validity_url": "%s%s/_matrix/identity/api/v1/pubkey/isvalid"
- % (id_server_scheme, id_server),
+ % (id_server_scheme, target),
}
else:
fallback_public_key = public_keys[0]
@@ -904,7 +970,7 @@ class RoomMemberHandler(object):
if not public_keys:
public_keys.append(fallback_public_key)
display_name = data["display_name"]
- defer.returnValue((token, public_keys, fallback_public_key, display_name))
+ return (token, public_keys, fallback_public_key, display_name)
@defer.inlineCallbacks
def _is_host_in_room(self, current_state_ids):
@@ -913,7 +979,7 @@ class RoomMemberHandler(object):
create_event_id = current_state_ids.get(("m.room.create", ""))
if len(current_state_ids) == 1 and create_event_id:
# We can only get here if we're in the process of creating the room
- defer.returnValue(True)
+ return True
for etype, state_key in current_state_ids:
if etype != EventTypes.Member or not self.hs.is_mine_id(state_key):
@@ -925,16 +991,16 @@ class RoomMemberHandler(object):
continue
if event.membership == Membership.JOIN:
- defer.returnValue(True)
+ return True
- defer.returnValue(False)
+ return False
@defer.inlineCallbacks
def _is_server_notice_room(self, room_id):
if self._server_notices_mxid is None:
- defer.returnValue(False)
+ return False
user_ids = yield self.store.get_users_in_room(room_id)
- defer.returnValue(self._server_notices_mxid in user_ids)
+ return self._server_notices_mxid in user_ids
class RoomMemberMasterHandler(RoomMemberHandler):
@@ -978,7 +1044,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
ret = yield fed_handler.do_remotely_reject_invite(
remote_room_hosts, room_id, target.to_string()
)
- defer.returnValue(ret)
+ return ret
except Exception as e:
# if we were unable to reject the exception, just mark
# it as rejected on our end and plough ahead.
@@ -989,7 +1055,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
logger.warn("Failed to reject invite: %s", e)
yield self.store.locally_reject_invite(target.to_string(), room_id)
- defer.returnValue({})
+ return {}
def _user_joined_room(self, target, room_id):
"""Implements RoomMemberHandler._user_joined_room
diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py
index fc873a3ba6..75e96ae1a2 100644
--- a/synapse/handlers/room_member_worker.py
+++ b/synapse/handlers/room_member_worker.py
@@ -53,7 +53,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
yield self._user_joined_room(user, room_id)
- defer.returnValue(ret)
+ return ret
def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target):
"""Implements RoomMemberHandler._remote_reject_invite
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index ddc4430d03..cd5e90bacb 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -69,7 +69,7 @@ class SearchHandler(BaseHandler):
# Scan through the old room for further predecessors
room_id = predecessor["room_id"]
- defer.returnValue(historical_room_ids)
+ return historical_room_ids
@defer.inlineCallbacks
def search(self, user, content, batch=None):
@@ -186,13 +186,11 @@ class SearchHandler(BaseHandler):
room_ids.intersection_update({batch_group_key})
if not room_ids:
- defer.returnValue(
- {
- "search_categories": {
- "room_events": {"results": [], "count": 0, "highlights": []}
- }
+ return {
+ "search_categories": {
+ "room_events": {"results": [], "count": 0, "highlights": []}
}
- )
+ }
rank_map = {} # event_id -> rank of event
allowed_events = []
@@ -455,4 +453,4 @@ class SearchHandler(BaseHandler):
if global_next_batch:
rooms_cat_res["next_batch"] = global_next_batch
- defer.returnValue({"search_categories": {"room_events": rooms_cat_res}})
+ return {"search_categories": {"room_events": rooms_cat_res}}
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index d90c9e0108..3f50d6de47 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 New Vector Ltd
+# Copyright 2017-2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -30,12 +31,15 @@ class SetPasswordHandler(BaseHandler):
super(SetPasswordHandler, self).__init__(hs)
self._auth_handler = hs.get_auth_handler()
self._device_handler = hs.get_device_handler()
+ self._password_policy_handler = hs.get_password_policy_handler()
@defer.inlineCallbacks
def set_password(self, user_id, newpassword, requester=None):
if not self.hs.config.password_localdb_enabled:
raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN)
+ self._password_policy_handler.validate_password(newpassword)
+
password_hash = yield self._auth_handler.hash(newpassword)
except_device_id = requester.device_id if requester else None
diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py
index 6b364befd5..f065970c40 100644
--- a/synapse/handlers/state_deltas.py
+++ b/synapse/handlers/state_deltas.py
@@ -48,7 +48,7 @@ class StateDeltasHandler(object):
if not event and not prev_event:
logger.debug("Neither event exists: %r %r", prev_event_id, event_id)
- defer.returnValue(None)
+ return None
prev_value = None
value = None
@@ -62,8 +62,8 @@ class StateDeltasHandler(object):
logger.debug("prev_value: %r -> value: %r", prev_value, value)
if value == public_value and prev_value != public_value:
- defer.returnValue(True)
+ return True
elif value != public_value and prev_value == public_value:
- defer.returnValue(False)
+ return False
else:
- defer.returnValue(None)
+ return None
diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py
index a0ee8db988..4449da6669 100644
--- a/synapse/handlers/stats.py
+++ b/synapse/handlers/stats.py
@@ -86,7 +86,7 @@ class StatsHandler(StateDeltasHandler):
# If still None then the initial background update hasn't happened yet
if self.pos is None:
- defer.returnValue(None)
+ return None
# Loop round handling deltas until we're up to date
while True:
@@ -328,6 +328,6 @@ class StatsHandler(StateDeltasHandler):
== "world_readable"
)
):
- defer.returnValue(True)
+ return True
else:
- defer.returnValue(False)
+ return False
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index cd1ac0a27a..4007284e5b 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -263,7 +263,7 @@ class SyncHandler(object):
timeout,
full_state,
)
- defer.returnValue(res)
+ return res
@defer.inlineCallbacks
def _wait_for_sync_for_user(self, sync_config, since_token, timeout, full_state):
@@ -303,7 +303,7 @@ class SyncHandler(object):
lazy_loaded = "false"
non_empty_sync_counter.labels(sync_type, lazy_loaded).inc()
- defer.returnValue(result)
+ return result
def current_sync_for_user(self, sync_config, since_token=None, full_state=False):
"""Get the sync for client needed to match what the server has now.
@@ -317,7 +317,7 @@ class SyncHandler(object):
user_id = user.to_string()
rules = yield self.store.get_push_rules_for_user(user_id)
rules = format_push_rules_for_user(user, rules)
- defer.returnValue(rules)
+ return rules
@defer.inlineCallbacks
def ephemeral_by_room(self, sync_result_builder, now_token, since_token=None):
@@ -378,7 +378,7 @@ class SyncHandler(object):
event_copy = {k: v for (k, v) in iteritems(event) if k != "room_id"}
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
- defer.returnValue((now_token, ephemeral_by_room))
+ return (now_token, ephemeral_by_room)
@defer.inlineCallbacks
def _load_filtered_recents(
@@ -426,8 +426,8 @@ class SyncHandler(object):
recents = []
if not limited or block_all_timeline:
- defer.returnValue(
- TimelineBatch(events=recents, prev_batch=now_token, limited=False)
+ return TimelineBatch(
+ events=recents, prev_batch=now_token, limited=False
)
filtering_factor = 2
@@ -490,12 +490,10 @@ class SyncHandler(object):
prev_batch_token = now_token.copy_and_replace("room_key", room_key)
- defer.returnValue(
- TimelineBatch(
- events=recents,
- prev_batch=prev_batch_token,
- limited=limited or newly_joined_room,
- )
+ return TimelineBatch(
+ events=recents,
+ prev_batch=prev_batch_token,
+ limited=limited or newly_joined_room,
)
@defer.inlineCallbacks
@@ -517,7 +515,7 @@ class SyncHandler(object):
if event.is_state():
state_ids = state_ids.copy()
state_ids[(event.type, event.state_key)] = event.event_id
- defer.returnValue(state_ids)
+ return state_ids
@defer.inlineCallbacks
def get_state_at(self, room_id, stream_position, state_filter=StateFilter.all()):
@@ -549,7 +547,7 @@ class SyncHandler(object):
else:
# no events in this room - so presumably no state
state = {}
- defer.returnValue(state)
+ return state
@defer.inlineCallbacks
def compute_summary(self, room_id, sync_config, batch, state, now_token):
@@ -579,7 +577,7 @@ class SyncHandler(object):
)
if not last_events:
- defer.returnValue(None)
+ return None
return
last_event = last_events[-1]
@@ -611,14 +609,14 @@ class SyncHandler(object):
if name_id:
name = yield self.store.get_event(name_id, allow_none=True)
if name and name.content.get("name"):
- defer.returnValue(summary)
+ return summary
if canonical_alias_id:
canonical_alias = yield self.store.get_event(
canonical_alias_id, allow_none=True
)
if canonical_alias and canonical_alias.content.get("alias"):
- defer.returnValue(summary)
+ return summary
me = sync_config.user.to_string()
@@ -652,7 +650,7 @@ class SyncHandler(object):
summary["m.heroes"] = sorted([user_id for user_id in gone_user_ids])[0:5]
if not sync_config.filter_collection.lazy_load_members():
- defer.returnValue(summary)
+ return summary
# ensure we send membership events for heroes if needed
cache_key = (sync_config.user.to_string(), sync_config.device_id)
@@ -686,7 +684,7 @@ class SyncHandler(object):
cache.set(s.state_key, s.event_id)
state[(EventTypes.Member, s.state_key)] = s
- defer.returnValue(summary)
+ return summary
def get_lazy_loaded_members_cache(self, cache_key):
cache = self.lazy_loaded_members_cache.get(cache_key)
@@ -871,14 +869,12 @@ class SyncHandler(object):
if state_ids:
state = yield self.store.get_events(list(state_ids.values()))
- defer.returnValue(
- {
- (e.type, e.state_key): e
- for e in sync_config.filter_collection.filter_room_state(
- list(state.values())
- )
- }
- )
+ return {
+ (e.type, e.state_key): e
+ for e in sync_config.filter_collection.filter_room_state(
+ list(state.values())
+ )
+ }
@defer.inlineCallbacks
def unread_notifs_for_room_id(self, room_id, sync_config):
@@ -894,11 +890,11 @@ class SyncHandler(object):
notifs = yield self.store.get_unread_event_push_actions_by_room_for_user(
room_id, sync_config.user.to_string(), last_unread_event_id
)
- defer.returnValue(notifs)
+ return notifs
# There is no new information in this period, so your notification
# count is whatever it was last time.
- defer.returnValue(None)
+ return None
@defer.inlineCallbacks
def generate_sync_result(self, sync_config, since_token=None, full_state=False):
@@ -989,19 +985,17 @@ class SyncHandler(object):
"Sync result for newly joined room %s: %r", room_id, joined_room
)
- defer.returnValue(
- SyncResult(
- presence=sync_result_builder.presence,
- account_data=sync_result_builder.account_data,
- joined=sync_result_builder.joined,
- invited=sync_result_builder.invited,
- archived=sync_result_builder.archived,
- to_device=sync_result_builder.to_device,
- device_lists=device_lists,
- groups=sync_result_builder.groups,
- device_one_time_keys_count=one_time_key_counts,
- next_batch=sync_result_builder.now_token,
- )
+ return SyncResult(
+ presence=sync_result_builder.presence,
+ account_data=sync_result_builder.account_data,
+ joined=sync_result_builder.joined,
+ invited=sync_result_builder.invited,
+ archived=sync_result_builder.archived,
+ to_device=sync_result_builder.to_device,
+ device_lists=device_lists,
+ groups=sync_result_builder.groups,
+ device_one_time_keys_count=one_time_key_counts,
+ next_batch=sync_result_builder.now_token,
)
@measure_func("_generate_sync_entry_for_groups")
@@ -1124,11 +1118,9 @@ class SyncHandler(object):
# Remove any users that we still share a room with.
newly_left_users -= users_who_share_room
- defer.returnValue(
- DeviceLists(changed=users_that_have_changed, left=newly_left_users)
- )
+ return DeviceLists(changed=users_that_have_changed, left=newly_left_users)
else:
- defer.returnValue(DeviceLists(changed=[], left=[]))
+ return DeviceLists(changed=[], left=[])
@defer.inlineCallbacks
def _generate_sync_entry_for_to_device(self, sync_result_builder):
@@ -1225,7 +1217,7 @@ class SyncHandler(object):
sync_result_builder.account_data = account_data_for_user
- defer.returnValue(account_data_by_room)
+ return account_data_by_room
@defer.inlineCallbacks
def _generate_sync_entry_for_presence(
@@ -1325,7 +1317,7 @@ class SyncHandler(object):
)
if not tags_by_room:
logger.debug("no-oping sync")
- defer.returnValue(([], [], [], []))
+ return ([], [], [], [])
ignored_account_data = yield self.store.get_global_account_data_by_type_for_user(
"m.ignored_user_list", user_id=user_id
@@ -1388,13 +1380,11 @@ class SyncHandler(object):
newly_left_users -= newly_joined_or_invited_users
- defer.returnValue(
- (
- newly_joined_rooms,
- newly_joined_or_invited_users,
- newly_left_rooms,
- newly_left_users,
- )
+ return (
+ newly_joined_rooms,
+ newly_joined_or_invited_users,
+ newly_left_rooms,
+ newly_left_users,
)
@defer.inlineCallbacks
@@ -1414,13 +1404,13 @@ class SyncHandler(object):
)
if rooms_changed:
- defer.returnValue(True)
+ return True
stream_id = RoomStreamToken.parse_stream_token(since_token.room_key).stream
for room_id in sync_result_builder.joined_room_ids:
if self.store.has_room_changed_since(room_id, stream_id):
- defer.returnValue(True)
- defer.returnValue(False)
+ return True
+ return False
@defer.inlineCallbacks
def _get_rooms_changed(self, sync_result_builder, ignored_users):
@@ -1637,7 +1627,7 @@ class SyncHandler(object):
)
room_entries.append(entry)
- defer.returnValue((room_entries, invited, newly_joined_rooms, newly_left_rooms))
+ return (room_entries, invited, newly_joined_rooms, newly_left_rooms)
@defer.inlineCallbacks
def _get_all_rooms(self, sync_result_builder, ignored_users):
@@ -1711,7 +1701,7 @@ class SyncHandler(object):
)
)
- defer.returnValue((room_entries, invited, []))
+ return (room_entries, invited, [])
@defer.inlineCallbacks
def _generate_room_entry(
@@ -1912,7 +1902,7 @@ class SyncHandler(object):
joined_room_ids.add(room_id)
joined_room_ids = frozenset(joined_room_ids)
- defer.returnValue(joined_room_ids)
+ return joined_room_ids
def _action_has_highlight(actions):
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index c3e0c8fc7e..6b661aa93d 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -140,7 +140,7 @@ class TypingHandler(object):
if was_present:
# No point sending another notification
- defer.returnValue(None)
+ return None
self._push_update(member=member, typing=True)
@@ -173,7 +173,7 @@ class TypingHandler(object):
def _stopped_typing(self, member):
if member.user_id not in self._room_typing.get(member.room_id, set()):
# No point
- defer.returnValue(None)
+ return None
self._member_typing_until.pop(member, None)
self._member_last_federation_poke.pop(member, None)
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 5de9630950..e53669e40d 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -133,7 +133,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# If still None then the initial background update hasn't happened yet
if self.pos is None:
- defer.returnValue(None)
+ return None
# Loop round handling deltas until we're up to date
while True:
|