From fd99787162113857119c033355548c5b3769a309 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 27 Sep 2018 14:53:58 -0600 Subject: Incorporate Dave's work for GDPR login flows As per https://github.com/vector-im/riot-web/issues/7168#issuecomment-419996117 --- synapse/handlers/auth.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 2a5eab124f..f08a2cdd7e 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -59,6 +59,7 @@ class AuthHandler(BaseHandler): LoginType.EMAIL_IDENTITY: self._check_email_identity, LoginType.MSISDN: self._check_msisdn, LoginType.DUMMY: self._check_dummy_auth, + LoginType.TERMS: self._check_terms_auth, } self.bcrypt_rounds = hs.config.bcrypt_rounds @@ -431,6 +432,9 @@ class AuthHandler(BaseHandler): def _check_dummy_auth(self, authdict, _): return defer.succeed(True) + def _check_terms_auth(self, authdict, _): + return defer.succeed(True) + @defer.inlineCallbacks def _check_threepid(self, medium, authdict): if 'threepid_creds' not in authdict: -- cgit 1.4.1 From 149c4f176563bd8c976d9c4601825753f7292b12 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Wed, 3 Oct 2018 15:25:53 -0600 Subject: Supply params for terms auth stage As per https://github.com/matrix-org/matrix-doc/pull/1692 --- synapse/handlers/auth.py | 9 +++++++++ 1 file changed, 9 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f08a2cdd7e..d6a19b74e9 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -466,6 +466,15 @@ class AuthHandler(BaseHandler): def _get_params_recaptcha(self): return {"public_key": self.hs.config.recaptcha_public_key} + def _get_params_terms(self): + return { + "policies": [{ + "name": "Privacy Policy", + "version": self.hs.config.user_consent_version, + "url": "%s/_matrix/consent/public" % (self.hs.config.public_baseurl,), + }], + } + def _auth_dict_for_flows(self, flows, session): public_flows = [] for f in flows: -- cgit 1.4.1 From 537d0b7b3632789e40cec13f3120151098f11d75 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Wed, 3 Oct 2018 17:50:11 -0600 Subject: Use a flag rather than a new route for the public policy This also means that the template now has optional parameters, which will need to be documented somehow. --- synapse/handlers/auth.py | 2 +- synapse/rest/client/v2_alpha/auth.py | 4 ++-- synapse/rest/consent/consent_resource.py | 36 +++++++++++++++++++------------- 3 files changed, 25 insertions(+), 17 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index d6a19b74e9..42d1336d6e 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -471,7 +471,7 @@ class AuthHandler(BaseHandler): "policies": [{ "name": "Privacy Policy", "version": self.hs.config.user_consent_version, - "url": "%s/_matrix/consent/public" % (self.hs.config.public_baseurl,), + "url": "%s/_matrix/consent?public=true" % (self.hs.config.public_baseurl,), }], } diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index f86f09adcf..77a5ea66f3 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -164,7 +164,7 @@ class AuthRestServlet(RestServlet): html = TERMS_TEMPLATE % { 'session': session, - 'terms_url': "%s/_matrix/consent/public" % ( + 'terms_url': "%s/_matrix/consent?public=true" % ( self.hs.config.public_baseurl, ), 'myurl': "%s/auth/%s/fallback/web" % ( @@ -244,7 +244,7 @@ class AuthRestServlet(RestServlet): else: html = TERMS_TEMPLATE % { 'session': session, - 'terms_url': "%s/_matrix/consent/public" % ( + 'terms_url': "%s/_matrix/consent?public=true" % ( self.hs.config.public_baseurl, ), 'myurl': "%s/auth/%s/fallback/web" % ( diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 7362e1858d..7a5786f164 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -30,7 +30,7 @@ from twisted.web.server import NOT_DONE_YET from synapse.api.errors import NotFoundError, StoreError, SynapseError from synapse.config import ConfigError from synapse.http.server import finish_request, wrap_html_request_handler -from synapse.http.servlet import parse_string +from synapse.http.servlet import parse_string, parse_boolean from synapse.types import UserID # language to use for the templates. TODO: figure this out from Accept-Language @@ -137,27 +137,35 @@ class ConsentResource(Resource): request (twisted.web.http.Request): """ - version = parse_string(request, "v", - default=self._default_consent_version) - username = parse_string(request, "u", required=True) - userhmac = parse_string(request, "h", required=True, encoding=None) + public_version = parse_boolean(request, "public", default=False) - self._check_hash(username, userhmac) + version = self._default_consent_version + username = None + userhmac = None + has_consented = False + if not public_version: + version = parse_string(request, "v", + default=self._default_consent_version) + username = parse_string(request, "u", required=True) + userhmac = parse_string(request, "h", required=True, encoding=None) - if username.startswith('@'): - qualified_user_id = username - else: - qualified_user_id = UserID(username, self.hs.hostname).to_string() + self._check_hash(username, userhmac) - u = yield self.store.get_user_by_id(qualified_user_id) - if u is None: - raise NotFoundError("Unknown user") + if username.startswith('@'): + qualified_user_id = username + else: + qualified_user_id = UserID(username, self.hs.hostname).to_string() + + u = yield self.store.get_user_by_id(qualified_user_id) + if u is None: + raise NotFoundError("Unknown user") + has_consented = u["consent_version"] == version try: self._render_template( request, "%s.html" % (version,), user=username, userhmac=userhmac, version=version, - has_consented=(u["consent_version"] == version), + has_consented=has_consented, public_version=public_version, ) except TemplateNotFound: raise NotFoundError("Unknown policy version") -- cgit 1.4.1 From dd99db846d76d511fc7bbea80897b9101782ec1f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 12 Oct 2018 18:03:27 -0600 Subject: Update login terms structure for the proposed language support --- synapse/handlers/auth.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 42d1336d6e..9038fee264 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -468,11 +468,14 @@ class AuthHandler(BaseHandler): def _get_params_terms(self): return { - "policies": [{ - "name": "Privacy Policy", + "policies": { + "privacy_policy": { "version": self.hs.config.user_consent_version, - "url": "%s/_matrix/consent?public=true" % (self.hs.config.public_baseurl,), - }], + "en": { + "name": "Privacy Policy", + "url": "%s/_matrix/consent" % (self.hs.config.public_baseurl,), + }, + }, } def _auth_dict_for_flows(self, flows, session): -- cgit 1.4.1 From 762a0982aab04ebec1e7a00bc03d26aefa8461c4 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Oct 2018 14:46:09 -0600 Subject: Python is hard --- synapse/handlers/auth.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 9038fee264..f1befeb575 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -469,11 +469,12 @@ class AuthHandler(BaseHandler): def _get_params_terms(self): return { "policies": { - "privacy_policy": { - "version": self.hs.config.user_consent_version, - "en": { - "name": "Privacy Policy", - "url": "%s/_matrix/consent" % (self.hs.config.public_baseurl,), + "privacy_policy": { + "version": self.hs.config.user_consent_version, + "en": { + "name": "Privacy Policy", + "url": "%s/_matrix/consent" % (self.hs.config.public_baseurl,), + }, }, }, } -- cgit 1.4.1 From 442734ff9e7a4ac09c54a58f8b5467379673914f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Oct 2018 14:56:13 -0600 Subject: Ensure the terms params are actually provided --- synapse/handlers/auth.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f1befeb575..12979f6ed3 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -486,6 +486,7 @@ class AuthHandler(BaseHandler): get_params = { LoginType.RECAPTCHA: self._get_params_recaptcha, + LoginType.TERMS: self._get_params_terms, } params = {} -- cgit 1.4.1 From a8ed93a4b55a19a478c9aba929bfea07e691abbf Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Oct 2018 16:10:29 -0600 Subject: pep8 --- synapse/handlers/auth.py | 2 +- synapse/rest/client/v2_alpha/auth.py | 3 --- synapse/rest/client/v2_alpha/register.py | 12 ++---------- 3 files changed, 3 insertions(+), 14 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 12979f6ed3..bef796fd0c 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -469,7 +469,7 @@ class AuthHandler(BaseHandler): def _get_params_terms(self): return { "policies": { - "privacy_policy": { + "privacy_policy": { "version": self.hs.config.user_consent_version, "en": { "name": "Privacy Policy", diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index ec583ad16a..0b2933fe8e 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -158,9 +158,6 @@ class AuthRestServlet(RestServlet): defer.returnValue(None) elif stagetype == LoginType.TERMS: session = request.args['session'][0] - authdict = { - 'session': session, - } html = TERMS_TEMPLATE % { 'session': session, diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 851ce6e9a4..c5214330ad 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -359,19 +359,11 @@ class RegisterRestServlet(RestServlet): [LoginType.MSISDN, LoginType.EMAIL_IDENTITY] ]) + # Append m.login.terms to all flows if we're requiring consent if self.hs.config.block_events_without_consent_error is not None: new_flows = [] for flow in flows: - # To only allow registration if completing GDPR auth, - # making clients that don't support it use fallback auth. flow.append(LoginType.TERMS) - - # or to duplicate all the flows above with the GDPR flow on the - # end so clients that support it can use it but clients that don't - # continue to consent via the DM from server notices bot. - #new_flows.extend([ - # flow + [LoginType.TERMS] - #]) flows.extend(new_flows) auth_result, params, session_id = yield self.auth_handler.check_auth( @@ -461,7 +453,7 @@ class RegisterRestServlet(RestServlet): ) if auth_result and LoginType.TERMS in auth_result: - logger.info("User %s has consented to the privacy policy" % registered_user_id) + logger.info("%s has consented to the privacy policy" % registered_user_id) yield self.store.user_set_consent_version( registered_user_id, self.hs.config.user_consent_version, ) -- cgit 1.4.1 From 871c4abfecfd14acda13e3f25c7d040f848a9a32 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Oct 2018 17:40:41 +0100 Subject: Factor _generate_room_id out of create_room we're going to need this for room upgrades. --- synapse/handlers/room.py | 45 +++++++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 22 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3ba92bdb4c..000a22b07c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -165,28 +165,7 @@ class RoomCreationHandler(BaseHandler): visibility = config.get("visibility", None) is_public = visibility == "public" - # autogen room IDs and try to create it. We may clash, so just - # try a few times till one goes through, giving up eventually. - attempts = 0 - room_id = None - while attempts < 5: - try: - random_string = stringutils.random_string(18) - gen_room_id = RoomID( - random_string, - self.hs.hostname, - ) - yield self.store.store_room( - room_id=gen_room_id.to_string(), - room_creator_user_id=user_id, - is_public=is_public - ) - room_id = gen_room_id.to_string() - break - except StoreError: - attempts += 1 - if not room_id: - raise StoreError(500, "Couldn't generate a room ID.") + room_id = yield self._generate_room_id(creator_id=user_id, is_public=is_public) if room_alias: directory_handler = self.hs.get_handlers().directory_handler @@ -427,6 +406,28 @@ class RoomCreationHandler(BaseHandler): content=content, ) + @defer.inlineCallbacks + def _generate_room_id(self, creator_id, is_public): + # autogen room IDs and try to create it. We may clash, so just + # try a few times till one goes through, giving up eventually. + attempts = 0 + while attempts < 5: + try: + random_string = stringutils.random_string(18) + gen_room_id = RoomID( + random_string, + self.hs.hostname, + ).to_string() + yield self.store.store_room( + room_id=gen_room_id, + room_creator_user_id=creator_id, + is_public=is_public, + ) + defer.returnValue(gen_room_id) + except StoreError: + attempts += 1 + raise StoreError(500, "Couldn't generate a room ID.") + class RoomContextHandler(object): def __init__(self, hs): -- cgit 1.4.1 From 7f7b2cd3de192816bcb0225774a22617989aec37 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Oct 2018 17:42:37 +0100 Subject: Make room_member_handler a member of RoomCreationHandler ... to save passing it into `_send_events_for_new_room` --- synapse/handlers/register.py | 6 ++++-- synapse/handlers/room.py | 9 +++------ 2 files changed, 7 insertions(+), 8 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index e9d7b25a36..7b4549223f 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -50,7 +50,6 @@ class RegistrationHandler(BaseHandler): self._auth_handler = hs.get_auth_handler() self.profile_handler = hs.get_profile_handler() self.user_directory_handler = hs.get_user_directory_handler() - self.room_creation_handler = self.hs.get_room_creation_handler() self.captcha_client = CaptchaServerHttpClient(hs) self._next_generated_user_id = None @@ -241,7 +240,10 @@ class RegistrationHandler(BaseHandler): else: # create room expects the localpart of the room alias room_alias_localpart = room_alias.localpart - yield self.room_creation_handler.create_room( + + # getting the RoomCreationHandler during init gives a dependency + # loop + yield self.hs.get_room_creation_handler().create_room( fake_requester, config={ "preset": "public_chat", diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 000a22b07c..d03d2cd7be 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -73,6 +73,7 @@ class RoomCreationHandler(BaseHandler): self.spam_checker = hs.get_spam_checker() self.event_creation_handler = hs.get_event_creation_handler() + self.room_member_handler = hs.get_room_member_handler() @defer.inlineCallbacks def create_room(self, requester, config, ratelimit=True, @@ -195,12 +196,9 @@ class RoomCreationHandler(BaseHandler): # override any attempt to set room versions via the creation_content creation_content["room_version"] = room_version - room_member_handler = self.hs.get_room_member_handler() - yield self._send_events_for_new_room( requester, room_id, - room_member_handler, preset_config=preset_config, invite_list=invite_list, initial_state=initial_state, @@ -242,7 +240,7 @@ class RoomCreationHandler(BaseHandler): if is_direct: content["is_direct"] = is_direct - yield room_member_handler.update_membership( + yield self.room_member_handler.update_membership( requester, UserID.from_string(invitee), room_id, @@ -280,7 +278,6 @@ class RoomCreationHandler(BaseHandler): self, creator, # A Requester object. room_id, - room_member_handler, preset_config, invite_list, initial_state, @@ -325,7 +322,7 @@ class RoomCreationHandler(BaseHandler): content=creation_content, ) - yield room_member_handler.update_membership( + yield self.room_member_handler.update_membership( creator, creator.user, room_id, -- cgit 1.4.1 From e1948175ee7fc469c985b58a01ecc2eb577e5e0a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Oct 2018 17:50:06 +0100 Subject: Allow power_level_content_override=None for _send_events_for_new_room --- synapse/handlers/room.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index d03d2cd7be..d42c2c41c4 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -204,7 +204,7 @@ class RoomCreationHandler(BaseHandler): initial_state=initial_state, creation_content=creation_content, room_alias=room_alias, - power_level_content_override=config.get("power_level_content_override", {}), + power_level_content_override=config.get("power_level_content_override"), creator_join_profile=creator_join_profile, ) @@ -282,9 +282,9 @@ class RoomCreationHandler(BaseHandler): invite_list, initial_state, creation_content, - room_alias, - power_level_content_override, - creator_join_profile, + room_alias=None, + power_level_content_override=None, + creator_join_profile=None, ): def create(etype, content, **kwargs): e = { @@ -364,7 +364,8 @@ class RoomCreationHandler(BaseHandler): for invitee in invite_list: power_level_content["users"][invitee] = 100 - power_level_content.update(power_level_content_override) + if power_level_content_override: + power_level_content.update(power_level_content_override) yield send( etype=EventTypes.PowerLevels, -- cgit 1.4.1 From 0f7d1c99061075fe54a37cfe785184f095addf78 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 22 Aug 2018 10:57:54 +0100 Subject: Basic initial support for room upgrades Currently just creates a new, empty, room, and sends a tombstone in the old room. --- synapse/api/constants.py | 1 + synapse/handlers/room.py | 121 +++++++++++++++++++++ synapse/rest/__init__.py | 2 + .../client/v2_alpha/room_upgrade_rest_servlet.py | 78 +++++++++++++ synapse/server.pyi | 6 + 5 files changed, 208 insertions(+) create mode 100644 synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py (limited to 'synapse/handlers') diff --git a/synapse/api/constants.py b/synapse/api/constants.py index c2630c4c64..5565e516d6 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -61,6 +61,7 @@ class LoginType(object): class EventTypes(object): Member = "m.room.member" Create = "m.room.create" + Tombstone = "m.room.tombstone" JoinRules = "m.room.join_rules" PowerLevels = "m.room.power_levels" Aliases = "m.room.aliases" diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index d42c2c41c4..3cce6f6150 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -36,6 +36,7 @@ from synapse.api.errors import AuthError, Codes, StoreError, SynapseError from synapse.storage.state import StateFilter from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID from synapse.util import stringutils +from synapse.util.async_helpers import Linearizer from synapse.visibility import filter_events_for_client from ._base import BaseHandler @@ -75,6 +76,124 @@ class RoomCreationHandler(BaseHandler): self.event_creation_handler = hs.get_event_creation_handler() self.room_member_handler = hs.get_room_member_handler() + # linearizer to stop two upgrades happening at once + self._upgrade_linearizer = Linearizer("room_upgrade_linearizer") + + @defer.inlineCallbacks + def upgrade_room(self, requester, old_room_id, new_version): + """Replace a room with a new room with a different version + + Args: + requester (synapse.types.Requester): the user requesting the upgrade + old_room_id (unicode): the id of the room to be replaced + new_version (unicode): the new room version to use + + Returns: + Deferred[unicode]: the new room id + """ + yield self.ratelimit(requester) + + user_id = requester.user.to_string() + + with (yield self._upgrade_linearizer.queue(old_room_id)): + # start by allocating a new room id + is_public = False # XXX fixme + new_room_id = yield self._generate_room_id( + creator_id=user_id, is_public=is_public, + ) + + # we create and auth the tombstone event before properly creating the new + # room, to check our user has perms in the old room. + tombstone_event, tombstone_context = ( + yield self.event_creation_handler.create_event( + requester, { + "type": EventTypes.Tombstone, + "state_key": "", + "room_id": old_room_id, + "sender": user_id, + "content": { + "body": "This room has been replaced", + "replacement_room": new_room_id, + } + }, + token_id=requester.access_token_id, + ) + ) + yield self.auth.check_from_context(tombstone_event, tombstone_context) + + yield self.clone_exiting_room( + requester, + old_room_id=old_room_id, + new_room_id=new_room_id, + new_room_version=new_version, + tombstone_event_id=tombstone_event.event_id, + ) + + # now send the tombstone + yield self.event_creation_handler.send_nonmember_event( + requester, tombstone_event, tombstone_context, + ) + + # XXX send a power_levels in the old room, if possible + + defer.returnValue(new_room_id) + + @defer.inlineCallbacks + def clone_exiting_room( + self, requester, old_room_id, new_room_id, new_room_version, + tombstone_event_id, + ): + """Populate a new room based on an old room + + Args: + requester (synapse.types.Requester): the user requesting the upgrade + old_room_id (unicode): the id of the room to be replaced + new_room_id (unicode): the id to give the new room (should already have been + created with _gemerate_room_id()) + new_room_version (unicode): the new room version to use + tombstone_event_id (unicode|str): the ID of the tombstone event in the old + room. + Returns: + Deferred[None] + """ + user_id = requester.user.to_string() + + if not self.spam_checker.user_may_create_room(user_id): + raise SynapseError(403, "You are not permitted to create rooms") + + # XXX check alias is free + # canonical_alias = None + + # XXX create association in directory handler + # XXX preset + + preset_config = RoomCreationPreset.PRIVATE_CHAT + + creation_content = { + "room_version": new_room_version, + "predecessor": { + "room_id": old_room_id, + "event_id": tombstone_event_id, + } + } + + initial_state = OrderedDict() + + yield self._send_events_for_new_room( + requester, + new_room_id, + preset_config=preset_config, + invite_list=[], + initial_state=initial_state, + creation_content=creation_content, + ) + + # XXX name + # XXX topic + # XXX invites/joins + # XXX 3pid invites + # XXX directory_handler.send_room_alias_update_event + @defer.inlineCallbacks def create_room(self, requester, config, ratelimit=True, creator_join_profile=None): @@ -416,6 +535,8 @@ class RoomCreationHandler(BaseHandler): random_string, self.hs.hostname, ).to_string() + if isinstance(gen_room_id, bytes): + gen_room_id = gen_room_id.decode('utf-8') yield self.store.store_room( room_id=gen_room_id, room_creator_user_id=creator_id, diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 4856822a5d..5f35c2d1be 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -47,6 +47,7 @@ from synapse.rest.client.v2_alpha import ( register, report_event, room_keys, + room_upgrade_rest_servlet, sendtodevice, sync, tags, @@ -116,3 +117,4 @@ class ClientRestResource(JsonResource): sendtodevice.register_servlets(hs, client_resource) user_directory.register_servlets(hs, client_resource) groups.register_servlets(hs, client_resource) + room_upgrade_rest_servlet.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py new file mode 100644 index 0000000000..1b195f90c4 --- /dev/null +++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.api.constants import KNOWN_ROOM_VERSIONS +from synapse.api.errors import Codes, SynapseError +from synapse.http.servlet import ( + RestServlet, + assert_params_in_dict, + parse_json_object_from_request, +) + +from ._base import client_v2_patterns + +logger = logging.getLogger(__name__) + + +class RoomUpgradeRestServlet(RestServlet): + PATTERNS = client_v2_patterns( + # /rooms/$roomid/upgrade + "/rooms/(?P[^/]*)/upgrade$", + v2_alpha=False, + ) + + def __init__(self, hs): + """ + + Args: + hs (synapse.server.HomeServer): + """ + super(RoomUpgradeRestServlet, self).__init__() + self._hs = hs + self._room_creation_handler = hs.get_room_creation_handler() + self._auth = hs.get_auth() + + @defer.inlineCallbacks + def on_POST(self, request, room_id): + requester = yield self._auth.get_user_by_req(request) + + content = parse_json_object_from_request(request) + assert_params_in_dict(content, ("new_version", )) + new_version = content["new_version"] + + if new_version not in KNOWN_ROOM_VERSIONS: + raise SynapseError( + 400, + "Your homeserver does not support this room version", + Codes.UNSUPPORTED_ROOM_VERSION, + ) + + new_room_id = yield self._room_creation_handler.upgrade_room( + requester, room_id, new_version + ) + + ret = { + "replacement_room": new_room_id, + } + + defer.returnValue((200, ret)) + + +def register_servlets(hs, http_server): + RoomUpgradeRestServlet(hs).register(http_server) diff --git a/synapse/server.pyi b/synapse/server.pyi index ce28486233..06cd083a74 100644 --- a/synapse/server.pyi +++ b/synapse/server.pyi @@ -7,6 +7,9 @@ import synapse.handlers.auth import synapse.handlers.deactivate_account import synapse.handlers.device import synapse.handlers.e2e_keys +import synapse.handlers.room +import synapse.handlers.room_member +import synapse.handlers.message import synapse.handlers.set_password import synapse.rest.media.v1.media_repository import synapse.server_notices.server_notices_manager @@ -50,6 +53,9 @@ class HomeServer(object): def get_room_creation_handler(self) -> synapse.handlers.room.RoomCreationHandler: pass + def get_room_member_handler(self) -> synapse.handlers.room_member.RoomMemberHandler: + pass + def get_event_creation_handler(self) -> synapse.handlers.message.EventCreationHandler: pass -- cgit 1.4.1 From 4cda300058ba68f97c032923ebf429f437eddd8e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 12 Oct 2018 11:13:40 +0100 Subject: preserve room visibility --- synapse/handlers/room.py | 8 +++++--- synapse/storage/room.py | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3cce6f6150..2f9eb8ef4c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -32,7 +32,7 @@ from synapse.api.constants import ( JoinRules, RoomCreationPreset, ) -from synapse.api.errors import AuthError, Codes, StoreError, SynapseError +from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError from synapse.storage.state import StateFilter from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID from synapse.util import stringutils @@ -97,9 +97,11 @@ class RoomCreationHandler(BaseHandler): with (yield self._upgrade_linearizer.queue(old_room_id)): # start by allocating a new room id - is_public = False # XXX fixme + r = yield self.store.get_room(old_room_id) + if r is None: + raise NotFoundError("Unknown room id %s" % (old_room_id,)) new_room_id = yield self._generate_room_id( - creator_id=user_id, is_public=is_public, + creator_id=user_id, is_public=r["is_public"], ) # we create and auth the tombstone event before properly creating the new diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 61013b8919..41c65e112a 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -47,7 +47,7 @@ class RoomWorkerStore(SQLBaseStore): Args: room_id (str): The ID of the room to retrieve. Returns: - A namedtuple containing the room information, or an empty list. + A dict containing the room information, or None if the room is unknown. """ return self._simple_select_one( table="rooms", -- cgit 1.4.1 From 1b9f253e208ea3a471594bde52366e3abf54fc1a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 12 Oct 2018 12:05:18 +0100 Subject: preserve PLs --- synapse/handlers/room.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 2f9eb8ef4c..40ca12f1b7 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -179,7 +179,13 @@ class RoomCreationHandler(BaseHandler): } } - initial_state = OrderedDict() + initial_state = dict() + + old_room_state_ids = yield self.store.get_current_state_ids(old_room_id) + pl_event_id = old_room_state_ids.get((EventTypes.PowerLevels, "")) + if pl_event_id: + pl_event = yield self.store.get_event(pl_event_id) + initial_state[(EventTypes.PowerLevels, "")] = pl_event.content yield self._send_events_for_new_room( requester, -- cgit 1.4.1 From 3a263bf3aec6b9709fed391671f8faec334dc739 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 12 Oct 2018 17:05:48 +0100 Subject: copy state --- synapse/handlers/room.py | 37 ++++++++++++++++++++++++++----------- 1 file changed, 26 insertions(+), 11 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 40ca12f1b7..ab92ca5e78 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -167,9 +167,6 @@ class RoomCreationHandler(BaseHandler): # canonical_alias = None # XXX create association in directory handler - # XXX preset - - preset_config = RoomCreationPreset.PRIVATE_CHAT creation_content = { "room_version": new_room_version, @@ -181,23 +178,41 @@ class RoomCreationHandler(BaseHandler): initial_state = dict() - old_room_state_ids = yield self.store.get_current_state_ids(old_room_id) - pl_event_id = old_room_state_ids.get((EventTypes.PowerLevels, "")) - if pl_event_id: - pl_event = yield self.store.get_event(pl_event_id) - initial_state[(EventTypes.PowerLevels, "")] = pl_event.content + types_to_copy = ( + (EventTypes.PowerLevels, ""), + (EventTypes.JoinRules, ""), + (EventTypes.Name, ""), + (EventTypes.Topic, ""), + (EventTypes.RoomHistoryVisibility, ""), + (EventTypes.GuestAccess, "") + ) + + old_room_state_ids = yield self.store.get_filtered_current_state_ids( + old_room_id, StateFilter.from_types(types_to_copy), + ) + # map from event_id to BaseEvent + old_room_state_events = yield self.store.get_events(old_room_state_ids.values()) + + for k in types_to_copy: + old_event_id = old_room_state_ids.get(k) + if old_event_id: + old_event = old_room_state_events.get(old_event_id) + if old_event: + initial_state[k] = old_event.content yield self._send_events_for_new_room( requester, new_room_id, - preset_config=preset_config, + + # we expect to override all the presets with initial_state, so this is + # somewhat arbitrary. + preset_config=RoomCreationPreset.PRIVATE_CHAT, + invite_list=[], initial_state=initial_state, creation_content=creation_content, ) - # XXX name - # XXX topic # XXX invites/joins # XXX 3pid invites # XXX directory_handler.send_room_alias_update_event -- cgit 1.4.1 From e6babc27d51c3de04cdaedc40439b7ddb56b2e12 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 24 Oct 2018 23:14:36 +0100 Subject: restrict PLs in old room --- synapse/handlers/room.py | 44 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index ab92ca5e78..d016f0e8b8 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -136,7 +136,49 @@ class RoomCreationHandler(BaseHandler): requester, tombstone_event, tombstone_context, ) - # XXX send a power_levels in the old room, if possible + # ... and restrict the PLs in the old room, if possible. + old_room_pl_state = yield self.state_handler.get_current_state( + old_room_id, + event_type=EventTypes.PowerLevels, + latest_event_ids=(tombstone_event.event_id, ), + ) + + if old_room_pl_state is None: + logger.warning( + "Not supported: upgrading a room with no PL event. Not setting PLs " + "in old room.", + ) + else: + pl_content = dict(old_room_pl_state.content) + users_default = int(pl_content.get("users_default", 0)) + restricted_level = max(users_default + 1, 50) + + updated = False + for v in ("invite", "events_default"): + current = int(pl_content.get(v, 0)) + if current < restricted_level: + logger.debug( + "Setting level for %s in %s to %i (was %i)", + v, old_room_id, restricted_level, current, + ) + pl_content[v] = restricted_level + updated = True + else: + logger.debug( + "Not setting level for %s (already %i)", + v, current, + ) + + if updated: + yield self.event_creation_handler.create_and_send_nonmember_event( + requester, { + "type": EventTypes.PowerLevels, + "state_key": '', + "room_id": old_room_id, + "sender": user_id, + "content": pl_content, + }, ratelimit=False, + ) defer.returnValue(new_room_id) -- cgit 1.4.1 From 193cadc988801d9035124d1fd3ca23607b9b1f25 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Oct 2018 17:10:30 +0100 Subject: Address review comments Improve comments, get old room state from the context we already have --- synapse/handlers/room.py | 16 +++++++++------- .../client/v2_alpha/room_upgrade_rest_servlet.py | 21 ++++++++++++++++----- 2 files changed, 25 insertions(+), 12 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index d016f0e8b8..145b5b19ee 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -136,19 +136,21 @@ class RoomCreationHandler(BaseHandler): requester, tombstone_event, tombstone_context, ) - # ... and restrict the PLs in the old room, if possible. - old_room_pl_state = yield self.state_handler.get_current_state( - old_room_id, - event_type=EventTypes.PowerLevels, - latest_event_ids=(tombstone_event.event_id, ), - ) + old_room_state = yield tombstone_context.get_current_state_ids(self.store) + old_room_pl_event_id = old_room_state.get((EventTypes.PowerLevels, "")) - if old_room_pl_state is None: + if old_room_pl_event_id is None: logger.warning( "Not supported: upgrading a room with no PL event. Not setting PLs " "in old room.", ) else: + # we try to stop regular users from speaking by setting the PL required + # to send regular events and invites to 'Moderator' level. That's normally + # 50, but if the default PL in a room is 50 or more, then we set the + # required PL above that. + + old_room_pl_state = yield self.store.get_event(old_room_pl_event_id) pl_content = dict(old_room_pl_state.content) users_default = int(pl_content.get("users_default", 0)) restricted_level = max(users_default + 1, 50) diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py index 1b195f90c4..e6356101fd 100644 --- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py @@ -31,6 +31,22 @@ logger = logging.getLogger(__name__) class RoomUpgradeRestServlet(RestServlet): + """Handler for room uprade requests. + + Handles requests of the form: + + POST /_matrix/client/r0/rooms/$roomid/upgrade HTTP/1.1 + Content-Type: application/json + + { + "new_version": "2", + } + + Creates a new room and shuts down the old one. Returns the ID of the new room. + + Args: + hs (synapse.server.HomeServer): + """ PATTERNS = client_v2_patterns( # /rooms/$roomid/upgrade "/rooms/(?P[^/]*)/upgrade$", @@ -38,11 +54,6 @@ class RoomUpgradeRestServlet(RestServlet): ) def __init__(self, hs): - """ - - Args: - hs (synapse.server.HomeServer): - """ super(RoomUpgradeRestServlet, self).__init__() self._hs = hs self._room_creation_handler = hs.get_room_creation_handler() -- cgit 1.4.1 From 54bbe71867fb3de2e3984e2b3eb909845c2448b3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Oct 2018 22:51:34 +0100 Subject: optimise state copying --- synapse/handlers/room.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 145b5b19ee..8e48c1ca6a 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -21,7 +21,7 @@ import math import string from collections import OrderedDict -from six import string_types +from six import iteritems, string_types from twisted.internet import defer @@ -237,12 +237,10 @@ class RoomCreationHandler(BaseHandler): # map from event_id to BaseEvent old_room_state_events = yield self.store.get_events(old_room_state_ids.values()) - for k in types_to_copy: - old_event_id = old_room_state_ids.get(k) - if old_event_id: - old_event = old_room_state_events.get(old_event_id) - if old_event: - initial_state[k] = old_event.content + for k, old_event_id in iteritems(old_room_state_ids): + old_event = old_room_state_events.get(old_event_id) + if old_event: + initial_state[k] = old_event.content yield self._send_events_for_new_room( requester, -- cgit 1.4.1 From 5caf79b312947c823977c89275c1ea5750aeec92 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Oct 2018 23:56:40 +0100 Subject: Remember to copy the avatar on room upgrades --- changelog.d/4100.feature | 1 + synapse/handlers/room.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/4100.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4100.feature b/changelog.d/4100.feature new file mode 100644 index 0000000000..a3f7dbdcdd --- /dev/null +++ b/changelog.d/4100.feature @@ -0,0 +1 @@ +Support for replacing rooms with new ones diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 8e48c1ca6a..c59c02527c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -228,7 +228,8 @@ class RoomCreationHandler(BaseHandler): (EventTypes.Name, ""), (EventTypes.Topic, ""), (EventTypes.RoomHistoryVisibility, ""), - (EventTypes.GuestAccess, "") + (EventTypes.GuestAccess, ""), + (EventTypes.RoomAvatar, ""), ) old_room_state_ids = yield self.store.get_filtered_current_state_ids( -- cgit 1.4.1 From db24d7f15e406390d57b23d48a78fa33604a47e7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Oct 2018 23:47:37 +0100 Subject: Better handling of odd PLs during room upgrades Fixes handling of rooms where we have permission to send the tombstone, but not other state. We need to (a) fail more gracefully when we can't send the PLs in the old room, and (b) not set the PLs in the new room until we are done with the other stuff. --- changelog.d/4099.feature | 1 + synapse/handlers/room.py | 125 ++++++++++++++++++++++++++++++----------------- 2 files changed, 82 insertions(+), 44 deletions(-) create mode 100644 changelog.d/4099.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4099.feature b/changelog.d/4099.feature new file mode 100644 index 0000000000..a3f7dbdcdd --- /dev/null +++ b/changelog.d/4099.feature @@ -0,0 +1 @@ +Support for replacing rooms with new ones diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 8e48c1ca6a..70085db625 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -136,53 +136,91 @@ class RoomCreationHandler(BaseHandler): requester, tombstone_event, tombstone_context, ) + # and finally, shut down the PLs in the old room, and update them in the new + # room. old_room_state = yield tombstone_context.get_current_state_ids(self.store) - old_room_pl_event_id = old_room_state.get((EventTypes.PowerLevels, "")) - if old_room_pl_event_id is None: - logger.warning( - "Not supported: upgrading a room with no PL event. Not setting PLs " - "in old room.", + yield self._update_upgraded_room_pls( + requester, old_room_id, new_room_id, old_room_state, + ) + + defer.returnValue(new_room_id) + + @defer.inlineCallbacks + def _update_upgraded_room_pls( + self, requester, old_room_id, new_room_id, old_room_state, + ): + """Send updated power levels in both rooms after an upgrade + + Args: + requester (synapse.types.Requester): the user requesting the upgrade + old_room_id (unicode): the id of the room to be replaced + new_room_id (unicode): the id of the replacement room + old_room_state (dict[tuple[str, str], str]): the state map for the old room + + Returns: + Deferred + """ + old_room_pl_event_id = old_room_state.get((EventTypes.PowerLevels, "")) + + if old_room_pl_event_id is None: + logger.warning( + "Not supported: upgrading a room with no PL event. Not setting PLs " + "in old room.", + ) + return + + old_room_pl_state = yield self.store.get_event(old_room_pl_event_id) + + # we try to stop regular users from speaking by setting the PL required + # to send regular events and invites to 'Moderator' level. That's normally + # 50, but if the default PL in a room is 50 or more, then we set the + # required PL above that. + + pl_content = dict(old_room_pl_state.content) + users_default = int(pl_content.get("users_default", 0)) + restricted_level = max(users_default + 1, 50) + + updated = False + for v in ("invite", "events_default"): + current = int(pl_content.get(v, 0)) + if current < restricted_level: + logger.info( + "Setting level for %s in %s to %i (was %i)", + v, old_room_id, restricted_level, current, ) + pl_content[v] = restricted_level + updated = True else: - # we try to stop regular users from speaking by setting the PL required - # to send regular events and invites to 'Moderator' level. That's normally - # 50, but if the default PL in a room is 50 or more, then we set the - # required PL above that. - - old_room_pl_state = yield self.store.get_event(old_room_pl_event_id) - pl_content = dict(old_room_pl_state.content) - users_default = int(pl_content.get("users_default", 0)) - restricted_level = max(users_default + 1, 50) - - updated = False - for v in ("invite", "events_default"): - current = int(pl_content.get(v, 0)) - if current < restricted_level: - logger.debug( - "Setting level for %s in %s to %i (was %i)", - v, old_room_id, restricted_level, current, - ) - pl_content[v] = restricted_level - updated = True - else: - logger.debug( - "Not setting level for %s (already %i)", - v, current, - ) - - if updated: - yield self.event_creation_handler.create_and_send_nonmember_event( - requester, { - "type": EventTypes.PowerLevels, - "state_key": '', - "room_id": old_room_id, - "sender": user_id, - "content": pl_content, - }, ratelimit=False, - ) - - defer.returnValue(new_room_id) + logger.info( + "Not setting level for %s (already %i)", + v, current, + ) + + if updated: + try: + yield self.event_creation_handler.create_and_send_nonmember_event( + requester, { + "type": EventTypes.PowerLevels, + "state_key": '', + "room_id": old_room_id, + "sender": requester.user.to_string(), + "content": pl_content, + }, ratelimit=False, + ) + except AuthError as e: + logger.warning("Unable to update PLs in old room: %s", e) + + logger.info("Setting correct PLs in new room") + yield self.event_creation_handler.create_and_send_nonmember_event( + requester, { + "type": EventTypes.PowerLevels, + "state_key": '', + "room_id": new_room_id, + "sender": requester.user.to_string(), + "content": old_room_pl_state.content, + }, ratelimit=False, + ) @defer.inlineCallbacks def clone_exiting_room( @@ -223,7 +261,6 @@ class RoomCreationHandler(BaseHandler): initial_state = dict() types_to_copy = ( - (EventTypes.PowerLevels, ""), (EventTypes.JoinRules, ""), (EventTypes.Name, ""), (EventTypes.Topic, ""), -- cgit 1.4.1 From 4cd1c9f2ffa46bc8ed258da200ae3b8ba25fcbb5 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 29 Oct 2018 23:57:24 +1100 Subject: Delete the disused & unspecced identicon functionality (#4106) --- changelog.d/4106.removal | 1 + scripts-dev/make_identicons.pl | 39 ----------------- synapse/handlers/register.py | 3 -- synapse/python_dependencies.py | 1 - synapse/rest/media/v1/identicon_resource.py | 68 ----------------------------- synapse/rest/media/v1/media_repository.py | 2 - 6 files changed, 1 insertion(+), 113 deletions(-) create mode 100644 changelog.d/4106.removal delete mode 100755 scripts-dev/make_identicons.pl delete mode 100644 synapse/rest/media/v1/identicon_resource.py (limited to 'synapse/handlers') diff --git a/changelog.d/4106.removal b/changelog.d/4106.removal new file mode 100644 index 0000000000..7e63208daa --- /dev/null +++ b/changelog.d/4106.removal @@ -0,0 +1 @@ +The disused and un-specced identicon generator has been removed. diff --git a/scripts-dev/make_identicons.pl b/scripts-dev/make_identicons.pl deleted file mode 100755 index cbff63e298..0000000000 --- a/scripts-dev/make_identicons.pl +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env perl - -use strict; -use warnings; - -use DBI; -use DBD::SQLite; -use JSON; -use Getopt::Long; - -my $db; # = "homeserver.db"; -my $server = "http://localhost:8008"; -my $size = 320; - -GetOptions("db|d=s", \$db, - "server|s=s", \$server, - "width|w=i", \$size) or usage(); - -usage() unless $db; - -my $dbh = DBI->connect("dbi:SQLite:dbname=$db","","") || die $DBI::errstr; - -my $res = $dbh->selectall_arrayref("select token, name from access_tokens, users where access_tokens.user_id = users.id group by user_id") || die $DBI::errstr; - -foreach (@$res) { - my ($token, $mxid) = ($_->[0], $_->[1]); - my ($user_id) = ($mxid =~ m/@(.*):/); - my ($url) = $dbh->selectrow_array("select avatar_url from profiles where user_id=?", undef, $user_id); - if (!$url || $url =~ /#auto$/) { - `curl -s -o tmp.png "$server/_matrix/media/v1/identicon?name=${mxid}&width=$size&height=$size"`; - my $json = `curl -s -X POST -H "Content-Type: image/png" -T "tmp.png" $server/_matrix/media/v1/upload?access_token=$token`; - my $content_uri = from_json($json)->{content_uri}; - `curl -X PUT -H "Content-Type: application/json" --data '{ "avatar_url": "${content_uri}#auto"}' $server/_matrix/client/api/v1/profile/${mxid}/avatar_url?access_token=$token`; - } -} - -sub usage { - die "usage: ./make-identicons.pl\n\t-d database [e.g. homeserver.db]\n\t-s homeserver (default: http://localhost:8008)\n\t-w identicon size in pixels (default 320)"; -} \ No newline at end of file diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 7b4549223f..d2beb275cf 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -256,9 +256,6 @@ class RegistrationHandler(BaseHandler): except Exception as e: logger.error("Failed to join new user to %r: %r", r, e) - # We used to generate default identicons here, but nowadays - # we want clients to generate their own as part of their branding - # rather than there being consistent matrix-wide ones, so we don't. defer.returnValue((user_id, token)) @defer.inlineCallbacks diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 943876456b..ca62ee7637 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -51,7 +51,6 @@ REQUIREMENTS = { "daemonize>=2.3.1": ["daemonize"], "bcrypt>=3.1.0": ["bcrypt>=3.1.0"], "pillow>=3.1.2": ["PIL"], - "pydenticon>=0.2": ["pydenticon"], "sortedcontainers>=1.4.4": ["sortedcontainers"], "psutil>=2.0.0": ["psutil>=2.0.0"], "pysaml2>=3.0.0": ["saml2"], diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py deleted file mode 100644 index bdbd8d50dd..0000000000 --- a/synapse/rest/media/v1/identicon_resource.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2015, 2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pydenticon import Generator - -from twisted.web.resource import Resource - -from synapse.http.servlet import parse_integer - -FOREGROUND = [ - "rgb(45,79,255)", - "rgb(254,180,44)", - "rgb(226,121,234)", - "rgb(30,179,253)", - "rgb(232,77,65)", - "rgb(49,203,115)", - "rgb(141,69,170)" -] - -BACKGROUND = "rgb(224,224,224)" -SIZE = 5 - - -class IdenticonResource(Resource): - isLeaf = True - - def __init__(self): - Resource.__init__(self) - self.generator = Generator( - SIZE, SIZE, foreground=FOREGROUND, background=BACKGROUND, - ) - - def generate_identicon(self, name, width, height): - v_padding = width % SIZE - h_padding = height % SIZE - top_padding = v_padding // 2 - left_padding = h_padding // 2 - bottom_padding = v_padding - top_padding - right_padding = h_padding - left_padding - width -= v_padding - height -= h_padding - padding = (top_padding, bottom_padding, left_padding, right_padding) - identicon = self.generator.generate( - name, width, height, padding=padding - ) - return identicon - - def render_GET(self, request): - name = "/".join(request.postpath) - width = parse_integer(request, "width", default=96) - height = parse_integer(request, "height", default=96) - identicon_bytes = self.generate_identicon(name, width, height) - request.setHeader(b"Content-Type", b"image/png") - request.setHeader( - b"Cache-Control", b"public,max-age=86400,s-maxage=86400" - ) - return identicon_bytes diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 08b1867fab..d6c5f07af0 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -45,7 +45,6 @@ from ._base import FileInfo, respond_404, respond_with_responder from .config_resource import MediaConfigResource from .download_resource import DownloadResource from .filepath import MediaFilePaths -from .identicon_resource import IdenticonResource from .media_storage import MediaStorage from .preview_url_resource import PreviewUrlResource from .storage_provider import StorageProviderWrapper @@ -769,7 +768,6 @@ class MediaRepositoryResource(Resource): self.putChild(b"thumbnail", ThumbnailResource( hs, media_repo, media_repo.media_storage, )) - self.putChild(b"identicon", IdenticonResource()) if hs.config.url_preview_enabled: self.putChild(b"preview_url", PreviewUrlResource( hs, media_repo, media_repo.media_storage, -- cgit 1.4.1 From 3bade14ec0aa7e56c84d30241bd86a177f0699d6 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 31 Oct 2018 04:33:41 +1100 Subject: Fix search 500ing (#4122) --- changelog.d/4122.bugfix | 1 + synapse/handlers/search.py | 8 ++- tests/rest/client/v1/test_rooms.py | 106 ++++++++++++++++++++++++++++++++++++- 3 files changed, 112 insertions(+), 3 deletions(-) create mode 100644 changelog.d/4122.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4122.bugfix b/changelog.d/4122.bugfix new file mode 100644 index 0000000000..66dcfb18b9 --- /dev/null +++ b/changelog.d/4122.bugfix @@ -0,0 +1 @@ +Searches that request profile info now no longer fail with a 500. diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 0c1d52fd11..80e7b15de8 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -24,6 +24,7 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import SynapseError from synapse.api.filtering import Filter from synapse.events.utils import serialize_event +from synapse.storage.state import StateFilter from synapse.visibility import filter_events_for_client from ._base import BaseHandler @@ -324,9 +325,12 @@ class SearchHandler(BaseHandler): else: last_event_id = event.event_id + state_filter = StateFilter.from_types( + [(EventTypes.Member, sender) for sender in senders] + ) + state = yield self.store.get_state_for_event( - last_event_id, - types=[(EventTypes.Member, sender) for sender in senders] + last_event_id, state_filter ) res["profile_info"] = { diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 359f7777ff..a824be9a62 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -23,7 +23,7 @@ from six.moves.urllib import parse as urlparse from twisted.internet import defer from synapse.api.constants import Membership -from synapse.rest.client.v1 import room +from synapse.rest.client.v1 import admin, login, room from tests import unittest @@ -799,3 +799,107 @@ class RoomMessageListTestCase(RoomBase): self.assertEquals(token, channel.json_body['start']) self.assertTrue("chunk" in channel.json_body) self.assertTrue("end" in channel.json_body) + + +class RoomSearchTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + ] + user_id = True + hijack_auth = False + + def prepare(self, reactor, clock, hs): + + # Register the user who does the searching + self.user_id = self.register_user("user", "pass") + self.access_token = self.login("user", "pass") + + # Register the user who sends the message + self.other_user_id = self.register_user("otheruser", "pass") + self.other_access_token = self.login("otheruser", "pass") + + # Create a room + self.room = self.helper.create_room_as(self.user_id, tok=self.access_token) + + # Invite the other person + self.helper.invite( + room=self.room, + src=self.user_id, + tok=self.access_token, + targ=self.other_user_id, + ) + + # The other user joins + self.helper.join( + room=self.room, user=self.other_user_id, tok=self.other_access_token + ) + + def test_finds_message(self): + """ + The search functionality will search for content in messages if asked to + do so. + """ + # The other user sends some messages + self.helper.send(self.room, body="Hi!", tok=self.other_access_token) + self.helper.send(self.room, body="There!", tok=self.other_access_token) + + request, channel = self.make_request( + "POST", + "/search?access_token=%s" % (self.access_token,), + { + "search_categories": { + "room_events": {"keys": ["content.body"], "search_term": "Hi"} + } + }, + ) + self.render(request) + + # Check we get the results we expect -- one search result, of the sent + # messages + self.assertEqual(channel.code, 200) + results = channel.json_body["search_categories"]["room_events"] + self.assertEqual(results["count"], 1) + self.assertEqual(results["results"][0]["result"]["content"]["body"], "Hi!") + + # No context was requested, so we should get none. + self.assertEqual(results["results"][0]["context"], {}) + + def test_include_context(self): + """ + When event_context includes include_profile, profile information will be + included in the search response. + """ + # The other user sends some messages + self.helper.send(self.room, body="Hi!", tok=self.other_access_token) + self.helper.send(self.room, body="There!", tok=self.other_access_token) + + request, channel = self.make_request( + "POST", + "/search?access_token=%s" % (self.access_token,), + { + "search_categories": { + "room_events": { + "keys": ["content.body"], + "search_term": "Hi", + "event_context": {"include_profile": True}, + } + } + }, + ) + self.render(request) + + # Check we get the results we expect -- one search result, of the sent + # messages + self.assertEqual(channel.code, 200) + results = channel.json_body["search_categories"]["room_events"] + self.assertEqual(results["count"], 1) + self.assertEqual(results["results"][0]["result"]["content"]["body"], "Hi!") + + # We should get context info, like the two users, and the display names. + context = results["results"][0]["context"] + self.assertEqual(len(context["profile_info"].keys()), 2) + self.assertEqual( + context["profile_info"][self.other_user_id]["displayname"], "otheruser" + ) -- cgit 1.4.1 From 9b827c40ca71510390c92472f7ec5cfcff9e69b2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 31 Oct 2018 15:42:23 +0000 Subject: Log some bits about event creation (#4121) I found these helpful in debugging my room upgrade tests. --- changelog.d/4121.misc | 1 + synapse/handlers/message.py | 3 +++ synapse/handlers/room.py | 4 ++++ 3 files changed, 8 insertions(+) create mode 100644 changelog.d/4121.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4121.misc b/changelog.d/4121.misc new file mode 100644 index 0000000000..9c29d80c3f --- /dev/null +++ b/changelog.d/4121.misc @@ -0,0 +1 @@ +Log some bits about room creation diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 969e588e73..a7cd779b02 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -427,6 +427,9 @@ class EventCreationHandler(object): if event.is_state(): prev_state = yield self.deduplicate_state_event(event, context) + logger.info( + "Not bothering to persist duplicate state event %s", event.event_id, + ) if prev_state is not None: defer.returnValue(prev_state) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 1d9417ff1a..fe960342b9 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -104,6 +104,8 @@ class RoomCreationHandler(BaseHandler): creator_id=user_id, is_public=r["is_public"], ) + logger.info("Creating new room %s to replace %s", new_room_id, old_room_id) + # we create and auth the tombstone event before properly creating the new # room, to check our user has perms in the old room. tombstone_event, tombstone_context = ( @@ -522,6 +524,7 @@ class RoomCreationHandler(BaseHandler): @defer.inlineCallbacks def send(etype, content, **kwargs): event = create(etype, content, **kwargs) + logger.info("Sending %s in new room", etype) yield self.event_creation_handler.create_and_send_nonmember_event( creator, event, @@ -544,6 +547,7 @@ class RoomCreationHandler(BaseHandler): content=creation_content, ) + logger.info("Sending %s in new room", EventTypes.Member) yield self.room_member_handler.update_membership( creator, creator.user, -- cgit 1.4.1 From 94c7fadc98542d582ff67c5ac788081c0d836e6b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 26 Oct 2018 15:11:35 +0100 Subject: Attempt to move room aliases on room upgrades --- changelog.d/4101.feature | 1 + synapse/handlers/directory.py | 34 +++++++++--- synapse/handlers/room.py | 121 +++++++++++++++++++++++++++++++++++++++--- 3 files changed, 142 insertions(+), 14 deletions(-) create mode 100644 changelog.d/4101.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4101.feature b/changelog.d/4101.feature new file mode 100644 index 0000000000..a3f7dbdcdd --- /dev/null +++ b/changelog.d/4101.feature @@ -0,0 +1 @@ +Support for replacing rooms with new ones diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 7d67bf803a..0699731c13 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -138,9 +138,30 @@ class DirectoryHandler(BaseHandler): ) @defer.inlineCallbacks - def delete_association(self, requester, room_alias): - # association deletion for human users + def delete_association(self, requester, room_alias, send_event=True): + """Remove an alias from the directory + (this is only meant for human users; AS users should call + delete_appservice_association) + + Args: + requester (Requester): + room_alias (RoomAlias): + send_event (bool): Whether to send an updated m.room.aliases event. + Note that, if we delete the canonical alias, we will always attempt + to send an m.room.canonical_alias event + + Returns: + Deferred[unicode]: room id that the alias used to point to + + Raises: + NotFoundError: if the alias doesn't exist + + AuthError: if the user doesn't have perms to delete the alias (ie, the user + is neither the creator of the alias, nor a server admin. + + SynapseError: if the alias belongs to an AS + """ user_id = requester.user.to_string() try: @@ -168,10 +189,11 @@ class DirectoryHandler(BaseHandler): room_id = yield self._delete_association(room_alias) try: - yield self.send_room_alias_update_event( - requester, - room_id - ) + if send_event: + yield self.send_room_alias_update_event( + requester, + room_id + ) yield self._update_canonical_alias( requester, diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 1d9417ff1a..76811050a6 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -136,10 +136,15 @@ class RoomCreationHandler(BaseHandler): requester, tombstone_event, tombstone_context, ) - # and finally, shut down the PLs in the old room, and update them in the new - # room. old_room_state = yield tombstone_context.get_current_state_ids(self.store) + # update any aliases + yield self._move_aliases_to_new_room( + requester, old_room_id, new_room_id, old_room_state, + ) + + # and finally, shut down the PLs in the old room, and update them in the new + # room. yield self._update_upgraded_room_pls( requester, old_room_id, new_room_id, old_room_state, ) @@ -245,11 +250,6 @@ class RoomCreationHandler(BaseHandler): if not self.spam_checker.user_may_create_room(user_id): raise SynapseError(403, "You are not permitted to create rooms") - # XXX check alias is free - # canonical_alias = None - - # XXX create association in directory handler - creation_content = { "room_version": new_room_version, "predecessor": { @@ -295,7 +295,112 @@ class RoomCreationHandler(BaseHandler): # XXX invites/joins # XXX 3pid invites - # XXX directory_handler.send_room_alias_update_event + + @defer.inlineCallbacks + def _move_aliases_to_new_room( + self, requester, old_room_id, new_room_id, old_room_state, + ): + directory_handler = self.hs.get_handlers().directory_handler + + aliases = yield self.store.get_aliases_for_room(old_room_id) + + # check to see if we have a canonical alias. + canonical_alias = None + canonical_alias_event_id = old_room_state.get((EventTypes.CanonicalAlias, "")) + if canonical_alias_event_id: + canonical_alias_event = yield self.store.get_event(canonical_alias_event_id) + if canonical_alias_event: + canonical_alias = canonical_alias_event.content.get("alias", "") + + # first we try to remove the aliases from the old room (we suppress sending + # the room_aliases event until the end). + # + # Note that we'll only be able to remove aliases that (a) aren't owned by an AS, + # and (b) unless the user is a server admin, which the user created. + # + # This is probably correct - given we don't allow such aliases to be deleted + # normally, it would be odd to allow it in the case of doing a room upgrade - + # but it makes the upgrade less effective, and you have to wonder why a room + # admin can't remove aliases that point to that room anyway. + # (cf https://github.com/matrix-org/synapse/issues/2360) + # + removed_aliases = [] + for alias_str in aliases: + alias = RoomAlias.from_string(alias_str) + try: + yield directory_handler.delete_association( + requester, alias, send_event=False, + ) + except SynapseError as e: + logger.warning( + "Unable to remove alias %s from old room: %s", + alias, e, + ) + else: + removed_aliases.append(alias_str) + + # if we didn't find any aliases, or couldn't remove anyway, we can skip the rest + # of this. + if not removed_aliases: + return + + try: + # this can fail if, for some reason, our user doesn't have perms to send + # m.room.aliases events in the old room (note that we've already checked that + # they have perms to send a tombstone event, so that's not terribly likely). + # + # If that happens, it's regrettable, but we should carry on: it's the same + # as when you remove an alias from the directory normally - it just means that + # the aliases event gets out of sync with the directory + # (cf https://github.com/vector-im/riot-web/issues/2369) + yield directory_handler.send_room_alias_update_event( + requester, old_room_id, + ) + except AuthError as e: + logger.warning( + "Failed to send updated alias event on old room: %s", e, + ) + + # we can now add any aliases we successfully removed to the new room. + for alias in removed_aliases: + try: + yield directory_handler.create_association( + requester, RoomAlias.from_string(alias), + new_room_id, servers=(self.hs.hostname, ), + send_event=False, + ) + logger.info("Moved alias %s to new room", alias) + except SynapseError as e: + # I'm not really expecting this to happen, but it could if the spam + # checking module decides it shouldn't, or similar. + logger.error( + "Error adding alias %s to new room: %s", + alias, e, + ) + + try: + if canonical_alias and (canonical_alias in removed_aliases): + yield self.event_creation_handler.create_and_send_nonmember_event( + requester, + { + "type": EventTypes.CanonicalAlias, + "state_key": "", + "room_id": new_room_id, + "sender": requester.user.to_string(), + "content": {"alias": canonical_alias, }, + }, + ratelimit=False + ) + + yield directory_handler.send_room_alias_update_event( + requester, new_room_id, + ) + except SynapseError as e: + # again I'm not really expecting this to fail, but if it does, I'd rather + # we returned the new room to the client at this point. + logger.error( + "Unable to send updated alias events in new room: %s", e, + ) @defer.inlineCallbacks def create_room(self, requester, config, ratelimit=True, -- cgit 1.4.1 From 0f8591a5a8695aa176736c651a361c40cf228b6d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 29 Oct 2018 15:20:19 +0000 Subject: Avoid else clause on exception for clarity --- synapse/handlers/room.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 76811050a6..9ff4656717 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -331,13 +331,12 @@ class RoomCreationHandler(BaseHandler): yield directory_handler.delete_association( requester, alias, send_event=False, ) + removed_aliases.append(alias_str) except SynapseError as e: logger.warning( "Unable to remove alias %s from old room: %s", alias, e, ) - else: - removed_aliases.append(alias_str) # if we didn't find any aliases, or couldn't remove anyway, we can skip the rest # of this. -- cgit 1.4.1 From a8d41c6aff0e58fc24fae1fe4ae89d28541a63cb Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Wed, 31 Oct 2018 13:19:28 -0600 Subject: Include a version query string arg for the consent route --- synapse/handlers/auth.py | 5 ++++- synapse/rest/client/v2_alpha/auth.py | 6 ++++-- 2 files changed, 8 insertions(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index d143522d9a..85fc1fc525 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -473,7 +473,10 @@ class AuthHandler(BaseHandler): "version": self.hs.config.user_consent_version, "en": { "name": "Privacy Policy", - "url": "%s/_matrix/consent" % (self.hs.config.public_baseurl,), + "url": "%s/_matrix/consent?v=%s" % ( + self.hs.config.public_baseurl, + self.hs.config.user_consent_version, + ), }, }, }, diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index 6f90935b22..a8d8ed6590 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -161,8 +161,9 @@ class AuthRestServlet(RestServlet): html = TERMS_TEMPLATE % { 'session': session, - 'terms_url': "%s/_matrix/consent" % ( + 'terms_url': "%s/_matrix/consent?v=%s" % ( self.hs.config.public_baseurl, + self.hs.config.user_consent_version, ), 'myurl': "%s/auth/%s/fallback/web" % ( CLIENT_V2_ALPHA_PREFIX, LoginType.TERMS @@ -241,8 +242,9 @@ class AuthRestServlet(RestServlet): else: html = TERMS_TEMPLATE % { 'session': session, - 'terms_url': "%s/_matrix/consent" % ( + 'terms_url': "%s/_matrix/consent?v=%s" % ( self.hs.config.public_baseurl, + self.hs.config.user_consent_version, ), 'myurl': "%s/auth/%s/fallback/web" % ( CLIENT_V2_ALPHA_PREFIX, LoginType.TERMS -- cgit 1.4.1 From cb7a6b2379e0e0a4ba8043da98e376b45d05b977 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 3 Nov 2018 00:19:23 +1100 Subject: Fix typing being reset causing infinite syncs (#4127) --- changelog.d/4127.bugfix | 1 + synapse/app/synchrotron.py | 14 ++++ synapse/handlers/typing.py | 14 ++-- tests/rest/client/v2_alpha/test_sync.py | 123 ++++++++++++++++++++++++++++++++ tests/server.py | 8 ++- 5 files changed, 155 insertions(+), 5 deletions(-) create mode 100644 changelog.d/4127.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4127.bugfix b/changelog.d/4127.bugfix new file mode 100644 index 0000000000..0701d2ceaa --- /dev/null +++ b/changelog.d/4127.bugfix @@ -0,0 +1 @@ +If the typing stream ID goes backwards (as on a worker when the master restarts), the worker's typing handler will no longer erroneously report rooms containing new typing events. diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 3926c7f263..0354e82bf8 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -226,7 +226,15 @@ class SynchrotronPresence(object): class SynchrotronTyping(object): def __init__(self, hs): self._latest_room_serial = 0 + self._reset() + + def _reset(self): + """ + Reset the typing handler's data caches. + """ + # map room IDs to serial numbers self._room_serials = {} + # map room IDs to sets of users currently typing self._room_typing = {} def stream_positions(self): @@ -236,6 +244,12 @@ class SynchrotronTyping(object): return {"typing": self._latest_room_serial} def process_replication_rows(self, token, rows): + if self._latest_room_serial > token: + # The master has gone backwards. To prevent inconsistent data, just + # clear everything. + self._reset() + + # Set the latest serial token to whatever the server gave us. self._latest_room_serial = token for row in rows: diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index c610933dd4..a61bbf9392 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -63,11 +63,8 @@ class TypingHandler(object): self._member_typing_until = {} # clock time we expect to stop self._member_last_federation_poke = {} - # map room IDs to serial numbers - self._room_serials = {} self._latest_room_serial = 0 - # map room IDs to sets of users currently typing - self._room_typing = {} + self._reset() # caches which room_ids changed at which serials self._typing_stream_change_cache = StreamChangeCache( @@ -79,6 +76,15 @@ class TypingHandler(object): 5000, ) + def _reset(self): + """ + Reset the typing handler's data caches. + """ + # map room IDs to serial numbers + self._room_serials = {} + # map room IDs to sets of users currently typing + self._room_typing = {} + def _handle_timeouts(self): logger.info("Checking for typing timeouts") diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 4c30c5f258..99b716f00a 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -15,9 +15,11 @@ from mock import Mock +from synapse.rest.client.v1 import admin, login, room from synapse.rest.client.v2_alpha import sync from tests import unittest +from tests.server import TimedOutException class FilterTestCase(unittest.HomeserverTestCase): @@ -65,3 +67,124 @@ class FilterTestCase(unittest.HomeserverTestCase): ["next_batch", "rooms", "account_data", "to_device", "device_lists"] ).issubset(set(channel.json_body.keys())) ) + + +class SyncTypingTests(unittest.HomeserverTestCase): + + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + sync.register_servlets, + ] + user_id = True + hijack_auth = False + + def test_sync_backwards_typing(self): + """ + If the typing serial goes backwards and the typing handler is then reset + (such as when the master restarts and sets the typing serial to 0), we + do not incorrectly return typing information that had a serial greater + than the now-reset serial. + """ + typing_url = "/rooms/%s/typing/%s?access_token=%s" + sync_url = "/sync?timeout=3000000&access_token=%s&since=%s" + + # Register the user who gets notified + user_id = self.register_user("user", "pass") + access_token = self.login("user", "pass") + + # Register the user who sends the message + other_user_id = self.register_user("otheruser", "pass") + other_access_token = self.login("otheruser", "pass") + + # Create a room + room = self.helper.create_room_as(user_id, tok=access_token) + + # Invite the other person + self.helper.invite(room=room, src=user_id, tok=access_token, targ=other_user_id) + + # The other user joins + self.helper.join(room=room, user=other_user_id, tok=other_access_token) + + # The other user sends some messages + self.helper.send(room, body="Hi!", tok=other_access_token) + self.helper.send(room, body="There!", tok=other_access_token) + + # Start typing. + request, channel = self.make_request( + "PUT", + typing_url % (room, other_user_id, other_access_token), + b'{"typing": true, "timeout": 30000}', + ) + self.render(request) + self.assertEquals(200, channel.code) + + request, channel = self.make_request( + "GET", "/sync?access_token=%s" % (access_token,) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # Stop typing. + request, channel = self.make_request( + "PUT", + typing_url % (room, other_user_id, other_access_token), + b'{"typing": false}', + ) + self.render(request) + self.assertEquals(200, channel.code) + + # Start typing. + request, channel = self.make_request( + "PUT", + typing_url % (room, other_user_id, other_access_token), + b'{"typing": true, "timeout": 30000}', + ) + self.render(request) + self.assertEquals(200, channel.code) + + # Should return immediately + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # Reset typing serial back to 0, as if the master had. + typing = self.hs.get_typing_handler() + typing._latest_room_serial = 0 + + # Since it checks the state token, we need some state to update to + # invalidate the stream token. + self.helper.send(room, body="There!", tok=other_access_token) + + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # This should time out! But it does not, because our stream token is + # ahead, and therefore it's saying the typing (that we've actually + # already seen) is new, since it's got a token above our new, now-reset + # stream token. + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # Clear the typing information, so that it doesn't think everything is + # in the future. + typing._reset() + + # Now it SHOULD fail as it never completes! + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.assertRaises(TimedOutException, self.render, request) diff --git a/tests/server.py b/tests/server.py index 819c854448..cc6dbe04ac 100644 --- a/tests/server.py +++ b/tests/server.py @@ -21,6 +21,12 @@ from synapse.util import Clock from tests.utils import setup_test_homeserver as _sth +class TimedOutException(Exception): + """ + A web query timed out. + """ + + @attr.s class FakeChannel(object): """ @@ -153,7 +159,7 @@ def wait_until_result(clock, request, timeout=100): x += 1 if x > timeout: - raise Exception("Timed out waiting for request to finish.") + raise TimedOutException("Timed out waiting for request to finish.") clock.advance(0.1) -- cgit 1.4.1 From bc80b3f454aa9b9ca8bc710ff502b83892ac0a91 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 5 Nov 2018 13:35:15 +0000 Subject: Add helpers for getting prev and auth events (#4139) * Add helpers for getting prev and auth events This is in preparation for allowing the event format to change between room versions. --- changelog.d/4139.misc | 1 + synapse/event_auth.py | 4 +-- synapse/events/__init__.py | 18 +++++++++++++ synapse/federation/transaction_queue.py | 4 +-- synapse/handlers/federation.py | 48 ++++++++++++++++----------------- synapse/state/__init__.py | 2 +- synapse/state/v2.py | 16 +++++------ synapse/storage/event_federation.py | 4 +-- synapse/storage/events.py | 8 +++--- tests/state/test_v2.py | 2 +- 10 files changed, 62 insertions(+), 45 deletions(-) create mode 100644 changelog.d/4139.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4139.misc b/changelog.d/4139.misc new file mode 100644 index 0000000000..d63d9e7003 --- /dev/null +++ b/changelog.d/4139.misc @@ -0,0 +1 @@ +Add helpers functions for getting prev and auth events of an event diff --git a/synapse/event_auth.py b/synapse/event_auth.py index d4d4474847..c81d8e6729 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -200,11 +200,11 @@ def _is_membership_change_allowed(event, auth_events): membership = event.content["membership"] # Check if this is the room creator joining: - if len(event.prev_events) == 1 and Membership.JOIN == membership: + if len(event.prev_event_ids()) == 1 and Membership.JOIN == membership: # Get room creation event: key = (EventTypes.Create, "", ) create = auth_events.get(key) - if create and event.prev_events[0][0] == create.event_id: + if create and event.prev_event_ids()[0] == create.event_id: if create.content["creator"] == event.state_key: return diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 12f1eb0a3e..84c75495d5 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -159,6 +159,24 @@ class EventBase(object): def keys(self): return six.iterkeys(self._event_dict) + def prev_event_ids(self): + """Returns the list of prev event IDs. The order matches the order + specified in the event, though there is no meaning to it. + + Returns: + list[str]: The list of event IDs of this event's prev_events + """ + return [e for e, _ in self.prev_events] + + def auth_event_ids(self): + """Returns the list of auth event IDs. The order matches the order + specified in the event, though there is no meaning to it. + + Returns: + list[str]: The list of event IDs of this event's auth_events + """ + return [e for e, _ in self.auth_events] + class FrozenEvent(EventBase): def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None): diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 3fdd63be95..099ace28c1 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -183,9 +183,7 @@ class TransactionQueue(object): # banned then it won't receive the event because it won't # be in the room after the ban. destinations = yield self.state.get_current_hosts_in_room( - event.room_id, latest_event_ids=[ - prev_id for prev_id, _ in event.prev_events - ], + event.room_id, latest_event_ids=event.prev_event_ids(), ) except Exception: logger.exception( diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index cd5b9bbb19..9ca5fd8724 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -239,7 +239,7 @@ class FederationHandler(BaseHandler): room_id, event_id, min_depth, ) - prevs = {e_id for e_id, _ in pdu.prev_events} + prevs = set(pdu.prev_event_ids()) seen = yield self.store.have_seen_events(prevs) if min_depth and pdu.depth < min_depth: @@ -607,7 +607,7 @@ class FederationHandler(BaseHandler): if e.event_id in seen_ids: continue e.internal_metadata.outlier = True - auth_ids = [e_id for e_id, _ in e.auth_events] + auth_ids = e.auth_event_ids() auth = { (e.type, e.state_key): e for e in auth_chain if e.event_id in auth_ids or e.type == EventTypes.Create @@ -726,7 +726,7 @@ class FederationHandler(BaseHandler): edges = [ ev.event_id for ev in events - if set(e_id for e_id, _ in ev.prev_events) - event_ids + if set(ev.prev_event_ids()) - event_ids ] logger.info( @@ -753,7 +753,7 @@ class FederationHandler(BaseHandler): required_auth = set( a_id for event in events + list(state_events.values()) + list(auth_events.values()) - for a_id, _ in event.auth_events + for a_id in event.auth_event_ids() ) auth_events.update({ e_id: event_map[e_id] for e_id in required_auth if e_id in event_map @@ -769,7 +769,7 @@ class FederationHandler(BaseHandler): auth_events.update(ret_events) required_auth.update( - a_id for event in ret_events.values() for a_id, _ in event.auth_events + a_id for event in ret_events.values() for a_id in event.auth_event_ids() ) missing_auth = required_auth - set(auth_events) @@ -796,7 +796,7 @@ class FederationHandler(BaseHandler): required_auth.update( a_id for event in results if event - for a_id, _ in event.auth_events + for a_id in event.auth_event_ids() ) missing_auth = required_auth - set(auth_events) @@ -816,7 +816,7 @@ class FederationHandler(BaseHandler): "auth_events": { (auth_events[a_id].type, auth_events[a_id].state_key): auth_events[a_id] - for a_id, _ in a.auth_events + for a_id in a.auth_event_ids() if a_id in auth_events } }) @@ -828,7 +828,7 @@ class FederationHandler(BaseHandler): "auth_events": { (auth_events[a_id].type, auth_events[a_id].state_key): auth_events[a_id] - for a_id, _ in event_map[e_id].auth_events + for a_id in event_map[e_id].auth_event_ids() if a_id in auth_events } }) @@ -1041,17 +1041,17 @@ class FederationHandler(BaseHandler): Raises: SynapseError if the event does not pass muster """ - if len(ev.prev_events) > 20: + if len(ev.prev_event_ids()) > 20: logger.warn("Rejecting event %s which has %i prev_events", - ev.event_id, len(ev.prev_events)) + ev.event_id, len(ev.prev_event_ids())) raise SynapseError( http_client.BAD_REQUEST, "Too many prev_events", ) - if len(ev.auth_events) > 10: + if len(ev.auth_event_ids()) > 10: logger.warn("Rejecting event %s which has %i auth_events", - ev.event_id, len(ev.auth_events)) + ev.event_id, len(ev.auth_event_ids())) raise SynapseError( http_client.BAD_REQUEST, "Too many auth_events", @@ -1076,7 +1076,7 @@ class FederationHandler(BaseHandler): def on_event_auth(self, event_id): event = yield self.store.get_event(event_id) auth = yield self.store.get_auth_chain( - [auth_id for auth_id, _ in event.auth_events], + [auth_id for auth_id in event.auth_event_ids()], include_given=True ) defer.returnValue([e for e in auth]) @@ -1698,7 +1698,7 @@ class FederationHandler(BaseHandler): missing_auth_events = set() for e in itertools.chain(auth_events, state, [event]): - for e_id, _ in e.auth_events: + for e_id in e.auth_event_ids(): if e_id not in event_map: missing_auth_events.add(e_id) @@ -1717,7 +1717,7 @@ class FederationHandler(BaseHandler): for e in itertools.chain(auth_events, state, [event]): auth_for_e = { (event_map[e_id].type, event_map[e_id].state_key): event_map[e_id] - for e_id, _ in e.auth_events + for e_id in e.auth_event_ids() if e_id in event_map } if create_event: @@ -1785,10 +1785,10 @@ class FederationHandler(BaseHandler): # This is a hack to fix some old rooms where the initial join event # didn't reference the create event in its auth events. - if event.type == EventTypes.Member and not event.auth_events: - if len(event.prev_events) == 1 and event.depth < 5: + if event.type == EventTypes.Member and not event.auth_event_ids(): + if len(event.prev_event_ids()) == 1 and event.depth < 5: c = yield self.store.get_event( - event.prev_events[0][0], + event.prev_event_ids()[0], allow_none=True, ) if c and c.type == EventTypes.Create: @@ -1835,7 +1835,7 @@ class FederationHandler(BaseHandler): # Now get the current auth_chain for the event. local_auth_chain = yield self.store.get_auth_chain( - [auth_id for auth_id, _ in event.auth_events], + [auth_id for auth_id in event.auth_event_ids()], include_given=True ) @@ -1891,7 +1891,7 @@ class FederationHandler(BaseHandler): """ # Check if we have all the auth events. current_state = set(e.event_id for e in auth_events.values()) - event_auth_events = set(e_id for e_id, _ in event.auth_events) + event_auth_events = set(event.auth_event_ids()) if event.is_state(): event_key = (event.type, event.state_key) @@ -1935,7 +1935,7 @@ class FederationHandler(BaseHandler): continue try: - auth_ids = [e_id for e_id, _ in e.auth_events] + auth_ids = e.auth_event_ids() auth = { (e.type, e.state_key): e for e in remote_auth_chain if e.event_id in auth_ids or e.type == EventTypes.Create @@ -1956,7 +1956,7 @@ class FederationHandler(BaseHandler): pass have_events = yield self.store.get_seen_events_with_rejections( - [e_id for e_id, _ in event.auth_events] + event.auth_event_ids() ) seen_events = set(have_events.keys()) except Exception: @@ -2058,7 +2058,7 @@ class FederationHandler(BaseHandler): continue try: - auth_ids = [e_id for e_id, _ in ev.auth_events] + auth_ids = ev.auth_event_ids() auth = { (e.type, e.state_key): e for e in result["auth_chain"] @@ -2250,7 +2250,7 @@ class FederationHandler(BaseHandler): missing_remote_ids = [e.event_id for e in missing_remotes] base_remote_rejected = list(missing_remotes) for e in missing_remotes: - for e_id, _ in e.auth_events: + for e_id in e.auth_event_ids(): if e_id in missing_remote_ids: try: base_remote_rejected.remove(e) diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 943d5d6bb5..70048b0c09 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -261,7 +261,7 @@ class StateHandler(object): logger.debug("calling resolve_state_groups from compute_event_context") entry = yield self.resolve_state_groups_for_events( - event.room_id, [e for e, _ in event.prev_events], + event.room_id, event.prev_event_ids(), ) prev_state_ids = entry.state diff --git a/synapse/state/v2.py b/synapse/state/v2.py index dbc9688c56..3573bb0028 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -159,7 +159,7 @@ def _get_power_level_for_sender(event_id, event_map, state_res_store): event = yield _get_event(event_id, event_map, state_res_store) pl = None - for aid, _ in event.auth_events: + for aid in event.auth_event_ids(): aev = yield _get_event(aid, event_map, state_res_store) if (aev.type, aev.state_key) == (EventTypes.PowerLevels, ""): pl = aev @@ -167,7 +167,7 @@ def _get_power_level_for_sender(event_id, event_map, state_res_store): if pl is None: # Couldn't find power level. Check if they're the creator of the room - for aid, _ in event.auth_events: + for aid in event.auth_event_ids(): aev = yield _get_event(aid, event_map, state_res_store) if (aev.type, aev.state_key) == (EventTypes.Create, ""): if aev.content.get("creator") == event.sender: @@ -299,7 +299,7 @@ def _add_event_and_auth_chain_to_graph(graph, event_id, event_map, graph.setdefault(eid, set()) event = yield _get_event(eid, event_map, state_res_store) - for aid, _ in event.auth_events: + for aid in event.auth_event_ids(): if aid in auth_diff: if aid not in graph: state.append(aid) @@ -369,7 +369,7 @@ def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): event = event_map[event_id] auth_events = {} - for aid, _ in event.auth_events: + for aid in event.auth_event_ids(): ev = yield _get_event(aid, event_map, state_res_store) if ev.rejected_reason is None: @@ -417,9 +417,9 @@ def _mainline_sort(event_ids, resolved_power_event_id, event_map, while pl: mainline.append(pl) pl_ev = yield _get_event(pl, event_map, state_res_store) - auth_events = pl_ev.auth_events + auth_events = pl_ev.auth_event_ids() pl = None - for aid, _ in auth_events: + for aid in auth_events: ev = yield _get_event(aid, event_map, state_res_store) if (ev.type, ev.state_key) == (EventTypes.PowerLevels, ""): pl = aid @@ -464,10 +464,10 @@ def _get_mainline_depth_for_event(event, mainline_map, event_map, state_res_stor if depth is not None: defer.returnValue(depth) - auth_events = event.auth_events + auth_events = event.auth_event_ids() event = None - for aid, _ in auth_events: + for aid in auth_events: aev = yield _get_event(aid, event_map, state_res_store) if (aev.type, aev.state_key) == (EventTypes.PowerLevels, ""): event = aev diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 3faca2a042..d3b9dea1d6 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -477,7 +477,7 @@ class EventFederationStore(EventFederationWorkerStore): "is_state": False, } for ev in events - for e_id, _ in ev.prev_events + for e_id in ev.prev_event_ids() ], ) @@ -510,7 +510,7 @@ class EventFederationStore(EventFederationWorkerStore): txn.executemany(query, [ (e_id, ev.room_id, e_id, ev.room_id, e_id, ev.room_id, False) - for ev in events for e_id, _ in ev.prev_events + for ev in events for e_id in ev.prev_event_ids() if not ev.internal_metadata.is_outlier() ]) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 919e855f3b..2047110b1d 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -416,7 +416,7 @@ class EventsStore(StateGroupWorkerStore, EventFederationStore, EventsWorkerStore ) if len_1: all_single_prev_not_state = all( - len(event.prev_events) == 1 + len(event.prev_event_ids()) == 1 and not event.is_state() for event, ctx in ev_ctx_rm ) @@ -440,7 +440,7 @@ class EventsStore(StateGroupWorkerStore, EventFederationStore, EventsWorkerStore # guess this by looking at the prev_events and checking # if they match the current forward extremities. for ev, _ in ev_ctx_rm: - prev_event_ids = set(e for e, _ in ev.prev_events) + prev_event_ids = set(ev.prev_event_ids()) if latest_event_ids == prev_event_ids: state_delta_reuse_delta_counter.inc() break @@ -551,7 +551,7 @@ class EventsStore(StateGroupWorkerStore, EventFederationStore, EventsWorkerStore result.difference_update( e_id for event in new_events - for e_id, _ in event.prev_events + for e_id in event.prev_event_ids() ) # Finally, remove any events which are prev_events of any existing events. @@ -869,7 +869,7 @@ class EventsStore(StateGroupWorkerStore, EventFederationStore, EventsWorkerStore "auth_id": auth_id, } for event, _ in events_and_contexts - for auth_id, _ in event.auth_events + for auth_id in event.auth_event_ids() if event.is_state() ], ) diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index d67f59b2c7..2e073a3afc 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -753,7 +753,7 @@ class TestStateResolutionStore(object): result.add(event_id) event = self.event_map[event_id] - for aid, _ in event.auth_events: + for aid in event.auth_event_ids(): stack.append(aid) return list(result) -- cgit 1.4.1 From f1087106cf637e3c108c096ff789100bcbcc461c Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Mon, 5 Nov 2018 17:59:29 -0500 Subject: handle empty backups according to latest spec proposal (#4123) fixes #4056 --- changelog.d/4123.bugfix | 1 + synapse/handlers/e2e_room_keys.py | 22 ++++++--- synapse/rest/client/v2_alpha/room_keys.py | 21 ++++++-- tests/handlers/test_e2e_room_keys.py | 79 +++++++++++++++---------------- 4 files changed, 71 insertions(+), 52 deletions(-) create mode 100644 changelog.d/4123.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4123.bugfix b/changelog.d/4123.bugfix new file mode 100644 index 0000000000..b82bc2aad3 --- /dev/null +++ b/changelog.d/4123.bugfix @@ -0,0 +1 @@ +fix return code of empty key backups diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 5edb3cfe04..42b040375f 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -19,7 +19,7 @@ from six import iteritems from twisted.internet import defer -from synapse.api.errors import RoomKeysVersionError, StoreError, SynapseError +from synapse.api.errors import NotFoundError, RoomKeysVersionError, StoreError from synapse.util.async_helpers import Linearizer logger = logging.getLogger(__name__) @@ -55,6 +55,8 @@ class E2eRoomKeysHandler(object): room_id(string): room ID to get keys for, for None to get keys for all rooms session_id(string): session ID to get keys for, for None to get keys for all sessions + Raises: + NotFoundError: if the backup version does not exist Returns: A deferred list of dicts giving the session_data and message metadata for these room keys. @@ -63,13 +65,19 @@ class E2eRoomKeysHandler(object): # we deliberately take the lock to get keys so that changing the version # works atomically with (yield self._upload_linearizer.queue(user_id)): + # make sure the backup version exists + try: + yield self.store.get_e2e_room_keys_version_info(user_id, version) + except StoreError as e: + if e.code == 404: + raise NotFoundError("Unknown backup version") + else: + raise + results = yield self.store.get_e2e_room_keys( user_id, version, room_id, session_id ) - if results['rooms'] == {}: - raise SynapseError(404, "No room_keys found") - defer.returnValue(results) @defer.inlineCallbacks @@ -120,7 +128,7 @@ class E2eRoomKeysHandler(object): } Raises: - SynapseError: with code 404 if there are no versions defined + NotFoundError: if there are no versions defined RoomKeysVersionError: if the uploaded version is not the current version """ @@ -134,7 +142,7 @@ class E2eRoomKeysHandler(object): version_info = yield self.store.get_e2e_room_keys_version_info(user_id) except StoreError as e: if e.code == 404: - raise SynapseError(404, "Version '%s' not found" % (version,)) + raise NotFoundError("Version '%s' not found" % (version,)) else: raise @@ -148,7 +156,7 @@ class E2eRoomKeysHandler(object): raise RoomKeysVersionError(current_version=version_info['version']) except StoreError as e: if e.code == 404: - raise SynapseError(404, "Version '%s' not found" % (version,)) + raise NotFoundError("Version '%s' not found" % (version,)) else: raise diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 45b5817d8b..ab3f1bd21a 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -17,7 +17,7 @@ import logging from twisted.internet import defer -from synapse.api.errors import Codes, SynapseError +from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, @@ -208,10 +208,25 @@ class RoomKeysServlet(RestServlet): user_id, version, room_id, session_id ) + # Convert room_keys to the right format to return. if session_id: - room_keys = room_keys['rooms'][room_id]['sessions'][session_id] + # If the client requests a specific session, but that session was + # not backed up, then return an M_NOT_FOUND. + if room_keys['rooms'] == {}: + raise NotFoundError("No room_keys found") + else: + room_keys = room_keys['rooms'][room_id]['sessions'][session_id] elif room_id: - room_keys = room_keys['rooms'][room_id] + # If the client requests all sessions from a room, but no sessions + # are found, then return an empty result rather than an error, so + # that clients don't have to handle an error condition, and an + # empty result is valid. (Similarly if the client requests all + # sessions from the backup, but in that case, room_keys is already + # in the right format, so we don't need to do anything about it.) + if room_keys['rooms'] == {}: + room_keys = {'sessions': {}} + else: + room_keys = room_keys['rooms'][room_id] defer.returnValue((200, room_keys)) diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index 9e08eac0a5..c8994f416e 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -169,8 +169,8 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): self.assertEqual(res, 404) @defer.inlineCallbacks - def test_get_missing_room_keys(self): - """Check that we get a 404 on querying missing room_keys + def test_get_missing_backup(self): + """Check that we get a 404 on querying missing backup """ res = None try: @@ -179,19 +179,20 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): res = e.code self.assertEqual(res, 404) - # check we also get a 404 even if the version is valid + @defer.inlineCallbacks + def test_get_missing_room_keys(self): + """Check we get an empty response from an empty backup + """ version = yield self.handler.create_version(self.local_user, { "algorithm": "m.megolm_backup.v1", "auth_data": "first_version_auth_data", }) self.assertEqual(version, "1") - res = None - try: - yield self.handler.get_room_keys(self.local_user, version) - except errors.SynapseError as e: - res = e.code - self.assertEqual(res, 404) + res = yield self.handler.get_room_keys(self.local_user, version) + self.assertDictEqual(res, { + "rooms": {} + }) # TODO: test the locking semantics when uploading room_keys, # although this is probably best done in sytest @@ -345,17 +346,15 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): # check for bulk-delete yield self.handler.upload_room_keys(self.local_user, version, room_keys) yield self.handler.delete_room_keys(self.local_user, version) - res = None - try: - yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", - ) - except errors.SynapseError as e: - res = e.code - self.assertEqual(res, 404) + res = yield self.handler.get_room_keys( + self.local_user, + version, + room_id="!abc:matrix.org", + session_id="c0ff33", + ) + self.assertDictEqual(res, { + "rooms": {} + }) # check for bulk-delete per room yield self.handler.upload_room_keys(self.local_user, version, room_keys) @@ -364,17 +363,15 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): version, room_id="!abc:matrix.org", ) - res = None - try: - yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", - ) - except errors.SynapseError as e: - res = e.code - self.assertEqual(res, 404) + res = yield self.handler.get_room_keys( + self.local_user, + version, + room_id="!abc:matrix.org", + session_id="c0ff33", + ) + self.assertDictEqual(res, { + "rooms": {} + }) # check for bulk-delete per session yield self.handler.upload_room_keys(self.local_user, version, room_keys) @@ -384,14 +381,12 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): room_id="!abc:matrix.org", session_id="c0ff33", ) - res = None - try: - yield self.handler.get_room_keys( - self.local_user, - version, - room_id="!abc:matrix.org", - session_id="c0ff33", - ) - except errors.SynapseError as e: - res = e.code - self.assertEqual(res, 404) + res = yield self.handler.get_room_keys( + self.local_user, + version, + room_id="!abc:matrix.org", + session_id="c0ff33", + ) + self.assertDictEqual(res, { + "rooms": {} + }) -- cgit 1.4.1 From 0f5e51f726756318f355d988856730a9930e2d2f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 6 Nov 2018 03:32:34 -0700 Subject: Add config variables for enabling terms auth and the policy name (#4142) So people can still collect consent the old way if they want to. --- changelog.d/4004.feature | 2 +- changelog.d/4133.feature | 2 +- changelog.d/4142.feature | 1 + docs/consent_tracking.md | 40 ++++++++++++++++++++++++++++---- synapse/config/consent_config.py | 18 ++++++++++++++ synapse/handlers/auth.py | 2 +- synapse/rest/client/v2_alpha/register.py | 2 +- synapse/rest/consent/consent_resource.py | 2 +- tests/test_terms_auth.py | 5 ++-- tests/utils.py | 2 ++ 10 files changed, 65 insertions(+), 11 deletions(-) create mode 100644 changelog.d/4142.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4004.feature b/changelog.d/4004.feature index ef5cdaf5ec..89975f4c6e 100644 --- a/changelog.d/4004.feature +++ b/changelog.d/4004.feature @@ -1 +1 @@ -Add `m.login.terms` to the registration flow when consent tracking is enabled. **This makes the template arguments conditionally optional on a new `public_version` variable - update your privacy templates to support this.** +Include flags to optionally add `m.login.terms` to the registration flow when consent tracking is enabled. diff --git a/changelog.d/4133.feature b/changelog.d/4133.feature index ef5cdaf5ec..89975f4c6e 100644 --- a/changelog.d/4133.feature +++ b/changelog.d/4133.feature @@ -1 +1 @@ -Add `m.login.terms` to the registration flow when consent tracking is enabled. **This makes the template arguments conditionally optional on a new `public_version` variable - update your privacy templates to support this.** +Include flags to optionally add `m.login.terms` to the registration flow when consent tracking is enabled. diff --git a/changelog.d/4142.feature b/changelog.d/4142.feature new file mode 100644 index 0000000000..89975f4c6e --- /dev/null +++ b/changelog.d/4142.feature @@ -0,0 +1 @@ +Include flags to optionally add `m.login.terms` to the registration flow when consent tracking is enabled. diff --git a/docs/consent_tracking.md b/docs/consent_tracking.md index 3634d13d4f..c586b5f0b6 100644 --- a/docs/consent_tracking.md +++ b/docs/consent_tracking.md @@ -81,9 +81,40 @@ should be a matter of `pip install Jinja2`. On debian, try `apt-get install python-jinja2`. Once this is complete, and the server has been restarted, try visiting -`https:///_matrix/consent`. If correctly configured, you should see a -default policy document. It is now possible to manually construct URIs where -users can give their consent. +`https:///_matrix/consent`. If correctly configured, this should give +an error "Missing string query parameter 'u'". It is now possible to manually +construct URIs where users can give their consent. + +### Enabling consent tracking at registration + +1. Add the following to your configuration: + + ```yaml + user_consent: + require_at_registration: true + policy_name: "Privacy Policy" # or whatever you'd like to call the policy + ``` + +2. In your consent templates, make use of the `public_version` variable to + see if an unauthenticated user is viewing the page. This is typically + wrapped around the form that would be used to actually agree to the document: + + ``` + {% if not public_version %} + +
+ + + + +
+ {% endif %} + ``` + +3. Restart Synapse to apply the changes. + +Visiting `https:///_matrix/consent` should now give you a view of the privacy +document. This is what users will be able to see when registering for accounts. ### Constructing the consent URI @@ -108,7 +139,8 @@ query parameters: Note that not providing a `u` parameter will be interpreted as wanting to view the document from an unauthenticated perspective, such as prior to registration. -Therefore, the `h` parameter is not required in this scenario. +Therefore, the `h` parameter is not required in this scenario. To enable this +behaviour, set `require_at_registration` to `true` in your `user_consent` config. Sending users a server notice asking them to agree to the policy diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py index e22c731aad..f193a090ae 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent_config.py @@ -42,6 +42,14 @@ DEFAULT_CONFIG = """\ # until the user consents to the privacy policy. The value of the setting is # used as the text of the error. # +# 'require_at_registration', if enabled, will add a step to the registration +# process, similar to how captcha works. Users will be required to accept the +# policy before their account is created. +# +# 'policy_name' is the display name of the policy users will see when registering +# for an account. Has no effect unless `require_at_registration` is enabled. +# Defaults to "Privacy Policy". +# # user_consent: # template_dir: res/templates/privacy # version: 1.0 @@ -54,6 +62,8 @@ DEFAULT_CONFIG = """\ # block_events_error: >- # To continue using this homeserver you must review and agree to the # terms and conditions at %(consent_uri)s +# require_at_registration: False +# policy_name: Privacy Policy # """ @@ -67,6 +77,8 @@ class ConsentConfig(Config): self.user_consent_server_notice_content = None self.user_consent_server_notice_to_guests = False self.block_events_without_consent_error = None + self.user_consent_at_registration = False + self.user_consent_policy_name = "Privacy Policy" def read_config(self, config): consent_config = config.get("user_consent") @@ -83,6 +95,12 @@ class ConsentConfig(Config): self.user_consent_server_notice_to_guests = bool(consent_config.get( "send_server_notice_to_guests", False, )) + self.user_consent_at_registration = bool(consent_config.get( + "require_at_registration", False, + )) + self.user_consent_policy_name = consent_config.get( + "policy_name", "Privacy Policy", + ) def default_config(self, **kwargs): return DEFAULT_CONFIG diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 85fc1fc525..a958c45271 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -472,7 +472,7 @@ class AuthHandler(BaseHandler): "privacy_policy": { "version": self.hs.config.user_consent_version, "en": { - "name": "Privacy Policy", + "name": self.hs.config.user_consent_policy_name, "url": "%s/_matrix/consent?v=%s" % ( self.hs.config.public_baseurl, self.hs.config.user_consent_version, diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index c5214330ad..0515715f7c 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -360,7 +360,7 @@ class RegisterRestServlet(RestServlet): ]) # Append m.login.terms to all flows if we're requiring consent - if self.hs.config.block_events_without_consent_error is not None: + if self.hs.config.user_consent_at_registration: new_flows = [] for flow in flows: flow.append(LoginType.TERMS) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index c85e84b465..e0f7de5d5c 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -142,7 +142,7 @@ class ConsentResource(Resource): userhmac = None has_consented = False public_version = username == "" - if not public_version: + if not public_version or not self.hs.config.user_consent_at_registration: userhmac = parse_string(request, "h", required=True, encoding=None) self._check_hash(username, userhmac) diff --git a/tests/test_terms_auth.py b/tests/test_terms_auth.py index 7deab5266f..0b71c6feb9 100644 --- a/tests/test_terms_auth.py +++ b/tests/test_terms_auth.py @@ -42,7 +42,8 @@ class TermsTestCase(unittest.HomeserverTestCase): hs.config.enable_registration_captcha = False def test_ui_auth(self): - self.hs.config.block_events_without_consent_error = True + self.hs.config.user_consent_at_registration = True + self.hs.config.user_consent_policy_name = "My Cool Privacy Policy" self.hs.config.public_baseurl = "https://example.org" self.hs.config.user_consent_version = "1.0" @@ -66,7 +67,7 @@ class TermsTestCase(unittest.HomeserverTestCase): "policies": { "privacy_policy": { "en": { - "name": "Privacy Policy", + "name": "My Cool Privacy Policy", "url": "https://example.org/_matrix/consent?v=1.0", }, "version": "1.0" diff --git a/tests/utils.py b/tests/utils.py index 565bb60d08..67ab916f30 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -123,6 +123,8 @@ def default_config(name): config.user_directory_search_all_users = False config.user_consent_server_notice_content = None config.block_events_without_consent_error = None + config.user_consent_at_registration = False + config.user_consent_policy_name = "Privacy Policy" config.media_storage_providers = [] config.autocreate_auto_join_rooms = True config.auto_join_rooms = [] -- cgit 1.4.1 From 30dd27afff3889293ec05b82c7f7e05d61c5b609 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 9 Nov 2018 11:34:45 +0000 Subject: Simplify to always drop events if server isn't in the room --- synapse/handlers/federation.py | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 9ca5fd8724..e859ff5ffa 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -202,27 +202,22 @@ class FederationHandler(BaseHandler): self.room_queues[room_id].append((pdu, origin)) return - # If we're no longer in the room just ditch the event entirely. This - # is probably an old server that has come back and thinks we're still - # in the room (or we've been rejoined to the room by a state reset). + # If we're not in the room just ditch the event entirely. This is + # probably an old server that has come back and thinks we're still in + # the room (or we've been rejoined to the room by a state reset). # - # If we were never in the room then maybe our database got vaped and - # we should check if we *are* in fact in the room. If we are then we - # can magically rejoin the room. + # Note that if we were never in the room then we would have already + # dropped the event, since we wouldn't know the room version. is_in_room = yield self.auth.check_host_in_room( room_id, self.server_name ) if not is_in_room: - was_in_room = yield self.store.was_host_joined( - pdu.room_id, self.server_name, + logger.info( + "[%s %s] Ignoring PDU from %s as we're not in the room", + room_id, event_id, origin, ) - if was_in_room: - logger.info( - "[%s %s] Ignoring PDU from %s as we've left the room", - room_id, event_id, origin, - ) - defer.returnValue(None) + defer.returnValue(None) state = None auth_chain = [] -- cgit 1.4.1 From dc59ad533414d63a2b87da705d9792fca4ee8b36 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 9 Nov 2018 14:58:09 +0000 Subject: Remove hack to support rejoining rooms --- synapse/handlers/federation.py | 116 +++++++++++++++-------------------------- 1 file changed, 42 insertions(+), 74 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index e859ff5ffa..a3bb864bb2 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -552,86 +552,54 @@ class FederationHandler(BaseHandler): room_id, event_id, event, ) - # FIXME (erikj): Awful hack to make the case where we are not currently - # in the room work - # If state and auth_chain are None, then we don't need to do this check - # as we already know we have enough state in the DB to handle this - # event. - if state and auth_chain and not event.internal_metadata.is_outlier(): - is_in_room = yield self.auth.check_host_in_room( - room_id, - self.server_name - ) - else: - is_in_room = True - - if not is_in_room: - logger.info( - "[%s %s] Got event for room we're not in", - room_id, event_id, - ) - - try: - yield self._persist_auth_tree( - origin, auth_chain, state, event - ) - except AuthError as e: - raise FederationError( - "ERROR", - e.code, - e.msg, - affected=event_id, - ) - - else: - event_ids = set() - if state: - event_ids |= {e.event_id for e in state} - if auth_chain: - event_ids |= {e.event_id for e in auth_chain} + event_ids = set() + if state: + event_ids |= {e.event_id for e in state} + if auth_chain: + event_ids |= {e.event_id for e in auth_chain} - seen_ids = yield self.store.have_seen_events(event_ids) + seen_ids = yield self.store.have_seen_events(event_ids) - if state and auth_chain is not None: - # If we have any state or auth_chain given to us by the replication - # layer, then we should handle them (if we haven't before.) + if state and auth_chain is not None: + # If we have any state or auth_chain given to us by the replication + # layer, then we should handle them (if we haven't before.) - event_infos = [] + event_infos = [] - for e in itertools.chain(auth_chain, state): - if e.event_id in seen_ids: - continue - e.internal_metadata.outlier = True - auth_ids = e.auth_event_ids() - auth = { - (e.type, e.state_key): e for e in auth_chain - if e.event_id in auth_ids or e.type == EventTypes.Create - } - event_infos.append({ - "event": e, - "auth_events": auth, - }) - seen_ids.add(e.event_id) + for e in itertools.chain(auth_chain, state): + if e.event_id in seen_ids: + continue + e.internal_metadata.outlier = True + auth_ids = e.auth_event_ids() + auth = { + (e.type, e.state_key): e for e in auth_chain + if e.event_id in auth_ids or e.type == EventTypes.Create + } + event_infos.append({ + "event": e, + "auth_events": auth, + }) + seen_ids.add(e.event_id) - logger.info( - "[%s %s] persisting newly-received auth/state events %s", - room_id, event_id, [e["event"].event_id for e in event_infos] - ) - yield self._handle_new_events(origin, event_infos) + logger.info( + "[%s %s] persisting newly-received auth/state events %s", + room_id, event_id, [e["event"].event_id for e in event_infos] + ) + yield self._handle_new_events(origin, event_infos) - try: - context = yield self._handle_new_event( - origin, - event, - state=state, - ) - except AuthError as e: - raise FederationError( - "ERROR", - e.code, - e.msg, - affected=event.event_id, - ) + try: + context = yield self._handle_new_event( + origin, + event, + state=state, + ) + except AuthError as e: + raise FederationError( + "ERROR", + e.code, + e.msg, + affected=event.event_id, + ) room = yield self.store.get_room(room_id) -- cgit 1.4.1 From ab4526a153c77cca94ee0c8620c9642f6e5f7926 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 15 Nov 2018 20:41:53 -0700 Subject: Remove duplicate slashes in generated consent URLs --- synapse/handlers/auth.py | 2 +- synapse/rest/client/v2_alpha/auth.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index a958c45271..c6e89db4bc 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -473,7 +473,7 @@ class AuthHandler(BaseHandler): "version": self.hs.config.user_consent_version, "en": { "name": self.hs.config.user_consent_policy_name, - "url": "%s/_matrix/consent?v=%s" % ( + "url": "%s_matrix/consent?v=%s" % ( self.hs.config.public_baseurl, self.hs.config.user_consent_version, ), diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index a8d8ed6590..c39f53b987 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -161,7 +161,7 @@ class AuthRestServlet(RestServlet): html = TERMS_TEMPLATE % { 'session': session, - 'terms_url': "%s/_matrix/consent?v=%s" % ( + 'terms_url': "%s_matrix/consent?v=%s" % ( self.hs.config.public_baseurl, self.hs.config.user_consent_version, ), @@ -242,7 +242,7 @@ class AuthRestServlet(RestServlet): else: html = TERMS_TEMPLATE % { 'session': session, - 'terms_url': "%s/_matrix/consent?v=%s" % ( + 'terms_url': "%s_matrix/consent?v=%s" % ( self.hs.config.public_baseurl, self.hs.config.user_consent_version, ), -- cgit 1.4.1 From 7039ece8fb633b97cd6166be636bae71fb3aa3c6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 28 Nov 2018 11:24:57 +0000 Subject: Neilj/fix autojoin (#4223) * Fix auto join failures for servers that require user consent * Fix auto join failures for servers that require user consent --- changelog.d/4223.bugfix | 1 + synapse/handlers/register.py | 23 +++++++++++++++++++++-- synapse/rest/client/v2_alpha/register.py | 1 + synapse/rest/consent/consent_resource.py | 2 ++ tests/handlers/test_register.py | 12 +++++++++++- 5 files changed, 36 insertions(+), 3 deletions(-) create mode 100644 changelog.d/4223.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4223.bugfix b/changelog.d/4223.bugfix new file mode 100644 index 0000000000..bab591a765 --- /dev/null +++ b/changelog.d/4223.bugfix @@ -0,0 +1 @@ +Fix auto join failures for servers that require user consent diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index d2beb275cf..015909bb26 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -217,7 +217,19 @@ class RegistrationHandler(BaseHandler): user_id = None token = None attempts += 1 + if not self.hs.config.user_consent_at_registration: + yield self._auto_join_rooms(user_id) + defer.returnValue((user_id, token)) + + @defer.inlineCallbacks + def _auto_join_rooms(self, user_id): + """Automatically joins users to auto join rooms - creating the room in the first place + if the user is the first to be created. + + Args: + user_id(str): The user to join + """ # auto-join the user to any rooms we're supposed to dump them into fake_requester = create_requester(user_id) @@ -226,7 +238,6 @@ class RegistrationHandler(BaseHandler): if self.hs.config.autocreate_auto_join_rooms: count = yield self.store.count_all_users() should_auto_create_rooms = count == 1 - for r in self.hs.config.auto_join_rooms: try: if should_auto_create_rooms: @@ -256,7 +267,15 @@ class RegistrationHandler(BaseHandler): except Exception as e: logger.error("Failed to join new user to %r: %r", r, e) - defer.returnValue((user_id, token)) + @defer.inlineCallbacks + def post_consent_actions(self, user_id): + """A series of registration actions that can only be carried out once consent + has been granted + + Args: + user_id (str): The user to join + """ + yield self._auto_join_rooms(user_id) @defer.inlineCallbacks def appservice_register(self, user_localpart, as_token): diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 0515715f7c..aec0c6b075 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -457,6 +457,7 @@ class RegisterRestServlet(RestServlet): yield self.store.user_set_consent_version( registered_user_id, self.hs.config.user_consent_version, ) + yield self.registration_handler.post_consent_actions(registered_user_id) defer.returnValue((200, return_dict)) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index ad525b22e1..80611cfe84 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -89,6 +89,7 @@ class ConsentResource(Resource): self.hs = hs self.store = hs.get_datastore() + self.registration_handler = hs.get_handlers().registration_handler # this is required by the request_handler wrapper self.clock = hs.get_clock() @@ -199,6 +200,7 @@ class ConsentResource(Resource): if e.code != 404: raise raise NotFoundError("Unknown user") + yield self.registration_handler.post_consent_actions(qualified_user_id) try: self._render_template(request, "success.html") diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 3e9a190727..90a2a76475 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -150,7 +150,6 @@ class RegistrationTestCase(unittest.TestCase): self.hs.config.auto_join_rooms = [room_alias_str] res = yield self.handler.register(localpart='jeff') rooms = yield self.store.get_rooms_for_user(res[0]) - directory_handler = self.hs.get_handlers().directory_handler room_alias = RoomAlias.from_string(room_alias_str) room_id = yield directory_handler.get_association(room_alias) @@ -184,3 +183,14 @@ class RegistrationTestCase(unittest.TestCase): res = yield self.handler.register(localpart='jeff') rooms = yield self.store.get_rooms_for_user(res[0]) self.assertEqual(len(rooms), 0) + + @defer.inlineCallbacks + def test_auto_create_auto_join_where_no_consent(self): + self.hs.config.user_consent_at_registration = True + self.hs.config.block_events_without_consent_error = "Error" + room_alias_str = "#room:test" + self.hs.config.auto_join_rooms = [room_alias_str] + res = yield self.handler.register(localpart='jeff') + yield self.handler.post_consent_actions(res[0]) + rooms = yield self.store.get_rooms_for_user(res[0]) + self.assertEqual(len(rooms), 0) -- cgit 1.4.1 From 158ffb92f1ba0e247fb0a71f0d400655643ae68e Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 4 Dec 2018 04:01:02 -0700 Subject: Add an option to disable search for homeservers which may not be interested in it (#4230) This is useful for homeservers not intended for users, such as bot-only homeservers or ones that only process IoT data. --- changelog.d/4230.feature | 1 + synapse/config/server.py | 12 +++++++++++- synapse/handlers/search.py | 3 +++ synapse/storage/search.py | 6 ++++++ 4 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 changelog.d/4230.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4230.feature b/changelog.d/4230.feature new file mode 100644 index 0000000000..0ecb1d5ec6 --- /dev/null +++ b/changelog.d/4230.feature @@ -0,0 +1 @@ +Add an option to disable search for homeservers that may not be interested in it. diff --git a/synapse/config/server.py b/synapse/config/server.py index 5ff9ac288d..4a5b902f8e 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -62,6 +62,11 @@ class ServerConfig(Config): # master, potentially causing inconsistency. self.enable_media_repo = config.get("enable_media_repo", True) + # whether to enable search. If disabled, new entries will not be inserted + # into the search tables and they will not be indexed. Users will receive + # errors when attempting to search for messages. + self.enable_search = config.get("enable_search", True) + self.filter_timeline_limit = config.get("filter_timeline_limit", -1) # Whether we should block invites sent to users on this server @@ -384,7 +389,12 @@ class ServerConfig(Config): # mau_limit_reserved_threepids: # - medium: 'email' # address: 'reserved_user@example.com' - + # + # Room searching + # + # If disabled, new messages will not be indexed for searching and users + # will receive errors when searching for messages. Defaults to enabled. + # enable_search: true """ % locals() def read_arguments(self, args): diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 80e7b15de8..ec936bbb4e 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -50,6 +50,9 @@ class SearchHandler(BaseHandler): dict to be returned to the client with results of search """ + if not self.hs.config.enable_search: + raise SynapseError(400, "Search is disabled on this homeserver") + batch_group = None batch_group_key = None batch_token = None diff --git a/synapse/storage/search.py b/synapse/storage/search.py index d5b5df93e6..c6420b2374 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -45,6 +45,10 @@ class SearchStore(BackgroundUpdateStore): def __init__(self, db_conn, hs): super(SearchStore, self).__init__(db_conn, hs) + + if not hs.config.enable_search: + return + self.register_background_update_handler( self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search ) @@ -316,6 +320,8 @@ class SearchStore(BackgroundUpdateStore): entries (iterable[SearchEntry]): entries to be added to the table """ + if not self.hs.config.enable_search: + return if isinstance(self.database_engine, PostgresEngine): sql = ( "INSERT INTO event_search" -- cgit 1.4.1 From b0c24a66ec7ede1c70e082fc1a652fb7b61bae9d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 6 Dec 2018 09:44:38 +0100 Subject: Rip out half-implemented m.login.saml2 support (#4265) * Rip out half-implemented m.login.saml2 support This was implemented in an odd way that left most of the work to the client, in a way that I really didn't understand. It's going to be a pain to maintain, so let's start by ripping it out. * drop undocumented dependency on dateutil It turns out we were relying on dateutil being pulled in transitively by pysaml2. There's no need for that bloat. --- changelog.d/4265.feature | 1 + synapse/config/homeserver.py | 3 +- synapse/config/saml2.py | 55 -------------------------------- synapse/handlers/register.py | 29 ----------------- synapse/python_dependencies.py | 1 - synapse/rest/client/v1/login.py | 69 ++--------------------------------------- tests/handlers/test_register.py | 15 --------- 7 files changed, 4 insertions(+), 169 deletions(-) create mode 100644 changelog.d/4265.feature delete mode 100644 synapse/config/saml2.py (limited to 'synapse/handlers') diff --git a/changelog.d/4265.feature b/changelog.d/4265.feature new file mode 100644 index 0000000000..da36986e2b --- /dev/null +++ b/changelog.d/4265.feature @@ -0,0 +1 @@ +Rework SAML2 authentication diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 10dd40159f..36182267c2 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -32,7 +32,6 @@ from .ratelimiting import RatelimitConfig from .registration import RegistrationConfig from .repository import ContentRepositoryConfig from .room_directory import RoomDirectoryConfig -from .saml2 import SAML2Config from .server import ServerConfig from .server_notices_config import ServerNoticesConfig from .spam_checker import SpamCheckerConfig @@ -45,7 +44,7 @@ from .workers import WorkerConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig, - AppServiceConfig, KeyConfig, SAML2Config, CasConfig, + AppServiceConfig, KeyConfig, CasConfig, JWTConfig, PasswordConfig, EmailConfig, WorkerConfig, PasswordAuthProviderConfig, PushConfig, SpamCheckerConfig, GroupsConfig, UserDirectoryConfig, diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py deleted file mode 100644 index 8d7f443021..0000000000 --- a/synapse/config/saml2.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 Ericsson -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ._base import Config - - -class SAML2Config(Config): - """SAML2 Configuration - Synapse uses pysaml2 libraries for providing SAML2 support - - config_path: Path to the sp_conf.py configuration file - idp_redirect_url: Identity provider URL which will redirect - the user back to /login/saml2 with proper info. - - sp_conf.py file is something like: - https://github.com/rohe/pysaml2/blob/master/example/sp-repoze/sp_conf.py.example - - More information: https://pythonhosted.org/pysaml2/howto/config.html - """ - - def read_config(self, config): - saml2_config = config.get("saml2_config", None) - if saml2_config: - self.saml2_enabled = saml2_config.get("enabled", True) - self.saml2_config_path = saml2_config["config_path"] - self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"] - else: - self.saml2_enabled = False - self.saml2_config_path = None - self.saml2_idp_redirect_url = None - - def default_config(self, config_dir_path, server_name, **kwargs): - return """ - # Enable SAML2 for registration and login. Uses pysaml2 - # config_path: Path to the sp_conf.py configuration file - # idp_redirect_url: Identity provider URL which will redirect - # the user back to /login/saml2 with proper info. - # See pysaml2 docs for format of config. - #saml2_config: - # enabled: true - # config_path: "%s/sp_conf.py" - # idp_redirect_url: "http://%s/idp" - """ % (config_dir_path, server_name) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 015909bb26..0f87c4610e 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -327,35 +327,6 @@ class RegistrationHandler(BaseHandler): else: logger.info("Valid captcha entered from %s", ip) - @defer.inlineCallbacks - def register_saml2(self, localpart): - """ - Registers email_id as SAML2 Based Auth. - """ - if types.contains_invalid_mxid_characters(localpart): - raise SynapseError( - 400, - "User ID can only contain characters a-z, 0-9, or '=_-./'", - ) - yield self.auth.check_auth_blocking() - user = UserID(localpart, self.hs.hostname) - user_id = user.to_string() - - yield self.check_user_id_not_appservice_exclusive(user_id) - token = self.macaroon_gen.generate_access_token(user_id) - try: - yield self.store.register( - user_id=user_id, - token=token, - password_hash=None, - create_profile_with_localpart=user.localpart, - ) - except Exception as e: - yield self.store.add_access_token_to_user(user_id, token) - # Ignore Registration errors - logger.exception(e) - defer.returnValue((user_id, token)) - @defer.inlineCallbacks def register_email(self, threepidCreds): """ diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index ca62ee7637..75ba9947cc 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -53,7 +53,6 @@ REQUIREMENTS = { "pillow>=3.1.2": ["PIL"], "sortedcontainers>=1.4.4": ["sortedcontainers"], "psutil>=2.0.0": ["psutil>=2.0.0"], - "pysaml2>=3.0.0": ["saml2"], "pymacaroons-pynacl>=0.9.3": ["pymacaroons"], "msgpack-python>=0.4.2": ["msgpack"], "phonenumbers>=8.2.0": ["phonenumbers"], diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index f6b4a85e40..011e84e8b1 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -18,10 +18,6 @@ import xml.etree.ElementTree as ET from six.moves import urllib -from canonicaljson import json -from saml2 import BINDING_HTTP_POST, config -from saml2.client import Saml2Client - from twisted.internet import defer from twisted.web.client import PartialDownloadError @@ -81,7 +77,6 @@ def login_id_thirdparty_from_phone(identifier): class LoginRestServlet(ClientV1RestServlet): PATTERNS = client_path_patterns("/login$") - SAML2_TYPE = "m.login.saml2" CAS_TYPE = "m.login.cas" SSO_TYPE = "m.login.sso" TOKEN_TYPE = "m.login.token" @@ -89,8 +84,6 @@ class LoginRestServlet(ClientV1RestServlet): def __init__(self, hs): super(LoginRestServlet, self).__init__(hs) - self.idp_redirect_url = hs.config.saml2_idp_redirect_url - self.saml2_enabled = hs.config.saml2_enabled self.jwt_enabled = hs.config.jwt_enabled self.jwt_secret = hs.config.jwt_secret self.jwt_algorithm = hs.config.jwt_algorithm @@ -103,8 +96,6 @@ class LoginRestServlet(ClientV1RestServlet): flows = [] if self.jwt_enabled: flows.append({"type": LoginRestServlet.JWT_TYPE}) - if self.saml2_enabled: - flows.append({"type": LoginRestServlet.SAML2_TYPE}) if self.cas_enabled: flows.append({"type": LoginRestServlet.SSO_TYPE}) @@ -134,18 +125,8 @@ class LoginRestServlet(ClientV1RestServlet): def on_POST(self, request): login_submission = parse_json_object_from_request(request) try: - if self.saml2_enabled and (login_submission["type"] == - LoginRestServlet.SAML2_TYPE): - relay_state = "" - if "relay_state" in login_submission: - relay_state = "&RelayState=" + urllib.parse.quote( - login_submission["relay_state"]) - result = { - "uri": "%s%s" % (self.idp_redirect_url, relay_state) - } - defer.returnValue((200, result)) - elif self.jwt_enabled and (login_submission["type"] == - LoginRestServlet.JWT_TYPE): + if self.jwt_enabled and (login_submission["type"] == + LoginRestServlet.JWT_TYPE): result = yield self.do_jwt_login(login_submission) defer.returnValue(result) elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE: @@ -345,50 +326,6 @@ class LoginRestServlet(ClientV1RestServlet): ) -class SAML2RestServlet(ClientV1RestServlet): - PATTERNS = client_path_patterns("/login/saml2", releases=()) - - def __init__(self, hs): - super(SAML2RestServlet, self).__init__(hs) - self.sp_config = hs.config.saml2_config_path - self.handlers = hs.get_handlers() - - @defer.inlineCallbacks - def on_POST(self, request): - saml2_auth = None - try: - conf = config.SPConfig() - conf.load_file(self.sp_config) - SP = Saml2Client(conf) - saml2_auth = SP.parse_authn_request_response( - request.args['SAMLResponse'][0], BINDING_HTTP_POST) - except Exception as e: # Not authenticated - logger.exception(e) - if saml2_auth and saml2_auth.status_ok() and not saml2_auth.not_signed: - username = saml2_auth.name_id.text - handler = self.handlers.registration_handler - (user_id, token) = yield handler.register_saml2(username) - # Forward to the RelayState callback along with ava - if 'RelayState' in request.args: - request.redirect(urllib.parse.unquote( - request.args['RelayState'][0]) + - '?status=authenticated&access_token=' + - token + '&user_id=' + user_id + '&ava=' + - urllib.quote(json.dumps(saml2_auth.ava))) - finish_request(request) - defer.returnValue(None) - defer.returnValue((200, {"status": "authenticated", - "user_id": user_id, "token": token, - "ava": saml2_auth.ava})) - elif 'RelayState' in request.args: - request.redirect(urllib.parse.unquote( - request.args['RelayState'][0]) + - '?status=not_authenticated') - finish_request(request) - defer.returnValue(None) - defer.returnValue((200, {"status": "not_authenticated"})) - - class CasRedirectServlet(RestServlet): PATTERNS = client_path_patterns("/login/(cas|sso)/redirect") @@ -517,8 +454,6 @@ class CasTicketServlet(ClientV1RestServlet): def register_servlets(hs, http_server): LoginRestServlet(hs).register(http_server) - if hs.config.saml2_enabled: - SAML2RestServlet(hs).register(http_server) if hs.config.cas_enabled: CasRedirectServlet(hs).register(http_server) CasTicketServlet(hs).register(http_server) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 90a2a76475..23bce6ee7d 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -129,21 +129,6 @@ class RegistrationTestCase(unittest.TestCase): with self.assertRaises(ResourceLimitError): yield self.handler.register(localpart="local_part") - @defer.inlineCallbacks - def test_register_saml2_mau_blocked(self): - self.hs.config.limit_usage_by_mau = True - self.store.get_monthly_active_count = Mock( - return_value=defer.succeed(self.lots_of_users) - ) - with self.assertRaises(ResourceLimitError): - yield self.handler.register_saml2(localpart="local_part") - - self.store.get_monthly_active_count = Mock( - return_value=defer.succeed(self.hs.config.max_mau_value) - ) - with self.assertRaises(ResourceLimitError): - yield self.handler.register_saml2(localpart="local_part") - @defer.inlineCallbacks def test_auto_create_auto_join_rooms(self): room_alias_str = "#room:test" -- cgit 1.4.1 From ae19a7db8c5eab43858b24453bbbb352f8b6152a Mon Sep 17 00:00:00 2001 From: rkfg Date: Thu, 6 Dec 2018 13:32:05 +0300 Subject: Prevent crash on pagination. --- changelog.d/4263.bugfix | 1 + synapse/handlers/pagination.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/4263.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4263.bugfix b/changelog.d/4263.bugfix new file mode 100644 index 0000000000..3dc1d7c732 --- /dev/null +++ b/changelog.d/4263.bugfix @@ -0,0 +1 @@ +Prevent crash on pagination. diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 43f81bd607..f2be6c1185 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -253,7 +253,7 @@ class PaginationHandler(object): ) state = None - if event_filter and event_filter.lazy_load_members(): + if event_filter and event_filter.lazy_load_members() and len(events) > 0: # TODO: remove redundant members # FIXME: we also care about invite targets etc. -- cgit 1.4.1 From c588b9b9e421855a1025e1d8c355818a40508c44 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 7 Dec 2018 13:10:07 +0100 Subject: Factor SSO success handling out of CAS login (#4264) This is mostly factoring out the post-CAS-login code to somewhere we can reuse it for other SSO flows, but it also fixes the userid mapping while we're at it. --- changelog.d/4264.bugfix | 1 + synapse/handlers/auth.py | 13 ++++- synapse/rest/client/v1/login.py | 105 +++++++++++++++++++++++++++++----------- synapse/types.py | 66 +++++++++++++++++++++++++ tests/test_types.py | 31 +++++++++++- 5 files changed, 184 insertions(+), 32 deletions(-) create mode 100644 changelog.d/4264.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4264.bugfix b/changelog.d/4264.bugfix new file mode 100644 index 0000000000..b914026932 --- /dev/null +++ b/changelog.d/4264.bugfix @@ -0,0 +1 @@ +Fix CAS login when username is not valid in an MXID diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index c6e89db4bc..2abd9af94f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -563,10 +563,10 @@ class AuthHandler(BaseHandler): insensitively, but return None if there are multiple inexact matches. Args: - (str) user_id: complete @user:id + (unicode|bytes) user_id: complete @user:id Returns: - defer.Deferred: (str) canonical_user_id, or None if zero or + defer.Deferred: (unicode) canonical_user_id, or None if zero or multiple matches """ res = yield self._find_user_id_and_pwd_hash(user_id) @@ -954,6 +954,15 @@ class MacaroonGenerator(object): return macaroon.serialize() def generate_short_term_login_token(self, user_id, duration_in_ms=(2 * 60 * 1000)): + """ + + Args: + user_id (unicode): + duration_in_ms (int): + + Returns: + unicode + """ macaroon = self._generate_base_macaroon(user_id) macaroon.add_first_party_caveat("type = login") now = self.hs.get_clock().time_msec() diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 011e84e8b1..b7c5b58b01 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -23,8 +23,12 @@ from twisted.web.client import PartialDownloadError from synapse.api.errors import Codes, LoginError, SynapseError from synapse.http.server import finish_request -from synapse.http.servlet import RestServlet, parse_json_object_from_request -from synapse.types import UserID +from synapse.http.servlet import ( + RestServlet, + parse_json_object_from_request, + parse_string, +) +from synapse.types import UserID, map_username_to_mxid_localpart from synapse.util.msisdn import phone_number_to_msisdn from .base import ClientV1RestServlet, client_path_patterns @@ -358,17 +362,15 @@ class CasTicketServlet(ClientV1RestServlet): self.cas_server_url = hs.config.cas_server_url self.cas_service_url = hs.config.cas_service_url self.cas_required_attributes = hs.config.cas_required_attributes - self.auth_handler = hs.get_auth_handler() - self.handlers = hs.get_handlers() - self.macaroon_gen = hs.get_macaroon_generator() + self._sso_auth_handler = SSOAuthHandler(hs) @defer.inlineCallbacks def on_GET(self, request): - client_redirect_url = request.args[b"redirectUrl"][0] + client_redirect_url = parse_string(request, "redirectUrl", required=True) http_client = self.hs.get_simple_http_client() uri = self.cas_server_url + "/proxyValidate" args = { - "ticket": request.args[b"ticket"][0].decode('ascii'), + "ticket": parse_string(request, "ticket", required=True), "service": self.cas_service_url } try: @@ -380,7 +382,6 @@ class CasTicketServlet(ClientV1RestServlet): result = yield self.handle_cas_response(request, body, client_redirect_url) defer.returnValue(result) - @defer.inlineCallbacks def handle_cas_response(self, request, cas_response_body, client_redirect_url): user, attributes = self.parse_cas_response(cas_response_body) @@ -396,28 +397,9 @@ class CasTicketServlet(ClientV1RestServlet): if required_value != actual_value: raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED) - user_id = UserID(user, self.hs.hostname).to_string() - auth_handler = self.auth_handler - registered_user_id = yield auth_handler.check_user_exists(user_id) - if not registered_user_id: - registered_user_id, _ = ( - yield self.handlers.registration_handler.register(localpart=user) - ) - - login_token = self.macaroon_gen.generate_short_term_login_token( - registered_user_id + return self._sso_auth_handler.on_successful_auth( + user, request, client_redirect_url, ) - redirect_url = self.add_login_token_to_redirect_url(client_redirect_url, - login_token) - request.redirect(redirect_url) - finish_request(request) - - def add_login_token_to_redirect_url(self, url, token): - url_parts = list(urllib.parse.urlparse(url)) - query = dict(urllib.parse.parse_qsl(url_parts[4])) - query.update({"loginToken": token}) - url_parts[4] = urllib.parse.urlencode(query).encode('ascii') - return urllib.parse.urlunparse(url_parts) def parse_cas_response(self, cas_response_body): user = None @@ -452,6 +434,71 @@ class CasTicketServlet(ClientV1RestServlet): return user, attributes +class SSOAuthHandler(object): + """ + Utility class for Resources and Servlets which handle the response from a SSO + service + + Args: + hs (synapse.server.HomeServer) + """ + def __init__(self, hs): + self._hostname = hs.hostname + self._auth_handler = hs.get_auth_handler() + self._registration_handler = hs.get_handlers().registration_handler + self._macaroon_gen = hs.get_macaroon_generator() + + @defer.inlineCallbacks + def on_successful_auth( + self, username, request, client_redirect_url, + ): + """Called once the user has successfully authenticated with the SSO. + + Registers the user if necessary, and then returns a redirect (with + a login token) to the client. + + Args: + username (unicode|bytes): the remote user id. We'll map this onto + something sane for a MXID localpath. + + request (SynapseRequest): the incoming request from the browser. We'll + respond to it with a redirect. + + client_redirect_url (unicode): the redirect_url the client gave us when + it first started the process. + + Returns: + Deferred[none]: Completes once we have handled the request. + """ + localpart = map_username_to_mxid_localpart(username) + user_id = UserID(localpart, self._hostname).to_string() + registered_user_id = yield self._auth_handler.check_user_exists(user_id) + if not registered_user_id: + registered_user_id, _ = ( + yield self._registration_handler.register( + localpart=localpart, + generate_token=False, + ) + ) + + login_token = self._macaroon_gen.generate_short_term_login_token( + registered_user_id + ) + redirect_url = self._add_login_token_to_redirect_url( + client_redirect_url, login_token + ) + request.redirect(redirect_url) + finish_request(request) + + @staticmethod + def _add_login_token_to_redirect_url(url, token): + url_parts = list(urllib.parse.urlparse(url)) + query = dict(urllib.parse.parse_qsl(url_parts[4])) + query.update({"loginToken": token}) + url_parts[4] = urllib.parse.urlencode(query) + return urllib.parse.urlunparse(url_parts) + + def register_servlets(hs, http_server): LoginRestServlet(hs).register(http_server) if hs.config.cas_enabled: diff --git a/synapse/types.py b/synapse/types.py index 41afb27a74..d8cb64addb 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import re import string from collections import namedtuple @@ -228,6 +229,71 @@ def contains_invalid_mxid_characters(localpart): return any(c not in mxid_localpart_allowed_characters for c in localpart) +UPPER_CASE_PATTERN = re.compile(b"[A-Z_]") + +# the following is a pattern which matches '=', and bytes which are not allowed in a mxid +# localpart. +# +# It works by: +# * building a string containing the allowed characters (excluding '=') +# * escaping every special character with a backslash (to stop '-' being interpreted as a +# range operator) +# * wrapping it in a '[^...]' regex +# * converting the whole lot to a 'bytes' sequence, so that we can use it to match +# bytes rather than strings +# +NON_MXID_CHARACTER_PATTERN = re.compile( + ("[^%s]" % ( + re.escape("".join(mxid_localpart_allowed_characters - {"="}),), + )).encode("ascii"), +) + + +def map_username_to_mxid_localpart(username, case_sensitive=False): + """Map a username onto a string suitable for a MXID + + This follows the algorithm laid out at + https://matrix.org/docs/spec/appendices.html#mapping-from-other-character-sets. + + Args: + username (unicode|bytes): username to be mapped + case_sensitive (bool): true if TEST and test should be mapped + onto different mxids + + Returns: + unicode: string suitable for a mxid localpart + """ + if not isinstance(username, bytes): + username = username.encode('utf-8') + + # first we sort out upper-case characters + if case_sensitive: + def f1(m): + return b"_" + m.group().lower() + + username = UPPER_CASE_PATTERN.sub(f1, username) + else: + username = username.lower() + + # then we sort out non-ascii characters + def f2(m): + g = m.group()[0] + if isinstance(g, str): + # on python 2, we need to do a ord(). On python 3, the + # byte itself will do. + g = ord(g) + return b"=%02x" % (g,) + + username = NON_MXID_CHARACTER_PATTERN.sub(f2, username) + + # we also do the =-escaping to mxids starting with an underscore. + username = re.sub(b'^_', b'=5f', username) + + # we should now only have ascii bytes left, so can decode back to a + # unicode. + return username.decode('ascii') + + class StreamToken( namedtuple("Token", ( "room_key", diff --git a/tests/test_types.py b/tests/test_types.py index 0f5c8bfaf9..d314a7ff58 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -14,7 +14,7 @@ # limitations under the License. from synapse.api.errors import SynapseError -from synapse.types import GroupID, RoomAlias, UserID +from synapse.types import GroupID, RoomAlias, UserID, map_username_to_mxid_localpart from tests import unittest from tests.utils import TestHomeServer @@ -79,3 +79,32 @@ class GroupIDTestCase(unittest.TestCase): except SynapseError as exc: self.assertEqual(400, exc.code) self.assertEqual("M_UNKNOWN", exc.errcode) + + +class MapUsernameTestCase(unittest.TestCase): + def testPassThrough(self): + self.assertEqual(map_username_to_mxid_localpart("test1234"), "test1234") + + def testUpperCase(self): + self.assertEqual(map_username_to_mxid_localpart("tEST_1234"), "test_1234") + self.assertEqual( + map_username_to_mxid_localpart("tEST_1234", case_sensitive=True), + "t_e_s_t__1234", + ) + + def testSymbols(self): + self.assertEqual( + map_username_to_mxid_localpart("test=$?_1234"), + "test=3d=24=3f_1234", + ) + + def testLeadingUnderscore(self): + self.assertEqual(map_username_to_mxid_localpart("_test_1234"), "=5ftest_1234") + + def testNonAscii(self): + # this should work with either a unicode or a bytes + self.assertEqual(map_username_to_mxid_localpart(u'têst'), "t=c3=aast") + self.assertEqual( + map_username_to_mxid_localpart(u'têst'.encode('utf-8')), + "t=c3=aast", + ) -- cgit 1.4.1 From 30da50a5b80e63c05e4b7ca637e3be9dd88dea59 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 7 Dec 2018 14:44:46 +0100 Subject: Initialise user displayname from SAML2 data (#4272) When we register a new user from SAML2 data, initialise their displayname correctly. --- changelog.d/4272.feature | 1 + synapse/handlers/register.py | 23 ++++++++++++++++------- synapse/rest/client/v1/login.py | 5 +++++ synapse/rest/saml2/response_resource.py | 3 +++ synapse/storage/registration.py | 20 +++++++++++++------- tests/storage/test_monthly_active_users.py | 2 +- 6 files changed, 39 insertions(+), 15 deletions(-) create mode 100644 changelog.d/4272.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4272.feature b/changelog.d/4272.feature new file mode 100644 index 0000000000..7a8f286957 --- /dev/null +++ b/changelog.d/4272.feature @@ -0,0 +1 @@ +SAML2 authentication: Initialise user display name from SAML2 data diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 0f87c4610e..ba39e67f6f 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -126,6 +126,7 @@ class RegistrationHandler(BaseHandler): make_guest=False, admin=False, threepid=None, + default_display_name=None, ): """Registers a new client on the server. @@ -140,6 +141,8 @@ class RegistrationHandler(BaseHandler): since it offers no means of associating a device_id with the access_token. Instead you should call auth_handler.issue_access_token after registration. + default_display_name (unicode|None): if set, the new user's displayname + will be set to this. Defaults to 'localpart'. Returns: A tuple of (user_id, access_token). Raises: @@ -169,6 +172,13 @@ class RegistrationHandler(BaseHandler): user = UserID(localpart, self.hs.hostname) user_id = user.to_string() + if was_guest: + # If the user was a guest then they already have a profile + default_display_name = None + + elif default_display_name is None: + default_display_name = localpart + token = None if generate_token: token = self.macaroon_gen.generate_access_token(user_id) @@ -178,10 +188,7 @@ class RegistrationHandler(BaseHandler): password_hash=password_hash, was_guest=was_guest, make_guest=make_guest, - create_profile_with_localpart=( - # If the user was a guest then they already have a profile - None if was_guest else user.localpart - ), + create_profile_with_displayname=default_display_name, admin=admin, ) @@ -203,13 +210,15 @@ class RegistrationHandler(BaseHandler): yield self.check_user_id_not_appservice_exclusive(user_id) if generate_token: token = self.macaroon_gen.generate_access_token(user_id) + if default_display_name is None: + default_display_name = localpart try: yield self.store.register( user_id=user_id, token=token, password_hash=password_hash, make_guest=make_guest, - create_profile_with_localpart=user.localpart, + create_profile_with_displayname=default_display_name, ) except SynapseError: # if user id is taken, just generate another @@ -300,7 +309,7 @@ class RegistrationHandler(BaseHandler): user_id=user_id, password_hash="", appservice_id=service_id, - create_profile_with_localpart=user.localpart, + create_profile_with_displayname=user.localpart, ) defer.returnValue(user_id) @@ -478,7 +487,7 @@ class RegistrationHandler(BaseHandler): user_id=user_id, token=token, password_hash=password_hash, - create_profile_with_localpart=user.localpart, + create_profile_with_displayname=user.localpart, ) else: yield self._auth_handler.delete_access_tokens_for_user(user_id) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index b7c5b58b01..e9d3032498 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -451,6 +451,7 @@ class SSOAuthHandler(object): @defer.inlineCallbacks def on_successful_auth( self, username, request, client_redirect_url, + user_display_name=None, ): """Called once the user has successfully authenticated with the SSO. @@ -467,6 +468,9 @@ class SSOAuthHandler(object): client_redirect_url (unicode): the redirect_url the client gave us when it first started the process. + user_display_name (unicode|None): if set, and we have to register a new user, + we will set their displayname to this. + Returns: Deferred[none]: Completes once we have handled the request. """ @@ -478,6 +482,7 @@ class SSOAuthHandler(object): yield self._registration_handler.register( localpart=localpart, generate_token=False, + default_display_name=user_display_name, ) ) diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py index ad2ed157b5..69fb77b322 100644 --- a/synapse/rest/saml2/response_resource.py +++ b/synapse/rest/saml2/response_resource.py @@ -66,6 +66,9 @@ class SAML2ResponseResource(Resource): raise CodeMessageException(400, "uid not in SAML2 response") username = saml2_auth.ava["uid"][0] + + displayName = saml2_auth.ava.get("displayName", [None])[0] return self._sso_auth_handler.on_successful_auth( username, request, relay_state, + user_display_name=displayName, ) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 80d76bf9d7..3d55441e33 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -22,6 +22,7 @@ from twisted.internet import defer from synapse.api.errors import Codes, StoreError from synapse.storage import background_updates from synapse.storage._base import SQLBaseStore +from synapse.types import UserID from synapse.util.caches.descriptors import cached, cachedInlineCallbacks @@ -167,7 +168,7 @@ class RegistrationStore(RegistrationWorkerStore, def register(self, user_id, token=None, password_hash=None, was_guest=False, make_guest=False, appservice_id=None, - create_profile_with_localpart=None, admin=False): + create_profile_with_displayname=None, admin=False): """Attempts to register an account. Args: @@ -181,8 +182,8 @@ class RegistrationStore(RegistrationWorkerStore, make_guest (boolean): True if the the new user should be guest, false to add a regular user account. appservice_id (str): The ID of the appservice registering the user. - create_profile_with_localpart (str): Optionally create a profile for - the given localpart. + create_profile_with_displayname (unicode): Optionally create a profile for + the user, setting their displayname to the given value Raises: StoreError if the user_id could not be registered. """ @@ -195,7 +196,7 @@ class RegistrationStore(RegistrationWorkerStore, was_guest, make_guest, appservice_id, - create_profile_with_localpart, + create_profile_with_displayname, admin ) @@ -208,9 +209,11 @@ class RegistrationStore(RegistrationWorkerStore, was_guest, make_guest, appservice_id, - create_profile_with_localpart, + create_profile_with_displayname, admin, ): + user_id_obj = UserID.from_string(user_id) + now = int(self.clock.time()) next_id = self._access_tokens_id_gen.get_next() @@ -273,12 +276,15 @@ class RegistrationStore(RegistrationWorkerStore, (next_id, user_id, token,) ) - if create_profile_with_localpart: + if create_profile_with_displayname: # set a default displayname serverside to avoid ugly race # between auto-joins and clients trying to set displaynames + # + # *obviously* the 'profiles' table uses localpart for user_id + # while everything else uses the full mxid. txn.execute( "INSERT INTO profiles(user_id, displayname) VALUES (?,?)", - (create_profile_with_localpart, create_profile_with_localpart) + (user_id_obj.localpart, create_profile_with_displayname) ) self._invalidate_cache_and_stream( diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 8664bc3d54..9618d57463 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -149,7 +149,7 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): def test_populate_monthly_users_is_guest(self): # Test that guest users are not added to mau list - user_id = "user_id" + user_id = "@user_id:host" self.store.register( user_id=user_id, token="123", password_hash=None, make_guest=True ) -- cgit 1.4.1 From d2f7c4e6b1efbdd3275d02a19220a10cf00a8f66 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 14 Dec 2018 18:20:59 +0000 Subject: create support user (#4141) Allow for the creation of a support user. A support user can access the server, join rooms, interact with other users, but does not appear in the user directory nor does it contribute to monthly active user limits. --- changelog.d/4141.feature | 1 + docs/admin_api/register_api.rst | 11 ++- synapse/_scripts/register_new_matrix_user.py | 19 ++++- synapse/api/auth.py | 5 +- synapse/api/constants.py | 8 ++ synapse/handlers/register.py | 15 +++- synapse/handlers/room.py | 2 +- synapse/handlers/user_directory.py | 45 ++++++----- synapse/rest/client/v1/admin.py | 11 ++- synapse/storage/monthly_active_users.py | 30 ++++++- synapse/storage/registration.py | 38 ++++++++- .../schema/delta/53/add_user_type_to_users.sql | 19 +++++ tests/api/test_auth.py | 2 + tests/handlers/test_register.py | 30 ++++++- tests/handlers/test_user_directory.py | 91 ++++++++++++++++++++++ tests/rest/client/v1/test_admin.py | 33 ++++++-- tests/storage/test_monthly_active_users.py | 34 +++++++- tests/storage/test_registration.py | 22 ++++++ tests/unittest.py | 1 + tests/utils.py | 1 - 20 files changed, 371 insertions(+), 47 deletions(-) create mode 100644 changelog.d/4141.feature create mode 100644 synapse/storage/schema/delta/53/add_user_type_to_users.sql create mode 100644 tests/handlers/test_user_directory.py (limited to 'synapse/handlers') diff --git a/changelog.d/4141.feature b/changelog.d/4141.feature new file mode 100644 index 0000000000..632d3547cb --- /dev/null +++ b/changelog.d/4141.feature @@ -0,0 +1 @@ +Special-case a support user for use in verifying behaviour of a given server. The support user does not appear in user directory or monthly active user counts. diff --git a/docs/admin_api/register_api.rst b/docs/admin_api/register_api.rst index 16d65c86b3..084e74ebf5 100644 --- a/docs/admin_api/register_api.rst +++ b/docs/admin_api/register_api.rst @@ -39,13 +39,13 @@ As an example:: } The MAC is the hex digest output of the HMAC-SHA1 algorithm, with the key being -the shared secret and the content being the nonce, user, password, and either -the string "admin" or "notadmin", each separated by NULs. For an example of -generation in Python:: +the shared secret and the content being the nonce, user, password, either the +string "admin" or "notadmin", and optionally the user_type +each separated by NULs. For an example of generation in Python:: import hmac, hashlib - def generate_mac(nonce, user, password, admin=False): + def generate_mac(nonce, user, password, admin=False, user_type=None): mac = hmac.new( key=shared_secret, @@ -59,5 +59,8 @@ generation in Python:: mac.update(password.encode('utf8')) mac.update(b"\x00") mac.update(b"admin" if admin else b"notadmin") + if user_type: + mac.update(b"\x00") + mac.update(user_type.encode('utf8')) return mac.hexdigest() diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py index 70cecde486..4c3abf06fe 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py @@ -35,6 +35,7 @@ def request_registration( server_location, shared_secret, admin=False, + user_type=None, requests=_requests, _print=print, exit=sys.exit, @@ -65,6 +66,9 @@ def request_registration( mac.update(password.encode('utf8')) mac.update(b"\x00") mac.update(b"admin" if admin else b"notadmin") + if user_type: + mac.update(b"\x00") + mac.update(user_type.encode('utf8')) mac = mac.hexdigest() @@ -74,6 +78,7 @@ def request_registration( "password": password, "mac": mac, "admin": admin, + "user_type": user_type, } _print("Sending registration request...") @@ -91,7 +96,7 @@ def request_registration( _print("Success!") -def register_new_user(user, password, server_location, shared_secret, admin): +def register_new_user(user, password, server_location, shared_secret, admin, user_type): if not user: try: default_user = getpass.getuser() @@ -129,7 +134,8 @@ def register_new_user(user, password, server_location, shared_secret, admin): else: admin = False - request_registration(user, password, server_location, shared_secret, bool(admin)) + request_registration(user, password, server_location, shared_secret, + bool(admin), user_type) def main(): @@ -154,6 +160,12 @@ def main(): default=None, help="New password for user. Will prompt if omitted.", ) + parser.add_argument( + "-t", + "--user_type", + default=None, + help="User type as specified in synapse.api.constants.UserTypes", + ) admin_group = parser.add_mutually_exclusive_group() admin_group.add_argument( "-a", @@ -208,7 +220,8 @@ def main(): if args.admin or args.no_admin: admin = args.admin - register_new_user(args.user, args.password, args.server_url, secret, admin) + register_new_user(args.user, args.password, args.server_url, secret, + admin, args.user_type) if __name__ == "__main__": diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 5309899703..b8a9af7158 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -802,9 +802,10 @@ class Auth(object): threepid should never be set at the same time. """ - # Never fail an auth check for the server notices users + # Never fail an auth check for the server notices users or support user # This can be a problem where event creation is prohibited due to blocking - if user_id == self.hs.config.server_notices_mxid: + is_support = yield self.store.is_support_user(user_id) + if user_id == self.hs.config.server_notices_mxid or is_support: return if self.hs.config.hs_disabled: diff --git a/synapse/api/constants.py b/synapse/api/constants.py index f20e0fcf0b..b7f25a42a2 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -119,3 +119,11 @@ KNOWN_ROOM_VERSIONS = { ServerNoticeMsgType = "m.server_notice" ServerNoticeLimitReached = "m.server_notice.usage_limit_reached" + + +class UserTypes(object): + """Allows for user type specific behaviour. With the benefit of hindsight + 'admin' and 'guest' users should also be UserTypes. Normal users are type None + """ + SUPPORT = "support" + ALL_USER_TYPES = (SUPPORT) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index ba39e67f6f..21c17c59a0 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -126,6 +126,7 @@ class RegistrationHandler(BaseHandler): make_guest=False, admin=False, threepid=None, + user_type=None, default_display_name=None, ): """Registers a new client on the server. @@ -141,6 +142,8 @@ class RegistrationHandler(BaseHandler): since it offers no means of associating a device_id with the access_token. Instead you should call auth_handler.issue_access_token after registration. + user_type (str|None): type of user. One of the values from + api.constants.UserTypes, or None for a normal user. default_display_name (unicode|None): if set, the new user's displayname will be set to this. Defaults to 'localpart'. Returns: @@ -190,6 +193,7 @@ class RegistrationHandler(BaseHandler): make_guest=make_guest, create_profile_with_displayname=default_display_name, admin=admin, + user_type=user_type, ) if self.hs.config.user_directory_search_all_users: @@ -242,9 +246,16 @@ class RegistrationHandler(BaseHandler): # auto-join the user to any rooms we're supposed to dump them into fake_requester = create_requester(user_id) - # try to create the room if we're the first user on the server + # try to create the room if we're the first real user on the server. Note + # that an auto-generated support user is not a real user and will never be + # the user to create the room should_auto_create_rooms = False - if self.hs.config.autocreate_auto_join_rooms: + is_support = yield self.store.is_support_user(user_id) + # There is an edge case where the first user is the support user, then + # the room is never created, though this seems unlikely and + # recoverable from given the support user being involved in the first + # place. + if self.hs.config.autocreate_auto_join_rooms and not is_support: count = yield self.store.count_all_users() should_auto_create_rooms = count == 1 for r in self.hs.config.auto_join_rooms: diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3928faa6e7..581e96c743 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -433,7 +433,7 @@ class RoomCreationHandler(BaseHandler): """ user_id = requester.user.to_string() - self.auth.check_auth_blocking(user_id) + yield self.auth.check_auth_blocking(user_id) if not self.spam_checker.user_may_create_room(user_id): raise SynapseError(403, "You are not permitted to create rooms") diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index f11b430126..3c40999338 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -125,9 +125,12 @@ class UserDirectoryHandler(object): """ # FIXME(#3714): We should probably do this in the same worker as all # the other changes. - yield self.store.update_profile_in_user_dir( - user_id, profile.display_name, profile.avatar_url, None, - ) + is_support = yield self.store.is_support_user(user_id) + # Support users are for diagnostics and should not appear in the user directory. + if not is_support: + yield self.store.update_profile_in_user_dir( + user_id, profile.display_name, profile.avatar_url, None, + ) @defer.inlineCallbacks def handle_user_deactivated(self, user_id): @@ -329,14 +332,7 @@ class UserDirectoryHandler(object): public_value=Membership.JOIN, ) - if change is None: - # Handle any profile changes - yield self._handle_profile_change( - state_key, room_id, prev_event_id, event_id, - ) - continue - - if not change: + if change is False: # Need to check if the server left the room entirely, if so # we might need to remove all the users in that room is_in_room = yield self.store.is_host_joined( @@ -354,16 +350,25 @@ class UserDirectoryHandler(object): else: logger.debug("Server is still in room: %r", room_id) - if change: # The user joined - event = yield self.store.get_event(event_id, allow_none=True) - profile = ProfileInfo( - avatar_url=event.content.get("avatar_url"), - display_name=event.content.get("displayname"), - ) + is_support = yield self.store.is_support_user(state_key) + if not is_support: + if change is None: + # Handle any profile changes + yield self._handle_profile_change( + state_key, room_id, prev_event_id, event_id, + ) + continue + + if change: # The user joined + event = yield self.store.get_event(event_id, allow_none=True) + profile = ProfileInfo( + avatar_url=event.content.get("avatar_url"), + display_name=event.content.get("displayname"), + ) - yield self._handle_new_user(room_id, state_key, profile) - else: # The user left - yield self._handle_remove_user(room_id, state_key) + yield self._handle_new_user(room_id, state_key, profile) + else: # The user left + yield self._handle_remove_user(room_id, state_key) else: logger.debug("Ignoring irrelevant type: %r", typ) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 41534b8c2a..82433a2aa9 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -23,7 +23,7 @@ from six.moves import http_client from twisted.internet import defer -from synapse.api.constants import Membership +from synapse.api.constants import Membership, UserTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.servlet import ( assert_params_in_dict, @@ -158,6 +158,11 @@ class UserRegisterServlet(ClientV1RestServlet): raise SynapseError(400, "Invalid password") admin = body.get("admin", None) + user_type = body.get("user_type", None) + + if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES: + raise SynapseError(400, "Invalid user type") + got_mac = body["mac"] want_mac = hmac.new( @@ -171,6 +176,9 @@ class UserRegisterServlet(ClientV1RestServlet): want_mac.update(password) want_mac.update(b"\x00") want_mac.update(b"admin" if admin else b"notadmin") + if user_type: + want_mac.update(b"\x00") + want_mac.update(user_type.encode('utf8')) want_mac = want_mac.hexdigest() if not hmac.compare_digest( @@ -189,6 +197,7 @@ class UserRegisterServlet(ClientV1RestServlet): password=body["password"], admin=bool(admin), generate_token=False, + user_type=user_type, ) result = yield register._create_registration_details(user_id, body) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 479e01ddc1..d6fc8edd4c 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -55,9 +55,12 @@ class MonthlyActiveUsersStore(SQLBaseStore): txn, tp["medium"], tp["address"] ) + if user_id: - self.upsert_monthly_active_user_txn(txn, user_id) - reserved_user_list.append(user_id) + is_support = self.is_support_user_txn(txn, user_id) + if not is_support: + self.upsert_monthly_active_user_txn(txn, user_id) + reserved_user_list.append(user_id) else: logger.warning( "mau limit reserved threepid %s not found in db" % tp @@ -182,6 +185,18 @@ class MonthlyActiveUsersStore(SQLBaseStore): Args: user_id (str): user to add/update """ + # Support user never to be included in MAU stats. Note I can't easily call this + # from upsert_monthly_active_user_txn because then I need a _txn form of + # is_support_user which is complicated because I want to cache the result. + # Therefore I call it here and ignore the case where + # upsert_monthly_active_user_txn is called directly from + # _initialise_reserved_users reasoning that it would be very strange to + # include a support user in this context. + + is_support = yield self.is_support_user(user_id) + if is_support: + return + is_insert = yield self.runInteraction( "upsert_monthly_active_user", self.upsert_monthly_active_user_txn, user_id @@ -200,6 +215,16 @@ class MonthlyActiveUsersStore(SQLBaseStore): in a database thread rather than the main thread, and we can't call txn.call_after because txn may not be a LoggingTransaction. + We consciously do not call is_support_txn from this method because it + is not possible to cache the response. is_support_txn will be false in + almost all cases, so it seems reasonable to call it only for + upsert_monthly_active_user and to call is_support_txn manually + for cases where upsert_monthly_active_user_txn is called directly, + like _initialise_reserved_users + + In short, don't call this method with support users. (Support users + should not appear in the MAU stats). + Args: txn (cursor): user_id (str): user to add/update @@ -208,6 +233,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): bool: True if a new entry was created, False if an existing one was updated. """ + # Am consciously deciding to lock the table on the basis that is ought # never be a big table and alternative approaches (batching multiple # upserts into a single txn) introduced a lot of extra complexity. diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 3d55441e33..10c3b9757f 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -19,6 +19,7 @@ from six.moves import range from twisted.internet import defer +from synapse.api.constants import UserTypes from synapse.api.errors import Codes, StoreError from synapse.storage import background_updates from synapse.storage._base import SQLBaseStore @@ -168,7 +169,7 @@ class RegistrationStore(RegistrationWorkerStore, def register(self, user_id, token=None, password_hash=None, was_guest=False, make_guest=False, appservice_id=None, - create_profile_with_displayname=None, admin=False): + create_profile_with_displayname=None, admin=False, user_type=None): """Attempts to register an account. Args: @@ -184,6 +185,10 @@ class RegistrationStore(RegistrationWorkerStore, appservice_id (str): The ID of the appservice registering the user. create_profile_with_displayname (unicode): Optionally create a profile for the user, setting their displayname to the given value + admin (boolean): is an admin user? + user_type (str|None): type of user. One of the values from + api.constants.UserTypes, or None for a normal user. + Raises: StoreError if the user_id could not be registered. """ @@ -197,7 +202,8 @@ class RegistrationStore(RegistrationWorkerStore, make_guest, appservice_id, create_profile_with_displayname, - admin + admin, + user_type ) def _register( @@ -211,6 +217,7 @@ class RegistrationStore(RegistrationWorkerStore, appservice_id, create_profile_with_displayname, admin, + user_type, ): user_id_obj = UserID.from_string(user_id) @@ -247,6 +254,7 @@ class RegistrationStore(RegistrationWorkerStore, "is_guest": 1 if make_guest else 0, "appservice_id": appservice_id, "admin": 1 if admin else 0, + "user_type": user_type, } ) else: @@ -260,6 +268,7 @@ class RegistrationStore(RegistrationWorkerStore, "is_guest": 1 if make_guest else 0, "appservice_id": appservice_id, "admin": 1 if admin else 0, + "user_type": user_type, } ) except self.database_engine.module.IntegrityError: @@ -456,6 +465,31 @@ class RegistrationStore(RegistrationWorkerStore, defer.returnValue(res if res else False) + @cachedInlineCallbacks() + def is_support_user(self, user_id): + """Determines if the user is of type UserTypes.SUPPORT + + Args: + user_id (str): user id to test + + Returns: + Deferred[bool]: True if user is of type UserTypes.SUPPORT + """ + res = yield self.runInteraction( + "is_support_user", self.is_support_user_txn, user_id + ) + defer.returnValue(res) + + def is_support_user_txn(self, txn, user_id): + res = self._simple_select_one_onecol_txn( + txn=txn, + table="users", + keyvalues={"name": user_id}, + retcol="user_type", + allow_none=True, + ) + return True if res == UserTypes.SUPPORT else False + @defer.inlineCallbacks def user_add_threepid(self, user_id, medium, address, validated_at, added_at): yield self._simple_upsert("user_threepids", { diff --git a/synapse/storage/schema/delta/53/add_user_type_to_users.sql b/synapse/storage/schema/delta/53/add_user_type_to_users.sql new file mode 100644 index 0000000000..88ec2f83e5 --- /dev/null +++ b/synapse/storage/schema/delta/53/add_user_type_to_users.sql @@ -0,0 +1,19 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* The type of the user: NULL for a regular user, or one of the constants in + * synapse.api.constants.UserTypes + */ +ALTER TABLE users ADD COLUMN user_type TEXT DEFAULT NULL; diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 379e9c4ab1..69dc40428b 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -50,6 +50,8 @@ class AuthTestCase(unittest.TestCase): # this is overridden for the appservice tests self.store.get_app_service_by_token = Mock(return_value=None) + self.store.is_support_user = Mock(return_value=defer.succeed(False)) + @defer.inlineCallbacks def test_get_user_by_req_user_valid_token(self): user_info = {"name": self.test_user, "token_id": "ditto", "device_id": "device"} diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 23bce6ee7d..eb70e1daa6 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,8 @@ from mock import Mock from twisted.internet import defer -from synapse.api.errors import ResourceLimitError +from synapse.api.constants import UserTypes +from synapse.api.errors import ResourceLimitError, SynapseError from synapse.handlers.register import RegistrationHandler from synapse.types import RoomAlias, UserID, create_requester @@ -64,6 +65,7 @@ class RegistrationTestCase(unittest.TestCase): requester, frank.localpart, "Frankie" ) self.assertEquals(result_user_id, user_id) + self.assertTrue(result_token is not None) self.assertEquals(result_token, 'secret') @defer.inlineCallbacks @@ -82,7 +84,7 @@ class RegistrationTestCase(unittest.TestCase): requester, local_part, None ) self.assertEquals(result_user_id, user_id) - self.assertEquals(result_token, 'secret') + self.assertTrue(result_token is not None) @defer.inlineCallbacks def test_mau_limits_when_disabled(self): @@ -169,6 +171,20 @@ class RegistrationTestCase(unittest.TestCase): rooms = yield self.store.get_rooms_for_user(res[0]) self.assertEqual(len(rooms), 0) + @defer.inlineCallbacks + def test_auto_create_auto_join_rooms_when_support_user_exists(self): + room_alias_str = "#room:test" + self.hs.config.auto_join_rooms = [room_alias_str] + + self.store.is_support_user = Mock(return_value=True) + res = yield self.handler.register(localpart='support') + rooms = yield self.store.get_rooms_for_user(res[0]) + self.assertEqual(len(rooms), 0) + directory_handler = self.hs.get_handlers().directory_handler + room_alias = RoomAlias.from_string(room_alias_str) + with self.assertRaises(SynapseError): + yield directory_handler.get_association(room_alias) + @defer.inlineCallbacks def test_auto_create_auto_join_where_no_consent(self): self.hs.config.user_consent_at_registration = True @@ -179,3 +195,13 @@ class RegistrationTestCase(unittest.TestCase): yield self.handler.post_consent_actions(res[0]) rooms = yield self.store.get_rooms_for_user(res[0]) self.assertEqual(len(rooms), 0) + + @defer.inlineCallbacks + def test_register_support_user(self): + res = yield self.handler.register(localpart='user', user_type=UserTypes.SUPPORT) + self.assertTrue(self.store.is_support_user(res[0])) + + @defer.inlineCallbacks + def test_register_not_support_user(self): + res = yield self.handler.register(localpart='user') + self.assertFalse(self.store.is_support_user(res[0])) diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py new file mode 100644 index 0000000000..11f2bae698 --- /dev/null +++ b/tests/handlers/test_user_directory.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from mock import Mock + +from twisted.internet import defer + +from synapse.api.constants import UserTypes +from synapse.handlers.user_directory import UserDirectoryHandler +from synapse.storage.roommember import ProfileInfo + +from tests import unittest +from tests.utils import setup_test_homeserver + + +class UserDirectoryHandlers(object): + def __init__(self, hs): + self.user_directory_handler = UserDirectoryHandler(hs) + + +class UserDirectoryTestCase(unittest.TestCase): + """ Tests the UserDirectoryHandler. """ + + @defer.inlineCallbacks + def setUp(self): + hs = yield setup_test_homeserver(self.addCleanup) + self.store = hs.get_datastore() + hs.handlers = UserDirectoryHandlers(hs) + + self.handler = hs.get_handlers().user_directory_handler + + @defer.inlineCallbacks + def test_handle_local_profile_change_with_support_user(self): + support_user_id = "@support:test" + yield self.store.register( + user_id=support_user_id, + token="123", + password_hash=None, + user_type=UserTypes.SUPPORT + ) + + yield self.handler.handle_local_profile_change(support_user_id, None) + profile = yield self.store.get_user_in_directory(support_user_id) + self.assertTrue(profile is None) + display_name = 'display_name' + + profile_info = ProfileInfo( + avatar_url='avatar_url', + display_name=display_name, + ) + regular_user_id = '@regular:test' + yield self.handler.handle_local_profile_change(regular_user_id, profile_info) + profile = yield self.store.get_user_in_directory(regular_user_id) + self.assertTrue(profile['display_name'] == display_name) + + @defer.inlineCallbacks + def test_handle_user_deactivated_support_user(self): + s_user_id = "@support:test" + self.store.register( + user_id=s_user_id, + token="123", + password_hash=None, + user_type=UserTypes.SUPPORT + ) + + self.store.remove_from_user_dir = Mock() + self.store.remove_from_user_in_public_room = Mock() + yield self.handler.handle_user_deactivated(s_user_id) + self.store.remove_from_user_dir.not_called() + self.store.remove_from_user_in_public_room.not_called() + + @defer.inlineCallbacks + def test_handle_user_deactivated_regular_user(self): + r_user_id = "@regular:test" + self.store.register(user_id=r_user_id, token="123", password_hash=None) + self.store.remove_from_user_dir = Mock() + self.store.remove_from_user_in_public_room = Mock() + yield self.handler.handle_user_deactivated(r_user_id) + self.store.remove_from_user_dir.called_once_with(r_user_id) + self.store.remove_from_user_in_public_room.assert_called_once_with(r_user_id) diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py index e38eb628a9..407bf0ac4c 100644 --- a/tests/rest/client/v1/test_admin.py +++ b/tests/rest/client/v1/test_admin.py @@ -19,6 +19,7 @@ import json from mock import Mock +from synapse.api.constants import UserTypes from synapse.rest.client.v1.admin import register_servlets from tests import unittest @@ -147,7 +148,9 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): nonce = channel.json_body["nonce"] want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update(nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin") + want_mac.update( + nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin\x00support" + ) want_mac = want_mac.hexdigest() body = json.dumps( @@ -156,6 +159,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): "username": "bob", "password": "abc123", "admin": True, + "user_type": UserTypes.SUPPORT, "mac": want_mac, } ) @@ -174,7 +178,9 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): nonce = channel.json_body["nonce"] want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update(nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin") + want_mac.update( + nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin" + ) want_mac = want_mac.hexdigest() body = json.dumps( @@ -202,8 +208,8 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): def test_missing_parts(self): """ Synapse will complain if you don't give nonce, username, password, and - mac. Admin is optional. Additional checks are done for length and - type. + mac. Admin and user_types are optional. Additional checks are done for length + and type. """ def nonce(): @@ -260,7 +266,7 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): self.assertEqual('Invalid username', channel.json_body["error"]) # - # Username checks + # Password checks # # Must be present @@ -296,3 +302,20 @@ class UserRegisterTestCase(unittest.HomeserverTestCase): self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual('Invalid password', channel.json_body["error"]) + + # + # user_type check + # + + # Invalid user_type + body = json.dumps({ + "nonce": nonce(), + "username": "a", + "password": "1234", + "user_type": "invalid"} + ) + request, channel = self.make_request("POST", self.url, body.encode('utf8')) + self.render(request) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual('Invalid user type', channel.json_body["error"]) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 9618d57463..9605301b59 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -16,6 +16,8 @@ from mock import Mock from twisted.internet import defer +from synapse.api.constants import UserTypes + from tests.unittest import HomeserverTestCase FORTY_DAYS = 40 * 24 * 60 * 60 @@ -28,6 +30,7 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): self.store = hs.get_datastore() hs.config.limit_usage_by_mau = True hs.config.max_mau_value = 50 + # Advance the clock a bit reactor.advance(FORTY_DAYS) @@ -39,14 +42,23 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): user1_email = "user1@matrix.org" user2 = "@user2:server" user2_email = "user2@matrix.org" + user3 = "@user3:server" + user3_email = "user3@matrix.org" + threepids = [ {'medium': 'email', 'address': user1_email}, {'medium': 'email', 'address': user2_email}, + {'medium': 'email', 'address': user3_email}, ] - user_num = len(threepids) + # -1 because user3 is a support user and does not count + user_num = len(threepids) - 1 self.store.register(user_id=user1, token="123", password_hash=None) self.store.register(user_id=user2, token="456", password_hash=None) + self.store.register( + user_id=user3, token="789", + password_hash=None, user_type=UserTypes.SUPPORT + ) self.pump() now = int(self.hs.get_clock().time_msec()) @@ -60,7 +72,7 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): active_count = self.store.get_monthly_active_count() - # Test total counts + # Test total counts, ensure user3 (support user) is not counted self.assertEquals(self.get_success(active_count), user_num) # Test user is marked as active @@ -221,6 +233,24 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): count = self.store.get_registered_reserved_users_count() self.assertEquals(self.get_success(count), len(threepids)) + def test_support_user_not_add_to_mau_limits(self): + support_user_id = "@support:test" + count = self.store.get_monthly_active_count() + self.pump() + self.assertEqual(self.get_success(count), 0) + + self.store.register( + user_id=support_user_id, + token="123", + password_hash=None, + user_type=UserTypes.SUPPORT + ) + + self.store.upsert_monthly_active_user(support_user_id) + count = self.store.get_monthly_active_count() + self.pump() + self.assertEqual(self.get_success(count), 0) + def test_track_monthly_users_without_cap(self): self.hs.config.limit_usage_by_mau = False self.hs.config.mau_stats_only = True diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index 3dfb7b903a..cb3cc4d2e5 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -16,6 +16,8 @@ from twisted.internet import defer +from synapse.api.constants import UserTypes + from tests import unittest from tests.utils import setup_test_homeserver @@ -99,6 +101,26 @@ class RegistrationStoreTestCase(unittest.TestCase): user = yield self.store.get_user_by_access_token(self.tokens[0]) self.assertIsNone(user, "access token was not deleted without device_id") + @defer.inlineCallbacks + def test_is_support_user(self): + TEST_USER = "@test:test" + SUPPORT_USER = "@support:test" + + res = yield self.store.is_support_user(None) + self.assertFalse(res) + yield self.store.register(user_id=TEST_USER, token="123", password_hash=None) + res = yield self.store.is_support_user(TEST_USER) + self.assertFalse(res) + + yield self.store.register( + user_id=SUPPORT_USER, + token="456", + password_hash=None, + user_type=UserTypes.SUPPORT + ) + res = yield self.store.is_support_user(SUPPORT_USER) + self.assertTrue(res) + class TokenGenerator: def __init__(self): diff --git a/tests/unittest.py b/tests/unittest.py index 092c930396..78d2f740f9 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -373,6 +373,7 @@ class HomeserverTestCase(TestCase): nonce_str += b"\x00admin" else: nonce_str += b"\x00notadmin" + want_mac.update(nonce.encode('ascii') + b"\x00" + nonce_str) want_mac = want_mac.hexdigest() diff --git a/tests/utils.py b/tests/utils.py index 04796a9b30..38e689983d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -140,7 +140,6 @@ def default_config(name): config.rc_messages_per_second = 10000 config.rc_message_burst_count = 10000 config.saml2_enabled = False - config.use_frozen_dicts = False # we need a sane default_room_version, otherwise attempts to create rooms will -- cgit 1.4.1 From 668e6625b0ec1c9dde0f81713c2c4a4f5a62f5b0 Mon Sep 17 00:00:00 2001 From: Nathan Pennie Date: Wed, 19 Dec 2018 06:53:02 -0500 Subject: Fix the variable names used for account_data --- synapse/handlers/sync.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 09739f2862..6d2b4096ab 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1668,13 +1668,13 @@ class SyncHandler(object): "content": content, }) - account_data = sync_config.filter_collection.filter_room_account_data( + account_data_events = sync_config.filter_collection.filter_room_account_data( account_data_events ) ephemeral = sync_config.filter_collection.filter_room_ephemeral(ephemeral) - if not (always_include or batch or account_data or ephemeral or full_state): + if not (always_include or batch or account_data_events or ephemeral or full_state): return state = yield self.compute_state_delta( @@ -1745,7 +1745,7 @@ class SyncHandler(object): room_id=room_id, timeline=batch, state=state, - account_data=account_data, + account_data=account_data_events, ) if room_sync or always_include: sync_result_builder.archived.append(room_sync) -- cgit 1.4.1 From 81b513416e6ff1a9f15622b158c2f0227db80872 Mon Sep 17 00:00:00 2001 From: Nathan Pennie Date: Wed, 19 Dec 2018 14:12:33 -0500 Subject: Fixed line length --- synapse/handlers/sync.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 6d2b4096ab..f7f768f751 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1674,7 +1674,11 @@ class SyncHandler(object): ephemeral = sync_config.filter_collection.filter_room_ephemeral(ephemeral) - if not (always_include or batch or account_data_events or ephemeral or full_state): + if not (always_include + or batch + or account_data_events + or ephemeral + or full_state): return state = yield self.compute_state_delta( -- cgit 1.4.1 From 27128145e67b2c00ff84e7f3d6ca56b03616648b Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 2 Jan 2019 23:37:39 +0000 Subject: fix NPE in /messages by checking if all events were filtered out (#4330) --- changelog.d/4330.bugfix | 1 + synapse/handlers/pagination.py | 21 +++++++++++---------- 2 files changed, 12 insertions(+), 10 deletions(-) create mode 100644 changelog.d/4330.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4330.bugfix b/changelog.d/4330.bugfix new file mode 100644 index 0000000000..666c9022e0 --- /dev/null +++ b/changelog.d/4330.bugfix @@ -0,0 +1 @@ +fix NPE in /messages by checking if all events were filtered out diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 43f81bd607..9d257ecf31 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -235,6 +235,17 @@ class PaginationHandler(object): "room_key", next_key ) + if events: + if event_filter: + events = event_filter.filter(events) + + events = yield filter_events_for_client( + self.store, + user_id, + events, + is_peeking=(member_event_id is None), + ) + if not events: defer.returnValue({ "chunk": [], @@ -242,16 +253,6 @@ class PaginationHandler(object): "end": next_token.to_string(), }) - if event_filter: - events = event_filter.filter(events) - - events = yield filter_events_for_client( - self.store, - user_id, - events, - is_peeking=(member_event_id is None), - ) - state = None if event_filter and event_filter.lazy_load_members(): # TODO: remove redundant members -- cgit 1.4.1 From 05e129664931c114fcaae8bebe0a26685dcd9c6d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 16 Jan 2019 23:14:11 +0000 Subject: don't store more remote device lists if they have more than 1K devices (#4397) --- changelog.d/4397.bugfix | 1 + synapse/handlers/device.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 changelog.d/4397.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4397.bugfix b/changelog.d/4397.bugfix new file mode 100644 index 0000000000..e7526d4454 --- /dev/null +++ b/changelog.d/4397.bugfix @@ -0,0 +1 @@ +Fix high CPU usage due to remote devicelist updates diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 9e017116a9..8955cde4ed 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -532,6 +532,25 @@ class DeviceListEduUpdater(object): stream_id = result["stream_id"] devices = result["devices"] + + # If the remote server has more than ~1000 devices for this user + # we assume that something is going horribly wrong (e.g. a bot + # that logs in and creates a new device every time it tries to + # send a message). Maintaining lots of devices per user in the + # cache can cause serious performance issues as if this request + # takes more than 60s to complete, internal replication from the + # inbound federation worker to the synapse master may time out + # causing the inbound federation to fail and causing the remote + # server to retry, causing a DoS. So in this scenario we give + # up on storing the total list of devices and only handle the + # delta instead. + if len(devices) > 1000: + logger.warn( + "Ignoring device list snapshot for %s as it has >1K devs (%d)", + user_id, len(devices) + ) + devices = [] + yield self.store.update_remote_device_list_cache( user_id, devices, stream_id, ) -- cgit 1.4.1 From 9feb5d0b71104bea4e366d451d5dddd447e16196 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 17 Jan 2019 12:40:09 +0000 Subject: sign_request -> build_auth_headers (#4408) Just got very confused about the fact that the headers are only an output, not an input. --- changelog.d/4408.misc | 1 + synapse/handlers/identity.py | 9 ++++++--- synapse/http/matrixfederationclient.py | 23 +++++++++++------------ 3 files changed, 18 insertions(+), 15 deletions(-) create mode 100644 changelog.d/4408.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4408.misc b/changelog.d/4408.misc new file mode 100644 index 0000000000..729bafd62e --- /dev/null +++ b/changelog.d/4408.misc @@ -0,0 +1 @@ +Refactor 'sign_request' as 'build_auth_headers' \ No newline at end of file diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 5feb3f22a6..39184f0e22 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -167,18 +167,21 @@ class IdentityHandler(BaseHandler): "mxid": mxid, "threepid": threepid, } - headers = {} + # we abuse the federation http client to sign the request, but we have to send it # using the normal http client since we don't want the SRV lookup and want normal # 'browser-like' HTTPS. - self.federation_http_client.sign_request( + auth_headers = self.federation_http_client.build_auth_headers( destination=None, method='POST', url_bytes='/_matrix/identity/api/v1/3pid/unbind'.encode('ascii'), - headers_dict=headers, content=content, destination_is=id_server, ) + headers = { + b"Authorization": auth_headers, + } + try: yield self.http_client.post_json_get_json( url, diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index f2a42f97a6..ea2fc64b99 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -298,9 +298,9 @@ class MatrixFederationHttpClient(object): json = request.get_json() if json: headers_dict[b"Content-Type"] = [b"application/json"] - self.sign_request( + auth_headers = self.build_auth_headers( destination_bytes, method_bytes, url_to_sign_bytes, - headers_dict, json, + json, ) data = encode_canonical_json(json) producer = FileBodyProducer( @@ -309,11 +309,12 @@ class MatrixFederationHttpClient(object): ) else: producer = None - self.sign_request( + auth_headers = self.build_auth_headers( destination_bytes, method_bytes, url_to_sign_bytes, - headers_dict, ) + headers_dict[b"Authorization"] = auth_headers + logger.info( "{%s} [%s] Sending request: %s %s", request.txn_id, request.destination, request.method, @@ -440,24 +441,23 @@ class MatrixFederationHttpClient(object): defer.returnValue(response) - def sign_request(self, destination, method, url_bytes, headers_dict, - content=None, destination_is=None): + def build_auth_headers( + self, destination, method, url_bytes, content=None, destination_is=None, + ): """ - Signs a request by adding an Authorization header to headers_dict + Builds the Authorization headers for a federation request Args: destination (bytes|None): The desination home server of the request. May be None if the destination is an identity server, in which case destination_is must be non-None. method (bytes): The HTTP method of the request url_bytes (bytes): The URI path of the request - headers_dict (dict[bytes, list[bytes]]): Dictionary of request headers to - append to content (object): The body of the request destination_is (bytes): As 'destination', but if the destination is an identity server Returns: - None + list[bytes]: a list of headers to be added as "Authorization:" headers """ request = { "method": method, @@ -484,8 +484,7 @@ class MatrixFederationHttpClient(object): self.server_name, key, sig, )).encode('ascii') ) - - headers_dict[b"Authorization"] = auth_headers + return auth_headers @defer.inlineCallbacks def put_json(self, destination, path, args={}, data={}, -- cgit 1.4.1 From df3a661e4adb7676682a5e3c298a2dfda18b08a1 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 18 Jan 2019 10:04:47 +0000 Subject: Search for messages across predecessor rooms Signed-off-by: Andrew Morgan --- synapse/api/filtering.py | 3 ++ synapse/handlers/search.py | 69 ++++++++++++++++++++++++++++++++++++++++++++++ synapse/storage/state.py | 1 + 3 files changed, 73 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 16ad654864..84000e6422 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -444,6 +444,9 @@ class Filter(object): def include_redundant_members(self): return self.filter_json.get("include_redundant_members", False) + def add_room_ids(self, room_ids): + self.rooms += room_ids + def _matches_wildcard(actual_value, filter_value): if filter_value.endswith("*"): diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index ec936bbb4e..77e7e4e0fb 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -37,6 +37,54 @@ class SearchHandler(BaseHandler): def __init__(self, hs): super(SearchHandler, self).__init__(hs) + @defer.inlineCallbacks + def get_old_rooms_from_upgraded_room(self, room_id): + """Retrieves room IDs of old rooms in the history of an upgraded room. + + We do so by checking the m.room.create event of the room for a + `predecessor` key. If it exists, we add the room ID to our return + list and then check that room for a m.room.create event and so on + until we can no longer find any more previous rooms. + + The full list of all found rooms in then returned. + + Args: + room_id (str): The ID of the room to search through. + + Returns: + dict of past room IDs as strings + """ + + historical_room_ids = [] + + while True: + state_ids = yield self.store.get_current_state_ids(room_id) + create_id = state_ids.get((EventTypes.Create, "")) + + # If we can't find the create event, assume we've hit a dead end + if not create_id: + break + + # Retrieve the room's create event + create_event = yield self.store.get_event(create_id) + + if not create_event: + break + + # Check if a predecessor room is present + predecessor = create_event.content.get("predecessor", None) + if not predecessor: + break + + # Add predecessor's room ID + historical_room_id = predecessor["room_id"] + historical_room_ids.append(historical_room_id) + + # Scan through the old room for further predecessors + room_id = historical_room_id + + defer.returnValue(historical_room_ids) + @defer.inlineCallbacks def search(self, user, content, batch=None): """Performs a full text search for a user. @@ -139,6 +187,27 @@ class SearchHandler(BaseHandler): room_ids = search_filter.filter_rooms(room_ids) + # If doing a subset of all rooms seearch, check if any of the rooms + # are from an upgraded room, and search their contents as well + # XXX: There is the possibility that we don't have a create event for + # the room in question, in which case we can't return all the results + # we want to. + # Ideally we would just return the results we can get now, and + # try to get more results from other servers in the background. + if search_filter.rooms: + historical_room_ids = [] + for room_id in room_ids: + # Add any previous rooms to the search if they exist + ids = yield self.get_old_rooms_from_upgraded_room(room_id) + historical_room_ids += ids + + # Add any found rooms to the list to search + for historical_room_id in historical_room_ids: + room_ids.add(historical_room_id) + + # Prevent any historical events from being filtered + search_filter.add_room_ids(historical_room_ids) + if batch_group == "room_id": room_ids.intersection_update({batch_group_key}) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index a134e9b3e8..49b3ff4a71 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -448,6 +448,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): Returns: deferred: dict of (type, state_key) -> event_id """ + def _get_current_state_ids_txn(txn): txn.execute( """SELECT type, state_key, event_id FROM current_state_events -- cgit 1.4.1 From 702c4b750c4db529d3789a899aa9badfa8c9df6e Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 21 Jan 2019 09:42:59 +0000 Subject: Migrate encryption state on room upgrade (#4411) * Migrate encryption state on room upgrade Signed-off-by: Andrew Morgan * Add changelog file --- changelog.d/4411.bugfix | 1 + synapse/api/constants.py | 1 + synapse/handlers/room.py | 1 + 3 files changed, 3 insertions(+) create mode 100644 changelog.d/4411.bugfix (limited to 'synapse/handlers') diff --git a/changelog.d/4411.bugfix b/changelog.d/4411.bugfix new file mode 100644 index 0000000000..219e98a924 --- /dev/null +++ b/changelog.d/4411.bugfix @@ -0,0 +1 @@ +Ensure encrypted room state is persisted across room upgrades. \ No newline at end of file diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 022f772714..46c4b4b9dc 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -68,6 +68,7 @@ class EventTypes(object): Aliases = "m.room.aliases" Redaction = "m.room.redaction" ThirdPartyInvite = "m.room.third_party_invite" + Encryption = "m.room.encryption" RoomHistoryVisibility = "m.room.history_visibility" CanonicalAlias = "m.room.canonical_alias" diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 581e96c743..cb8c5f77dd 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -269,6 +269,7 @@ class RoomCreationHandler(BaseHandler): (EventTypes.RoomHistoryVisibility, ""), (EventTypes.GuestAccess, ""), (EventTypes.RoomAvatar, ""), + (EventTypes.Encryption, ""), ) old_room_state_ids = yield self.store.get_filtered_current_state_ids( -- cgit 1.4.1 From 75942af1dbe1a4aa65fc986e5662e12c1337b7de Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 17 Jan 2019 14:11:24 +0000 Subject: Fix typo --- synapse/handlers/room.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index cb8c5f77dd..8898602eeb 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -125,7 +125,7 @@ class RoomCreationHandler(BaseHandler): ) yield self.auth.check_from_context(tombstone_event, tombstone_context) - yield self.clone_exiting_room( + yield self.clone_existing_room( requester, old_room_id=old_room_id, new_room_id=new_room_id, @@ -230,7 +230,7 @@ class RoomCreationHandler(BaseHandler): ) @defer.inlineCallbacks - def clone_exiting_room( + def clone_existing_room( self, requester, old_room_id, new_room_id, new_room_version, tombstone_event_id, ): -- cgit 1.4.1 From 4ff6d2224543146723e3109681636de73a738f25 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 17 Jan 2019 15:22:03 +0000 Subject: Preserve DM status of a room on upgrade Signed-off-by: Andrew Morgan --- synapse/handlers/room.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 8898602eeb..3bfba6c744 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -260,8 +260,27 @@ class RoomCreationHandler(BaseHandler): } } + # Copy over whether this room is considered a direct message by this + # user or not + user_account_data = yield self.store.get_account_data_for_user( + user_id, + ) + + if user_account_data: + direct_rooms = user_account_data[0]["m.direct"] + # Check if this room was a DM + if old_room_id in direct_rooms[user_id]: + # Add this room ID to the list of direct rooms + direct_rooms[user_id].append(new_room_id) + + # Add this room ID to the list of direct rooms for this user + yield self.store.add_account_data_for_user( + user_id, "m.direct", direct_rooms, + ) + initial_state = dict() + # Replicate relevant room events types_to_copy = ( (EventTypes.JoinRules, ""), (EventTypes.Name, ""), -- cgit 1.4.1 From 887ca93a1b3b01d1da2c80549d5d2b1166e0608b Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 17 Jan 2019 15:46:39 +0000 Subject: Prevent crash on user who doesn't have any direct rooms --- synapse/handlers/room.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3bfba6c744..626c9f9166 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -266,7 +266,7 @@ class RoomCreationHandler(BaseHandler): user_id, ) - if user_account_data: + if user_account_data and "m.direct" in user_account_data[0]: direct_rooms = user_account_data[0]["m.direct"] # Check if this room was a DM if old_room_id in direct_rooms[user_id]: -- cgit 1.4.1 From ea8903fcc904c88027bcec62dc6646377941db41 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 18 Jan 2019 15:12:47 +0000 Subject: Migrating dm and room tags work for migrator --- synapse/handlers/room.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 626c9f9166..07cf5f3f18 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -260,12 +260,16 @@ class RoomCreationHandler(BaseHandler): } } - # Copy over whether this room is considered a direct message by this - # user or not + # Copy over room account data for this user user_account_data = yield self.store.get_account_data_for_user( user_id, ) + room_tags = yield self.store.get_tags_for_room( + user_id, old_room_id, + ) + + # Copy direct message state if applicable if user_account_data and "m.direct" in user_account_data[0]: direct_rooms = user_account_data[0]["m.direct"] # Check if this room was a DM @@ -278,6 +282,13 @@ class RoomCreationHandler(BaseHandler): user_id, "m.direct", direct_rooms, ) + # Copy room tags if applicable + if room_tags: + # Copy each room tag to the new room + for tag in room_tags.keys(): + tag_content = room_tags[tag] + yield self.store.add_tag_to_room(user_id, new_room_id, tag, tag_content) + initial_state = dict() # Replicate relevant room events -- cgit 1.4.1 From 25d64a846ab4974da0d8a51b3a1ff014a10d319d Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 18 Jan 2019 15:27:11 +0000 Subject: Fix typos --- changelog.d/4412.bugfix | 2 +- synapse/handlers/room_member.py | 16 ++++++++-------- synapse/rest/client/v1/room.py | 8 ++++---- synapse/storage/background_updates.py | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) (limited to 'synapse/handlers') diff --git a/changelog.d/4412.bugfix b/changelog.d/4412.bugfix index 2388ed5b59..007be1b7db 100644 --- a/changelog.d/4412.bugfix +++ b/changelog.d/4412.bugfix @@ -1 +1 @@ -Copy over whether a room is a direct message or not on room upgrade. \ No newline at end of file +Copy over whether a room is a direct message and any associated room tags on room upgrade. \ No newline at end of file diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 07fd3e82fc..1d337ad95c 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -63,7 +63,7 @@ class RoomMemberHandler(object): self.directory_handler = hs.get_handlers().directory_handler self.registration_handler = hs.get_handlers().registration_handler self.profile_handler = hs.get_profile_handler() - self.event_creation_hander = hs.get_event_creation_handler() + self.event_creation_handler = hs.get_event_creation_handler() self.member_linearizer = Linearizer(name="member") @@ -168,7 +168,7 @@ class RoomMemberHandler(object): if requester.is_guest: content["kind"] = "guest" - event, context = yield self.event_creation_hander.create_event( + event, context = yield self.event_creation_handler.create_event( requester, { "type": EventTypes.Member, @@ -186,14 +186,14 @@ class RoomMemberHandler(object): ) # Check if this event matches the previous membership event for the user. - duplicate = yield self.event_creation_hander.deduplicate_state_event( + duplicate = yield self.event_creation_handler.deduplicate_state_event( event, context, ) if duplicate is not None: # Discard the new event since this membership change is a no-op. defer.returnValue(duplicate) - yield self.event_creation_hander.handle_new_client_event( + yield self.event_creation_handler.handle_new_client_event( requester, event, context, @@ -493,7 +493,7 @@ class RoomMemberHandler(object): else: requester = synapse.types.create_requester(target_user) - prev_event = yield self.event_creation_hander.deduplicate_state_event( + prev_event = yield self.event_creation_handler.deduplicate_state_event( event, context, ) if prev_event is not None: @@ -513,7 +513,7 @@ class RoomMemberHandler(object): if is_blocked: raise SynapseError(403, "This room has been blocked on this server") - yield self.event_creation_hander.handle_new_client_event( + yield self.event_creation_handler.handle_new_client_event( requester, event, context, @@ -527,7 +527,7 @@ class RoomMemberHandler(object): ) if event.membership == Membership.JOIN: - # Only fire user_joined_room if the user has acutally joined the + # Only fire user_joined_room if the user has actually joined the # room. Don't bother if the user is just changing their profile # info. newly_joined = True @@ -755,7 +755,7 @@ class RoomMemberHandler(object): ) ) - yield self.event_creation_hander.create_and_send_nonmember_event( + yield self.event_creation_handler.create_and_send_nonmember_event( requester, { "type": EventTypes.ThirdPartyInvite, diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index fcfe7857f6..48da4d557f 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -89,7 +89,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet): def __init__(self, hs): super(RoomStateEventRestServlet, self).__init__(hs) self.handlers = hs.get_handlers() - self.event_creation_hander = hs.get_event_creation_handler() + self.event_creation_handler = hs.get_event_creation_handler() self.room_member_handler = hs.get_room_member_handler() self.message_handler = hs.get_message_handler() @@ -172,7 +172,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet): content=content, ) else: - event = yield self.event_creation_hander.create_and_send_nonmember_event( + event = yield self.event_creation_handler.create_and_send_nonmember_event( requester, event_dict, txn_id=txn_id, @@ -189,7 +189,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet): def __init__(self, hs): super(RoomSendEventRestServlet, self).__init__(hs) - self.event_creation_hander = hs.get_event_creation_handler() + self.event_creation_handler = hs.get_event_creation_handler() def register(self, http_server): # /rooms/$roomid/send/$event_type[/$txn_id] @@ -211,7 +211,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet): if b'ts' in request.args and requester.app_service: event_dict['origin_server_ts'] = parse_integer(request, "ts", 0) - event = yield self.event_creation_hander.create_and_send_nonmember_event( + event = yield self.event_creation_handler.create_and_send_nonmember_event( requester, event_dict, txn_id=txn_id, diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 5fe1ca2de7..60cdc884e6 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -240,7 +240,7 @@ class BackgroundUpdateStore(SQLBaseStore): * An integer count of the number of items to update in this batch. The handler should return a deferred integer count of items updated. - The hander is responsible for updating the progress of the update. + The handler is responsible for updating the progress of the update. Args: update_name(str): The name of the update that this code handles. -- cgit 1.4.1 From 8c85f0833d568f90d397dad30bc0ba28cf5d538b Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 18 Jan 2019 17:03:09 +0000 Subject: tags, m.direct copying over correctly --- synapse/handlers/room.py | 22 ++++++++++-------- synapse/handlers/room_member.py | 51 +++++++++++++++++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 11 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 07cf5f3f18..9ac04eda50 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -272,15 +272,19 @@ class RoomCreationHandler(BaseHandler): # Copy direct message state if applicable if user_account_data and "m.direct" in user_account_data[0]: direct_rooms = user_account_data[0]["m.direct"] - # Check if this room was a DM - if old_room_id in direct_rooms[user_id]: - # Add this room ID to the list of direct rooms - direct_rooms[user_id].append(new_room_id) - - # Add this room ID to the list of direct rooms for this user - yield self.store.add_account_data_for_user( - user_id, "m.direct", direct_rooms, - ) + + # Check which key this room is under + for key, room_id_list in direct_rooms.items(): + for room_id in room_id_list: + if room_id == old_room_id: + # Add new room_id to this key + direct_rooms[key].append(new_room_id) + + # Save back to user's m.direct account data + yield self.store.add_account_data_for_user( + user_id, "m.direct", direct_rooms, + ) + break # Copy room tags if applicable if room_tags: diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 1d337ad95c..35dd448c02 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -161,6 +161,8 @@ class RoomMemberHandler(object): ratelimit=True, content=None, ): + user_id = target.to_string() + if content is None: content = {} @@ -175,7 +177,7 @@ class RoomMemberHandler(object): "content": content, "room_id": room_id, "sender": requester.user.to_string(), - "state_key": target.to_string(), + "state_key": user_id, # For backwards compatibility: "membership": membership, @@ -204,7 +206,7 @@ class RoomMemberHandler(object): prev_state_ids = yield context.get_prev_state_ids(self.store) prev_member_event_id = prev_state_ids.get( - (EventTypes.Member, target.to_string()), + (EventTypes.Member, user_id), None ) @@ -218,6 +220,51 @@ class RoomMemberHandler(object): newly_joined = prev_member_event.membership != Membership.JOIN if newly_joined: yield self._user_joined_room(target, room_id) + + # Copy over direct message status and room tags if this is a join + # on an upgraded room + + # Check if this is an upgraded room + state_ids = yield self.store.get_current_state_ids(room_id) + create_id = state_ids.get((EventTypes.Create, "")) + if not create_id: + return + create_event = yield self.store.get_event(create_id) + + if "predecessor" in create_event["content"]: + old_room_id = create_event["content"]["predecessor"]["room_id"] + + # Copy over room account data from predecessor room to upgraded room + user_account_data = yield self.store.get_account_data_for_user( + user_id, + ) + room_tags = yield self.store.get_tags_for_room( + user_id, old_room_id, + ) + + # Copy direct message state if applicable + if user_account_data and "m.direct" in user_account_data[0]: + direct_rooms = user_account_data[0]["m.direct"] + + # Check which key this room is under + for key, room_id_list in direct_rooms.items(): + for rid in room_id_list: + if rid == old_room_id: + # Add new room_id to this key + direct_rooms[key].append(room_id) + + # Save back to user's m.direct account data + yield self.store.add_account_data_for_user( + user_id, "m.direct", direct_rooms, + ) + break + + # Copy room tags if applicable + if room_tags: + # Copy each room tag to the new room + for tag in room_tags.keys(): + tag_content = room_tags[tag] + yield self.store.add_tag_to_room(user_id, room_id, tag, tag_content) elif event.membership == Membership.LEAVE: if prev_member_event_id: prev_member_event = yield self.store.get_event(prev_member_event_id) -- cgit 1.4.1 From 48951f437fb792d5d4da70c95fa8d20467428604 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 22 Jan 2019 10:52:56 +0000 Subject: Join logic covers both room creator and arbitrary users --- synapse/handlers/room.py | 33 --------------------------------- 1 file changed, 33 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 9ac04eda50..388302de09 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -260,39 +260,6 @@ class RoomCreationHandler(BaseHandler): } } - # Copy over room account data for this user - user_account_data = yield self.store.get_account_data_for_user( - user_id, - ) - - room_tags = yield self.store.get_tags_for_room( - user_id, old_room_id, - ) - - # Copy direct message state if applicable - if user_account_data and "m.direct" in user_account_data[0]: - direct_rooms = user_account_data[0]["m.direct"] - - # Check which key this room is under - for key, room_id_list in direct_rooms.items(): - for room_id in room_id_list: - if room_id == old_room_id: - # Add new room_id to this key - direct_rooms[key].append(new_room_id) - - # Save back to user's m.direct account data - yield self.store.add_account_data_for_user( - user_id, "m.direct", direct_rooms, - ) - break - - # Copy room tags if applicable - if room_tags: - # Copy each room tag to the new room - for tag in room_tags.keys(): - tag_content = room_tags[tag] - yield self.store.add_tag_to_room(user_id, new_room_id, tag, tag_content) - initial_state = dict() # Replicate relevant room events -- cgit 1.4.1 From 8086a5c05e5ddf7a24662882cbb090ec826110b4 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 22 Jan 2019 10:56:35 +0000 Subject: Fix comments --- synapse/handlers/room_member.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 35dd448c02..d14da4bc7f 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -211,7 +211,7 @@ class RoomMemberHandler(object): ) if event.membership == Membership.JOIN: - # Only fire user_joined_room if the user has acutally joined the + # Only fire user_joined_room if the user has actually joined the # room. Don't bother if the user is just changing their profile # info. newly_joined = True @@ -234,7 +234,7 @@ class RoomMemberHandler(object): if "predecessor" in create_event["content"]: old_room_id = create_event["content"]["predecessor"]["room_id"] - # Copy over room account data from predecessor room to upgraded room + # Retrieve room account data for predecessor room user_account_data = yield self.store.get_account_data_for_user( user_id, ) -- cgit 1.4.1 From c9bfb058d85f6205fada062c78a4d1eca119417c Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 22 Jan 2019 11:12:48 +0000 Subject: Fix a bug with single-room search searching all rooms * Create a new method for getting predecessor rooms * Remove formatting change --- synapse/api/filtering.py | 15 +++++++++++++-- synapse/handlers/search.py | 42 ++++++++++-------------------------------- synapse/storage/state.py | 29 ++++++++++++++++++++++++++++- 3 files changed, 51 insertions(+), 35 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 84000e6422..0d8957175d 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -444,8 +444,19 @@ class Filter(object): def include_redundant_members(self): return self.filter_json.get("include_redundant_members", False) - def add_room_ids(self, room_ids): - self.rooms += room_ids + def with_room_ids(self, room_ids): + """Returns a new filter with the given room IDs appended. + + Args: + room_ids (list): A list of room_ids. + + Returns: + filter: A new filter including the given rooms and the old + filter's rooms. + """ + newFilter = self + newFilter.rooms += room_ids + return newFilter def _matches_wildcard(actual_value, filter_value): diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 77e7e4e0fb..75c26fe065 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -49,39 +49,26 @@ class SearchHandler(BaseHandler): The full list of all found rooms in then returned. Args: - room_id (str): The ID of the room to search through. + room_id (str): id of the room to search through. Returns: - dict of past room IDs as strings + Deferred[iterable[str]]: predecessor room ids """ historical_room_ids = [] while True: - state_ids = yield self.store.get_current_state_ids(room_id) - create_id = state_ids.get((EventTypes.Create, "")) + predecessor = yield self.store.get_room_predecessor(room_id) - # If we can't find the create event, assume we've hit a dead end - if not create_id: - break - - # Retrieve the room's create event - create_event = yield self.store.get_event(create_id) - - if not create_event: - break - - # Check if a predecessor room is present - predecessor = create_event.content.get("predecessor", None) + # If no predecessor, assume we've hit a dead end if not predecessor: break # Add predecessor's room ID - historical_room_id = predecessor["room_id"] - historical_room_ids.append(historical_room_id) + historical_room_ids.append(predecessor["room_id"]) # Scan through the old room for further predecessors - room_id = historical_room_id + room_id = predecessor["room_id"] defer.returnValue(historical_room_ids) @@ -185,28 +172,19 @@ class SearchHandler(BaseHandler): ) room_ids = set(r.room_id for r in rooms) - room_ids = search_filter.filter_rooms(room_ids) - # If doing a subset of all rooms seearch, check if any of the rooms # are from an upgraded room, and search their contents as well - # XXX: There is the possibility that we don't have a create event for - # the room in question, in which case we can't return all the results - # we want to. - # Ideally we would just return the results we can get now, and - # try to get more results from other servers in the background. if search_filter.rooms: historical_room_ids = [] - for room_id in room_ids: + for room_id in search_filter.rooms: # Add any previous rooms to the search if they exist ids = yield self.get_old_rooms_from_upgraded_room(room_id) historical_room_ids += ids - # Add any found rooms to the list to search - for historical_room_id in historical_room_ids: - room_ids.add(historical_room_id) - # Prevent any historical events from being filtered - search_filter.add_room_ids(historical_room_ids) + search_filter = search_filter.with_room_ids(historical_room_ids) + + room_ids = search_filter.filter_rooms(room_ids) if batch_group == "room_id": room_ids.intersection_update({batch_group_key}) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 49b3ff4a71..b064671851 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -437,6 +437,34 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): create_event = yield self.get_event(create_id) defer.returnValue(create_event.content.get("room_version", "1")) + @defer.inlineCallbacks + def get_room_predecessor(self, room_id): + """Get the predecessor room of an upgraded room if one exists. + Otherwise return None. + + Args: + room_id (str) + + Returns: + Deferred[str]: predecessor room id + """ + + state_ids = yield self.get_current_state_ids(room_id) + create_id = state_ids.get((EventTypes.Create, "")) + + # If we can't find the create event, assume we've hit a dead end + if not create_id: + return None + + # Retrieve the room's create event + create_event = yield self.get_event(create_id) + + if not create_event: + return None + + # Return predecessor if present + return create_event.content.get("predecessor", None) + @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): """Get the current state event ids for a room based on the @@ -448,7 +476,6 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): Returns: deferred: dict of (type, state_key) -> event_id """ - def _get_current_state_ids_txn(txn): txn.execute( """SELECT type, state_key, event_id FROM current_state_events -- cgit 1.4.1 From c4875d8c767035036fa1e5c1ee58e9c01ac1e08d Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 22 Jan 2019 12:13:46 +0000 Subject: Prevent duplicate room IDs in m.direct --- synapse/handlers/room_member.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index d14da4bc7f..cdaf65cdef 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -248,16 +248,15 @@ class RoomMemberHandler(object): # Check which key this room is under for key, room_id_list in direct_rooms.items(): - for rid in room_id_list: - if rid == old_room_id: - # Add new room_id to this key - direct_rooms[key].append(room_id) - - # Save back to user's m.direct account data - yield self.store.add_account_data_for_user( - user_id, "m.direct", direct_rooms, - ) - break + if old_room_id in room_id_list and room_id not in room_id_list: + # Add new room_id to this key + direct_rooms[key].append(room_id) + + # Save back to user's m.direct account data + yield self.store.add_account_data_for_user( + user_id, "m.direct", direct_rooms, + ) + break # Copy room tags if applicable if room_tags: -- cgit 1.4.1 From 766a172b9911ff04ae3b314155462dfd40c76fba Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 22 Jan 2019 13:23:26 +0000 Subject: lint --- synapse/handlers/room_member.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index cdaf65cdef..eb46f5f4fa 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -263,7 +263,9 @@ class RoomMemberHandler(object): # Copy each room tag to the new room for tag in room_tags.keys(): tag_content = room_tags[tag] - yield self.store.add_tag_to_room(user_id, room_id, tag, tag_content) + yield self.store.add_tag_to_room( + user_id, room_id, tag, tag_content + ) elif event.membership == Membership.LEAVE: if prev_member_event_id: prev_member_event = yield self.store.get_event(prev_member_event_id) -- cgit 1.4.1 From 6129e52f437c2e03b711453434924e170f3d11bf Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 23 Jan 2019 19:39:06 +1100 Subject: Support ACME for certificate provisioning (#4384) --- changelog.d/4384.feature | 1 + scripts-dev/build_debian_packages | 2 +- synapse/app/homeserver.py | 56 ++++++++++++--- synapse/config/_base.py | 4 +- synapse/config/tls.py | 115 ++++++++++++++++++++++------- synapse/handlers/acme.py | 147 ++++++++++++++++++++++++++++++++++++++ synapse/python_dependencies.py | 4 ++ synapse/server.py | 5 ++ 8 files changed, 298 insertions(+), 36 deletions(-) create mode 100644 changelog.d/4384.feature create mode 100644 synapse/handlers/acme.py (limited to 'synapse/handlers') diff --git a/changelog.d/4384.feature b/changelog.d/4384.feature new file mode 100644 index 0000000000..daedcd58c4 --- /dev/null +++ b/changelog.d/4384.feature @@ -0,0 +1 @@ +Synapse can now automatically provision TLS certificates via ACME (the protocol used by CAs like Let's Encrypt). diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages index 577d93e6f6..6b9be99060 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages @@ -10,12 +10,12 @@ # can be passed on the commandline for debugging. import argparse -from concurrent.futures import ThreadPoolExecutor import os import signal import subprocess import sys import threading +from concurrent.futures import ThreadPoolExecutor DISTS = ( "debian:stretch", diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f3ac3d19f0..ffc49d77cc 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import gc import logging import os import sys +import traceback from six import iteritems @@ -324,17 +326,12 @@ def setup(config_options): events.USE_FROZEN_DICTS = config.use_frozen_dicts - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - database_engine = create_engine(config.database_config) config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection hs = SynapseHomeServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, @@ -361,12 +358,53 @@ def setup(config_options): logger.info("Database prepared in %s.", config.database_config['name']) hs.setup() - hs.start_listening() + @defer.inlineCallbacks def start(): - hs.get_pusherpool().start() - hs.get_datastore().start_profiling() - hs.get_datastore().start_doing_background_updates() + try: + # Check if the certificate is still valid. + cert_days_remaining = hs.config.is_disk_cert_valid() + + if hs.config.acme_enabled: + # If ACME is enabled, we might need to provision a certificate + # before starting. + acme = hs.get_acme_handler() + + # Start up the webservices which we will respond to ACME + # challenges with. + yield acme.start_listening() + + # We want to reprovision if cert_days_remaining is None (meaning no + # certificate exists), or the days remaining number it returns + # is less than our re-registration threshold. + if (cert_days_remaining is None) or ( + not cert_days_remaining > hs.config.acme_reprovision_threshold + ): + yield acme.provision_certificate() + + # Read the certificate from disk and build the context factories for + # TLS. + hs.config.read_certificate_from_disk() + hs.tls_server_context_factory = context_factory.ServerContextFactory(config) + hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + + # It is now safe to start your Synapse. + hs.start_listening() + hs.get_pusherpool().start() + hs.get_datastore().start_profiling() + hs.get_datastore().start_doing_background_updates() + except Exception as e: + # If a DeferredList failed (like in listening on the ACME listener), + # we need to print the subfailure explicitly. + if isinstance(e, defer.FirstError): + e.subFailure.printTraceback(sys.stderr) + sys.exit(1) + + # Something else went wrong when starting. Print it and bail out. + traceback.print_exc(file=sys.stderr) + sys.exit(1) reactor.callWhenRunning(start) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index fd2d6d52ef..5858fb92b4 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -367,7 +367,7 @@ class Config(object): if not keys_directory: keys_directory = os.path.dirname(config_files[-1]) - config_dir_path = os.path.abspath(keys_directory) + self.config_dir_path = os.path.abspath(keys_directory) specified_config = {} for config_file in config_files: @@ -379,7 +379,7 @@ class Config(object): server_name = specified_config["server_name"] config_string = self.generate_config( - config_dir_path=config_dir_path, + config_dir_path=self.config_dir_path, data_dir_path=os.getcwd(), server_name=server_name, generate_secrets=False, diff --git a/synapse/config/tls.py b/synapse/config/tls.py index bb8952c672..a75e233aa0 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -13,60 +13,110 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import os +from datetime import datetime from hashlib import sha256 from unpaddedbase64 import encode_base64 from OpenSSL import crypto -from ._base import Config +from synapse.config._base import Config + +logger = logging.getLogger() class TlsConfig(Config): def read_config(self, config): - self.tls_certificate = self.read_tls_certificate( - config.get("tls_certificate_path") - ) - self.tls_certificate_file = config.get("tls_certificate_path") + acme_config = config.get("acme", {}) + self.acme_enabled = acme_config.get("enabled", False) + self.acme_url = acme_config.get( + "url", "https://acme-v01.api.letsencrypt.org/directory" + ) + self.acme_port = acme_config.get("port", 8449) + self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"]) + self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) + + self.tls_certificate_file = os.path.abspath(config.get("tls_certificate_path")) + self.tls_private_key_file = os.path.abspath(config.get("tls_private_key_path")) + self._original_tls_fingerprints = config["tls_fingerprints"] + self.tls_fingerprints = list(self._original_tls_fingerprints) self.no_tls = config.get("no_tls", False) - if self.no_tls: - self.tls_private_key = None - else: - self.tls_private_key = self.read_tls_private_key( - config.get("tls_private_key_path") - ) + # This config option applies to non-federation HTTP clients + # (e.g. for talking to recaptcha, identity servers, and such) + # It should never be used in production, and is intended for + # use only when running tests. + self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get( + "use_insecure_ssl_client_just_for_testing_do_not_use" + ) + + self.tls_certificate = None + self.tls_private_key = None + + def is_disk_cert_valid(self): + """ + Is the certificate we have on disk valid, and if so, for how long? + + Returns: + int: Days remaining of certificate validity. + None: No certificate exists. + """ + if not os.path.exists(self.tls_certificate_file): + return None + + try: + with open(self.tls_certificate_file, 'rb') as f: + cert_pem = f.read() + except Exception: + logger.exception("Failed to read existing certificate off disk!") + raise + + try: + tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem) + except Exception: + logger.exception("Failed to parse existing certificate off disk!") + raise + + # YYYYMMDDhhmmssZ -- in UTC + expires_on = datetime.strptime( + tls_certificate.get_notAfter().decode('ascii'), "%Y%m%d%H%M%SZ" + ) + now = datetime.utcnow() + days_remaining = (expires_on - now).days + return days_remaining - self.tls_fingerprints = config["tls_fingerprints"] + def read_certificate_from_disk(self): + """ + Read the certificates from disk. + """ + self.tls_certificate = self.read_tls_certificate(self.tls_certificate_file) + + if not self.no_tls: + self.tls_private_key = self.read_tls_private_key(self.tls_private_key_file) + + self.tls_fingerprints = list(self._original_tls_fingerprints) # Check that our own certificate is included in the list of fingerprints # and include it if it is not. x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, - self.tls_certificate + crypto.FILETYPE_ASN1, self.tls_certificate ) sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest()) sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints) if sha256_fingerprint not in sha256_fingerprints: self.tls_fingerprints.append({u"sha256": sha256_fingerprint}) - # This config option applies to non-federation HTTP clients - # (e.g. for talking to recaptcha, identity servers, and such) - # It should never be used in production, and is intended for - # use only when running tests. - self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get( - "use_insecure_ssl_client_just_for_testing_do_not_use" - ) - def default_config(self, config_dir_path, server_name, **kwargs): base_key_name = os.path.join(config_dir_path, server_name) tls_certificate_path = base_key_name + ".tls.crt" tls_private_key_path = base_key_name + ".tls.key" - return """\ + return ( + """\ # PEM encoded X509 certificate for TLS. # You can replace the self-signed certificate that synapse # autogenerates on launch with your own SSL certificate + key pair @@ -107,7 +157,24 @@ class TlsConfig(Config): # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] - """ % locals() + + ## Support for ACME certificate auto-provisioning. + # acme: + # enabled: false + ## ACME path. + ## If you only want to test, use the staging url: + ## https://acme-staging.api.letsencrypt.org/directory + # url: 'https://acme-v01.api.letsencrypt.org/directory' + ## Port number (to listen for the HTTP-01 challenge). + ## Using port 80 requires utilising something like authbind, or proxying to it. + # port: 8449 + ## Hosts to bind to. + # bind_addresses: ['127.0.0.1'] + ## How many days remaining on a certificate before it is renewed. + # reprovision_threshold: 30 + """ + % locals() + ) def read_tls_certificate(self, cert_path): cert_pem = self.read_file(cert_path, "tls_certificate") diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py new file mode 100644 index 0000000000..73ea7ed018 --- /dev/null +++ b/synapse/handlers/acme.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +import attr +from zope.interface import implementer + +from twisted.internet import defer +from twisted.internet.endpoints import serverFromString +from twisted.python.filepath import FilePath +from twisted.python.url import URL +from twisted.web import server, static +from twisted.web.resource import Resource + +logger = logging.getLogger(__name__) + +try: + from txacme.interfaces import ICertificateStore + + @attr.s + @implementer(ICertificateStore) + class ErsatzStore(object): + """ + A store that only stores in memory. + """ + + certs = attr.ib(default=attr.Factory(dict)) + + def store(self, server_name, pem_objects): + self.certs[server_name] = [o.as_bytes() for o in pem_objects] + return defer.succeed(None) + + +except ImportError: + # txacme is missing + pass + + +class AcmeHandler(object): + def __init__(self, hs): + self.hs = hs + self.reactor = hs.get_reactor() + + @defer.inlineCallbacks + def start_listening(self): + + # Configure logging for txacme, if you need to debug + # from eliot import add_destinations + # from eliot.twisted import TwistedDestination + # + # add_destinations(TwistedDestination()) + + from txacme.challenges import HTTP01Responder + from txacme.service import AcmeIssuingService + from txacme.endpoint import load_or_create_client_key + from txacme.client import Client + from josepy.jwa import RS256 + + self._store = ErsatzStore() + responder = HTTP01Responder() + + self._issuer = AcmeIssuingService( + cert_store=self._store, + client_creator=( + lambda: Client.from_url( + reactor=self.reactor, + url=URL.from_text(self.hs.config.acme_url), + key=load_or_create_client_key( + FilePath(self.hs.config.config_dir_path) + ), + alg=RS256, + ) + ), + clock=self.reactor, + responders=[responder], + ) + + well_known = Resource() + well_known.putChild(b'acme-challenge', responder.resource) + responder_resource = Resource() + responder_resource.putChild(b'.well-known', well_known) + responder_resource.putChild(b'check', static.Data(b'OK', b'text/plain')) + + srv = server.Site(responder_resource) + + listeners = [] + + for host in self.hs.config.acme_bind_addresses: + logger.info( + "Listening for ACME requests on %s:%s", host, self.hs.config.acme_port + ) + endpoint = serverFromString( + self.reactor, "tcp:%s:interface=%s" % (self.hs.config.acme_port, host) + ) + listeners.append(endpoint.listen(srv)) + + # Make sure we are registered to the ACME server. There's no public API + # for this, it is usually triggered by startService, but since we don't + # want it to control where we save the certificates, we have to reach in + # and trigger the registration machinery ourselves. + self._issuer._registered = False + yield self._issuer._ensure_registered() + + # Return a Deferred that will fire when all the servers have started up. + yield defer.DeferredList(listeners, fireOnOneErrback=True, consumeErrors=True) + + @defer.inlineCallbacks + def provision_certificate(self): + + logger.warning("Reprovisioning %s", self.hs.hostname) + + try: + yield self._issuer.issue_cert(self.hs.hostname) + except Exception: + logger.exception("Fail!") + raise + logger.warning("Reprovisioned %s, saving.", self.hs.hostname) + cert_chain = self._store.certs[self.hs.hostname] + + try: + with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: + for x in cert_chain: + if x.startswith(b"-----BEGIN RSA PRIVATE KEY-----"): + private_key_file.write(x) + + with open(self.hs.config.tls_certificate_file, "wb") as certificate_file: + for x in cert_chain: + if x.startswith(b"-----BEGIN CERTIFICATE-----"): + certificate_file.write(x) + except Exception: + logger.exception("Failed saving!") + raise + + defer.returnValue(True) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 882e844eb1..756721e304 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -79,6 +79,10 @@ CONDITIONAL_REQUIREMENTS = { # ConsentResource uses select_autoescape, which arrived in jinja 2.9 "resources.consent": ["Jinja2>=2.9"], + # ACME support is required to provision TLS certificates from authorities + # that use the protocol, such as Let's Encrypt. + "acme": ["txacme>=0.9.2"], + "saml2": ["pysaml2>=4.5.0"], "url_preview": ["lxml>=3.5.0"], "test": ["mock>=2.0"], diff --git a/synapse/server.py b/synapse/server.py index 9985687b95..c8914302cf 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -46,6 +46,7 @@ from synapse.federation.transport.client import TransportLayerClient from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer from synapse.groups.groups_server import GroupsServerHandler from synapse.handlers import Handlers +from synapse.handlers.acme import AcmeHandler from synapse.handlers.appservice import ApplicationServicesHandler from synapse.handlers.auth import AuthHandler, MacaroonGenerator from synapse.handlers.deactivate_account import DeactivateAccountHandler @@ -129,6 +130,7 @@ class HomeServer(object): 'sync_handler', 'typing_handler', 'room_list_handler', + 'acme_handler', 'auth_handler', 'device_handler', 'e2e_keys_handler', @@ -310,6 +312,9 @@ class HomeServer(object): def build_e2e_room_keys_handler(self): return E2eRoomKeysHandler(self) + def build_acme_handler(self): + return AcmeHandler(self) + def build_application_service_api(self): return ApplicationServiceApi(self) -- cgit 1.4.1 From 82a92ba535b424009ef752add2e0d5e198254e04 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Wed, 23 Jan 2019 15:01:09 +0000 Subject: Add metric for user dir current event stream position --- synapse/handlers/user_directory.py | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 3c40999338..120815b09b 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -19,6 +19,7 @@ from six import iteritems from twisted.internet import defer +import synapse.metrics from synapse.api.constants import EventTypes, JoinRules, Membership from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.roommember import ProfileInfo @@ -163,6 +164,11 @@ class UserDirectoryHandler(object): yield self._handle_deltas(deltas) self.pos = deltas[-1]["stream_id"] + + # Expose current event processing position to prometheus + synapse.metrics.event_processing_positions.labels( + "user_dir").set(self.pos) + yield self.store.update_user_directory_stream_pos(self.pos) @defer.inlineCallbacks -- cgit 1.4.1 From 67cd4dad81ed2932009472da2d13648ca11eab73 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 23 Jan 2019 16:50:06 +0000 Subject: Implement MSC 1813 - Add room version to make APIs We also implement `make_membership_event` converting the returned room version to an event format version. --- synapse/events/__init__.py | 17 ++++++++++++++++- synapse/federation/federation_client.py | 21 ++++++++++++++++----- synapse/federation/federation_server.py | 8 +++++++- synapse/handlers/federation.py | 2 +- 4 files changed, 40 insertions(+), 8 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 9dd6940385..e6f94e68af 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -18,7 +18,7 @@ from distutils.util import strtobool import six -from synapse.api.constants import EventFormatVersions +from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventFormatVersions from synapse.util.caches import intern_dict from synapse.util.frozenutils import freeze @@ -235,3 +235,18 @@ class FrozenEvent(EventBase): self.get("type", None), self.get("state_key", None), ) + + +def room_version_to_event_format(room_version): + """Converts a room version string to the event format + + Args: + room_version (str) + + Returns: + int + """ + if room_version not in KNOWN_ROOM_VERSIONS: + raise + + return EventFormatVersions.V1 diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index d05ed91d64..0757ad12f4 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -25,14 +25,19 @@ from prometheus_client import Counter from twisted.internet import defer -from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, Membership +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + Membership, + RoomVersions, +) from synapse.api.errors import ( CodeMessageException, FederationDeniedError, HttpResponseException, SynapseError, ) -from synapse.events import builder +from synapse.events import builder, room_version_to_event_format from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.util import logcontext, unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache @@ -536,8 +541,9 @@ class FederationClient(FederationBase): params (dict[str, str|Iterable[str]]): Query parameters to include in the request. Return: - Deferred: resolves to a tuple of (origin (str), event (object)) - where origin is the remote homeserver which generated the event. + Deferred[tuple[str, dict, int]]: resolves to a tuple of + `(origin, event, event_format)` where origin is the remote + homeserver which generated the event. Fails with a ``SynapseError`` if the chosen remote server returns a 300/400 code. @@ -557,6 +563,11 @@ class FederationClient(FederationBase): destination, room_id, user_id, membership, params, ) + # Note: If not supplied, the room version may be either v1 or v2, + # however either way the event format version will be v1. + room_version = ret.get("room_version", RoomVersions.V1) + event_format = room_version_to_event_format(room_version) + pdu_dict = ret.get("event", None) if not isinstance(pdu_dict, dict): raise InvalidResponseError("Bad 'event' field in response") @@ -574,7 +585,7 @@ class FederationClient(FederationBase): ev = builder.EventBuilder(pdu_dict) defer.returnValue( - (destination, ev) + (destination, ev, event_format) ) return self._try_destination_list( diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 37d29e7027..17eccaaea0 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -400,8 +400,14 @@ class FederationServer(FederationBase): origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, room_id) pdu = yield self.handler.on_make_leave_request(room_id, user_id) + + room_version = yield self.store.get_room_version(room_id) + time_now = self._clock.time_msec() - defer.returnValue({"event": pdu.get_pdu_json(time_now)}) + defer.returnValue({ + "event": pdu.get_pdu_json(time_now), + "room_version": room_version, + }) @defer.inlineCallbacks def on_send_leave_request(self, origin, content): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a3bb864bb2..d1ba1450e9 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1336,7 +1336,7 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, content={}, params=None): - origin, pdu = yield self.federation_client.make_membership_event( + origin, pdu, _ = yield self.federation_client.make_membership_event( target_hosts, room_id, user_id, -- cgit 1.4.1 From 6a41d2a187ecef484a3aa67518ec9b4b0638c614 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 23 Jan 2019 17:19:58 +0000 Subject: Add room_version param to get_pdu When we add new event format we'll need to know the event format or room version when parsing events. --- synapse/federation/federation_base.py | 11 +++++--- synapse/federation/federation_client.py | 46 ++++++++++++++++++++++++++++----- synapse/federation/federation_server.py | 4 ++- synapse/handlers/federation.py | 12 +++++++-- 4 files changed, 60 insertions(+), 13 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index b7ad729c63..d749bfdd3a 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -43,8 +43,8 @@ class FederationBase(object): self._clock = hs.get_clock() @defer.inlineCallbacks - def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False, - include_none=False): + def _check_sigs_and_hash_and_fetch(self, origin, pdus, room_version, + outlier=False, include_none=False): """Takes a list of PDUs and checks the signatures and hashs of each one. If a PDU fails its signature check then we check if we have it in the database and if not then request if from the originating server of @@ -56,8 +56,12 @@ class FederationBase(object): a new list. Args: + origin (str) pdu (list) - outlier (bool) + room_version (str) + outlier (bool): Whether the events are outliers or not + include_none (str): Whether to include None in the returned list + for events that have failed their checks Returns: Deferred : A list of PDUs that have valid signatures and hashes. @@ -84,6 +88,7 @@ class FederationBase(object): res = yield self.get_pdu( destinations=[pdu.origin], event_id=pdu.event_id, + room_version=room_version, outlier=outlier, timeout=10000, ) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index d05ed91d64..4e171f9b56 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -25,7 +25,12 @@ from prometheus_client import Counter from twisted.internet import defer -from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, Membership +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + Membership, + RoomVersions, +) from synapse.api.errors import ( CodeMessageException, FederationDeniedError, @@ -202,7 +207,8 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function - def get_pdu(self, destinations, event_id, outlier=False, timeout=None): + def get_pdu(self, destinations, event_id, room_version, outlier=False, + timeout=None): """Requests the PDU with given origin and ID from the remote home servers. @@ -212,6 +218,7 @@ class FederationClient(FederationBase): Args: destinations (list): Which home servers to query event_id (str): event to fetch + room_version (str): version of the room outlier (bool): Indicates whether the PDU is an `outlier`, i.e. if it's from an arbitary point in the context as opposed to part of the current block of PDUs. Defaults to `False` @@ -352,10 +359,13 @@ class FederationClient(FederationBase): ev.event_id for ev in itertools.chain(pdus, auth_chain) ]) + room_version = yield self.store.get_room_version(room_id) + signed_pdus = yield self._check_sigs_and_hash_and_fetch( destination, [p for p in pdus if p.event_id not in seen_events], - outlier=True + outlier=True, + room_version=room_version, ) signed_pdus.extend( seen_events[p.event_id] for p in pdus if p.event_id in seen_events @@ -364,7 +374,8 @@ class FederationClient(FederationBase): signed_auth = yield self._check_sigs_and_hash_and_fetch( destination, [p for p in auth_chain if p.event_id not in seen_events], - outlier=True + outlier=True, + room_version=room_version, ) signed_auth.extend( seen_events[p.event_id] for p in auth_chain if p.event_id in seen_events @@ -411,6 +422,8 @@ class FederationClient(FederationBase): random.shuffle(srvs) return srvs + room_version = yield self.store.get_room_version(room_id) + batch_size = 20 missing_events = list(missing_events) for i in range(0, len(missing_events), batch_size): @@ -421,6 +434,7 @@ class FederationClient(FederationBase): self.get_pdu, destinations=random_server_list(), event_id=e_id, + room_version=room_version, ) for e_id in batch ] @@ -450,8 +464,11 @@ class FederationClient(FederationBase): for p in res["auth_chain"] ] + room_version = yield self.store.get_room_version(room_id) + signed_auth = yield self._check_sigs_and_hash_and_fetch( - destination, auth_chain, outlier=True + destination, auth_chain, + outlier=True, room_version=room_version, ) signed_auth.sort(key=lambda e: e.depth) @@ -650,9 +667,20 @@ class FederationClient(FederationBase): for p in itertools.chain(state, auth_chain) } + room_version = None + for e in state: + if (e.type, e.state_key) == (EventTypes.Create, ""): + room_version = e.content.get("room_version", RoomVersions.V1) + break + + if room_version is None: + # We use this error has that is what + raise SynapseError(400, "No create event in state") + valid_pdus = yield self._check_sigs_and_hash_and_fetch( destination, list(pdus.values()), outlier=True, + room_version=room_version, ) valid_pdus_map = { @@ -790,8 +818,10 @@ class FederationClient(FederationBase): for e in content["auth_chain"] ] + room_version = yield self.store.get_room_version(room_id) + signed_auth = yield self._check_sigs_and_hash_and_fetch( - destination, auth_chain, outlier=True + destination, auth_chain, outlier=True, room_version=room_version, ) signed_auth.sort(key=lambda e: e.depth) @@ -838,8 +868,10 @@ class FederationClient(FederationBase): for e in content.get("events", []) ] + room_version = yield self.store.get_room_version(room_id) + signed_events = yield self._check_sigs_and_hash_and_fetch( - destination, events, outlier=False + destination, events, outlier=False, room_version=room_version, ) except HttpResponseException as e: if not e.code == 400: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 37d29e7027..cb729c69ea 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -457,8 +457,10 @@ class FederationServer(FederationBase): for e in content["auth_chain"] ] + room_version = yield self.store.get_room_version(room_id) + signed_auth = yield self._check_sigs_and_hash_and_fetch( - origin, auth_chain, outlier=True + origin, auth_chain, outlier=True, room_version=room_version, ) ret = yield self.handler.on_query_auth( diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a3bb864bb2..a9dc4a4e4e 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -34,6 +34,7 @@ from synapse.api.constants import ( EventTypes, Membership, RejectedReason, + RoomVersions, ) from synapse.api.errors import ( AuthError, @@ -342,6 +343,8 @@ class FederationHandler(BaseHandler): room_id, event_id, p, ) + room_version = yield self.store.get_room_version(room_id) + with logcontext.nested_logging_context(p): # note that if any of the missing prevs share missing state or # auth events, the requests to fetch those events are deduped @@ -355,7 +358,7 @@ class FederationHandler(BaseHandler): # we want the state *after* p; get_state_for_room returns the # state *before* p. remote_event = yield self.federation_client.get_pdu( - [origin], p, outlier=True, + [origin], p, room_version, outlier=True, ) if remote_event is None: @@ -379,7 +382,6 @@ class FederationHandler(BaseHandler): for x in remote_state: event_map[x.event_id] = x - room_version = yield self.store.get_room_version(room_id) state_map = yield resolve_events_with_store( room_version, state_maps, event_map, state_res_store=StateResolutionStore(self.store), @@ -655,6 +657,8 @@ class FederationHandler(BaseHandler): if dest == self.server_name: raise SynapseError(400, "Can't backfill from self.") + room_version = yield self.store.get_room_version(room_id) + events = yield self.federation_client.backfill( dest, room_id, @@ -748,6 +752,7 @@ class FederationHandler(BaseHandler): self.federation_client.get_pdu, [dest], event_id, + room_version=room_version, outlier=True, timeout=10000, ) @@ -1659,6 +1664,8 @@ class FederationHandler(BaseHandler): create_event = e break + room_version = create_event.content.get("room_version", RoomVersions.V1) + missing_auth_events = set() for e in itertools.chain(auth_events, state, [event]): for e_id in e.auth_event_ids(): @@ -1669,6 +1676,7 @@ class FederationHandler(BaseHandler): m_ev = yield self.federation_client.get_pdu( [origin], e_id, + room_version=room_version, outlier=True, timeout=10000, ) -- cgit 1.4.1 From 886e5acc762b879b606773b511ff92345aef14c6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 16 Jan 2019 15:13:07 +0000 Subject: Store rejected remote invite events as outliers Currently they're stored as non-outliers even though the server isn't in the room, which can be problematic in places where the code assumes it has the state for all non outlier events. In particular, there is an edge case where persisting the leave event triggers a state resolution, which requires looking up the room version from state. Since the server doesn't have the state, this causes an exception to be thrown. --- synapse/federation/federation_client.py | 10 ++++++-- synapse/handlers/federation.py | 44 +++++++++------------------------ synapse/storage/roommember.py | 5 +--- 3 files changed, 21 insertions(+), 38 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index d05ed91d64..8fa726759e 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -32,7 +32,6 @@ from synapse.api.errors import ( HttpResponseException, SynapseError, ) -from synapse.events import builder from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.util import logcontext, unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache @@ -66,6 +65,8 @@ class FederationClient(FederationBase): self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() + self.event_builder_factory = hs.get_event_builder_factory() + self._get_pdu_cache = ExpiringCache( cache_name="get_pdu_cache", clock=self._clock, @@ -571,7 +572,12 @@ class FederationClient(FederationBase): if "prev_state" not in pdu_dict: pdu_dict["prev_state"] = [] - ev = builder.EventBuilder(pdu_dict) + # Strip off the fields that we want to clobber. + pdu_dict.pop("origin", None) + pdu_dict.pop("origin_server_ts", None) + pdu_dict.pop("unsigned", None) + + ev = self.event_builder_factory.new(pdu_dict) defer.returnValue( (destination, ev) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a3bb864bb2..70be87cd3d 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -43,10 +43,7 @@ from synapse.api.errors import ( StoreError, SynapseError, ) -from synapse.crypto.event_signing import ( - add_hashes_and_signatures, - compute_event_signature, -) +from synapse.crypto.event_signing import compute_event_signature from synapse.events.validator import EventValidator from synapse.replication.http.federation import ( ReplicationCleanRoomRestServlet, @@ -58,7 +55,6 @@ from synapse.types import UserID, get_domain_from_id from synapse.util import logcontext, unwrapFirstError from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room -from synapse.util.frozenutils import unfreeze from synapse.util.logutils import log_function from synapse.util.retryutils import NotRetryingDestination from synapse.visibility import filter_events_for_server @@ -1083,7 +1079,9 @@ class FederationHandler(BaseHandler): handled_events = set() try: - event = self._sign_event(event) + self._sign_event(event) + event.internal_metadata.outlier = False + # Try the host we successfully got a response to /make_join/ # request first. try: @@ -1289,13 +1287,7 @@ class FederationHandler(BaseHandler): event.internal_metadata.outlier = True event.internal_metadata.invite_from_remote = True - event.signatures.update( - compute_event_signature( - event, - self.hs.hostname, - self.hs.config.signing_key[0] - ) - ) + self._sign_event(event) context = yield self.state_handler.compute_event_context(event) yield self.persist_events_and_notify([(event, context)]) @@ -1313,7 +1305,7 @@ class FederationHandler(BaseHandler): # Mark as outlier as we don't have any state for this event; we're not # even in the room. event.internal_metadata.outlier = True - event = self._sign_event(event) + self._sign_event(event) # Try the host that we succesfully called /make_leave/ on first for # the /send_leave/ request. @@ -1358,26 +1350,14 @@ class FederationHandler(BaseHandler): defer.returnValue((origin, event)) def _sign_event(self, event): - event.internal_metadata.outlier = False - - builder = self.event_builder_factory.new( - unfreeze(event.get_pdu_json()) - ) - - builder.event_id = self.event_builder_factory.create_event_id() - builder.origin = self.hs.hostname - - if not hasattr(event, "signatures"): - builder.signatures = {} - - add_hashes_and_signatures( - builder, - self.hs.hostname, - self.hs.config.signing_key[0], + event.signatures.update( + compute_event_signature( + event, + self.hs.hostname, + self.hs.config.signing_key[0] + ) ) - return builder.build() - @defer.inlineCallbacks @log_function def on_make_leave_request(self, room_id, user_id): diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 0707f9a86a..c7488f4259 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -591,10 +591,7 @@ class RoomMemberStore(RoomMemberWorkerStore): # i.e., its something that has just happened. # The only current event that can also be an outlier is if its an # invite that has come in across federation. - is_new_state = not backfilled and ( - not event.internal_metadata.is_outlier() - or event.internal_metadata.is_invite_from_remote() - ) + is_new_state = not backfilled is_mine = self.hs.is_mine_id(event.state_key) if is_new_state and is_mine: if event.membership == Membership.INVITE: -- cgit 1.4.1 From 07f62da55ac8903f7ea224255b8defd122724ec4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 23 Jan 2019 19:44:37 +0000 Subject: Remove unnecessary '_sign_event' --- synapse/federation/federation_client.py | 9 ++++++++- synapse/handlers/federation.py | 14 -------------- 2 files changed, 8 insertions(+), 15 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 8fa726759e..f4adcb556d 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -32,6 +32,7 @@ from synapse.api.errors import ( HttpResponseException, SynapseError, ) +from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.util import logcontext, unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache @@ -577,7 +578,13 @@ class FederationClient(FederationBase): pdu_dict.pop("origin_server_ts", None) pdu_dict.pop("unsigned", None) - ev = self.event_builder_factory.new(pdu_dict) + builder = self.event_builder_factory.new(pdu_dict) + add_hashes_and_signatures( + builder, + self.hs.hostname, + self.hs.config.signing_key[0] + ) + ev = builder.build() defer.returnValue( (destination, ev) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 70be87cd3d..9a14ba4517 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -43,7 +43,6 @@ from synapse.api.errors import ( StoreError, SynapseError, ) -from synapse.crypto.event_signing import compute_event_signature from synapse.events.validator import EventValidator from synapse.replication.http.federation import ( ReplicationCleanRoomRestServlet, @@ -1079,7 +1078,6 @@ class FederationHandler(BaseHandler): handled_events = set() try: - self._sign_event(event) event.internal_metadata.outlier = False # Try the host we successfully got a response to /make_join/ @@ -1287,8 +1285,6 @@ class FederationHandler(BaseHandler): event.internal_metadata.outlier = True event.internal_metadata.invite_from_remote = True - self._sign_event(event) - context = yield self.state_handler.compute_event_context(event) yield self.persist_events_and_notify([(event, context)]) @@ -1305,7 +1301,6 @@ class FederationHandler(BaseHandler): # Mark as outlier as we don't have any state for this event; we're not # even in the room. event.internal_metadata.outlier = True - self._sign_event(event) # Try the host that we succesfully called /make_leave/ on first for # the /send_leave/ request. @@ -1349,15 +1344,6 @@ class FederationHandler(BaseHandler): assert(event.room_id == room_id) defer.returnValue((origin, event)) - def _sign_event(self, event): - event.signatures.update( - compute_event_signature( - event, - self.hs.hostname, - self.hs.config.signing_key[0] - ) - ) - @defer.inlineCallbacks @log_function def on_make_leave_request(self, room_id, user_id): -- cgit 1.4.1 From 7c288c22500e2045d36a29c38d2671fad6484e30 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 23 Jan 2019 20:05:44 +0000 Subject: Clarify the invite flows --- synapse/events/__init__.py | 8 ++++++-- synapse/handlers/federation.py | 12 +++++++++++- synapse/storage/roommember.py | 11 +++++++---- 3 files changed, 24 insertions(+), 7 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 84c75495d5..5030636c7e 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -41,8 +41,12 @@ class _EventInternalMetadata(object): def is_outlier(self): return getattr(self, "outlier", False) - def is_invite_from_remote(self): - return getattr(self, "invite_from_remote", False) + def is_new_remote_event(self): + """Whether this is a new remote event, like an invite or an invite + rejection. This is needed as those events are marked as outliers, but + they still need to be processed. + """ + return getattr(self, "new_remote_event", False) def get_send_on_behalf_of(self): """Whether this server should send the event on behalf of another server. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 9a14ba4517..e017cab777 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -43,6 +43,7 @@ from synapse.api.errors import ( StoreError, SynapseError, ) +from synapse.crypto.event_signing import compute_event_signature from synapse.events.validator import EventValidator from synapse.replication.http.federation import ( ReplicationCleanRoomRestServlet, @@ -1283,7 +1284,15 @@ class FederationHandler(BaseHandler): ) event.internal_metadata.outlier = True - event.internal_metadata.invite_from_remote = True + event.internal_metadata.new_remote_event = True + + event.signatures.update( + compute_event_signature( + event, + self.hs.hostname, + self.hs.config.signing_key[0] + ) + ) context = yield self.state_handler.compute_event_context(event) yield self.persist_events_and_notify([(event, context)]) @@ -1301,6 +1310,7 @@ class FederationHandler(BaseHandler): # Mark as outlier as we don't have any state for this event; we're not # even in the room. event.internal_metadata.outlier = True + event.internal_metadata.new_remote_event = True # Try the host that we succesfully called /make_leave/ on first for # the /send_leave/ request. diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index c7488f4259..40b13de80b 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -588,10 +588,13 @@ class RoomMemberStore(RoomMemberWorkerStore): ) # We update the local_invites table only if the event is "current", - # i.e., its something that has just happened. - # The only current event that can also be an outlier is if its an - # invite that has come in across federation. - is_new_state = not backfilled + # i.e., its something that has just happened. If the event is an + # outlier it is only current if its a "new remote event", like a + # remote invite or a rejection of a remote invite. + is_new_state = not backfilled and ( + not event.internal_metadata.is_outlier() + or event.internal_metadata.is_new_remote_event() + ) is_mine = self.hs.is_mine_id(event.state_key) if is_new_state and is_mine: if event.membership == Membership.INVITE: -- cgit 1.4.1 From 068aa1d22840a1154bb8fbdd445a8c36b290db91 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 24 Jan 2019 12:44:27 +0000 Subject: Time out filtered room dir queries after 60s --- synapse/handlers/room_list.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index dc88620885..ea63fb604c 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -31,6 +31,7 @@ from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.util.caches.response_cache import ResponseCache from ._base import BaseHandler +from datetime import datetime, timedelta logger = logging.getLogger(__name__) @@ -73,8 +74,13 @@ class RoomListHandler(BaseHandler): # We explicitly don't bother caching searches or requests for # appservice specific lists. logger.info("Bypassing cache as search request.") + + # XXX: Quick hack to stop room directory queries taking too long. + # Timeout request after 60s. Probably want a more fundamental + # solution at some point + timeout = datetime.now() + timedelta(seconds=60) return self._get_public_room_list( - limit, since_token, search_filter, network_tuple=network_tuple, + limit, since_token, search_filter, network_tuple=network_tuple, timeout=timeout, ) key = (limit, since_token, network_tuple) @@ -87,7 +93,8 @@ class RoomListHandler(BaseHandler): @defer.inlineCallbacks def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, - network_tuple=EMPTY_THIRD_PARTY_ID,): + network_tuple=EMPTY_THIRD_PARTY_ID, + timeout=None,): if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: @@ -202,6 +209,9 @@ class RoomListHandler(BaseHandler): chunk = [] for i in range(0, len(rooms_to_scan), step): + if timeout and datetime.now() > timeout: + raise Exception("Timed out searching room directory") + batch = rooms_to_scan[i:i + step] logger.info("Processing %i rooms for result", len(batch)) yield concurrently_execute( -- cgit 1.4.1 From 5541645e80d2907721f17f648717f0b5a2b6f4fe Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 24 Jan 2019 12:45:32 +0000 Subject: lint --- synapse/handlers/room_list.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index ea63fb604c..a99b6e1460 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -80,7 +80,8 @@ class RoomListHandler(BaseHandler): # solution at some point timeout = datetime.now() + timedelta(seconds=60) return self._get_public_room_list( - limit, since_token, search_filter, network_tuple=network_tuple, timeout=timeout, + limit, since_token, search_filter, + network_tuple=network_tuple, timeout=timeout, ) key = (limit, since_token, network_tuple) -- cgit 1.4.1 From a2d85144e54457df2aae2c1a759f1baae910de91 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 24 Jan 2019 14:22:26 +0000 Subject: isort --- synapse/handlers/room_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index a99b6e1460..5f7b33473e 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -15,6 +15,7 @@ import logging from collections import namedtuple +from datetime import datetime, timedelta from six import PY3, iteritems from six.moves import range @@ -31,7 +32,6 @@ from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.util.caches.response_cache import ResponseCache from ._base import BaseHandler -from datetime import datetime, timedelta logger = logging.getLogger(__name__) -- cgit 1.4.1 From 5b1dc940839b6390562475fe1b033ca7fce33c37 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 24 Jan 2019 14:59:50 +0000 Subject: Use self.clock instead of datetime --- synapse/handlers/room_list.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 5f7b33473e..2af520819e 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -15,7 +15,6 @@ import logging from collections import namedtuple -from datetime import datetime, timedelta from six import PY3, iteritems from six.moves import range @@ -78,7 +77,7 @@ class RoomListHandler(BaseHandler): # XXX: Quick hack to stop room directory queries taking too long. # Timeout request after 60s. Probably want a more fundamental # solution at some point - timeout = datetime.now() + timedelta(seconds=60) + timeout = self.clock.time() + 60 return self._get_public_room_list( limit, since_token, search_filter, network_tuple=network_tuple, timeout=timeout, @@ -95,7 +94,7 @@ class RoomListHandler(BaseHandler): def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, network_tuple=EMPTY_THIRD_PARTY_ID, - timeout=None,): + timeout=0,): if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: @@ -210,7 +209,7 @@ class RoomListHandler(BaseHandler): chunk = [] for i in range(0, len(rooms_to_scan), step): - if timeout and datetime.now() > timeout: + if timeout and self.clock.time() > timeout: raise Exception("Timed out searching room directory") batch = rooms_to_scan[i:i + step] -- cgit 1.4.1 From 075ff3ede9aac0bc31d638b45c63007d664d9eee Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 24 Jan 2019 15:10:22 +0000 Subject: Change default timeout value from 0 to None --- synapse/handlers/room_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 2af520819e..13e212d669 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -94,7 +94,7 @@ class RoomListHandler(BaseHandler): def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, network_tuple=EMPTY_THIRD_PARTY_ID, - timeout=0,): + timeout=None,): if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: -- cgit 1.4.1 From 03c85335d1d386c0523af3b6bf992f83bfb905d7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 24 Jan 2019 17:22:09 +0000 Subject: Apply suggestions from code review Co-Authored-By: anoadragon453 <1342360+anoadragon453@users.noreply.github.com> --- synapse/handlers/search.py | 2 +- synapse/storage/state.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 75c26fe065..49c439313e 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -52,7 +52,7 @@ class SearchHandler(BaseHandler): room_id (str): id of the room to search through. Returns: - Deferred[iterable[str]]: predecessor room ids + Deferred[iterable[unicode]]: predecessor room ids """ historical_room_ids = [] diff --git a/synapse/storage/state.py b/synapse/storage/state.py index fceb9744aa..0a0691cd00 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -446,7 +446,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): room_id (str) Returns: - Deferred[str]: predecessor room id + Deferred[unicode|None]: predecessor room id """ state_ids = yield self.get_current_state_ids(room_id) create_id = state_ids.get((EventTypes.Create, "")) -- cgit 1.4.1 From b8082a54451bb4db30e3b2a4d19dc8cb23330eb7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 24 Jan 2019 17:33:19 +0000 Subject: Use term 'out of band membership' instead --- synapse/events/__init__.py | 9 +++++---- synapse/handlers/federation.py | 4 ++-- synapse/storage/roommember.py | 6 +++--- 3 files changed, 10 insertions(+), 9 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 5030636c7e..48289cad06 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -41,12 +41,13 @@ class _EventInternalMetadata(object): def is_outlier(self): return getattr(self, "outlier", False) - def is_new_remote_event(self): - """Whether this is a new remote event, like an invite or an invite + def is_out_of_band_membership(self): + """Whether this is an out of band membership, like an invite or an invite rejection. This is needed as those events are marked as outliers, but - they still need to be processed. + they still need to be processed as if they're new events (e.g. updating + invite state in the database, relaying to clients, etc). """ - return getattr(self, "new_remote_event", False) + return getattr(self, "out_of_band_membership", False) def get_send_on_behalf_of(self): """Whether this server should send the event on behalf of another server. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index e017cab777..242719b7ce 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1284,7 +1284,7 @@ class FederationHandler(BaseHandler): ) event.internal_metadata.outlier = True - event.internal_metadata.new_remote_event = True + event.internal_metadata.out_of_band_membership = True event.signatures.update( compute_event_signature( @@ -1310,7 +1310,7 @@ class FederationHandler(BaseHandler): # Mark as outlier as we don't have any state for this event; we're not # even in the room. event.internal_metadata.outlier = True - event.internal_metadata.new_remote_event = True + event.internal_metadata.out_of_band_membership = True # Try the host that we succesfully called /make_leave/ on first for # the /send_leave/ request. diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 40b13de80b..592c1bcd33 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -589,11 +589,11 @@ class RoomMemberStore(RoomMemberWorkerStore): # We update the local_invites table only if the event is "current", # i.e., its something that has just happened. If the event is an - # outlier it is only current if its a "new remote event", like a - # remote invite or a rejection of a remote invite. + # outlier it is only current if its an "out of band membership", + # like a remote invite or a rejection of a remote invite. is_new_state = not backfilled and ( not event.internal_metadata.is_outlier() - or event.internal_metadata.is_new_remote_event() + or event.internal_metadata.is_out_of_band_membership() ) is_mine = self.hs.is_mine_id(event.state_key) if is_new_state and is_mine: -- cgit 1.4.1 From 9139b87be420fcfce22e70a7c35ba52b2ea32f3a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 24 Jan 2019 18:04:02 +0000 Subject: Remove unecessary setting of outlier bit --- synapse/handlers/federation.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 242719b7ce..d53b716ffb 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1079,8 +1079,6 @@ class FederationHandler(BaseHandler): handled_events = set() try: - event.internal_metadata.outlier = False - # Try the host we successfully got a response to /make_join/ # request first. try: -- cgit 1.4.1 From d148c43050f7a85523a743ff6069683c644a517d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 24 Jan 2019 18:31:23 +0000 Subject: Review comments --- synapse/federation/federation_client.py | 3 ++- synapse/handlers/federation.py | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 4e171f9b56..7fb5736142 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -674,7 +674,8 @@ class FederationClient(FederationBase): break if room_version is None: - # We use this error has that is what + # If the state doesn't have a create event then the room is + # invalid, and it would fail auth checks anyway. raise SynapseError(400, "No create event in state") valid_pdus = yield self._check_sigs_and_hash_and_fetch( diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a9dc4a4e4e..5280d88a50 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1664,6 +1664,11 @@ class FederationHandler(BaseHandler): create_event = e break + if create_event is None: + # If the state doesn't have a create event then the room is + # invalid, and it would fail auth checks anyway. + raise SynapseError(400, "No create event in state") + room_version = create_event.content.get("room_version", RoomVersions.V1) missing_auth_events = set() -- cgit 1.4.1 From a50cf929c13044f25c3776802287458fe5695c37 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 23 Jan 2019 20:21:33 +0000 Subject: Require event format version to parse or create events --- synapse/events/__init__.py | 24 ++++++++++- synapse/events/builder.py | 51 ++++++++++++++++++++++- synapse/federation/federation_base.py | 9 +++-- synapse/federation/federation_client.py | 60 ++++++++++++++++----------- synapse/federation/federation_server.py | 33 +++++++++------ synapse/federation/transport/server.py | 4 +- synapse/handlers/federation.py | 72 ++++++++++++++++++++------------- synapse/handlers/message.py | 10 ++++- 8 files changed, 191 insertions(+), 72 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 38470ad176..3fe52aaa45 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -18,7 +18,11 @@ from distutils.util import strtobool import six -from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventFormatVersions +from synapse.api.constants import ( + KNOWN_EVENT_FORMAT_VERSIONS, + KNOWN_ROOM_VERSIONS, + EventFormatVersions, +) from synapse.util.caches import intern_dict from synapse.util.frozenutils import freeze @@ -256,3 +260,21 @@ def room_version_to_event_format(room_version): raise RuntimeError("Unrecognized room version %s" % (room_version,)) return EventFormatVersions.V1 + + +def event_type_from_format_version(format_version): + """Returns the python type to use to construct an Event object for the + given event format version. + + Args: + format_version (int): The event format version + + Returns: + type: A type that can be initialized as per the initializer of + `FrozenEvent` + """ + if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: + raise Exception( + "No event format %r" % (format_version,) + ) + return FrozenEvent diff --git a/synapse/events/builder.py b/synapse/events/builder.py index e662eaef10..7e63371095 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -15,12 +15,39 @@ import copy +from synapse.api.constants import RoomVersions from synapse.types import EventID from synapse.util.stringutils import random_string from . import EventBase, FrozenEvent, _event_dict_property +def get_event_builder(room_version, key_values={}, internal_metadata_dict={}): + """Generate an event builder appropriate for the given room version + + Args: + room_version (str): Version of the room that we're creating an + event builder for + key_values (dict): Fields used as the basis of the new event + internal_metadata_dict (dict): Used to create the `_EventInternalMetadata` + object. + + Returns: + EventBuilder + """ + if room_version in { + RoomVersions.V1, + RoomVersions.V2, + RoomVersions.VDH_TEST, + RoomVersions.STATE_V2_TEST, + }: + return EventBuilder(key_values, internal_metadata_dict) + else: + raise Exception( + "No event format defined for version %r" % (room_version,) + ) + + class EventBuilder(EventBase): def __init__(self, key_values={}, internal_metadata_dict={}): signatures = copy.deepcopy(key_values.pop("signatures", {})) @@ -58,7 +85,29 @@ class EventBuilderFactory(object): return e_id.to_string() - def new(self, key_values={}): + def new(self, room_version, key_values={}): + """Generate an event builder appropriate for the given room version + + Args: + room_version (str): Version of the room that we're creating an + event builder for + key_values (dict): Fields used as the basis of the new event + + Returns: + EventBuilder + """ + + # There's currently only the one event version defined + if room_version not in { + RoomVersions.V1, + RoomVersions.V2, + RoomVersions.VDH_TEST, + RoomVersions.STATE_V2_TEST, + }: + raise Exception( + "No event format defined for version %r" % (room_version,) + ) + key_values["event_id"] = self.create_event_id() time_now = int(self.clock.time_msec()) diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index d749bfdd3a..5c31e5f85f 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -23,7 +23,7 @@ from twisted.internet.defer import DeferredList from synapse.api.constants import MAX_DEPTH, EventTypes, Membership from synapse.api.errors import Codes, SynapseError from synapse.crypto.event_signing import check_event_content_hash -from synapse.events import FrozenEvent +from synapse.events import event_type_from_format_version from synapse.events.utils import prune_event from synapse.http.servlet import assert_params_in_dict from synapse.types import get_domain_from_id @@ -302,11 +302,12 @@ def _is_invite_via_3pid(event): ) -def event_from_pdu_json(pdu_json, outlier=False): +def event_from_pdu_json(pdu_json, event_format_version, outlier=False): """Construct a FrozenEvent from an event json received over federation Args: pdu_json (object): pdu as received over federation + event_format_version (int): The event format version outlier (bool): True to mark this event as an outlier Returns: @@ -330,8 +331,8 @@ def event_from_pdu_json(pdu_json, outlier=False): elif depth > MAX_DEPTH: raise SynapseError(400, "Depth too large", Codes.BAD_JSON) - event = FrozenEvent( - pdu_json + event = event_type_from_format_version(event_format_version)( + pdu_json, ) event.internal_metadata.outlier = outlier diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 33ecabca29..71809893c5 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -170,13 +170,13 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function - def backfill(self, dest, context, limit, extremities): + def backfill(self, dest, room_id, limit, extremities): """Requests some more historic PDUs for the given context from the given destination server. Args: dest (str): The remote home server to ask. - context (str): The context to backfill. + room_id (str): The room_id to backfill. limit (int): The maximum number of PDUs to return. extremities (list): List of PDU id and origins of the first pdus we have seen from the context @@ -191,12 +191,15 @@ class FederationClient(FederationBase): return transaction_data = yield self.transport_layer.backfill( - dest, context, extremities, limit) + dest, room_id, extremities, limit) logger.debug("backfill transaction_data=%s", repr(transaction_data)) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + pdus = [ - event_from_pdu_json(p, outlier=False) + event_from_pdu_json(p, format_ver, outlier=False) for p in transaction_data["pdus"] ] @@ -240,6 +243,8 @@ class FederationClient(FederationBase): pdu_attempts = self.pdu_destination_tried.setdefault(event_id, {}) + format_ver = room_version_to_event_format(room_version) + signed_pdu = None for destination in destinations: now = self._clock.time_msec() @@ -255,7 +260,7 @@ class FederationClient(FederationBase): logger.debug("transaction_data %r", transaction_data) pdu_list = [ - event_from_pdu_json(p, outlier=outlier) + event_from_pdu_json(p, format_ver, outlier=outlier) for p in transaction_data["pdus"] ] @@ -349,12 +354,16 @@ class FederationClient(FederationBase): destination, room_id, event_id=event_id, ) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + pdus = [ - event_from_pdu_json(p, outlier=True) for p in result["pdus"] + event_from_pdu_json(p, format_ver, outlier=True) + for p in result["pdus"] ] auth_chain = [ - event_from_pdu_json(p, outlier=True) + event_from_pdu_json(p, format_ver, outlier=True) for p in result.get("auth_chain", []) ] @@ -362,8 +371,6 @@ class FederationClient(FederationBase): ev.event_id for ev in itertools.chain(pdus, auth_chain) ]) - room_version = yield self.store.get_room_version(room_id) - signed_pdus = yield self._check_sigs_and_hash_and_fetch( destination, [p for p in pdus if p.event_id not in seen_events], @@ -462,13 +469,14 @@ class FederationClient(FederationBase): destination, room_id, event_id, ) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + auth_chain = [ - event_from_pdu_json(p, outlier=True) + event_from_pdu_json(p, format_ver, outlier=True) for p in res["auth_chain"] ] - room_version = yield self.store.get_room_version(room_id) - signed_auth = yield self._check_sigs_and_hash_and_fetch( destination, auth_chain, outlier=True, room_version=room_version, @@ -605,7 +613,7 @@ class FederationClient(FederationBase): pdu_dict.pop("origin_server_ts", None) pdu_dict.pop("unsigned", None) - builder = self.event_builder_factory.new(pdu_dict) + builder = self.event_builder_factory.new(room_version, pdu_dict) add_hashes_and_signatures( builder, self.hs.hostname, @@ -621,7 +629,7 @@ class FederationClient(FederationBase): "make_" + membership, destinations, send_request, ) - def send_join(self, destinations, pdu): + def send_join(self, destinations, pdu, event_format_version): """Sends a join event to one of a list of homeservers. Doing so will cause the remote server to add the event to the graph, @@ -631,6 +639,7 @@ class FederationClient(FederationBase): destinations (str): Candidate homeservers which are probably participating in the room. pdu (BaseEvent): event to be sent + event_format_version (int): The event format version Return: Deferred: resolves to a dict with members ``origin`` (a string @@ -676,12 +685,12 @@ class FederationClient(FederationBase): logger.debug("Got content: %s", content) state = [ - event_from_pdu_json(p, outlier=True) + event_from_pdu_json(p, event_format_version, outlier=True) for p in content.get("state", []) ] auth_chain = [ - event_from_pdu_json(p, outlier=True) + event_from_pdu_json(p, event_format_version, outlier=True) for p in content.get("auth_chain", []) ] @@ -759,7 +768,10 @@ class FederationClient(FederationBase): logger.debug("Got response to send_invite: %s", pdu_dict) - pdu = event_from_pdu_json(pdu_dict) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + + pdu = event_from_pdu_json(pdu_dict, format_ver) # Check signatures are correct. pdu = yield self._check_sigs_and_hash(pdu) @@ -837,13 +849,14 @@ class FederationClient(FederationBase): content=send_content, ) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + auth_chain = [ - event_from_pdu_json(e) + event_from_pdu_json(e, format_ver) for e in content["auth_chain"] ] - room_version = yield self.store.get_room_version(room_id) - signed_auth = yield self._check_sigs_and_hash_and_fetch( destination, auth_chain, outlier=True, room_version=room_version, ) @@ -887,13 +900,14 @@ class FederationClient(FederationBase): timeout=timeout, ) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + events = [ - event_from_pdu_json(e) + event_from_pdu_json(e, format_ver) for e in content.get("events", []) ] - room_version = yield self.store.get_room_version(room_id) - signed_events = yield self._check_sigs_and_hash_and_fetch( destination, events, outlier=False, room_version=room_version, ) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index dde166e295..4aa04b9588 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -34,6 +34,7 @@ from synapse.api.errors import ( SynapseError, ) from synapse.crypto.event_signing import compute_event_signature +from synapse.events import room_version_to_event_format from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.federation.persistence import TransactionActions from synapse.federation.units import Edu, Transaction @@ -178,14 +179,13 @@ class FederationServer(FederationBase): continue try: - # In future we will actually use the room version to parse the - # PDU into an event. - yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) except NotFoundError: logger.info("Ignoring PDU for unknown room_id: %s", room_id) continue - event = event_from_pdu_json(p) + event = event_from_pdu_json(p, format_ver) pdus_by_room.setdefault(room_id, []).append(event) pdu_results = {} @@ -370,7 +370,9 @@ class FederationServer(FederationBase): @defer.inlineCallbacks def on_invite_request(self, origin, content, room_version): - pdu = event_from_pdu_json(content) + format_ver = room_version_to_event_format(room_version) + + pdu = event_from_pdu_json(content, format_ver) origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, pdu.room_id) ret_pdu = yield self.handler.on_invite_request(origin, pdu) @@ -378,9 +380,12 @@ class FederationServer(FederationBase): defer.returnValue({"event": ret_pdu.get_pdu_json(time_now)}) @defer.inlineCallbacks - def on_send_join_request(self, origin, content): + def on_send_join_request(self, origin, content, room_id): logger.debug("on_send_join_request: content: %s", content) - pdu = event_from_pdu_json(content) + + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + pdu = event_from_pdu_json(content, format_ver) origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, pdu.room_id) @@ -410,9 +415,12 @@ class FederationServer(FederationBase): }) @defer.inlineCallbacks - def on_send_leave_request(self, origin, content): + def on_send_leave_request(self, origin, content, room_id): logger.debug("on_send_leave_request: content: %s", content) - pdu = event_from_pdu_json(content) + + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + pdu = event_from_pdu_json(content, format_ver) origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, pdu.room_id) @@ -458,13 +466,14 @@ class FederationServer(FederationBase): origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, room_id) + room_version = yield self.store.get_room_version(room_id) + format_ver = room_version_to_event_format(room_version) + auth_chain = [ - event_from_pdu_json(e) + event_from_pdu_json(e, format_ver) for e in content["auth_chain"] ] - room_version = yield self.store.get_room_version(room_id) - signed_auth = yield self._check_sigs_and_hash_and_fetch( origin, auth_chain, outlier=True, room_version=room_version, ) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 4557a9e66e..67ae0212c3 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -469,7 +469,7 @@ class FederationSendLeaveServlet(BaseFederationServlet): @defer.inlineCallbacks def on_PUT(self, origin, content, query, room_id, event_id): - content = yield self.handler.on_send_leave_request(origin, content) + content = yield self.handler.on_send_leave_request(origin, content, room_id) defer.returnValue((200, content)) @@ -487,7 +487,7 @@ class FederationSendJoinServlet(BaseFederationServlet): def on_PUT(self, origin, content, query, context, event_id): # TODO(paul): assert that context/event_id parsed from path actually # match those given in content - content = yield self.handler.on_send_join_request(origin, content) + content = yield self.handler.on_send_join_request(origin, content, context) defer.returnValue((200, content)) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index c52dca1b81..a4b771049c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1061,7 +1061,7 @@ class FederationHandler(BaseHandler): """ logger.debug("Joining %s to %s", joinee, room_id) - origin, event = yield self._make_and_verify_event( + origin, event, event_format_version = yield self._make_and_verify_event( target_hosts, room_id, joinee, @@ -1091,7 +1091,9 @@ class FederationHandler(BaseHandler): target_hosts.insert(0, origin) except ValueError: pass - ret = yield self.federation_client.send_join(target_hosts, event) + ret = yield self.federation_client.send_join( + target_hosts, event, event_format_version, + ) origin = ret["origin"] state = ret["state"] @@ -1164,13 +1166,18 @@ class FederationHandler(BaseHandler): """ event_content = {"membership": Membership.JOIN} - builder = self.event_builder_factory.new({ - "type": EventTypes.Member, - "content": event_content, - "room_id": room_id, - "sender": user_id, - "state_key": user_id, - }) + room_version = yield self.store.get_room_version(room_id) + + builder = self.event_builder_factory.new( + room_version, + { + "type": EventTypes.Member, + "content": event_content, + "room_id": room_id, + "sender": user_id, + "state_key": user_id, + } + ) try: event, context = yield self.event_creation_handler.create_new_client_event( @@ -1304,7 +1311,7 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks def do_remotely_reject_invite(self, target_hosts, room_id, user_id): - origin, event = yield self._make_and_verify_event( + origin, event, event_format_version = yield self._make_and_verify_event( target_hosts, room_id, user_id, @@ -1336,7 +1343,7 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, content={}, params=None): - origin, pdu, _ = yield self.federation_client.make_membership_event( + origin, event, format_ver = yield self.federation_client.make_membership_event( target_hosts, room_id, user_id, @@ -1345,9 +1352,7 @@ class FederationHandler(BaseHandler): params=params, ) - logger.debug("Got response to make_%s: %s", membership, pdu) - - event = pdu + logger.debug("Got response to make_%s: %s", membership, event) # We should assert some things. # FIXME: Do this in a nicer way @@ -1355,7 +1360,7 @@ class FederationHandler(BaseHandler): assert(event.user_id == user_id) assert(event.state_key == user_id) assert(event.room_id == room_id) - defer.returnValue((origin, event)) + defer.returnValue((origin, event, format_ver)) @defer.inlineCallbacks @log_function @@ -1364,13 +1369,17 @@ class FederationHandler(BaseHandler): leave event for the room and return that. We do *not* persist or process it until the other server has signed it and sent it back. """ - builder = self.event_builder_factory.new({ - "type": EventTypes.Member, - "content": {"membership": Membership.LEAVE}, - "room_id": room_id, - "sender": user_id, - "state_key": user_id, - }) + room_version = yield self.store.get_room_version(room_id) + builder = self.event_builder_factory.new( + room_version, + { + "type": EventTypes.Member, + "content": {"membership": Membership.LEAVE}, + "room_id": room_id, + "sender": user_id, + "state_key": user_id, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder=builder, @@ -2266,14 +2275,16 @@ class FederationHandler(BaseHandler): } if (yield self.auth.check_host_in_room(room_id, self.hs.hostname)): - builder = self.event_builder_factory.new(event_dict) + room_version = yield self.store.get_room_version(room_id) + builder = self.event_builder_factory.new(room_version, event_dict) + EventValidator().validate_new(builder) event, context = yield self.event_creation_handler.create_new_client_event( builder=builder ) event, context = yield self.add_display_name_to_third_party_invite( - event_dict, event, context + room_version, event_dict, event, context ) try: @@ -2304,14 +2315,18 @@ class FederationHandler(BaseHandler): Returns: Deferred: resolves (to None) """ - builder = self.event_builder_factory.new(event_dict) + room_version = yield self.store.get_room_version(room_id) + + # NB: event_dict has a particular specced format we might need to fudge + # if we change event formats too much. + builder = self.event_builder_factory.new(room_version, event_dict) event, context = yield self.event_creation_handler.create_new_client_event( builder=builder, ) event, context = yield self.add_display_name_to_third_party_invite( - event_dict, event, context + room_version, event_dict, event, context ) try: @@ -2331,7 +2346,8 @@ class FederationHandler(BaseHandler): yield member_handler.send_membership_event(None, event, context) @defer.inlineCallbacks - def add_display_name_to_third_party_invite(self, event_dict, event, context): + def add_display_name_to_third_party_invite(self, room_version, event_dict, + event, context): key = ( EventTypes.ThirdPartyInvite, event.content["third_party_invite"]["signed"]["token"] @@ -2355,7 +2371,7 @@ class FederationHandler(BaseHandler): # auth checks. If we need the invite and don't have it then the # auth check code will explode appropriately. - builder = self.event_builder_factory.new(event_dict) + builder = self.event_builder_factory.new(room_version, event_dict) EventValidator().validate_new(builder) event, context = yield self.event_creation_handler.create_new_client_event( builder=builder, diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index a7cd779b02..7aaa4fba33 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -278,7 +278,15 @@ class EventCreationHandler(object): """ yield self.auth.check_auth_blocking(requester.user.to_string()) - builder = self.event_builder_factory.new(event_dict) + if event_dict["type"] == EventTypes.Create and event_dict["state_key"] == "": + room_version = event_dict["content"]["room_version"] + else: + try: + room_version = yield self.store.get_room_version(event_dict["room_id"]) + except NotFoundError: + raise AuthError(403, "Unknown room") + + builder = self.event_builder_factory.new(room_version, event_dict) self.validator.validate_new(builder) -- cgit 1.4.1 From 0862d35b8e23151174dac3740c37ddb18e78a74a Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:09:34 +0000 Subject: Move tag and direct state copying into separate function --- synapse/handlers/room_member.py | 106 ++++++++++++++++++++++++---------------- 1 file changed, 63 insertions(+), 43 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index eb46f5f4fa..a340566bb0 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -223,49 +223,7 @@ class RoomMemberHandler(object): # Copy over direct message status and room tags if this is a join # on an upgraded room - - # Check if this is an upgraded room - state_ids = yield self.store.get_current_state_ids(room_id) - create_id = state_ids.get((EventTypes.Create, "")) - if not create_id: - return - create_event = yield self.store.get_event(create_id) - - if "predecessor" in create_event["content"]: - old_room_id = create_event["content"]["predecessor"]["room_id"] - - # Retrieve room account data for predecessor room - user_account_data = yield self.store.get_account_data_for_user( - user_id, - ) - room_tags = yield self.store.get_tags_for_room( - user_id, old_room_id, - ) - - # Copy direct message state if applicable - if user_account_data and "m.direct" in user_account_data[0]: - direct_rooms = user_account_data[0]["m.direct"] - - # Check which key this room is under - for key, room_id_list in direct_rooms.items(): - if old_room_id in room_id_list and room_id not in room_id_list: - # Add new room_id to this key - direct_rooms[key].append(room_id) - - # Save back to user's m.direct account data - yield self.store.add_account_data_for_user( - user_id, "m.direct", direct_rooms, - ) - break - - # Copy room tags if applicable - if room_tags: - # Copy each room tag to the new room - for tag in room_tags.keys(): - tag_content = room_tags[tag] - yield self.store.add_tag_to_room( - user_id, room_id, tag, tag_content - ) + self.copy_room_tags_and_direct_to_room(old_room_id, room_id, user_id) elif event.membership == Membership.LEAVE: if prev_member_event_id: prev_member_event = yield self.store.get_event(prev_member_event_id) @@ -274,6 +232,68 @@ class RoomMemberHandler(object): defer.returnValue(event) + @defer.inlineCallbacks + def copy_room_tags_and_direct_to_room( + self, + old_room_id, + new_room_id, + user_id, + ): + """Copies the tags and direct room state from one room to another. + + Args: + old_room_id (str) + new_room_id (str) + user_id (str) + + Returns: + Deferred|None + """ + # Check if this is an upgraded room + state_ids = yield self.store.get_filtered_current_state_ids( + new_room_id, StateFilter.from_types(((EventTypes.Create, ""))), + ) + create_id = state_ids.get((EventTypes.Create, "")) + if not create_id: + return + create_event = yield self.store.get_event(create_id) + + if "predecessor" in create_event["content"]: + old_room_id = create_event["content"]["predecessor"]["room_id"] + + # Retrieve room account data for predecessor room + user_account_data = yield self.store.get_account_data_for_user( + user_id, + ) + room_tags = yield self.store.get_tags_for_room( + user_id, old_room_id, + ) + + # Copy direct message state if applicable + if user_account_data and "m.direct" in user_account_data[0]: + direct_rooms = user_account_data[0]["m.direct"] + + # Check which key this room is under + for key, room_id_list in direct_rooms.items(): + if old_room_id in room_id_list and new_room_id not in room_id_list: + # Add new room_id to this key + direct_rooms[key].append(new_room_id) + + # Save back to user's m.direct account data + yield self.store.add_account_data_for_user( + user_id, "m.direct", direct_rooms, + ) + break + + # Copy room tags if applicable + if room_tags: + # Copy each room tag to the new room + for tag in room_tags.keys(): + tag_content = room_tags[tag] + yield self.store.add_tag_to_room( + user_id, room_id, tag, tag_content + ) + @defer.inlineCallbacks def update_membership( self, -- cgit 1.4.1 From 6f3fda79ce50059c314b9e1566007062045624b4 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:21:25 +0000 Subject: Move room_tag declaration to be closer to its use --- synapse/handlers/room_member.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index a340566bb0..67c601b696 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -261,13 +261,10 @@ class RoomMemberHandler(object): if "predecessor" in create_event["content"]: old_room_id = create_event["content"]["predecessor"]["room_id"] - # Retrieve room account data for predecessor room + # Retrieve user account data for predecessor room user_account_data = yield self.store.get_account_data_for_user( user_id, ) - room_tags = yield self.store.get_tags_for_room( - user_id, old_room_id, - ) # Copy direct message state if applicable if user_account_data and "m.direct" in user_account_data[0]: @@ -286,6 +283,10 @@ class RoomMemberHandler(object): break # Copy room tags if applicable + room_tags = yield self.store.get_tags_for_room( + user_id, old_room_id, + ) + if room_tags: # Copy each room tag to the new room for tag in room_tags.keys(): -- cgit 1.4.1 From 516456b763c7652b1401ad502bb6b621f822b15a Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:22:14 +0000 Subject: Remove unnecessary null check --- synapse/handlers/room_member.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 67c601b696..3d4500fb18 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -287,13 +287,12 @@ class RoomMemberHandler(object): user_id, old_room_id, ) - if room_tags: - # Copy each room tag to the new room - for tag in room_tags.keys(): - tag_content = room_tags[tag] - yield self.store.add_tag_to_room( - user_id, room_id, tag, tag_content - ) + # Copy each room tag to the new room + for tag in room_tags.keys(): + tag_content = room_tags[tag] + yield self.store.add_tag_to_room( + user_id, room_id, tag, tag_content + ) @defer.inlineCallbacks def update_membership( -- cgit 1.4.1 From c4cdafa81fdf167961257f16118735b304535f62 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:24:28 +0000 Subject: Destructure account data tuple before use --- synapse/handlers/room_member.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 3d4500fb18..bde4798d0c 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -262,13 +262,13 @@ class RoomMemberHandler(object): old_room_id = create_event["content"]["predecessor"]["room_id"] # Retrieve user account data for predecessor room - user_account_data = yield self.store.get_account_data_for_user( + user_account_data, _ = yield self.store.get_account_data_for_user( user_id, ) # Copy direct message state if applicable - if user_account_data and "m.direct" in user_account_data[0]: - direct_rooms = user_account_data[0]["m.direct"] + if user_account_data and "m.direct" in user_account_data: + direct_rooms = user_account_data["m.direct"] # Check which key this room is under for key, room_id_list in direct_rooms.items(): -- cgit 1.4.1 From 8265995498e375cefd728b77930ea3e65306c69a Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:26:06 +0000 Subject: Use python magic --- synapse/handlers/room_member.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index bde4798d0c..ee12428fe7 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -288,8 +288,7 @@ class RoomMemberHandler(object): ) # Copy each room tag to the new room - for tag in room_tags.keys(): - tag_content = room_tags[tag] + for tag, tag_content in room_tags.items(): yield self.store.add_tag_to_room( user_id, room_id, tag, tag_content ) -- cgit 1.4.1 From da0d2219d2bdde68165b16d51483357dd234ad1d Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:37:12 +0000 Subject: Clean up direct_rooms access --- synapse/handlers/room_member.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index ee12428fe7..b9de146913 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -267,10 +267,10 @@ class RoomMemberHandler(object): ) # Copy direct message state if applicable - if user_account_data and "m.direct" in user_account_data: - direct_rooms = user_account_data["m.direct"] + direct_rooms = user_account_data.get("m.direct", {}) - # Check which key this room is under + # Check which key this room is under + if isinstance(direct_rooms, dict): for key, room_id_list in direct_rooms.items(): if old_room_id in room_id_list and new_room_id not in room_id_list: # Add new room_id to this key -- cgit 1.4.1 From 9244a3089eae772c0fca69372a8230d1b5ab9947 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 25 Jan 2019 11:48:38 +0000 Subject: Fixes --- synapse/handlers/room_member.py | 78 ++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 39 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index b9de146913..aabc488753 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -29,6 +29,7 @@ import synapse.server import synapse.types from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, Codes, SynapseError +from synapse.storage.state import StateFilter from synapse.types import RoomID, UserID from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room, user_left_room @@ -223,7 +224,18 @@ class RoomMemberHandler(object): # Copy over direct message status and room tags if this is a join # on an upgraded room - self.copy_room_tags_and_direct_to_room(old_room_id, room_id, user_id) + # Check if this is an upgraded room + state_ids = yield self.store.get_filtered_current_state_ids( + room_id, StateFilter.from_types([(EventTypes.Create, "")]), + ) + create_id = state_ids.get((EventTypes.Create, "")) + if not create_id: + return + create_event = yield self.store.get_event(create_id) + + if "predecessor" in create_event["content"]: + old_room_id = create_event["content"]["predecessor"]["room_id"] + self.copy_room_tags_and_direct_to_room(old_room_id, room_id, user_id) elif event.membership == Membership.LEAVE: if prev_member_event_id: prev_member_event = yield self.store.get_event(prev_member_event_id) @@ -249,49 +261,37 @@ class RoomMemberHandler(object): Returns: Deferred|None """ - # Check if this is an upgraded room - state_ids = yield self.store.get_filtered_current_state_ids( - new_room_id, StateFilter.from_types(((EventTypes.Create, ""))), + # Retrieve user account data for predecessor room + user_account_data, _ = yield self.store.get_account_data_for_user( + user_id, ) - create_id = state_ids.get((EventTypes.Create, "")) - if not create_id: - return - create_event = yield self.store.get_event(create_id) - if "predecessor" in create_event["content"]: - old_room_id = create_event["content"]["predecessor"]["room_id"] + # Copy direct message state if applicable + direct_rooms = user_account_data.get("m.direct", {}) - # Retrieve user account data for predecessor room - user_account_data, _ = yield self.store.get_account_data_for_user( - user_id, - ) + # Check which key this room is under + if isinstance(direct_rooms, dict): + for key, room_id_list in direct_rooms.items(): + if old_room_id in room_id_list and new_room_id not in room_id_list: + # Add new room_id to this key + direct_rooms[key].append(new_room_id) - # Copy direct message state if applicable - direct_rooms = user_account_data.get("m.direct", {}) - - # Check which key this room is under - if isinstance(direct_rooms, dict): - for key, room_id_list in direct_rooms.items(): - if old_room_id in room_id_list and new_room_id not in room_id_list: - # Add new room_id to this key - direct_rooms[key].append(new_room_id) - - # Save back to user's m.direct account data - yield self.store.add_account_data_for_user( - user_id, "m.direct", direct_rooms, - ) - break - - # Copy room tags if applicable - room_tags = yield self.store.get_tags_for_room( - user_id, old_room_id, - ) + # Save back to user's m.direct account data + yield self.store.add_account_data_for_user( + user_id, "m.direct", direct_rooms, + ) + break - # Copy each room tag to the new room - for tag, tag_content in room_tags.items(): - yield self.store.add_tag_to_room( - user_id, room_id, tag, tag_content - ) + # Copy room tags if applicable + room_tags = yield self.store.get_tags_for_room( + user_id, old_room_id, + ) + + # Copy each room tag to the new room + for tag, tag_content in room_tags.items(): + yield self.store.add_tag_to_room( + user_id, new_room_id, tag, tag_content + ) @defer.inlineCallbacks def update_membership( -- cgit 1.4.1 From ae2a957dbacc38f1126e2eca160f17322c710d26 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Jan 2019 18:31:41 +0000 Subject: Pass through room version to event auth --- synapse/api/auth.py | 14 ++++++++++---- synapse/event_auth.py | 3 ++- synapse/handlers/federation.py | 20 ++++++++++++-------- synapse/handlers/message.py | 7 ++++++- synapse/handlers/room.py | 5 ++++- synapse/state/__init__.py | 2 +- synapse/state/v1.py | 14 +++++++++++--- synapse/state/v2.py | 14 +++++++++----- tests/state/test_v2.py | 4 +++- tests/test_event_auth.py | 13 +++++++++++-- 10 files changed, 69 insertions(+), 27 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e37b807c94..7b213e54c8 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -65,7 +65,7 @@ class Auth(object): register_cache("cache", "token_cache", self.token_cache) @defer.inlineCallbacks - def check_from_context(self, event, context, do_sig_check=True): + def check_from_context(self, room_version, event, context, do_sig_check=True): prev_state_ids = yield context.get_prev_state_ids(self.store) auth_events_ids = yield self.compute_auth_events( event, prev_state_ids, for_verification=True, @@ -74,12 +74,16 @@ class Auth(object): auth_events = { (e.type, e.state_key): e for e in itervalues(auth_events) } - self.check(event, auth_events=auth_events, do_sig_check=do_sig_check) + self.check( + room_version, event, + auth_events=auth_events, do_sig_check=do_sig_check, + ) - def check(self, event, auth_events, do_sig_check=True): + def check(self, room_version, event, auth_events, do_sig_check=True): """ Checks if this event is correctly authed. Args: + room_version (str): version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. @@ -88,7 +92,9 @@ class Auth(object): True if the auth checks pass. """ with Measure(self.clock, "auth.check"): - event_auth.check(event, auth_events, do_sig_check=do_sig_check) + event_auth.check( + room_version, event, auth_events, do_sig_check=do_sig_check + ) @defer.inlineCallbacks def check_joined_room(self, room_id, user_id, current_state=None): diff --git a/synapse/event_auth.py b/synapse/event_auth.py index c81d8e6729..9adedbbb02 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -27,10 +27,11 @@ from synapse.types import UserID, get_domain_from_id logger = logging.getLogger(__name__) -def check(event, auth_events, do_sig_check=True, do_size_check=True): +def check(room_version, event, auth_events, do_sig_check=True, do_size_check=True): """ Checks if this event is correctly authed. Args: + room_version (str): the version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a4b771049c..5adbe7b538 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1189,7 +1189,9 @@ class FederationHandler(BaseHandler): # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_join_request` - yield self.auth.check_from_context(event, context, do_sig_check=False) + yield self.auth.check_from_context( + room_version, event, context, do_sig_check=False, + ) defer.returnValue(event) @@ -1388,7 +1390,9 @@ class FederationHandler(BaseHandler): try: # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_leave_request` - yield self.auth.check_from_context(event, context, do_sig_check=False) + yield self.auth.check_from_context( + room_version, event, context, do_sig_check=False, + ) except AuthError as e: logger.warn("Failed to create new leave %r because %s", event, e) raise e @@ -1683,7 +1687,7 @@ class FederationHandler(BaseHandler): auth_for_e[(EventTypes.Create, "")] = create_event try: - self.auth.check(e, auth_events=auth_for_e) + self.auth.check(room_version, e, auth_events=auth_for_e) except SynapseError as err: # we may get SynapseErrors here as well as AuthErrors. For # instance, there are a couple of (ancient) events in some @@ -1927,6 +1931,8 @@ class FederationHandler(BaseHandler): current_state = set(e.event_id for e in auth_events.values()) different_auth = event_auth_events - current_state + room_version = yield self.store.get_room_version(event.room_id) + if different_auth and not event.internal_metadata.is_outlier(): # Do auth conflict res. logger.info("Different auth: %s", different_auth) @@ -1951,8 +1957,6 @@ class FederationHandler(BaseHandler): (d.type, d.state_key): d for d in different_events if d }) - room_version = yield self.store.get_room_version(event.room_id) - new_state = yield self.state_handler.resolve_events( room_version, [list(local_view.values()), list(remote_view.values())], @@ -2052,7 +2056,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check(event, auth_events=auth_events) + self.auth.check(room_version, event, auth_events=auth_events) except AuthError as e: logger.warn("Failed auth resolution for %r because %s", event, e) raise e @@ -2288,7 +2292,7 @@ class FederationHandler(BaseHandler): ) try: - yield self.auth.check_from_context(event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying new third party invite %r because %s", event, e) raise e @@ -2330,7 +2334,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check_from_context(event, context) + self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying third party invite %r because %s", event, e) raise e diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7aaa4fba33..10a7ed4c5e 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -611,8 +611,13 @@ class EventCreationHandler(object): extra_users (list(UserID)): Any extra users to notify about event """ + if event.is_state() and (event.type, event.state_key) == (EventTypes.Create, ""): + room_version = event.content["room_version"] + else: + room_version = yield self.store.get_room_version(event.room_id) + try: - yield self.auth.check_from_context(event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as err: logger.warn("Denying new event %r because %s", event, err) raise err diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index cb8c5f77dd..19b4ee35d2 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -123,7 +123,10 @@ class RoomCreationHandler(BaseHandler): token_id=requester.access_token_id, ) ) - yield self.auth.check_from_context(tombstone_event, tombstone_context) + old_room_version = yield self.store.get_room_version(old_room_id) + yield self.auth.check_from_context( + old_room_version, tombstone_event, tombstone_context, + ) yield self.clone_exiting_room( requester, diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index e9ecb00277..2fca51d0b2 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -611,7 +611,7 @@ def resolve_events_with_store(room_version, state_sets, event_map, state_res_sto RoomVersions.VDH_TEST, RoomVersions.STATE_V2_TEST, RoomVersions.V2, ): return v2.resolve_events_with_store( - state_sets, event_map, state_res_store, + room_version, state_sets, event_map, state_res_store, ) else: # This should only happen if we added a version but forgot to add it to diff --git a/synapse/state/v1.py b/synapse/state/v1.py index 19e091ce3b..6d3afcae7c 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -21,7 +21,7 @@ from six import iteritems, iterkeys, itervalues from twisted.internet import defer from synapse import event_auth -from synapse.api.constants import EventTypes +from synapse.api.constants import EventTypes, RoomVersions from synapse.api.errors import AuthError logger = logging.getLogger(__name__) @@ -274,7 +274,11 @@ def _resolve_auth_events(events, auth_events): auth_events[(prev_event.type, prev_event.state_key)] = prev_event try: # The signatures have already been checked at this point - event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False) + event_auth.check( + RoomVersions.V1, event, auth_events, + do_sig_check=False, + do_size_check=False, + ) prev_event = event except AuthError: return prev_event @@ -286,7 +290,11 @@ def _resolve_normal_events(events, auth_events): for event in _ordered_events(events): try: # The signatures have already been checked at this point - event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False) + event_auth.check( + RoomVersions.V1, event, auth_events, + do_sig_check=False, + do_size_check=False, + ) return event except AuthError: pass diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 3573bb0028..650995c92c 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -29,10 +29,12 @@ logger = logging.getLogger(__name__) @defer.inlineCallbacks -def resolve_events_with_store(state_sets, event_map, state_res_store): +def resolve_events_with_store(room_version, state_sets, event_map, state_res_store): """Resolves the state using the v2 state resolution algorithm Args: + room_version (str): The room version + state_sets(list): List of dicts of (type, state_key) -> event_id, which are the different state groups to resolve. @@ -104,7 +106,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store): # Now sequentially auth each one resolved_state = yield _iterative_auth_checks( - sorted_power_events, unconflicted_state, event_map, + room_version, sorted_power_events, unconflicted_state, event_map, state_res_store, ) @@ -129,7 +131,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store): logger.debug("resolving remaining events") resolved_state = yield _iterative_auth_checks( - leftover_events, resolved_state, event_map, + room_version, leftover_events, resolved_state, event_map, state_res_store, ) @@ -350,11 +352,13 @@ def _reverse_topological_power_sort(event_ids, event_map, state_res_store, auth_ @defer.inlineCallbacks -def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): +def _iterative_auth_checks(room_version, event_ids, base_state, event_map, + state_res_store): """Sequentially apply auth checks to each event in given list, updating the state as it goes along. Args: + room_version (str) event_ids (list[str]): Ordered list of events to apply auth checks to base_state (dict[tuple[str, str], str]): The set of state to start with event_map (dict[str,FrozenEvent]) @@ -385,7 +389,7 @@ def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): try: event_auth.check( - event, auth_events, + room_version, event, auth_events, do_sig_check=False, do_size_check=False ) diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index 2e073a3afc..9a5c816927 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -19,7 +19,7 @@ from six.moves import zip import attr -from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.constants import EventTypes, JoinRules, Membership, RoomVersions from synapse.event_auth import auth_types_for_event from synapse.events import FrozenEvent from synapse.state.v2 import lexicographical_topological_sort, resolve_events_with_store @@ -539,6 +539,7 @@ class StateTestCase(unittest.TestCase): state_before = dict(state_at_event[prev_events[0]]) else: state_d = resolve_events_with_store( + RoomVersions.V2, [state_at_event[n] for n in prev_events], event_map=event_map, state_res_store=TestStateResolutionStore(event_map), @@ -685,6 +686,7 @@ class SimpleParamStateTestCase(unittest.TestCase): # Test that we correctly handle passing `None` as the event_map state_d = resolve_events_with_store( + RoomVersions.V2, [self.state_at_bob, self.state_at_charlie], event_map=None, state_res_store=TestStateResolutionStore(self.event_map), diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 411b4a9f86..7ee318e4e8 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -16,6 +16,7 @@ import unittest from synapse import event_auth +from synapse.api.constants import RoomVersions from synapse.api.errors import AuthError from synapse.events import FrozenEvent @@ -35,12 +36,16 @@ class EventAuthTestCase(unittest.TestCase): } # creator should be able to send state - event_auth.check(_random_state_event(creator), auth_events, do_sig_check=False) + event_auth.check( + RoomVersions.V1, _random_state_event(creator), auth_events, + do_sig_check=False, + ) # joiner should not be able to send state self.assertRaises( AuthError, event_auth.check, + RoomVersions.V1, _random_state_event(joiner), auth_events, do_sig_check=False, @@ -69,13 +74,17 @@ class EventAuthTestCase(unittest.TestCase): self.assertRaises( AuthError, event_auth.check, + RoomVersions.V1, _random_state_event(pleb), auth_events, do_sig_check=False, ), # king should be able to send state - event_auth.check(_random_state_event(king), auth_events, do_sig_check=False) + event_auth.check( + RoomVersions.V1, _random_state_event(king), auth_events, + do_sig_check=False, + ) # helpers for making events -- cgit 1.4.1 From 1ce463963db06158d8e740b1986392ef968df0c0 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 28 Jan 2019 14:08:18 +0000 Subject: Reuse predecessor method --- synapse/handlers/room_member.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index aabc488753..6163c6bdee 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -29,7 +29,6 @@ import synapse.server import synapse.types from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, Codes, SynapseError -from synapse.storage.state import StateFilter from synapse.types import RoomID, UserID from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room, user_left_room @@ -224,18 +223,15 @@ class RoomMemberHandler(object): # Copy over direct message status and room tags if this is a join # on an upgraded room + # Check if this is an upgraded room - state_ids = yield self.store.get_filtered_current_state_ids( - room_id, StateFilter.from_types([(EventTypes.Create, "")]), - ) - create_id = state_ids.get((EventTypes.Create, "")) - if not create_id: - return - create_event = yield self.store.get_event(create_id) - - if "predecessor" in create_event["content"]: - old_room_id = create_event["content"]["predecessor"]["room_id"] - self.copy_room_tags_and_direct_to_room(old_room_id, room_id, user_id) + predecessor = yield self.store.get_room_predecessor(room_id) + + if predecessor: + # It is an upgraded room. Copy over old tags + self.copy_room_tags_and_direct_to_room( + predecessor["room_id"], room_id, user_id, + ) elif event.membership == Membership.LEAVE: if prev_member_event_id: prev_member_event = yield self.store.get_event(prev_member_event_id) -- cgit 1.4.1 From f0e96ab66a09db8ffec34fddf67d088fe03f8ea6 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 28 Jan 2019 14:09:45 +0000 Subject: Change return syntax in doc string --- synapse/handlers/room_member.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 6163c6bdee..9ed5a05cca 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -255,7 +255,7 @@ class RoomMemberHandler(object): user_id (str) Returns: - Deferred|None + Deferred[None] """ # Retrieve user account data for predecessor room user_account_data, _ = yield self.store.get_account_data_for_user( -- cgit 1.4.1 From b872c7b1b43431b8933e2afd2f226aa34ad81a0f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 28 Jan 2019 17:00:14 +0000 Subject: Split up event validation between event and builder The validator was being run on the EventBuilder objects, and so the validator only checked a subset of fields. With the upcoming EventBuilder refactor even fewer fields will be there to validate. To get around this we split the validation into those that can be run against an EventBuilder and those run against a fully fledged event. --- synapse/events/validator.py | 73 ++++++++++++++++++++++++++++++------------ synapse/handlers/federation.py | 7 ++-- synapse/handlers/message.py | 4 ++- 3 files changed, 61 insertions(+), 23 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/events/validator.py b/synapse/events/validator.py index cf184748a1..55d44d093d 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -24,14 +24,13 @@ class EventValidator(object): def validate(self, event): EventID.from_string(event.event_id) - RoomID.from_string(event.room_id) required = [ - # "auth_events", + "auth_events", "content", - # "hashes", + "hashes", "origin", - # "prev_events", + "prev_events", "sender", "type", ] @@ -43,33 +42,60 @@ class EventValidator(object): # Check that the following keys have string values strings = [ "origin", - "sender", - "type", ] - if hasattr(event, "state_key"): - strings.append("state_key") - for s in strings: if not isinstance(getattr(event, s), string_types): raise SynapseError(400, "Not '%s' a string type" % (s,)) - if event.type == EventTypes.Member: - if "membership" not in event.content: - raise SynapseError(400, "Content has not membership key") + def validate_new(self, event): + """Validates the event has roughly the right format - if event.content["membership"] not in Membership.LIST: - raise SynapseError(400, "Invalid membership key") + Args: + event (FrozenEvent) + """ + self.validate_builder(event) + self.validate(event) - # Check that the following keys have dictionary values - # TODO + UserID.from_string(event.sender) + + if event.type == EventTypes.Message: + strings = [ + "body", + "msgtype", + ] - # Check that the following keys have the correct format for DAGs - # TODO + self._ensure_strings(event.content, strings) - def validate_new(self, event): - self.validate(event) + elif event.type == EventTypes.Topic: + self._ensure_strings(event.content, ["topic"]) + elif event.type == EventTypes.Name: + self._ensure_strings(event.content, ["name"]) + + def validate_builder(self, event): + """Validates that the builder/event has roughly the right format. Only + checks values that we expect a proto event to have, rather than all the + fields an event would have + + Args: + event (EventBuilder|FrozenEvent) + """ + + strings = [ + "room_id", + "sender", + "type", + ] + + if hasattr(event, "state_key"): + strings.append("state_key") + + for s in strings: + if not isinstance(getattr(event, s), string_types): + raise SynapseError(400, "Not '%s' a string type" % (s,)) + + RoomID.from_string(event.room_id) UserID.from_string(event.sender) if event.type == EventTypes.Message: @@ -86,6 +112,13 @@ class EventValidator(object): elif event.type == EventTypes.Name: self._ensure_strings(event.content, ["name"]) + elif event.type == EventTypes.Member: + if "membership" not in event.content: + raise SynapseError(400, "Content has not membership key") + + if event.content["membership"] not in Membership.LIST: + raise SynapseError(400, "Invalid membership key") + def _ensure_strings(self, d, keys): for s in keys: if s not in d: diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a4b771049c..13333818ae 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2278,7 +2278,7 @@ class FederationHandler(BaseHandler): room_version = yield self.store.get_room_version(room_id) builder = self.event_builder_factory.new(room_version, event_dict) - EventValidator().validate_new(builder) + EventValidator().validate_builder(builder) event, context = yield self.event_creation_handler.create_new_client_event( builder=builder ) @@ -2287,6 +2287,8 @@ class FederationHandler(BaseHandler): room_version, event_dict, event, context ) + EventValidator().validate_new(event) + try: yield self.auth.check_from_context(event, context) except AuthError as e: @@ -2372,10 +2374,11 @@ class FederationHandler(BaseHandler): # auth check code will explode appropriately. builder = self.event_builder_factory.new(room_version, event_dict) - EventValidator().validate_new(builder) + EventValidator().validate_builder(builder) event, context = yield self.event_creation_handler.create_new_client_event( builder=builder, ) + EventValidator().validate_new(event) defer.returnValue((event, context)) @defer.inlineCallbacks diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7aaa4fba33..d2aab25111 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -288,7 +288,7 @@ class EventCreationHandler(object): builder = self.event_builder_factory.new(room_version, event_dict) - self.validator.validate_new(builder) + self.validator.validate_builder(builder) if builder.type == EventTypes.Member: membership = builder.content.get("membership", None) @@ -326,6 +326,8 @@ class EventCreationHandler(object): prev_events_and_hashes=prev_events_and_hashes, ) + self.validator.validate_new(event) + defer.returnValue((event, context)) def _is_exempt_from_privacy_policy(self, builder, requester): -- cgit 1.4.1 From d758d5310e5a5753f181d0eef447acb1a3dd29cc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 28 Jan 2019 17:26:39 +0000 Subject: Correctly use default room version if none is set --- synapse/handlers/message.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 10a7ed4c5e..67b364a6c3 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json from twisted.internet import defer from twisted.internet.defer import succeed -from synapse.api.constants import MAX_DEPTH, EventTypes, Membership +from synapse.api.constants import MAX_DEPTH, EventTypes, Membership, RoomVersions from synapse.api.errors import ( AuthError, Codes, @@ -612,7 +612,7 @@ class EventCreationHandler(object): """ if event.is_state() and (event.type, event.state_key) == (EventTypes.Create, ""): - room_version = event.content["room_version"] + room_version = event.content.get("room_version", RoomVersions.V1) else: room_version = yield self.store.get_room_version(event.room_id) -- cgit 1.4.1 From be47cfa9c97b4acfd884440f1953ed000225eb37 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Jan 2019 17:19:31 +0000 Subject: Refactor event building into EventBuilder This is so that everything is done in one place, making it easier to change the event format based on room version --- synapse/events/builder.py | 284 ++++++++++++++++++++++++-------- synapse/federation/federation_client.py | 20 +-- synapse/handlers/message.py | 34 +--- synapse/server.py | 5 +- synapse/storage/event_federation.py | 23 +++ 5 files changed, 254 insertions(+), 112 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 7e63371095..225b5fd670 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -13,79 +13,156 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy +import attr -from synapse.api.constants import RoomVersions +from twisted.internet import defer + +from synapse.api.constants import ( + KNOWN_EVENT_FORMAT_VERSIONS, + KNOWN_ROOM_VERSIONS, + MAX_DEPTH, +) +from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.types import EventID from synapse.util.stringutils import random_string -from . import EventBase, FrozenEvent, _event_dict_property +from . import ( + _EventInternalMetadata, + event_type_from_format_version, + room_version_to_event_format, +) -def get_event_builder(room_version, key_values={}, internal_metadata_dict={}): - """Generate an event builder appropriate for the given room version +@attr.s(slots=True, cmp=False, frozen=True) +class EventBuilder(object): + """A format independent event builder used to build up the event content + before signing the event. - Args: - room_version (str): Version of the room that we're creating an - event builder for - key_values (dict): Fields used as the basis of the new event - internal_metadata_dict (dict): Used to create the `_EventInternalMetadata` - object. + (Note that while objects of this class are frozen, the + content/unsigned/internal_metadata fields are still mutable) - Returns: - EventBuilder + Attributes: + format_version (int): Event format version + room_id (str) + type (str) + sender (str) + content (dict) + unsigned (dict) + internal_metadata (_EventInternalMetadata) + + _state (StateHandler) + _auth (synapse.api.Auth) + _store (DataStore) + _clock (Clock) + _hostname (str): The hostname of the server creating the event + _signing_key: The signing key to use to sign the event as the server """ - if room_version in { - RoomVersions.V1, - RoomVersions.V2, - RoomVersions.VDH_TEST, - RoomVersions.STATE_V2_TEST, - }: - return EventBuilder(key_values, internal_metadata_dict) - else: - raise Exception( - "No event format defined for version %r" % (room_version,) - ) + _state = attr.ib() + _auth = attr.ib() + _store = attr.ib() + _clock = attr.ib() + _hostname = attr.ib() + _signing_key = attr.ib() + + format_version = attr.ib() + + room_id = attr.ib() + type = attr.ib() + sender = attr.ib() + + content = attr.ib(default=attr.Factory(dict)) + unsigned = attr.ib(default=attr.Factory(dict)) + + # These only exist on a subset of events, so they raise AttributeError if + # someone tries to get them when they don't exist. + _state_key = attr.ib(default=None) + _redacts = attr.ib(default=None) + + internal_metadata = attr.ib(default=attr.Factory(lambda: _EventInternalMetadata({}))) + + @property + def state_key(self): + if self._state_key is not None: + return self._state_key + + raise AttributeError("state_key") + + def is_state(self): + return self._state_key is not None -class EventBuilder(EventBase): - def __init__(self, key_values={}, internal_metadata_dict={}): - signatures = copy.deepcopy(key_values.pop("signatures", {})) - unsigned = copy.deepcopy(key_values.pop("unsigned", {})) + @defer.inlineCallbacks + def build(self, prev_event_ids): + """Transform into a fully signed and hashed event - super(EventBuilder, self).__init__( - key_values, - signatures=signatures, - unsigned=unsigned, - internal_metadata_dict=internal_metadata_dict, + Args: + prev_event_ids (list[str]): The event IDs to use as the prev events + + Returns: + Deferred[FrozenEvent] + """ + + state_ids = yield self._state.get_current_state_ids( + self.room_id, prev_event_ids, + ) + auth_ids = yield self._auth.compute_auth_events( + self, state_ids, ) - event_id = _event_dict_property("event_id") - state_key = _event_dict_property("state_key") - type = _event_dict_property("type") + auth_events = yield self._store.add_event_hashes(auth_ids) + prev_events = yield self._store.add_event_hashes(prev_event_ids) - def build(self): - return FrozenEvent.from_event(self) + old_depth = yield self._store.get_max_depth_of( + prev_event_ids, + ) + depth = old_depth + 1 + # we cap depth of generated events, to ensure that they are not + # rejected by other servers (and so that they can be persisted in + # the db) + depth = min(depth, MAX_DEPTH) -class EventBuilderFactory(object): - def __init__(self, clock, hostname): - self.clock = clock - self.hostname = hostname + event_dict = { + "auth_events": auth_events, + "prev_events": prev_events, + "type": self.type, + "room_id": self.room_id, + "sender": self.sender, + "content": self.content, + "unsigned": self.unsigned, + "depth": depth, + "prev_state": [], + } + + if self.is_state(): + event_dict["state_key"] = self._state_key - self.event_id_count = 0 + if self._redacts is not None: + event_dict["redacts"] = self._redacts - def create_event_id(self): - i = str(self.event_id_count) - self.event_id_count += 1 + defer.returnValue( + create_local_event_from_event_dict( + clock=self._clock, + hostname=self._hostname, + signing_key=self._signing_key, + format_version=self.format_version, + event_dict=event_dict, + internal_metadata_dict=self.internal_metadata.get_dict(), + ) + ) - local_part = str(int(self.clock.time())) + i + random_string(5) - e_id = EventID(local_part, self.hostname) +class EventBuilderFactory(object): + def __init__(self, hs): + self.clock = hs.get_clock() + self.hostname = hs.hostname + self.signing_key = hs.config.signing_key[0] - return e_id.to_string() + self.store = hs.get_datastore() + self.state = hs.get_state_handler() + self.auth = hs.get_auth() - def new(self, room_version, key_values={}): + def new(self, room_version, key_values): """Generate an event builder appropriate for the given room version Args: @@ -98,27 +175,104 @@ class EventBuilderFactory(object): """ # There's currently only the one event version defined - if room_version not in { - RoomVersions.V1, - RoomVersions.V2, - RoomVersions.VDH_TEST, - RoomVersions.STATE_V2_TEST, - }: + if room_version not in KNOWN_ROOM_VERSIONS: raise Exception( "No event format defined for version %r" % (room_version,) ) - key_values["event_id"] = self.create_event_id() + key_values["event_id"] = _create_event_id(self.clock, self.hostname) + + return EventBuilder( + store=self.store, + state=self.state, + auth=self.auth, + clock=self.clock, + hostname=self.hostname, + signing_key=self.signing_key, + format_version=room_version_to_event_format(room_version), + type=key_values["type"], + state_key=key_values.get("state_key"), + room_id=key_values["room_id"], + sender=key_values["sender"], + content=key_values.get("content", {}), + unsigned=key_values.get("unsigned", {}), + redacts=key_values.get("redacts", None), + ) + + +def create_local_event_from_event_dict(clock, hostname, signing_key, + format_version, event_dict, + internal_metadata_dict=None): + """Takes a fully formed event dict, ensuring that fields like `origin` + and `origin_server_ts` have correct values for a locally produced event, + then signs and hashes it. + + Args: + clock (Clock) + hostname (str) + signing_key + format_version (int) + event_dict (dict) + internal_metadata_dict (dict|None) + + Returns: + FrozenEvent + """ + + # There's currently only the one event version defined + if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: + raise Exception( + "No event format defined for version %r" % (format_version,) + ) + + if internal_metadata_dict is None: + internal_metadata_dict = {} + + time_now = int(clock.time_msec()) + + event_dict["event_id"] = _create_event_id(clock, hostname) + + event_dict["origin"] = hostname + event_dict["origin_server_ts"] = time_now + + event_dict.setdefault("unsigned", {}) + age = event_dict["unsigned"].pop("age", 0) + event_dict["unsigned"].setdefault("age_ts", time_now - age) + + event_dict.setdefault("signatures", {}) + + add_hashes_and_signatures( + event_dict, + hostname, + signing_key, + ) + return event_type_from_format_version(format_version)( + event_dict, internal_metadata_dict=internal_metadata_dict, + ) + + +# A counter used when generating new event IDs +_event_id_counter = 0 + + +def _create_event_id(clock, hostname): + """Create a new event ID + + Args: + clock (Clock) + hostname (str): The server name for the event ID + + Returns: + str + """ - time_now = int(self.clock.time_msec()) + global _event_id_counter - key_values.setdefault("origin", self.hostname) - key_values.setdefault("origin_server_ts", time_now) + i = str(_event_id_counter) + _event_id_counter += 1 - key_values.setdefault("unsigned", {}) - age = key_values["unsigned"].pop("age", 0) - key_values["unsigned"].setdefault("age_ts", time_now - age) + local_part = str(int(clock.time())) + i + random_string(5) - key_values["signatures"] = {} + e_id = EventID(local_part, hostname) - return EventBuilder(key_values=key_values,) + return e_id.to_string() diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 71809893c5..be3bb59431 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -37,8 +37,7 @@ from synapse.api.errors import ( HttpResponseException, SynapseError, ) -from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.events import room_version_to_event_format +from synapse.events import builder, room_version_to_event_format from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.util import logcontext, unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache @@ -72,7 +71,8 @@ class FederationClient(FederationBase): self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() - self.event_builder_factory = hs.get_event_builder_factory() + self.hostname = hs.hostname + self.signing_key = hs.config.signing_key[0] self._get_pdu_cache = ExpiringCache( cache_name="get_pdu_cache", @@ -608,18 +608,10 @@ class FederationClient(FederationBase): if "prev_state" not in pdu_dict: pdu_dict["prev_state"] = [] - # Strip off the fields that we want to clobber. - pdu_dict.pop("origin", None) - pdu_dict.pop("origin_server_ts", None) - pdu_dict.pop("unsigned", None) - - builder = self.event_builder_factory.new(room_version, pdu_dict) - add_hashes_and_signatures( - builder, - self.hs.hostname, - self.hs.config.signing_key[0] + ev = builder.create_local_event_from_event_dict( + self._clock, self.hostname, self.signing_key, + format_version=event_format, event_dict=pdu_dict, ) - ev = builder.build() defer.returnValue( (destination, ev, event_format) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 05d1370c18..ac6f4fd985 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json from twisted.internet import defer from twisted.internet.defer import succeed -from synapse.api.constants import MAX_DEPTH, EventTypes, Membership, RoomVersions +from synapse.api.constants import EventTypes, Membership, RoomVersions from synapse.api.errors import ( AuthError, Codes, @@ -31,7 +31,6 @@ from synapse.api.errors import ( SynapseError, ) from synapse.api.urls import ConsentURIBuilder -from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events.utils import serialize_event from synapse.events.validator import EventValidator from synapse.replication.http.send_event import ReplicationSendEventRestServlet @@ -545,40 +544,17 @@ class EventCreationHandler(object): prev_events_and_hashes = \ yield self.store.get_prev_events_for_room(builder.room_id) - if prev_events_and_hashes: - depth = max([d for _, _, d in prev_events_and_hashes]) + 1 - # we cap depth of generated events, to ensure that they are not - # rejected by other servers (and so that they can be persisted in - # the db) - depth = min(depth, MAX_DEPTH) - else: - depth = 1 - prev_events = [ (event_id, prev_hashes) for event_id, prev_hashes, _ in prev_events_and_hashes ] - builder.prev_events = prev_events - builder.depth = depth - - context = yield self.state.compute_event_context(builder) - if requester: - context.app_service = requester.app_service - - if builder.is_state(): - builder.prev_state = yield self.store.add_event_hashes( - context.prev_state_events - ) - - yield self.auth.add_auth_events(builder, context) - - signing_key = self.hs.config.signing_key[0] - add_hashes_and_signatures( - builder, self.server_name, signing_key + event = yield builder.build( + prev_event_ids=[p for p, _ in prev_events], ) + context = yield self.state.compute_event_context(event) - event = builder.build() + self.validator.validate_new(event) logger.debug( "Created event %s", diff --git a/synapse/server.py b/synapse/server.py index c8914302cf..6c52101616 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -355,10 +355,7 @@ class HomeServer(object): return Keyring(self) def build_event_builder_factory(self): - return EventBuilderFactory( - clock=self.get_clock(), - hostname=self.hostname, - ) + return EventBuilderFactory(self) def build_filtering(self): return Filtering(self) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index d3b9dea1d6..38809ed0fc 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -125,6 +125,29 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, return dict(txn) + @defer.inlineCallbacks + def get_max_depth_of(self, event_ids): + """Returns the max depth of a set of event IDs + + Args: + event_ids (list[str]) + + Returns + Deferred[int] + """ + rows = yield self._simple_select_many_batch( + table="events", + column="event_id", + iterable=event_ids, + retcols=("depth",), + desc="get_max_depth_of", + ) + + if not rows: + defer.returnValue(0) + else: + defer.returnValue(max(row["depth"] for row in rows)) + def _get_oldest_events_in_room_txn(self, txn, room_id): return self._simple_select_onecol_txn( txn, -- cgit 1.4.1 From 6598992b01629062791cd4d98f062aeabd4f7baf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 11:28:02 +0000 Subject: Fixup calls to `comput_event_signature` We currently pass FrozenEvent instead of `dict` to `compute_event_signature`, which works by accident due to `dict(event)` producing the correct result. This fixes PR #4493 commit 855a151 --- synapse/federation/federation_server.py | 2 +- synapse/handlers/federation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 4aa04b9588..6681614232 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -322,7 +322,7 @@ class FederationServer(FederationBase): if self.hs.is_mine_id(event.event_id): event.signatures.update( compute_event_signature( - event, + event.get_pdu_json(), self.hs.hostname, self.hs.config.signing_key[0] ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index adf59db7a8..fcaf7530b0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1300,7 +1300,7 @@ class FederationHandler(BaseHandler): event.signatures.update( compute_event_signature( - event, + event.get_pdu_json(), self.hs.hostname, self.hs.config.signing_key[0] ) -- cgit 1.4.1 From 7709d2bd167e27493b134e938410c307f8c10396 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 28 Jan 2019 21:09:45 +0000 Subject: Implement rechecking of redactions --- synapse/api/auth.py | 4 ++-- synapse/event_auth.py | 24 ++++++++++++++++++------ synapse/events/__init__.py | 3 +++ synapse/handlers/message.py | 6 +++++- synapse/storage/events_worker.py | 26 +++++++++++++++++++++++++- 5 files changed, 53 insertions(+), 10 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 7b213e54c8..963e0e7d60 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -627,7 +627,7 @@ class Auth(object): defer.returnValue(auth_ids) - def check_redaction(self, event, auth_events): + def check_redaction(self, room_version, event, auth_events): """Check whether the event sender is allowed to redact the target event. Returns: @@ -640,7 +640,7 @@ class Auth(object): AuthError if the event sender is definitely not allowed to redact the target event. """ - return event_auth.check_redaction(event, auth_events) + return event_auth.check_redaction(room_version, event, auth_events) @defer.inlineCallbacks def check_can_change_room_list(self, room_id, user): diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 9adedbbb02..a95d142f0c 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -20,7 +20,13 @@ from signedjson.key import decode_verify_key_bytes from signedjson.sign import SignatureVerifyException, verify_signed_json from unpaddedbase64 import decode_base64 -from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, Membership +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + JoinRules, + Membership, + RoomVersions, +) from synapse.api.errors import AuthError, EventSizeError, SynapseError from synapse.types import UserID, get_domain_from_id @@ -168,7 +174,7 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru _check_power_levels(event, auth_events) if event.type == EventTypes.Redaction: - check_redaction(event, auth_events) + check_redaction(room_version, event, auth_events) logger.debug("Allowing! %s", event) @@ -422,7 +428,7 @@ def _can_send_event(event, auth_events): return True -def check_redaction(event, auth_events): +def check_redaction(room_version, event, auth_events): """Check whether the event sender is allowed to redact the target event. Returns: @@ -442,10 +448,16 @@ def check_redaction(event, auth_events): if user_level >= redact_level: return False - redacter_domain = get_domain_from_id(event.event_id) - redactee_domain = get_domain_from_id(event.redacts) - if redacter_domain == redactee_domain: + if room_version in (RoomVersions.V1, RoomVersions.V2, RoomVersions.VDH_TEST): + redacter_domain = get_domain_from_id(event.event_id) + redactee_domain = get_domain_from_id(event.redacts) + if redacter_domain == redactee_domain: + return True + elif room_version == RoomVersions.V3: + event.internal_metadata.recheck_redaction = True return True + else: + raise RuntimeError("Unrecognized room version %r" % (room_version,)) raise AuthError( 403, diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 3fe52aaa45..70d3c0fbd9 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -63,6 +63,9 @@ class _EventInternalMetadata(object): """ return getattr(self, "send_on_behalf_of", None) + def need_to_check_redaction(self): + return getattr(self, "recheck_redaction", False) + def _event_dict_property(key): # We want to be able to use hasattr with the event dict properties. diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 05d1370c18..0cfced43d5 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -767,7 +767,8 @@ class EventCreationHandler(object): auth_events = { (e.type, e.state_key): e for e in auth_events.values() } - if self.auth.check_redaction(event, auth_events=auth_events): + room_version = yield self.store.get_room_version(event.room_id) + if self.auth.check_redaction(room_version, event, auth_events=auth_events): original_event = yield self.store.get_event( event.redacts, check_redacted=False, @@ -781,6 +782,9 @@ class EventCreationHandler(object): "You don't have permission to redact events" ) + # We've already checked. + event.internal_metadata.recheck_redaction = False + if event.type == EventTypes.Create: prev_state_ids = yield context.get_prev_state_ids(self.store) if prev_state_ids: diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 0a0ca58fc4..9ce19430e8 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -21,13 +21,14 @@ from canonicaljson import json from twisted.internet import defer -from synapse.api.constants import EventFormatVersions +from synapse.api.constants import EventFormatVersions, EventTypes from synapse.api.errors import NotFoundError from synapse.events import FrozenEvent, event_type_from_format_version # noqa: F401 # these are only included to make the type annotations work from synapse.events.snapshot import EventContext # noqa: F401 from synapse.events.utils import prune_event from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.types import get_domain_from_id from synapse.util.logcontext import ( LoggingContext, PreserveLoggingContext, @@ -174,6 +175,29 @@ class EventsWorkerStore(SQLBaseStore): if not entry: continue + # Some redactions in room version v3 need to be rechecked if we + # didn't have the redacted event at the time, so we recheck on read + # instead. + if not allow_rejected and entry.event.type == EventTypes.Redaction: + if entry.event.internal_metadata.need_to_check_redaction(): + orig = yield self.get_event( + entry.event.redacts, + allow_none=True, + allow_rejected=True, + get_prev_content=False, + ) + expected_domain = get_domain_from_id(entry.event.sender) + if orig and get_domain_from_id(orig.sender) == expected_domain: + # This redaction event is allowed. Mark as not needing a + # recheck. + entry.event.recheck_redaction = False + else: + # We don't have the event that is being redacted, so we + # assume that the event isn't authorized for now. (If we + # later receive the event, then we will always redact + # it anyway, since we have this redaction) + continue + if allow_rejected or not entry.event.rejected_reason: if check_redacted and entry.redacted_event: event = entry.redacted_event -- cgit 1.4.1 From 5891a6edc89039cf4fa76217c3f34384a2aa14eb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 12:09:10 +0000 Subject: Correctly set context.app_service --- synapse/handlers/message.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index ac6f4fd985..37a7dca794 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -553,6 +553,8 @@ class EventCreationHandler(object): prev_event_ids=[p for p, _ in prev_events], ) context = yield self.state.compute_event_context(event) + if requester: + context.app_service = requester.app_service self.validator.validate_new(event) -- cgit 1.4.1 From b1fffca3458c6ba26da1e61e4e70ac6f7419d839 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 16:11:19 +0000 Subject: Remove event ID usage when checking if new room The event ID is changing, so we can no longer get the domain from it. On the other hand, the check is unnecessary. --- synapse/handlers/room_member.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 9ed5a05cca..2beffdf41e 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -940,7 +940,8 @@ class RoomMemberHandler(object): # first member event? create_event_id = current_state_ids.get(("m.room.create", "")) if len(current_state_ids) == 1 and create_event_id: - defer.returnValue(self.hs.is_mine_id(create_event_id)) + # We can only get here if we're in the process of creating the room + defer.returnValue(True) for etype, state_key in current_state_ids: if etype != EventTypes.Member or not self.hs.is_mine_id(state_key): -- cgit 1.4.1 From 55d90248356b0068b201c5be7298e0f3ae1c8ace Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 16:15:16 +0000 Subject: Use snder and not event ID domain to check if ours The transaction queue only sends out events that we generate. This was done by checking domain of event ID, but that can no longer be used. Instead, we may as well use the sender field. --- synapse/federation/transaction_queue.py | 2 +- synapse/handlers/federation.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index fe787abaeb..1f0b67f5f8 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -175,7 +175,7 @@ class TransactionQueue(object): def handle_event(event): # Only send events for this server. send_on_behalf_of = event.internal_metadata.get_send_on_behalf_of() - is_mine = self.is_mine_id(event.event_id) + is_mine = self.is_mine_id(event.sender) if not is_mine and send_on_behalf_of is None: return diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index fcaf7530b0..f89dabb9eb 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2293,6 +2293,10 @@ class FederationHandler(BaseHandler): EventValidator().validate_new(event) + # We need to tell the transaction queue to send this out, even + # though the sender isn't a local user. + event.internal_metadata.send_on_behalf_of = self.hs.hostname + try: yield self.auth.check_from_context(room_version, event, context) except AuthError as e: @@ -2342,6 +2346,10 @@ class FederationHandler(BaseHandler): raise e yield self._check_signature(event, context) + # We need to tell the transaction queue to send this out, even + # though the sender isn't a local user. + event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender) + # XXX we send the invite here, but send_membership_event also sends it, # so we end up making two requests. I think this is redundant. returned_invite = yield self.send_invite(origin, event) -- cgit 1.4.1 From 3f189c902ea1146a497512049aa38fe9a0a91169 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 30 Jan 2019 10:53:17 +0000 Subject: Fix flake8 (#4519) --- changelog.d/4519.misc | 1 + synapse/_scripts/register_new_matrix_user.py | 4 +- synapse/handlers/directory.py | 4 +- synapse/handlers/federation.py | 2 +- synapse/push/clientformat.py | 2 +- synapse/storage/__init__.py | 2 +- synapse/storage/events.py | 168 +++++++++++++-------------- synapse/storage/events_worker.py | 2 +- tests/storage/test_background_update.py | 2 +- tests/storage/test_end_to_end_keys.py | 3 - tests/storage/test_keys.py | 3 - tests/storage/test_state.py | 3 - 12 files changed, 94 insertions(+), 102 deletions(-) create mode 100644 changelog.d/4519.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4519.misc b/changelog.d/4519.misc new file mode 100644 index 0000000000..897e783d28 --- /dev/null +++ b/changelog.d/4519.misc @@ -0,0 +1 @@ +Fix code to comply with linting in PyFlakes 3.7.1. diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py index 4c3abf06fe..6e93f5a0c6 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py @@ -46,7 +46,7 @@ def request_registration( # Get the nonce r = requests.get(url, verify=False) - if r.status_code is not 200: + if r.status_code != 200: _print("ERROR! Received %d %s" % (r.status_code, r.reason)) if 400 <= r.status_code < 500: try: @@ -84,7 +84,7 @@ def request_registration( _print("Sending registration request...") r = requests.post(url, json=data, verify=False) - if r.status_code is not 200: + if r.status_code != 200: _print("ERROR! Received %d %s" % (r.status_code, r.reason)) if 400 <= r.status_code < 500: try: diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 0699731c13..6bb254f899 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -57,8 +57,8 @@ class DirectoryHandler(BaseHandler): # general association creation for both human users and app services for wchar in string.whitespace: - if wchar in room_alias.localpart: - raise SynapseError(400, "Invalid characters in room alias") + if wchar in room_alias.localpart: + raise SynapseError(400, "Invalid characters in room alias") if not self.hs.is_mine(room_alias): raise SynapseError(400, "Room alias must be local") diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index f89dabb9eb..083f2e0ac3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -102,7 +102,7 @@ class FederationHandler(BaseHandler): self.hs = hs - self.store = hs.get_datastore() # type: synapse.storage.DataStore + self.store = hs.get_datastore() self.federation_client = hs.get_federation_client() self.state_handler = hs.get_state_handler() self.server_name = hs.hostname diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index ecbf364a5e..8bd96b1178 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -84,7 +84,7 @@ def _rule_to_template(rule): templaterule["pattern"] = thecond["pattern"] if unscoped_rule_id: - templaterule['rule_id'] = unscoped_rule_id + templaterule['rule_id'] = unscoped_rule_id if 'default' in rule: templaterule['default'] = rule['default'] return templaterule diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 24329879e5..42cd3c83ad 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -317,7 +317,7 @@ class DataStore(RoomMemberStore, RoomStore, thirty_days_ago_in_secs)) for row in txn: - if row[0] is 'unknown': + if row[0] == 'unknown': pass results[row[0]] = row[1] diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 3e1915fb87..81b250480d 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -904,106 +904,106 @@ class EventsStore(StateGroupWorkerStore, EventFederationStore, EventsWorkerStore def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order): for room_id, current_state_tuple in iteritems(state_delta_by_room): - to_delete, to_insert = current_state_tuple - - # First we add entries to the current_state_delta_stream. We - # do this before updating the current_state_events table so - # that we can use it to calculate the `prev_event_id`. (This - # allows us to not have to pull out the existing state - # unnecessarily). - sql = """ - INSERT INTO current_state_delta_stream - (stream_id, room_id, type, state_key, event_id, prev_event_id) - SELECT ?, ?, ?, ?, ?, ( - SELECT event_id FROM current_state_events - WHERE room_id = ? AND type = ? AND state_key = ? - ) - """ - txn.executemany(sql, ( - ( - max_stream_order, room_id, etype, state_key, None, - room_id, etype, state_key, - ) - for etype, state_key in to_delete - # We sanity check that we're deleting rather than updating - if (etype, state_key) not in to_insert - )) - txn.executemany(sql, ( - ( - max_stream_order, room_id, etype, state_key, ev_id, - room_id, etype, state_key, - ) - for (etype, state_key), ev_id in iteritems(to_insert) - )) + to_delete, to_insert = current_state_tuple - # Now we actually update the current_state_events table - - txn.executemany( - "DELETE FROM current_state_events" - " WHERE room_id = ? AND type = ? AND state_key = ?", - ( - (room_id, etype, state_key) - for etype, state_key in itertools.chain(to_delete, to_insert) - ), + # First we add entries to the current_state_delta_stream. We + # do this before updating the current_state_events table so + # that we can use it to calculate the `prev_event_id`. (This + # allows us to not have to pull out the existing state + # unnecessarily). + sql = """ + INSERT INTO current_state_delta_stream + (stream_id, room_id, type, state_key, event_id, prev_event_id) + SELECT ?, ?, ?, ?, ?, ( + SELECT event_id FROM current_state_events + WHERE room_id = ? AND type = ? AND state_key = ? ) - - self._simple_insert_many_txn( - txn, - table="current_state_events", - values=[ - { - "event_id": ev_id, - "room_id": room_id, - "type": key[0], - "state_key": key[1], - } - for key, ev_id in iteritems(to_insert) - ], + """ + txn.executemany(sql, ( + ( + max_stream_order, room_id, etype, state_key, None, + room_id, etype, state_key, ) - - txn.call_after( - self._curr_state_delta_stream_cache.entity_has_changed, - room_id, max_stream_order, + for etype, state_key in to_delete + # We sanity check that we're deleting rather than updating + if (etype, state_key) not in to_insert + )) + txn.executemany(sql, ( + ( + max_stream_order, room_id, etype, state_key, ev_id, + room_id, etype, state_key, ) + for (etype, state_key), ev_id in iteritems(to_insert) + )) - # Invalidate the various caches - - # Figure out the changes of membership to invalidate the - # `get_rooms_for_user` cache. - # We find out which membership events we may have deleted - # and which we have added, then we invlidate the caches for all - # those users. - members_changed = set( - state_key - for ev_type, state_key in itertools.chain(to_delete, to_insert) - if ev_type == EventTypes.Member - ) + # Now we actually update the current_state_events table - for member in members_changed: - self._invalidate_cache_and_stream( - txn, self.get_rooms_for_user_with_stream_ordering, (member,) - ) + txn.executemany( + "DELETE FROM current_state_events" + " WHERE room_id = ? AND type = ? AND state_key = ?", + ( + (room_id, etype, state_key) + for etype, state_key in itertools.chain(to_delete, to_insert) + ), + ) - for host in set(get_domain_from_id(u) for u in members_changed): - self._invalidate_cache_and_stream( - txn, self.is_host_joined, (room_id, host) - ) - self._invalidate_cache_and_stream( - txn, self.was_host_joined, (room_id, host) - ) + self._simple_insert_many_txn( + txn, + table="current_state_events", + values=[ + { + "event_id": ev_id, + "room_id": room_id, + "type": key[0], + "state_key": key[1], + } + for key, ev_id in iteritems(to_insert) + ], + ) + + txn.call_after( + self._curr_state_delta_stream_cache.entity_has_changed, + room_id, max_stream_order, + ) + + # Invalidate the various caches + + # Figure out the changes of membership to invalidate the + # `get_rooms_for_user` cache. + # We find out which membership events we may have deleted + # and which we have added, then we invlidate the caches for all + # those users. + members_changed = set( + state_key + for ev_type, state_key in itertools.chain(to_delete, to_insert) + if ev_type == EventTypes.Member + ) + for member in members_changed: self._invalidate_cache_and_stream( - txn, self.get_users_in_room, (room_id,) + txn, self.get_rooms_for_user_with_stream_ordering, (member,) ) + for host in set(get_domain_from_id(u) for u in members_changed): self._invalidate_cache_and_stream( - txn, self.get_room_summary, (room_id,) + txn, self.is_host_joined, (room_id, host) ) - self._invalidate_cache_and_stream( - txn, self.get_current_state_ids, (room_id,) + txn, self.was_host_joined, (room_id, host) ) + self._invalidate_cache_and_stream( + txn, self.get_users_in_room, (room_id,) + ) + + self._invalidate_cache_and_stream( + txn, self.get_room_summary, (room_id,) + ) + + self._invalidate_cache_and_stream( + txn, self.get_current_state_ids, (room_id,) + ) + def _update_forward_extremities_txn(self, txn, new_forward_extremities, max_stream_order): for room_id, new_extrem in iteritems(new_forward_extremities): diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index ebe1429acb..57dae324c7 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -220,7 +220,7 @@ class EventsWorkerStore(SQLBaseStore): defer.returnValue(events) def _invalidate_get_event_cache(self, event_id): - self._get_event_cache.invalidate((event_id,)) + self._get_event_cache.invalidate((event_id,)) def _get_events_from_cache(self, events, allow_rejected, update_metrics=True): """Fetch events from the caches diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index 81403727c5..5568a607c7 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -11,7 +11,7 @@ class BackgroundUpdateTestCase(unittest.TestCase): def setUp(self): hs = yield setup_test_homeserver( self.addCleanup - ) # type: synapse.server.HomeServer + ) self.store = hs.get_datastore() self.clock = hs.get_clock() diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py index b83f7336d3..11fb8c0c19 100644 --- a/tests/storage/test_end_to_end_keys.py +++ b/tests/storage/test_end_to_end_keys.py @@ -20,9 +20,6 @@ import tests.utils class EndToEndKeyStoreTestCase(tests.unittest.TestCase): - def __init__(self, *args, **kwargs): - super(EndToEndKeyStoreTestCase, self).__init__(*args, **kwargs) - self.store = None # type: synapse.storage.DataStore @defer.inlineCallbacks def setUp(self): diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 47f4a8ceac..0d2dc9f325 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -22,9 +22,6 @@ import tests.utils class KeyStoreTestCase(tests.unittest.TestCase): - def __init__(self, *args, **kwargs): - super(KeyStoreTestCase, self).__init__(*args, **kwargs) - self.store = None # type: synapse.storage.keys.KeyStore @defer.inlineCallbacks def setUp(self): diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index a1f99134dc..99cd3e09eb 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -28,9 +28,6 @@ logger = logging.getLogger(__name__) class StateStoreTestCase(tests.unittest.TestCase): - def __init__(self, *args, **kwargs): - super(StateStoreTestCase, self).__init__(*args, **kwargs) - self.store = None # type: synapse.storage.DataStore @defer.inlineCallbacks def setUp(self): -- cgit 1.4.1 From 7615a8ced1385460d73dca45fc6534a2fcb64227 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 30 Jan 2019 14:17:55 +0000 Subject: ACME config cleanups (#4525) * Handle listening for ACME requests on IPv6 addresses the weird url-but-not-actually-a-url-string doesn't handle IPv6 addresses without extra quoting. Building a string which you are about to parse again seems like a weird choice. Let's just use listenTCP, which is consistent with what we do elsewhere. * Clean up the default ACME config make it look a bit more consistent with everything else, and tweak the defaults to listen on port 80. * newsfile --- changelog.d/4525.feature | 1 + synapse/app/__init__.py | 25 +++++++++++- synapse/app/_base.py | 22 +---------- synapse/config/tls.py | 100 +++++++++++++++++++++++++++++++++++------------ synapse/handlers/acme.py | 27 +++++++------ 5 files changed, 115 insertions(+), 60 deletions(-) create mode 100644 changelog.d/4525.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4525.feature b/changelog.d/4525.feature new file mode 100644 index 0000000000..c7f595cec2 --- /dev/null +++ b/changelog.d/4525.feature @@ -0,0 +1 @@ + Synapse can now automatically provision TLS certificates via ACME (the protocol used by CAs like Let's Encrypt). diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index b45adafdd3..f56f5fcc13 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -12,15 +12,38 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import sys from synapse import python_dependencies # noqa: E402 sys.dont_write_bytecode = True +logger = logging.getLogger(__name__) + try: python_dependencies.check_requirements() except python_dependencies.DependencyException as e: sys.stderr.writelines(e.message) sys.exit(1) + + +def check_bind_error(e, address, bind_addresses): + """ + This method checks an exception occurred while binding on 0.0.0.0. + If :: is specified in the bind addresses a warning is shown. + The exception is still raised otherwise. + + Binding on both 0.0.0.0 and :: causes an exception on Linux and macOS + because :: binds on both IPv4 and IPv6 (as per RFC 3493). + When binding on 0.0.0.0 after :: this can safely be ignored. + + Args: + e (Exception): Exception that was caught. + address (str): Address on which binding was attempted. + bind_addresses (list): Addresses on which the service listens. + """ + if address == '0.0.0.0' and '::' in bind_addresses: + logger.warn('Failed to listen on 0.0.0.0, continuing because listening on [::]') + else: + raise e diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 3840c663ab..5b97a54d45 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -22,6 +22,7 @@ from daemonize import Daemonize from twisted.internet import error, reactor +from synapse.app import check_bind_error from synapse.util import PreserveLoggingContext from synapse.util.rlimit import change_resource_limit @@ -188,24 +189,3 @@ def listen_ssl( logger.info("Synapse now listening on port %d (TLS)", port) return r - - -def check_bind_error(e, address, bind_addresses): - """ - This method checks an exception occurred while binding on 0.0.0.0. - If :: is specified in the bind addresses a warning is shown. - The exception is still raised otherwise. - - Binding on both 0.0.0.0 and :: causes an exception on Linux and macOS - because :: binds on both IPv4 and IPv6 (as per RFC 3493). - When binding on 0.0.0.0 after :: this can safely be ignored. - - Args: - e (Exception): Exception that was caught. - address (str): Address on which binding was attempted. - bind_addresses (list): Addresses on which the service listens. - """ - if address == '0.0.0.0' and '::' in bind_addresses: - logger.warn('Failed to listen on 0.0.0.0, continuing because listening on [::]') - else: - raise e diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 734f612db7..5f63676d9c 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -31,13 +31,16 @@ logger = logging.getLogger() class TlsConfig(Config): def read_config(self, config): - acme_config = config.get("acme", {}) + acme_config = config.get("acme", None) + if acme_config is None: + acme_config = {} + self.acme_enabled = acme_config.get("enabled", False) self.acme_url = acme_config.get( "url", "https://acme-v01.api.letsencrypt.org/directory" ) - self.acme_port = acme_config.get("port", 8449) - self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"]) + self.acme_port = acme_config.get("port", 80) + self.acme_bind_addresses = acme_config.get("bind_addresses", ['::', '0.0.0.0']) self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) @@ -126,21 +129,80 @@ class TlsConfig(Config): tls_certificate_path = base_key_name + ".tls.crt" tls_private_key_path = base_key_name + ".tls.key" + # this is to avoid the max line length. Sorrynotsorry + proxypassline = ( + 'ProxyPass /.well-known/acme-challenge ' + 'http://localhost:8009/.well-known/acme-challenge' + ) + return ( """\ - # PEM encoded X509 certificate for TLS. - # This certificate, as of Synapse 1.0, will need to be a valid - # and verifiable certificate, with a root that is available in - # the root store of other servers you wish to federate to. Any - # required intermediary certificates can be appended after the - # primary certificate in hierarchical order. + # PEM-encoded X509 certificate for TLS. + # This certificate, as of Synapse 1.0, will need to be a valid and verifiable + # certificate, signed by a recognised Certificate Authority. + # + # See 'ACME support' below to enable auto-provisioning this certificate via + # Let's Encrypt. + # tls_certificate_path: "%(tls_certificate_path)s" - # PEM encoded private key for TLS + # PEM-encoded private key for TLS tls_private_key_path: "%(tls_private_key_path)s" - # Don't bind to the https port - no_tls: False + # ACME support: This will configure Synapse to request a valid TLS certificate + # for your configured `server_name` via Let's Encrypt. + # + # Note that provisioning a certificate in this way requires port 80 to be + # routed to Synapse so that it can complete the http-01 ACME challenge. + # By default, if you enable ACME support, Synapse will attempt to listen on + # port 80 for incoming http-01 challenges - however, this will likely fail + # with 'Permission denied' or a similar error. + # + # There are a couple of potential solutions to this: + # + # * If you already have an Apache, Nginx, or similar listening on port 80, + # you can configure Synapse to use an alternate port, and have your web + # server forward the requests. For example, assuming you set 'port: 8009' + # below, on Apache, you would write: + # + # %(proxypassline)s + # + # * Alternatively, you can use something like `authbind` to give Synapse + # permission to listen on port 80. + # + acme: + # ACME support is disabled by default. Uncomment the following line + # to enable it. + # + # enabled: true + + # Endpoint to use to request certificates. If you only want to test, + # use Let's Encrypt's staging url: + # https://acme-staging.api.letsencrypt.org/directory + # + # url: https://acme-v01.api.letsencrypt.org/directory + + # Port number to listen on for the HTTP-01 challenge. Change this if + # you are forwarding connections through Apache/Nginx/etc. + # + # port: 80 + + # Local addresses to listen on for incoming connections. + # Again, you may want to change this if you are forwarding connections + # through Apache/Nginx/etc. + # + # bind_addresses: ['::', '0.0.0.0'] + + # How many days remaining on a certificate before it is renewed. + # + # reprovision_threshold: 30 + + # If your server runs behind a reverse-proxy which terminates TLS connections + # (for both client and federation connections), it may be useful to disable + # All TLS support for incoming connections. Setting no_tls to False will + # do so (and avoid the need to give synapse a TLS private key). + # + # no_tls: False # List of allowed TLS fingerprints for this server to publish along # with the signing keys for this server. Other matrix servers that @@ -170,20 +232,6 @@ class TlsConfig(Config): tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] - ## Support for ACME certificate auto-provisioning. - # acme: - # enabled: false - ## ACME path. - ## If you only want to test, use the staging url: - ## https://acme-staging.api.letsencrypt.org/directory - # url: 'https://acme-v01.api.letsencrypt.org/directory' - ## Port number (to listen for the HTTP-01 challenge). - ## Using port 80 requires utilising something like authbind, or proxying to it. - # port: 8449 - ## Hosts to bind to. - # bind_addresses: ['127.0.0.1'] - ## How many days remaining on a certificate before it is renewed. - # reprovision_threshold: 30 """ % locals() ) diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 73ea7ed018..dd0b217965 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -18,13 +18,16 @@ import logging import attr from zope.interface import implementer +import twisted +import twisted.internet.error from twisted.internet import defer -from twisted.internet.endpoints import serverFromString from twisted.python.filepath import FilePath from twisted.python.url import URL from twisted.web import server, static from twisted.web.resource import Resource +from synapse.app import check_bind_error + logger = logging.getLogger(__name__) try: @@ -96,16 +99,19 @@ class AcmeHandler(object): srv = server.Site(responder_resource) - listeners = [] - - for host in self.hs.config.acme_bind_addresses: + bind_addresses = self.hs.config.acme_bind_addresses + for host in bind_addresses: logger.info( - "Listening for ACME requests on %s:%s", host, self.hs.config.acme_port - ) - endpoint = serverFromString( - self.reactor, "tcp:%s:interface=%s" % (self.hs.config.acme_port, host) + "Listening for ACME requests on %s:%i", host, self.hs.config.acme_port, ) - listeners.append(endpoint.listen(srv)) + try: + self.reactor.listenTCP( + self.hs.config.acme_port, + srv, + interface=host, + ) + except twisted.internet.error.CannotListenError as e: + check_bind_error(e, host, bind_addresses) # Make sure we are registered to the ACME server. There's no public API # for this, it is usually triggered by startService, but since we don't @@ -114,9 +120,6 @@ class AcmeHandler(object): self._issuer._registered = False yield self._issuer._ensure_registered() - # Return a Deferred that will fire when all the servers have started up. - yield defer.DeferredList(listeners, fireOnOneErrback=True, consumeErrors=True) - @defer.inlineCallbacks def provision_certificate(self): -- cgit 1.4.1 From ed8c5e4cda75f120db32c313c7b24bd15659c5e3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 30 Jan 2019 15:46:27 +0000 Subject: Fix remote invite rejections not comming down sync This was broken in PR #4405, commit 886e5ac, where we changed remote rejections to be outliers. The fix is to explicitly add the leave event in when we know its an out of band invite. We can't always add the event as if the server is/was in the room there might be more events to send down the sync than just the leave. --- synapse/handlers/sync.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f7f768f751..28857bfc1c 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1473,10 +1473,22 @@ class SyncHandler(object): if since_token and since_token.is_after(leave_token): continue + # If this is an out of band message, like a remote invite + # rejection, we include it in the recents batch. Otherwise, we + # let _load_filtered_recents handle fetching the correct + # batches. + # + # This is all screaming out for a refactor, as the logic here is + # subtle and the moving parts numerous. + if leave_event.internal_metadata.is_out_of_band_membership(): + batch_events = [leave_event] + else: + batch_events = None + room_entries.append(RoomSyncResultBuilder( room_id=room_id, rtype="archived", - events=None, + events=batch_events, newly_joined=room_id in newly_joined_rooms, full_state=False, since_token=since_token, -- cgit 1.4.1 From d621c5562ea6bce4fd8282da642b3123ae016d94 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Wed, 30 Jan 2019 16:33:51 +0000 Subject: Copy over non-federatable trait on room upgrade --- synapse/handlers/room.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 13ba9291b0..c04ba3a0c5 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -263,6 +263,20 @@ class RoomCreationHandler(BaseHandler): } } + # Check if old room was non-federatable + + # Get old room's create event + old_room_create_event_ids = yield self.store.get_filtered_current_state_ids( + old_room_id, StateFilter.from_types(((EventTypes.Create, ""),)), + ) + old_room_create_event_dict = yield self.store.get_events(old_room_create_event_ids.values()) + old_room_create_event = list(old_room_create_event_dict.values())[0] + + # Check if the create event specified a non-federatable room + if old_room_create_event.content.get("m.federate", True) == False: + # If so, mark the new room as non-federatable as well + creation_content["m.federate"] = False + initial_state = dict() # Replicate relevant room events -- cgit 1.4.1 From fb50934b8ff605a70b2a325eaaf51148a9651de5 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 31 Jan 2019 11:34:45 +0000 Subject: lint --- synapse/handlers/room.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index c04ba3a0c5..a69441b96f 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -269,11 +269,13 @@ class RoomCreationHandler(BaseHandler): old_room_create_event_ids = yield self.store.get_filtered_current_state_ids( old_room_id, StateFilter.from_types(((EventTypes.Create, ""),)), ) - old_room_create_event_dict = yield self.store.get_events(old_room_create_event_ids.values()) + old_room_create_event_dict = yield self.store.get_events( + old_room_create_event_ids.values(), + ) old_room_create_event = list(old_room_create_event_dict.values())[0] # Check if the create event specified a non-federatable room - if old_room_create_event.content.get("m.federate", True) == False: + if not old_room_create_event.content.get("m.federate", True): # If so, mark the new room as non-federatable as well creation_content["m.federate"] = False -- cgit 1.4.1 From 3ed3cb43394b41e76f4739f22760c1d8ebfed3c7 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 31 Jan 2019 18:21:39 +0000 Subject: New function for getting room's create event --- synapse/handlers/room.py | 8 +------- synapse/storage/state.py | 31 ++++++++++++++++++++----------- 2 files changed, 21 insertions(+), 18 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a69441b96f..5e40e9ea46 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -266,13 +266,7 @@ class RoomCreationHandler(BaseHandler): # Check if old room was non-federatable # Get old room's create event - old_room_create_event_ids = yield self.store.get_filtered_current_state_ids( - old_room_id, StateFilter.from_types(((EventTypes.Create, ""),)), - ) - old_room_create_event_dict = yield self.store.get_events( - old_room_create_event_ids.values(), - ) - old_room_create_event = list(old_room_create_event_dict.values())[0] + old_room_create_event = yield self.store.get_create_event_for_room(old_room_id) # Check if the create event specified a non-federatable room if not old_room_create_event.content.get("m.federate", True): diff --git a/synapse/storage/state.py b/synapse/storage/state.py index c3ab7db7ae..522aaee918 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -24,7 +24,6 @@ import attr from twisted.internet import defer from synapse.api.constants import EventTypes -from synapse.api.errors import NotFoundError from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.engines import PostgresEngine @@ -428,13 +427,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): """ # for now we do this by looking at the create event. We may want to cache this # more intelligently in future. - state_ids = yield self.get_current_state_ids(room_id) - create_id = state_ids.get((EventTypes.Create, "")) - - if not create_id: - raise NotFoundError("Unknown room %s" % (room_id)) - create_event = yield self.get_event(create_id) + # Retrieve the room's create event + create_event = yield self.get_create_event_for_room(room_id) defer.returnValue(create_event.content.get("room_version", "1")) @defer.inlineCallbacks @@ -448,6 +443,22 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): Returns: Deferred[unicode|None]: predecessor room id """ + # Retrieve the room's create event + create_event = yield self.get_create_event_for_room(room_id) + + # Return predecessor if present + defer.returnValue(create_event.content.get("predecessor", None)) + + @defer.inlineCallbacks + def get_create_event_for_room(self, room_id): + """Get the create state event for a room. + + Args: + room_id (str) + + Returns: + Deferred[EventBase|None]: The room creation event. None if can not be found + """ state_ids = yield self.get_current_state_ids(room_id) create_id = state_ids.get((EventTypes.Create, "")) @@ -455,11 +466,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): if not create_id: defer.returnValue(None) - # Retrieve the room's create event + # Retrieve the room's create event and return create_event = yield self.get_event(create_id) - - # Return predecessor if present - defer.returnValue(create_event.content.get("predecessor", None)) + defer.returnValue(create_event) @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): -- cgit 1.4.1 From 07dfe148de2fbc6eb22be662ed8216a6e11f6811 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 31 Jan 2019 18:30:40 +0000 Subject: Add some debug for membership syncing issues (#4538) I can't figure out what's going on with #4422 and #4436; perhaps this will help. --- changelog.d/4538.misc | 1 + synapse/handlers/sync.py | 13 +++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 changelog.d/4538.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4538.misc b/changelog.d/4538.misc new file mode 100644 index 0000000000..dbc878b09c --- /dev/null +++ b/changelog.d/4538.misc @@ -0,0 +1 @@ +Add some debug for membership syncing issues diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 28857bfc1c..bd97241ab4 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -895,14 +895,17 @@ class SyncHandler(object): Returns: Deferred(SyncResult) """ - logger.info("Calculating sync response for %r", sync_config.user) - # NB: The now_token gets changed by some of the generate_sync_* methods, # this is due to some of the underlying streams not supporting the ability # to query up to a given point. # Always use the `now_token` in `SyncResultBuilder` now_token = yield self.event_sources.get_current_token() + logger.info( + "Calculating sync response for %r between %s and %s", + sync_config.user, since_token, now_token, + ) + user_id = sync_config.user.to_string() app_service = self.store.get_app_service_by_user_id(user_id) if app_service: @@ -1390,6 +1393,12 @@ class SyncHandler(object): room_entries = [] invited = [] for room_id, events in iteritems(mem_change_events_by_room_id): + logger.info( + "Membership changes in %s: [%s]", + room_id, + ", ".join(("%s (%s)" % (e.event_id, e.membership) for e in events)), + ) + non_joins = [e for e in events if e.membership != Membership.JOIN] has_join = len(non_joins) != len(events) -- cgit 1.4.1 From 664c81e8b7525bfaa5c3a7620f2f831ade0754a2 Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Wed, 6 Feb 2019 17:47:22 -0500 Subject: return proper error codes for some 404s --- synapse/handlers/e2e_room_keys.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 42b040375f..c5d7bf0c29 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -267,7 +267,7 @@ class E2eRoomKeysHandler(object): version(str): Optional; if None gives the most recent version otherwise a historical one. Raises: - StoreError: code 404 if the requested backup version doesn't exist + NotFoundError: if the requested backup version doesn't exist Returns: A deferred of a info dict that gives the info about the new version. @@ -279,7 +279,13 @@ class E2eRoomKeysHandler(object): """ with (yield self._upload_linearizer.queue(user_id)): - res = yield self.store.get_e2e_room_keys_version_info(user_id, version) + try: + res = yield self.store.get_e2e_room_keys_version_info(user_id, version) + except StoreError as e: + if e.code == 404: + raise NotFoundError("Unknown backup version") + else: + raise defer.returnValue(res) @defer.inlineCallbacks @@ -290,8 +296,14 @@ class E2eRoomKeysHandler(object): user_id(str): the user whose current backup version we're deleting version(str): the version id of the backup being deleted Raises: - StoreError: code 404 if this backup version doesn't exist + NotFoundError: if this backup version doesn't exist """ with (yield self._upload_linearizer.queue(user_id)): - yield self.store.delete_e2e_room_keys_version(user_id, version) + try: + yield self.store.delete_e2e_room_keys_version(user_id, version) + except StoreError as e: + if e.code == 404: + raise NotFoundError("Unknown backup version") + else: + raise -- cgit 1.4.1 From 82486371738a130322f3a1829bc4b49f79c1a3e4 Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Wed, 6 Feb 2019 17:57:10 -0500 Subject: add new endpoint to update backup versions --- synapse/handlers/e2e_room_keys.py | 34 ++++++++++++++++++++++++++++++- synapse/rest/client/v2_alpha/room_keys.py | 33 ++++++++++++++++++++++++++++++ synapse/storage/e2e_room_keys.py | 21 +++++++++++++++++++ 3 files changed, 87 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index c5d7bf0c29..e0e5ece747 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -19,7 +19,8 @@ from six import iteritems from twisted.internet import defer -from synapse.api.errors import NotFoundError, RoomKeysVersionError, StoreError +from synapse.api.errors import Codes, NotFoundError, RoomKeysVersionError, \ + StoreError, SynapseError from synapse.util.async_helpers import Linearizer logger = logging.getLogger(__name__) @@ -307,3 +308,34 @@ class E2eRoomKeysHandler(object): raise NotFoundError("Unknown backup version") else: raise + + @defer.inlineCallbacks + def update_version(self, user_id, version, version_info): + """Update the info about a given version of the user's backup + + Args: + user_id(str): the user whose current backup version we're updating + version(str): the backup version we're updating + version_info(dict): the new information about the backup + Raises: + NotFoundError: if the requested backup version doesn't exist + Returns: + A deferred of an empty dict. + """ + try: + old_info = yield self.store.get_e2e_room_keys_version_info(user_id, version) + except StoreError as e: + if e.code == 404: + raise NotFoundError("Unknown backup version") + else: + raise + if old_info["algorithm"] != version_info["algorithm"]: + raise SynapseError( + 400, + "Algorithm does not match", + Codes.INVALID_PARAM + ) + + yield self.store.update_e2e_room_keys_version(user_id, version, version_info) + + defer.returnValue({}) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index ab3f1bd21a..1c39d2af1c 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -380,6 +380,39 @@ class RoomKeysVersionServlet(RestServlet): ) defer.returnValue((200, {})) + @defer.inlineCallbacks + def on_PUT(self, request, version): + """ + Update the information about a given version of the user's room_keys backup. + + POST /room_keys/version/12345 HTTP/1.1 + Content-Type: application/json + { + "algorithm": "m.megolm_backup.v1", + "auth_data": { + "public_key": "abcdefg", + "signatures": { + "ed25519:something": "hijklmnop" + } + } + } + + HTTP/1.1 200 OK + Content-Type: application/json + {} + """ + requester = yield self.auth.get_user_by_req(request, allow_guest=False) + user_id = requester.user.to_string() + info = parse_json_object_from_request(request) + + if version is None: + raise SynapseError(400, "No version specified to update", Codes.MISSING_PARAM) + + yield self.e2e_room_keys_handler.update_version( + user_id, version, info + ) + defer.returnValue((200, {})) + def register_servlets(hs, http_server): RoomKeysServlet(hs).register(http_server) diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index 45cebe61d1..9a3aec759e 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -298,6 +298,27 @@ class EndToEndRoomKeyStore(SQLBaseStore): "create_e2e_room_keys_version_txn", _create_e2e_room_keys_version_txn ) + def update_e2e_room_keys_version(self, user_id, version, info): + """Update a given backup version + + Args: + user_id(str): the user whose backup version we're updating + version(str): the version ID of the backup version we're updating + info(dict): the new backup version info to store + """ + + return self._simple_update( + table="e2e_room_keys_versions", + keyvalues={ + "user_id": user_id, + "version": version, + }, + updatevalues={ + "auth_data": json.dumps(info["auth_data"]), + }, + desc="update_e2e_room_keys_version" + ) + def delete_e2e_room_keys_version(self, user_id, version=None): """Delete a given backup version of the user's room keys. Doesn't delete their actual key data. -- cgit 1.4.1 From 9ff620a518739bfe7417bac20365e4acf9c5906e Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Wed, 6 Feb 2019 21:32:52 -0500 Subject: fix import to make isort happy --- synapse/handlers/e2e_room_keys.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index e0e5ece747..6c43c9db76 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -19,8 +19,13 @@ from six import iteritems from twisted.internet import defer -from synapse.api.errors import Codes, NotFoundError, RoomKeysVersionError, \ - StoreError, SynapseError +from synapse.api.errors import ( + Codes, + NotFoundError, + RoomKeysVersionError, + StoreError, + SynapseError +) from synapse.util.async_helpers import Linearizer logger = logging.getLogger(__name__) -- cgit 1.4.1 From d9e424bf649c1d4ba7b9da7fd64db84cda389e11 Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Wed, 6 Feb 2019 22:18:41 -0500 Subject: re-try to make isort happy --- synapse/handlers/e2e_room_keys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 6c43c9db76..546050f8e7 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -24,7 +24,7 @@ from synapse.api.errors import ( NotFoundError, RoomKeysVersionError, StoreError, - SynapseError + SynapseError, ) from synapse.util.async_helpers import Linearizer -- cgit 1.4.1 From afae8442b56d2e2466812916654396341038c38c Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Fri, 8 Feb 2019 01:32:45 -0500 Subject: make sure version is in body and wrap in linearizer queue also add tests --- synapse/handlers/e2e_room_keys.py | 37 +++++++++++----- synapse/rest/client/v2_alpha/room_keys.py | 3 +- tests/handlers/test_e2e_room_keys.py | 72 +++++++++++++++++++++++++++++++ 3 files changed, 100 insertions(+), 12 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 546050f8e7..7bc174070e 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -327,20 +327,35 @@ class E2eRoomKeysHandler(object): Returns: A deferred of an empty dict. """ - try: - old_info = yield self.store.get_e2e_room_keys_version_info(user_id, version) - except StoreError as e: - if e.code == 404: - raise NotFoundError("Unknown backup version") - else: - raise - if old_info["algorithm"] != version_info["algorithm"]: + if "version" not in version_info: + raise SynapseError( + 400, + "Missing version in body", + Codes.MISSING_PARAM + ) + if version_info["version"] != version: raise SynapseError( 400, - "Algorithm does not match", + "Version in body does not match", Codes.INVALID_PARAM ) + with (yield self._upload_linearizer.queue(user_id)): + try: + old_info = yield self.store.get_e2e_room_keys_version_info( + user_id, version + ) + except StoreError as e: + if e.code == 404: + raise NotFoundError("Unknown backup version") + else: + raise + if old_info["algorithm"] != version_info["algorithm"]: + raise SynapseError( + 400, + "Algorithm does not match", + Codes.INVALID_PARAM + ) - yield self.store.update_e2e_room_keys_version(user_id, version, version_info) + yield self.store.update_e2e_room_keys_version(user_id, version, version_info) - defer.returnValue({}) + defer.returnValue({}) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 1c39d2af1c..220a0de30b 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -394,7 +394,8 @@ class RoomKeysVersionServlet(RestServlet): "signatures": { "ed25519:something": "hijklmnop" } - } + }, + "version": "42" } HTTP/1.1 200 OK diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index c8994f416e..1c49bbbc3c 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -125,6 +125,78 @@ class E2eRoomKeysHandlerTestCase(unittest.TestCase): "auth_data": "second_version_auth_data", }) + @defer.inlineCallbacks + def test_update_version(self): + """Check that we can update versions. + """ + version = yield self.handler.create_version(self.local_user, { + "algorithm": "m.megolm_backup.v1", + "auth_data": "first_version_auth_data", + }) + self.assertEqual(version, "1") + + res = yield self.handler.update_version(self.local_user, version, { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": version + }) + self.assertDictEqual(res, {}) + + # check we can retrieve it as the current version + res = yield self.handler.get_version_info(self.local_user) + self.assertDictEqual(res, { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": version + }) + + @defer.inlineCallbacks + def test_update_missing_version(self): + """Check that we get a 404 on updating nonexistent versions + """ + res = None + try: + yield self.handler.update_version(self.local_user, "1", { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": "1" + }) + except errors.SynapseError as e: + res = e.code + self.assertEqual(res, 404) + + @defer.inlineCallbacks + def test_update_bad_version(self): + """Check that we get a 400 if the version in the body is missing or + doesn't match + """ + version = yield self.handler.create_version(self.local_user, { + "algorithm": "m.megolm_backup.v1", + "auth_data": "first_version_auth_data", + }) + self.assertEqual(version, "1") + + res = None + try: + yield self.handler.update_version(self.local_user, version, { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data" + }) + except errors.SynapseError as e: + res = e.code + self.assertEqual(res, 400) + + res = None + try: + yield self.handler.update_version(self.local_user, version, { + "algorithm": "m.megolm_backup.v1", + "auth_data": "revised_first_version_auth_data", + "version": "incorrect" + }) + except errors.SynapseError as e: + res = e.code + self.assertEqual(res, 400) + @defer.inlineCallbacks def test_delete_missing_version(self): """Check that we get a 404 on deleting nonexistent versions -- cgit 1.4.1 From a126f86eec53cd4807a1300bea885122f8559944 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 11 Feb 2019 11:30:37 +0000 Subject: Transfer Server ACLs on room upgrade --- synapse/handlers/room.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 5e40e9ea46..f9af1f0046 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -284,6 +284,7 @@ class RoomCreationHandler(BaseHandler): (EventTypes.GuestAccess, ""), (EventTypes.RoomAvatar, ""), (EventTypes.Encryption, ""), + (EventTypes.ServerACL, ""), ) old_room_state_ids = yield self.store.get_filtered_current_state_ids( -- cgit 1.4.1 From bb4fd8f927807bdd6efb08ee65ec01269e000417 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 13 Feb 2019 23:05:32 +1100 Subject: Run `black` on user directory code (#4635) --- changelog.d/4635.misc | 1 + synapse/handlers/user_directory.py | 94 ++++++++++++----------- synapse/storage/user_directory.py | 153 ++++++++++++++++--------------------- 3 files changed, 117 insertions(+), 131 deletions(-) create mode 100644 changelog.d/4635.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4635.misc b/changelog.d/4635.misc new file mode 100644 index 0000000000..0f45957b84 --- /dev/null +++ b/changelog.d/4635.misc @@ -0,0 +1 @@ +Run `black` to reformat user directory code. diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 120815b09b..283c6c1b81 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -130,7 +130,7 @@ class UserDirectoryHandler(object): # Support users are for diagnostics and should not appear in the user directory. if not is_support: yield self.store.update_profile_in_user_dir( - user_id, profile.display_name, profile.avatar_url, None, + user_id, profile.display_name, profile.avatar_url, None ) @defer.inlineCallbacks @@ -166,8 +166,9 @@ class UserDirectoryHandler(object): self.pos = deltas[-1]["stream_id"] # Expose current event processing position to prometheus - synapse.metrics.event_processing_positions.labels( - "user_dir").set(self.pos) + synapse.metrics.event_processing_positions.labels("user_dir").set( + self.pos + ) yield self.store.update_user_directory_stream_pos(self.pos) @@ -191,21 +192,25 @@ class UserDirectoryHandler(object): logger.info("Handling room %d/%d", num_processed_rooms + 1, len(room_ids)) yield self._handle_initial_room(room_id) num_processed_rooms += 1 - yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0) logger.info("Processed all rooms.") if self.search_all_users: num_processed_users = 0 user_ids = yield self.store.get_all_local_users() - logger.info("Doing initial update of user directory. %d users", len(user_ids)) + logger.info( + "Doing initial update of user directory. %d users", len(user_ids) + ) for user_id in user_ids: # We add profiles for all users even if they don't match the # include pattern, just in case we want to change it in future - logger.info("Handling user %d/%d", num_processed_users + 1, len(user_ids)) + logger.info( + "Handling user %d/%d", num_processed_users + 1, len(user_ids) + ) yield self._handle_local_user(user_id) num_processed_users += 1 - yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.0) logger.info("Processed all users") @@ -224,24 +229,24 @@ class UserDirectoryHandler(object): if not is_in_room: return - is_public = yield self.store.is_room_world_readable_or_publicly_joinable(room_id) + is_public = yield self.store.is_room_world_readable_or_publicly_joinable( + room_id + ) users_with_profile = yield self.state.get_current_user_in_room(room_id) user_ids = set(users_with_profile) unhandled_users = user_ids - self.initially_handled_users yield self.store.add_profiles_to_user_dir( - room_id, { - user_id: users_with_profile[user_id] for user_id in unhandled_users - } + room_id, + {user_id: users_with_profile[user_id] for user_id in unhandled_users}, ) self.initially_handled_users |= unhandled_users if is_public: yield self.store.add_users_to_public_room( - room_id, - user_ids=user_ids - self.initially_handled_users_in_public + room_id, user_ids=user_ids - self.initially_handled_users_in_public ) self.initially_handled_users_in_public |= user_ids @@ -253,7 +258,7 @@ class UserDirectoryHandler(object): count = 0 for user_id in user_ids: if count % self.INITIAL_ROOM_SLEEP_COUNT == 0: - yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0) if not self.is_mine_id(user_id): count += 1 @@ -268,7 +273,7 @@ class UserDirectoryHandler(object): continue if count % self.INITIAL_ROOM_SLEEP_COUNT == 0: - yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0) count += 1 user_set = (user_id, other_user_id) @@ -290,25 +295,23 @@ class UserDirectoryHandler(object): if len(to_insert) > self.INITIAL_ROOM_BATCH_SIZE: yield self.store.add_users_who_share_room( - room_id, not is_public, to_insert, + room_id, not is_public, to_insert ) to_insert.clear() if len(to_update) > self.INITIAL_ROOM_BATCH_SIZE: yield self.store.update_users_who_share_room( - room_id, not is_public, to_update, + room_id, not is_public, to_update ) to_update.clear() if to_insert: - yield self.store.add_users_who_share_room( - room_id, not is_public, to_insert, - ) + yield self.store.add_users_who_share_room(room_id, not is_public, to_insert) to_insert.clear() if to_update: yield self.store.update_users_who_share_room( - room_id, not is_public, to_update, + room_id, not is_public, to_update ) to_update.clear() @@ -329,11 +332,12 @@ class UserDirectoryHandler(object): # may have become public or not and add/remove the users in said room if typ in (EventTypes.RoomHistoryVisibility, EventTypes.JoinRules): yield self._handle_room_publicity_change( - room_id, prev_event_id, event_id, typ, + room_id, prev_event_id, event_id, typ ) elif typ == EventTypes.Member: change = yield self._get_key_change( - prev_event_id, event_id, + prev_event_id, + event_id, key_name="membership", public_value=Membership.JOIN, ) @@ -342,14 +346,16 @@ class UserDirectoryHandler(object): # Need to check if the server left the room entirely, if so # we might need to remove all the users in that room is_in_room = yield self.store.is_host_joined( - room_id, self.server_name, + room_id, self.server_name ) if not is_in_room: logger.info("Server left room: %r", room_id) # Fetch all the users that we marked as being in user # directory due to being in the room and then check if # need to remove those users or not - user_ids = yield self.store.get_users_in_dir_due_to_room(room_id) + user_ids = yield self.store.get_users_in_dir_due_to_room( + room_id + ) for user_id in user_ids: yield self._handle_remove_user(room_id, user_id) return @@ -361,7 +367,7 @@ class UserDirectoryHandler(object): if change is None: # Handle any profile changes yield self._handle_profile_change( - state_key, room_id, prev_event_id, event_id, + state_key, room_id, prev_event_id, event_id ) continue @@ -393,13 +399,15 @@ class UserDirectoryHandler(object): if typ == EventTypes.RoomHistoryVisibility: change = yield self._get_key_change( - prev_event_id, event_id, + prev_event_id, + event_id, key_name="history_visibility", public_value="world_readable", ) elif typ == EventTypes.JoinRules: change = yield self._get_key_change( - prev_event_id, event_id, + prev_event_id, + event_id, key_name="join_rule", public_value=JoinRules.PUBLIC, ) @@ -524,7 +532,7 @@ class UserDirectoryHandler(object): ) if self.is_mine_id(other_user_id) and not is_appservice: shared_is_private = yield self.store.get_if_users_share_a_room( - other_user_id, user_id, + other_user_id, user_id ) if shared_is_private is True: # We've already marked in the database they share a private room @@ -539,13 +547,11 @@ class UserDirectoryHandler(object): to_insert.add((other_user_id, user_id)) if to_insert: - yield self.store.add_users_who_share_room( - room_id, not is_public, to_insert, - ) + yield self.store.add_users_who_share_room(room_id, not is_public, to_insert) if to_update: yield self.store.update_users_who_share_room( - room_id, not is_public, to_update, + room_id, not is_public, to_update ) @defer.inlineCallbacks @@ -564,15 +570,15 @@ class UserDirectoryHandler(object): row = yield self.store.get_user_in_public_room(user_id) update_user_in_public = row and row["room_id"] == room_id - if (update_user_in_public or update_user_dir): + if update_user_in_public or update_user_dir: # XXX: Make this faster? rooms = yield self.store.get_rooms_for_user(user_id) for j_room_id in rooms: - if (not update_user_in_public and not update_user_dir): + if not update_user_in_public and not update_user_dir: break is_in_room = yield self.store.is_host_joined( - j_room_id, self.server_name, + j_room_id, self.server_name ) if not is_in_room: @@ -600,19 +606,19 @@ class UserDirectoryHandler(object): # Get a list of user tuples that were in the DB due to this room and # users (this includes tuples where the other user matches `user_id`) user_tuples = yield self.store.get_users_in_share_dir_with_room_id( - user_id, room_id, + user_id, room_id ) for user_id, other_user_id in user_tuples: # For each user tuple get a list of rooms that they still share, # trying to find a private room, and update the entry in the DB - rooms = yield self.store.get_rooms_in_common_for_users(user_id, other_user_id) + rooms = yield self.store.get_rooms_in_common_for_users( + user_id, other_user_id + ) # If they dont share a room anymore, remove the mapping if not rooms: - yield self.store.remove_user_who_share_room( - user_id, other_user_id, - ) + yield self.store.remove_user_who_share_room(user_id, other_user_id) continue found_public_share = None @@ -626,13 +632,13 @@ class UserDirectoryHandler(object): else: found_public_share = None yield self.store.update_users_who_share_room( - room_id, not is_public, [(user_id, other_user_id)], + room_id, not is_public, [(user_id, other_user_id)] ) break if found_public_share: yield self.store.update_users_who_share_room( - room_id, not is_public, [(user_id, other_user_id)], + room_id, not is_public, [(user_id, other_user_id)] ) @defer.inlineCallbacks @@ -660,7 +666,7 @@ class UserDirectoryHandler(object): if prev_name != new_name or prev_avatar != new_avatar: yield self.store.update_profile_in_user_dir( - user_id, new_name, new_avatar, room_id, + user_id, new_name, new_avatar, room_id ) @defer.inlineCallbacks diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index e8b574ee5e..fea866c043 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -44,7 +44,7 @@ class UserDirectoryStore(SQLBaseStore): ) current_state_ids = yield self.get_filtered_current_state_ids( - room_id, StateFilter.from_types(types_to_filter), + room_id, StateFilter.from_types(types_to_filter) ) join_rules_id = current_state_ids.get((EventTypes.JoinRules, "")) @@ -74,14 +74,8 @@ class UserDirectoryStore(SQLBaseStore): """ yield self._simple_insert_many( table="users_in_public_rooms", - values=[ - { - "user_id": user_id, - "room_id": room_id, - } - for user_id in user_ids - ], - desc="add_users_to_public_room" + values=[{"user_id": user_id, "room_id": room_id} for user_id in user_ids], + desc="add_users_to_public_room", ) for user_id in user_ids: self.get_user_in_public_room.invalidate((user_id,)) @@ -107,7 +101,9 @@ class UserDirectoryStore(SQLBaseStore): """ args = ( ( - user_id, get_localpart_from_id(user_id), get_domain_from_id(user_id), + user_id, + get_localpart_from_id(user_id), + get_domain_from_id(user_id), profile.display_name, ) for user_id, profile in iteritems(users_with_profile) @@ -120,7 +116,7 @@ class UserDirectoryStore(SQLBaseStore): args = ( ( user_id, - "%s %s" % (user_id, p.display_name,) if p.display_name else user_id + "%s %s" % (user_id, p.display_name) if p.display_name else user_id, ) for user_id, p in iteritems(users_with_profile) ) @@ -141,12 +137,10 @@ class UserDirectoryStore(SQLBaseStore): "avatar_url": profile.avatar_url, } for user_id, profile in iteritems(users_with_profile) - ] + ], ) for user_id in users_with_profile: - txn.call_after( - self.get_user_in_directory.invalidate, (user_id,) - ) + txn.call_after(self.get_user_in_directory.invalidate, (user_id,)) return self.runInteraction( "add_profiles_to_user_dir", _add_profiles_to_user_dir_txn @@ -188,9 +182,11 @@ class UserDirectoryStore(SQLBaseStore): txn.execute( sql, ( - user_id, get_localpart_from_id(user_id), - get_domain_from_id(user_id), display_name, - ) + user_id, + get_localpart_from_id(user_id), + get_domain_from_id(user_id), + display_name, + ), ) else: # TODO: Remove this code after we've bumped the minimum version @@ -208,9 +204,11 @@ class UserDirectoryStore(SQLBaseStore): txn.execute( sql, ( - user_id, get_localpart_from_id(user_id), - get_domain_from_id(user_id), display_name, - ) + user_id, + get_localpart_from_id(user_id), + get_domain_from_id(user_id), + display_name, + ), ) elif new_entry is False: sql = """ @@ -225,15 +223,16 @@ class UserDirectoryStore(SQLBaseStore): ( get_localpart_from_id(user_id), get_domain_from_id(user_id), - display_name, user_id, - ) + display_name, + user_id, + ), ) else: raise RuntimeError( "upsert returned None when 'can_native_upsert' is False" ) elif isinstance(self.database_engine, Sqlite3Engine): - value = "%s %s" % (user_id, display_name,) if display_name else user_id + value = "%s %s" % (user_id, display_name) if display_name else user_id self._simple_upsert_txn( txn, table="user_directory_search", @@ -264,29 +263,18 @@ class UserDirectoryStore(SQLBaseStore): def remove_from_user_dir(self, user_id): def _remove_from_user_dir_txn(txn): self._simple_delete_txn( - txn, - table="user_directory", - keyvalues={"user_id": user_id}, + txn, table="user_directory", keyvalues={"user_id": user_id} ) self._simple_delete_txn( - txn, - table="user_directory_search", - keyvalues={"user_id": user_id}, + txn, table="user_directory_search", keyvalues={"user_id": user_id} ) self._simple_delete_txn( - txn, - table="users_in_public_rooms", - keyvalues={"user_id": user_id}, - ) - txn.call_after( - self.get_user_in_directory.invalidate, (user_id,) - ) - txn.call_after( - self.get_user_in_public_room.invalidate, (user_id,) + txn, table="users_in_public_rooms", keyvalues={"user_id": user_id} ) - return self.runInteraction( - "remove_from_user_dir", _remove_from_user_dir_txn, - ) + txn.call_after(self.get_user_in_directory.invalidate, (user_id,)) + txn.call_after(self.get_user_in_public_room.invalidate, (user_id,)) + + return self.runInteraction("remove_from_user_dir", _remove_from_user_dir_txn) @defer.inlineCallbacks def remove_from_user_in_public_room(self, user_id): @@ -371,6 +359,7 @@ class UserDirectoryStore(SQLBaseStore): share_private (bool): Is the room private user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs. """ + def _add_users_who_share_room_txn(txn): self._simple_insert_many_txn( txn, @@ -387,13 +376,12 @@ class UserDirectoryStore(SQLBaseStore): ) for user_id, other_user_id in user_id_tuples: txn.call_after( - self.get_users_who_share_room_from_dir.invalidate, - (user_id,), + self.get_users_who_share_room_from_dir.invalidate, (user_id,) ) txn.call_after( - self.get_if_users_share_a_room.invalidate, - (user_id, other_user_id), + self.get_if_users_share_a_room.invalidate, (user_id, other_user_id) ) + return self.runInteraction( "add_users_who_share_room", _add_users_who_share_room_txn ) @@ -407,6 +395,7 @@ class UserDirectoryStore(SQLBaseStore): share_private (bool): Is the room private user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs. """ + def _update_users_who_share_room_txn(txn): sql = """ UPDATE users_who_share_rooms @@ -414,21 +403,16 @@ class UserDirectoryStore(SQLBaseStore): WHERE user_id = ? AND other_user_id = ? """ txn.executemany( - sql, - ( - (room_id, share_private, uid, oid) - for uid, oid in user_id_sets - ) + sql, ((room_id, share_private, uid, oid) for uid, oid in user_id_sets) ) for user_id, other_user_id in user_id_sets: txn.call_after( - self.get_users_who_share_room_from_dir.invalidate, - (user_id,), + self.get_users_who_share_room_from_dir.invalidate, (user_id,) ) txn.call_after( - self.get_if_users_share_a_room.invalidate, - (user_id, other_user_id), + self.get_if_users_share_a_room.invalidate, (user_id, other_user_id) ) + return self.runInteraction( "update_users_who_share_room", _update_users_who_share_room_txn ) @@ -442,22 +426,18 @@ class UserDirectoryStore(SQLBaseStore): share_private (bool): Is the room private user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs. """ + def _remove_user_who_share_room_txn(txn): self._simple_delete_txn( txn, table="users_who_share_rooms", - keyvalues={ - "user_id": user_id, - "other_user_id": other_user_id, - }, + keyvalues={"user_id": user_id, "other_user_id": other_user_id}, ) txn.call_after( - self.get_users_who_share_room_from_dir.invalidate, - (user_id,), + self.get_users_who_share_room_from_dir.invalidate, (user_id,) ) txn.call_after( - self.get_if_users_share_a_room.invalidate, - (user_id, other_user_id), + self.get_if_users_share_a_room.invalidate, (user_id, other_user_id) ) return self.runInteraction( @@ -478,10 +458,7 @@ class UserDirectoryStore(SQLBaseStore): """ return self._simple_select_one_onecol( table="users_who_share_rooms", - keyvalues={ - "user_id": user_id, - "other_user_id": other_user_id, - }, + keyvalues={"user_id": user_id, "other_user_id": other_user_id}, retcol="share_private", allow_none=True, desc="get_if_users_share_a_room", @@ -499,17 +476,12 @@ class UserDirectoryStore(SQLBaseStore): """ rows = yield self._simple_select_list( table="users_who_share_rooms", - keyvalues={ - "user_id": user_id, - }, - retcols=("other_user_id", "share_private",), + keyvalues={"user_id": user_id}, + retcols=("other_user_id", "share_private"), desc="get_users_who_share_room_with_user", ) - defer.returnValue({ - row["other_user_id"]: row["share_private"] - for row in rows - }) + defer.returnValue({row["other_user_id"]: row["share_private"] for row in rows}) def get_users_in_share_dir_with_room_id(self, user_id, room_id): """Get all user tuples that are in the users_who_share_rooms due to the @@ -556,6 +528,7 @@ class UserDirectoryStore(SQLBaseStore): def delete_all_from_user_dir(self): """Delete the entire user directory """ + def _delete_all_from_user_dir_txn(txn): txn.execute("DELETE FROM user_directory") txn.execute("DELETE FROM user_directory_search") @@ -565,6 +538,7 @@ class UserDirectoryStore(SQLBaseStore): txn.call_after(self.get_user_in_public_room.invalidate_all) txn.call_after(self.get_users_who_share_room_from_dir.invalidate_all) txn.call_after(self.get_if_users_share_a_room.invalidate_all) + return self.runInteraction( "delete_all_from_user_dir", _delete_all_from_user_dir_txn ) @@ -574,7 +548,7 @@ class UserDirectoryStore(SQLBaseStore): return self._simple_select_one( table="user_directory", keyvalues={"user_id": user_id}, - retcols=("room_id", "display_name", "avatar_url",), + retcols=("room_id", "display_name", "avatar_url"), allow_none=True, desc="get_user_in_directory", ) @@ -607,7 +581,9 @@ class UserDirectoryStore(SQLBaseStore): def get_current_state_deltas(self, prev_stream_id): prev_stream_id = int(prev_stream_id) - if not self._curr_state_delta_stream_cache.has_any_entity_changed(prev_stream_id): + if not self._curr_state_delta_stream_cache.has_any_entity_changed( + prev_stream_id + ): return [] def get_current_state_deltas_txn(txn): @@ -641,7 +617,7 @@ class UserDirectoryStore(SQLBaseStore): WHERE ? < stream_id AND stream_id <= ? ORDER BY stream_id ASC """ - txn.execute(sql, (prev_stream_id, max_stream_id,)) + txn.execute(sql, (prev_stream_id, max_stream_id)) return self.cursor_to_dict(txn) return self.runInteraction( @@ -731,8 +707,11 @@ class UserDirectoryStore(SQLBaseStore): display_name IS NULL, avatar_url IS NULL LIMIT ? - """ % (join_clause, where_clause) - args = join_args + (full_query, exact_query, prefix_query, limit + 1,) + """ % ( + join_clause, + where_clause, + ) + args = join_args + (full_query, exact_query, prefix_query, limit + 1) elif isinstance(self.database_engine, Sqlite3Engine): search_query = _parse_query_sqlite(search_term) @@ -749,7 +728,10 @@ class UserDirectoryStore(SQLBaseStore): display_name IS NULL, avatar_url IS NULL LIMIT ? - """ % (join_clause, where_clause) + """ % ( + join_clause, + where_clause, + ) args = join_args + (search_query, limit + 1) else: # This should be unreachable. @@ -761,10 +743,7 @@ class UserDirectoryStore(SQLBaseStore): limited = len(results) > limit - defer.returnValue({ - "limited": limited, - "results": results, - }) + defer.returnValue({"limited": limited, "results": results}) def _parse_query_sqlite(search_term): @@ -779,7 +758,7 @@ def _parse_query_sqlite(search_term): # Pull out the individual words, discarding any non-word characters. results = re.findall(r"([\w\-]+)", search_term, re.UNICODE) - return " & ".join("(%s* OR %s)" % (result, result,) for result in results) + return " & ".join("(%s* OR %s)" % (result, result) for result in results) def _parse_query_postgres(search_term): @@ -792,7 +771,7 @@ def _parse_query_postgres(search_term): # Pull out the individual words, discarding any non-word characters. results = re.findall(r"([\w\-]+)", search_term, re.UNICODE) - both = " & ".join("(%s:* | %s)" % (result, result,) for result in results) + both = " & ".join("(%s:* | %s)" % (result, result) for result in results) exact = " & ".join("%s" % (result,) for result in results) prefix = " & ".join("%s:*" % (result,) for result in results) -- cgit 1.4.1 From 7fc1196a362fc56d11e2652ee5c9699b1198cf40 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 13:58:52 +0000 Subject: Correctly handle RequestSendFailed exceptions This mainly reduces the number of exceptions we log. --- synapse/crypto/keyring.py | 4 ++-- synapse/groups/attestations.py | 7 ++++++- synapse/handlers/device.py | 4 ++-- synapse/handlers/groups_local.py | 12 +++++++++--- 4 files changed, 19 insertions(+), 8 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 3a96980bed..cce40fdd2d 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -35,7 +35,7 @@ from unpaddedbase64 import decode_base64 from twisted.internet import defer -from synapse.api.errors import Codes, SynapseError +from synapse.api.errors import Codes, RequestSendFailed, SynapseError from synapse.util import logcontext, unwrapFirstError from synapse.util.logcontext import ( LoggingContext, @@ -656,7 +656,7 @@ def _handle_key_deferred(verify_request): try: with PreserveLoggingContext(): _, key_id, verify_key = yield verify_request.deferred - except IOError as e: + except (IOError, RequestSendFailed) as e: logger.warn( "Got IOError when downloading keys for %s: %s %s", server_name, type(e).__name__, str(e), diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index b04f4234ca..786149be65 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -42,7 +42,7 @@ from signedjson.sign import sign_json from twisted.internet import defer -from synapse.api.errors import SynapseError +from synapse.api.errors import RequestSendFailed, SynapseError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import get_domain_from_id from synapse.util.logcontext import run_in_background @@ -191,6 +191,11 @@ class GroupAttestionRenewer(object): yield self.store.update_attestation_renewal( group_id, user_id, attestation ) + except RequestSendFailed as e: + logger.warning( + "Failed to renew attestation of %r in %r: %s", + user_id, group_id, e, + ) except Exception: logger.exception("Error renewing attestation of %r in %r", user_id, group_id) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 8955cde4ed..6eddb10e0d 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -20,7 +20,7 @@ from twisted.internet import defer from synapse.api import errors from synapse.api.constants import EventTypes -from synapse.api.errors import FederationDeniedError +from synapse.api.errors import FederationDeniedError, RequestSendFailed from synapse.types import RoomStreamToken, get_domain_from_id from synapse.util import stringutils from synapse.util.async_helpers import Linearizer @@ -504,7 +504,7 @@ class DeviceListEduUpdater(object): origin = get_domain_from_id(user_id) try: result = yield self.federation.query_user_devices(origin, user_id) - except NotRetryingDestination: + except (NotRetryingDestination, RequestSendFailed): # TODO: Remember that we are now out of sync and try again # later logger.warn( diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 173315af6c..02c508acec 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -20,7 +20,7 @@ from six import iteritems from twisted.internet import defer -from synapse.api.errors import HttpResponseException, SynapseError +from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError from synapse.types import get_domain_from_id logger = logging.getLogger(__name__) @@ -46,13 +46,19 @@ def _create_rerouter(func_name): # when the remote end responds with things like 403 Not # In Group, we can communicate that to the client instead # of a 500. - def h(failure): + def http_response_errback(failure): failure.trap(HttpResponseException) e = failure.value if e.code == 403: raise e.to_synapse_error() return failure - d.addErrback(h) + + def request_failed_errback(failure): + failure.trap(RequestSendFailed) + raise SynapseError(502, "Failed to contact group server") + + d.addErrback(http_response_errback) + d.addErrback(request_failed_errback) return d return f -- cgit 1.4.1 From eaf4d11af9da7d6d9ce71cb83f70424bb38e0703 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 16:02:23 +0000 Subject: Add configurable room list publishing rules This allows specifying who and what is allowed to be published onto the public room list --- synapse/config/room_directory.py | 94 ++++++++++++++++++++++++++++++------- synapse/handlers/directory.py | 29 ++++++++++-- synapse/storage/state.py | 25 ++++++++++ tests/config/test_room_directory.py | 73 ++++++++++++++++++++++++++++ tests/handlers/test_directory.py | 1 + 5 files changed, 200 insertions(+), 22 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 9da13ab11b..a0869ed6ab 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -23,70 +23,121 @@ class RoomDirectoryConfig(Config): alias_creation_rules = config["alias_creation_rules"] self._alias_creation_rules = [ - _AliasRule(rule) + _RoomDirectoryRule("alias_creation_rules", rule) for rule in alias_creation_rules ] + room_list_publication_rules = config["room_list_publication_rules"] + + self._room_list_publication_rules = [ + _RoomDirectoryRule("room_list_publication_rules", rule) + for rule in room_list_publication_rules + ] + def default_config(self, config_dir_path, server_name, **kwargs): return """ # The `alias_creation` option controls who's allowed to create aliases # on this server. # # The format of this option is a list of rules that contain globs that - # match against user_id and the new alias (fully qualified with server - # name). The action in the first rule that matches is taken, which can - # currently either be "allow" or "deny". + # match against user_id, room_id and the new alias (fully qualified with + # server name). The action in the first rule that matches is taken, + # which can currently either be "allow" or "deny". + # + # Missing user_id/room_id/alias fields default to "*". # # If no rules match the request is denied. alias_creation_rules: - user_id: "*" - alias: "*" + alias: "*" # This matches alias being created + room_id: "*" + action: allow + + # The `room_list_publication_rules` option control who and what can be + # published in the public room list. + # + # The format of this option is the same as that for + # `alias_creation_rules` + room_list_publication_rules: + - user_id: "*" + alias: "*" # This matches any local or canonical alias + # associated with the room + room_id: "*" action: allow """ - def is_alias_creation_allowed(self, user_id, alias): + def is_alias_creation_allowed(self, user_id, room_id, alias): """Checks if the given user is allowed to create the given alias Args: user_id (str) + room_id (str) alias (str) Returns: boolean: True if user is allowed to crate the alias """ for rule in self._alias_creation_rules: - if rule.matches(user_id, alias): + if rule.matches(user_id, room_id, [alias]): + return rule.action == "allow" + + return False + + def is_publishing_room_allowed(self, user_id, room_id, aliases): + """Checks if the given user is allowed to publish the room + + Args: + user_id (str) + room_id (str) + aliases (list[str]): any local aliases associated with the room + + Returns: + boolean: True if user can publish room + """ + for rule in self._room_list_publication_rules: + if rule.matches(user_id, room_id, aliases): return rule.action == "allow" return False -class _AliasRule(object): - def __init__(self, rule): +class _RoomDirectoryRule(object): + """Helper class to test whether a room directory action is allowed, like + creating an alias or publishing a room. + """ + + def __init__(self, option_name, rule): action = rule["action"] - user_id = rule["user_id"] - alias = rule["alias"] + user_id = rule.get("user_id", "*") + room_id = rule.get("room_id", "*") + alias = rule.get("alias", "*") if action in ("allow", "deny"): self.action = action else: raise ConfigError( - "alias_creation_rules rules can only have action of 'allow'" - " or 'deny'" + "%s rules can only have action of 'allow'" + " or 'deny'" % (option_name,) ) + self._alias_matches_all = alias == "*" + try: self._user_id_regex = glob_to_regex(user_id) self._alias_regex = glob_to_regex(alias) + self._room_id_regex = glob_to_regex(room_id) except Exception as e: raise ConfigError("Failed to parse glob into regex: %s", e) - def matches(self, user_id, alias): - """Tests if this rule matches the given user_id and alias. + def matches(self, user_id, room_id, aliases): + """Tests if this rule matches the given user_id, room_id and aliases. Args: user_id (str) - alias (str) + room_id (str) + aliases (list[str]): The associated aliases to the room. Will be a + single element for testing alias creation, and can be empty for + testing room publishing. Returns: boolean @@ -96,7 +147,16 @@ class _AliasRule(object): if not self._user_id_regex.match(user_id): return False - if not self._alias_regex.match(alias): + # If we are not given any aliases then this rule only matches if the + # alias glob matches all aliases + if not aliases and not self._alias_matches_all: + return False + + for alias in aliases: + if not self._alias_regex.match(alias): + return False + + if not self._room_id_regex.match(room_id): return False return True diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 6bb254f899..e5319b42a6 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -112,7 +112,9 @@ class DirectoryHandler(BaseHandler): 403, "This user is not permitted to create this alias", ) - if not self.config.is_alias_creation_allowed(user_id, room_alias.to_string()): + if not self.config.is_alias_creation_allowed( + user_id, room_id, room_alias.to_string(), + ): # Lets just return a generic message, as there may be all sorts of # reasons why we said no. TODO: Allow configurable error messages # per alias creation rule? @@ -395,9 +397,9 @@ class DirectoryHandler(BaseHandler): room_id (str) visibility (str): "public" or "private" """ - if not self.spam_checker.user_may_publish_room( - requester.user.to_string(), room_id - ): + user_id = requester.user.to_string() + + if not self.spam_checker.user_may_publish_room(user_id, room_id): raise AuthError( 403, "This user is not permitted to publish rooms to the room list" @@ -415,7 +417,24 @@ class DirectoryHandler(BaseHandler): yield self.auth.check_can_change_room_list(room_id, requester.user) - yield self.store.set_room_is_public(room_id, visibility == "public") + room_aliases = yield self.store.get_aliases_for_room(room_id) + canonical_alias = yield self.store.get_canonical_alias_for_room(room_id) + if canonical_alias: + room_aliases.append(canonical_alias) + + making_public = visibility == "public" + + if making_public and not self.config.is_publishing_room_allowed( + user_id, room_id, room_aliases, + ): + # Lets just return a generic message, as there may be all sorts of + # reasons why we said no. TODO: Allow configurable error messages + # per alias creation rule? + raise SynapseError( + 403, "Not allowed to publish room", + ) + + yield self.store.set_room_is_public(room_id, making_public) @defer.inlineCallbacks def edit_published_appservice_room_list(self, appservice_id, network_id, diff --git a/synapse/storage/state.py b/synapse/storage/state.py index d14a7b2538..6ddc4055d2 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -548,6 +548,31 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): _get_filtered_current_state_ids_txn, ) + @defer.inlineCallbacks + def get_canonical_alias_for_room(self, room_id): + """Get canonical alias for room, if any + + Args: + room_id (str) + + Returns: + Deferred[str|None]: The canonical alias, if any + """ + + state = yield self.get_filtered_current_state_ids(room_id, StateFilter.from_types( + [(EventTypes.CanonicalAlias, "")] + )) + + event_id = state.get((EventTypes.CanonicalAlias, "")) + if not event_id: + return + + event = yield self.get_event(event_id, allow_none=True) + if not event: + return + + defer.returnValue(event.content.get("canonical_alias")) + @cached(max_entries=10000, iterable=True) def get_state_group_delta(self, state_group): """Given a state group try to return a previous group and a delta between diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index f37a17d618..1d4ca0055c 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -36,6 +36,8 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): - user_id: "@gah:example.com" alias: "#goo:example.com" action: "allow" + + room_list_publication_rules: [] """) rd_config = RoomDirectoryConfig() @@ -43,25 +45,96 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): self.assertFalse(rd_config.is_alias_creation_allowed( user_id="@bob:example.com", + room_id="!test", alias="#test:example.com", )) self.assertTrue(rd_config.is_alias_creation_allowed( user_id="@test:example.com", + room_id="!test", alias="#unofficial_st:example.com", )) self.assertTrue(rd_config.is_alias_creation_allowed( user_id="@foobar:example.com", + room_id="!test", alias="#test:example.com", )) self.assertTrue(rd_config.is_alias_creation_allowed( user_id="@gah:example.com", + room_id="!test", alias="#goo:example.com", )) self.assertFalse(rd_config.is_alias_creation_allowed( user_id="@test:example.com", + room_id="!test", alias="#test:example.com", )) + + def test_room_publish_acl(self): + config = yaml.load(""" + alias_creation_rules: [] + + room_list_publication_rules: + - user_id: "*bob*" + alias: "*" + action: "deny" + - user_id: "*" + alias: "#unofficial_*" + action: "allow" + - user_id: "@foo*:example.com" + alias: "*" + action: "allow" + - user_id: "@gah:example.com" + alias: "#goo:example.com" + action: "allow" + - room_id: "!test-deny" + action: "deny" + """) + + rd_config = RoomDirectoryConfig() + rd_config.read_config(config) + + self.assertFalse(rd_config.is_publishing_room_allowed( + user_id="@bob:example.com", + room_id="!test", + aliases=["#test:example.com"], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#unofficial_st:example.com"], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@foobar:example.com", + room_id="!test", + aliases=[], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@gah:example.com", + room_id="!test", + aliases=["#goo:example.com"], + )) + + self.assertFalse(rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#test:example.com"], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@foobar:example.com", + room_id="!test-deny", + aliases=[], + )) + + self.assertFalse(rd_config.is_publishing_room_allowed( + user_id="@gah:example.com", + room_id="!test-deny", + aliases=[], + )) diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 8ae6556c0a..9bf395e923 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -121,6 +121,7 @@ class TestCreateAliasACL(unittest.HomeserverTestCase): "action": "allow", } ] + config["room_list_publication_rules"] = [] rd_config = RoomDirectoryConfig() rd_config.read_config(config) -- cgit 1.4.1 From f61b2068e633b8ea11453992749f696d0e35e7d5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 18:08:09 +0000 Subject: Only fetch aliases when publishing rooms --- synapse/handlers/directory.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index e5319b42a6..8b113307d2 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -417,22 +417,22 @@ class DirectoryHandler(BaseHandler): yield self.auth.check_can_change_room_list(room_id, requester.user) - room_aliases = yield self.store.get_aliases_for_room(room_id) - canonical_alias = yield self.store.get_canonical_alias_for_room(room_id) - if canonical_alias: - room_aliases.append(canonical_alias) - making_public = visibility == "public" - - if making_public and not self.config.is_publishing_room_allowed( - user_id, room_id, room_aliases, - ): - # Lets just return a generic message, as there may be all sorts of - # reasons why we said no. TODO: Allow configurable error messages - # per alias creation rule? - raise SynapseError( - 403, "Not allowed to publish room", - ) + if making_public: + room_aliases = yield self.store.get_aliases_for_room(room_id) + canonical_alias = yield self.store.get_canonical_alias_for_room(room_id) + if canonical_alias: + room_aliases.append(canonical_alias) + + if not self.config.is_publishing_room_allowed( + user_id, room_id, room_aliases, + ): + # Lets just return a generic message, as there may be all sorts of + # reasons why we said no. TODO: Allow configurable error messages + # per alias creation rule? + raise SynapseError( + 403, "Not allowed to publish room", + ) yield self.store.set_room_is_public(room_id, making_public) -- cgit 1.4.1 From 1895d14e12b901ed4928950e6cc3b1e2e6fd89cd Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 15 Feb 2019 12:05:08 +0000 Subject: Support .well-known delegation when issuing certificates through ACME --- changelog.d/4652.feature | 1 + synapse/handlers/acme.py | 27 +++++++++++++++++++++++---- 2 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 changelog.d/4652.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4652.feature b/changelog.d/4652.feature new file mode 100644 index 0000000000..48d9bb08a0 --- /dev/null +++ b/changelog.d/4652.feature @@ -0,0 +1 @@ +Support .well-known delegation when issuing certificates through ACME diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index dd0b217965..9d1b1a1c29 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -25,8 +25,11 @@ from twisted.python.filepath import FilePath from twisted.python.url import URL from twisted.web import server, static from twisted.web.resource import Resource +from twisted.web.client import URI from synapse.app import check_bind_error +from synapse.crypto.context_factory import ClientTLSOptionsFactory +from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent logger = logging.getLogger(__name__) @@ -123,15 +126,31 @@ class AcmeHandler(object): @defer.inlineCallbacks def provision_certificate(self): - logger.warning("Reprovisioning %s", self.hs.hostname) + # Retrieve .well-known if it's in use. We do so through the federation + # agent, because that's where the .well-known logic lives. + agent = MatrixFederationAgent( + tls_client_options_factory=ClientTLSOptionsFactory(None), + reactor=self.reactor, + ) + delegated = yield agent._get_well_known(bytes(self.hs.hostname,"ascii")) + + # If .well-known is in use, use the delegated hostname instead of the + # homeserver's server_name. + if delegated: + cert_name = delegated.decode("ascii") + logger.info(".well-known is in use, provisionning %s instead of %s", cert_name, self.hs.hostname) + else: + cert_name = self.hs.hostname + + logger.warning("Reprovisioning %s", cert_name) try: - yield self._issuer.issue_cert(self.hs.hostname) + yield self._issuer.issue_cert(cert_name) except Exception: logger.exception("Fail!") raise - logger.warning("Reprovisioned %s, saving.", self.hs.hostname) - cert_chain = self._store.certs[self.hs.hostname] + logger.warning("Reprovisioned %s, saving.", cert_name) + cert_chain = self._store.certs[cert_name] try: with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: -- cgit 1.4.1 From af8a2f679b38d0e3594e172b3b4f7a7c4468193e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 15 Feb 2019 12:27:43 +0000 Subject: Remove unused import --- synapse/handlers/acme.py | 1 - 1 file changed, 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 9d1b1a1c29..93a6a36e6a 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -25,7 +25,6 @@ from twisted.python.filepath import FilePath from twisted.python.url import URL from twisted.web import server, static from twisted.web.resource import Resource -from twisted.web.client import URI from synapse.app import check_bind_error from synapse.crypto.context_factory import ClientTLSOptionsFactory -- cgit 1.4.1 From f86b695cbd6a39492946fcddbfcc241ff836e767 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 15 Feb 2019 12:29:34 +0000 Subject: Various cosmetics to make TravisCI happy --- synapse/handlers/acme.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 93a6a36e6a..a56a9cd287 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -131,13 +131,16 @@ class AcmeHandler(object): tls_client_options_factory=ClientTLSOptionsFactory(None), reactor=self.reactor, ) - delegated = yield agent._get_well_known(bytes(self.hs.hostname,"ascii")) + delegated = yield agent._get_well_known(bytes(self.hs.hostname, "ascii")) # If .well-known is in use, use the delegated hostname instead of the # homeserver's server_name. if delegated: cert_name = delegated.decode("ascii") - logger.info(".well-known is in use, provisionning %s instead of %s", cert_name, self.hs.hostname) + logger.info( + ".well-known is in use, provisionning %s instead of %s", + cert_name, self.hs.hostname, + ) else: cert_name = self.hs.hostname -- cgit 1.4.1 From 6d02a13d81f7d99cb92081631b188398eea0c4d7 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 18 Feb 2019 11:36:34 +0000 Subject: Typo in info log Co-Authored-By: babolivier --- synapse/handlers/acme.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index a56a9cd287..ca5b7257d3 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -138,7 +138,7 @@ class AcmeHandler(object): if delegated: cert_name = delegated.decode("ascii") logger.info( - ".well-known is in use, provisionning %s instead of %s", + ".well-known is in use, provisioning %s instead of %s", cert_name, self.hs.hostname, ) else: -- cgit 1.4.1 From eb2b8523ae1ddd38bf1dd19ee37e44e7f4a3ee68 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 12:12:57 +0000 Subject: Split out registration to worker This allows registration to be handled by a worker, though the actual write to the database still happens on master. Note: due to the in-memory session map all registration requests must be handled by the same worker. --- synapse/app/client_reader.py | 2 + synapse/handlers/register.py | 63 ++++++++- synapse/replication/http/__init__.py | 4 +- synapse/replication/http/login.py | 85 ++++++++++++ synapse/replication/http/register.py | 91 ++++++++++++ synapse/rest/client/v2_alpha/register.py | 73 ++++++---- synapse/storage/registration.py | 230 +++++++++++++++---------------- 7 files changed, 401 insertions(+), 147 deletions(-) create mode 100644 synapse/replication/http/login.py create mode 100644 synapse/replication/http/register.py (limited to 'synapse/handlers') diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index a9d2147022..9250b6c239 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -47,6 +47,7 @@ from synapse.rest.client.v1.room import ( RoomMemberListRestServlet, RoomStateRestServlet, ) +from synapse.rest.client.v2_alpha.register import RegisterRestServlet from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree @@ -92,6 +93,7 @@ class ClientReaderServer(HomeServer): JoinedRoomMemberListRestServlet(self).register(resource) RoomStateRestServlet(self).register(resource) RoomEventContextServlet(self).register(resource) + RegisterRestServlet(self).register(resource) resources.update({ "/_matrix/client/r0": resource, diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 21c17c59a0..8ea557a003 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -27,6 +27,7 @@ from synapse.api.errors import ( SynapseError, ) from synapse.http.client import CaptchaServerHttpClient +from synapse.replication.http.register import ReplicationRegisterServlet from synapse.types import RoomAlias, RoomID, UserID, create_requester from synapse.util.async_helpers import Linearizer from synapse.util.threepids import check_3pid_allowed @@ -61,6 +62,9 @@ class RegistrationHandler(BaseHandler): ) self._server_notices_mxid = hs.config.server_notices_mxid + if hs.config.worker_app: + self._register_client = ReplicationRegisterServlet.make_client(hs) + @defer.inlineCallbacks def check_username(self, localpart, guest_access_token=None, assigned_user_id=None): @@ -185,7 +189,7 @@ class RegistrationHandler(BaseHandler): token = None if generate_token: token = self.macaroon_gen.generate_access_token(user_id) - yield self.store.register( + yield self._register_with_store( user_id=user_id, token=token, password_hash=password_hash, @@ -217,7 +221,7 @@ class RegistrationHandler(BaseHandler): if default_display_name is None: default_display_name = localpart try: - yield self.store.register( + yield self._register_with_store( user_id=user_id, token=token, password_hash=password_hash, @@ -316,7 +320,7 @@ class RegistrationHandler(BaseHandler): user_id, allowed_appservice=service ) - yield self.store.register( + yield self._register_with_store( user_id=user_id, password_hash="", appservice_id=service_id, @@ -494,7 +498,7 @@ class RegistrationHandler(BaseHandler): token = self.macaroon_gen.generate_access_token(user_id) if need_register: - yield self.store.register( + yield self._register_with_store( user_id=user_id, token=token, password_hash=password_hash, @@ -573,3 +577,54 @@ class RegistrationHandler(BaseHandler): action="join", ratelimit=False, ) + + def _register_with_store(self, user_id, token=None, password_hash=None, + was_guest=False, make_guest=False, appservice_id=None, + create_profile_with_displayname=None, admin=False, + user_type=None): + """Register user in the datastore. + + Args: + user_id (str): The desired user ID to register. + token (str): The desired access token to use for this user. If this + is not None, the given access token is associated with the user + id. + password_hash (str|None): Optional. The password hash for this user. + was_guest (bool): Optional. Whether this is a guest account being + upgraded to a non-guest account. + make_guest (boolean): True if the the new user should be guest, + false to add a regular user account. + appservice_id (str|None): The ID of the appservice registering the user. + create_profile_with_displayname (unicode|None): Optionally create a + profile for the user, setting their displayname to the given value + admin (boolean): is an admin user? + user_type (str|None): type of user. One of the values from + api.constants.UserTypes, or None for a normal user. + + Returns: + Deferred + """ + if self.hs.config.worker_app: + return self._register_client( + user_id=user_id, + token=token, + password_hash=password_hash, + was_guest=was_guest, + make_guest=make_guest, + appservice_id=appservice_id, + create_profile_with_displayname=create_profile_with_displayname, + admin=admin, + user_type=user_type, + ) + else: + return self.store.register( + user_id=user_id, + token=token, + password_hash=password_hash, + was_guest=was_guest, + make_guest=make_guest, + appservice_id=appservice_id, + create_profile_with_displayname=create_profile_with_displayname, + admin=admin, + user_type=user_type, + ) diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py index 19f214281e..81b85352b1 100644 --- a/synapse/replication/http/__init__.py +++ b/synapse/replication/http/__init__.py @@ -14,7 +14,7 @@ # limitations under the License. from synapse.http.server import JsonResource -from synapse.replication.http import federation, membership, send_event +from synapse.replication.http import federation, login, membership, register, send_event REPLICATION_PREFIX = "/_synapse/replication" @@ -28,3 +28,5 @@ class ReplicationRestResource(JsonResource): send_event.register_servlets(hs, self) membership.register_servlets(hs, self) federation.register_servlets(hs, self) + login.register_servlets(hs, self) + register.register_servlets(hs, self) diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py new file mode 100644 index 0000000000..797f6aabd1 --- /dev/null +++ b/synapse/replication/http/login.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint + +logger = logging.getLogger(__name__) + + +class RegisterDeviceReplicationServlet(ReplicationEndpoint): + """Ensure a device is registered, generating a new access token for the + device. + + Used during registration and login. + """ + + NAME = "device_check_registered" + PATH_ARGS = ("user_id",) + + def __init__(self, hs): + super(RegisterDeviceReplicationServlet, self).__init__(hs) + self.auth_handler = hs.get_auth_handler() + self.device_handler = hs.get_device_handler() + self.macaroon_gen = hs.get_macaroon_generator() + + @staticmethod + def _serialize_payload(user_id, device_id, initial_display_name, is_guest): + """ + Args: + device_id (str|None): Device ID to use, if None a new one is + generated. + initial_display_name (str|None) + is_guest (bool) + """ + return { + "device_id": device_id, + "initial_display_name": initial_display_name, + "is_guest": is_guest, + } + + @defer.inlineCallbacks + def _handle_request(self, request, user_id): + content = parse_json_object_from_request(request) + + device_id = content["device_id"] + initial_display_name = content["initial_display_name"] + is_guest = content["is_guest"] + + device_id = yield self.device_handler.check_device_registered( + user_id, device_id, initial_display_name, + ) + + if is_guest: + access_token = self.macaroon_gen.generate_access_token( + user_id, ["guest = true"] + ) + else: + access_token = yield self.auth_handler.get_access_token_for_user_id( + user_id, device_id=device_id, + ) + + defer.returnValue((200, { + "device_id": device_id, + "access_token": access_token, + })) + + +def register_servlets(hs, http_server): + RegisterDeviceReplicationServlet(hs).register(http_server) diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py new file mode 100644 index 0000000000..bdaf37396c --- /dev/null +++ b/synapse/replication/http/register.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint + +logger = logging.getLogger(__name__) + + +class ReplicationRegisterServlet(ReplicationEndpoint): + """Register a new user + """ + + NAME = "register_user" + PATH_ARGS = ("user_id",) + + def __init__(self, hs): + super(ReplicationRegisterServlet, self).__init__(hs) + self.store = hs.get_datastore() + + @staticmethod + def _serialize_payload( + user_id, token, password_hash, was_guest, make_guest, appservice_id, + create_profile_with_displayname, admin, user_type, + ): + """ + Args: + user_id (str): The desired user ID to register. + token (str): The desired access token to use for this user. If this + is not None, the given access token is associated with the user + id. + password_hash (str|None): Optional. The password hash for this user. + was_guest (bool): Optional. Whether this is a guest account being + upgraded to a non-guest account. + make_guest (boolean): True if the the new user should be guest, + false to add a regular user account. + appservice_id (str|None): The ID of the appservice registering the user. + create_profile_with_displayname (unicode|None): Optionally create a + profile for the user, setting their displayname to the given value + admin (boolean): is an admin user? + user_type (str|None): type of user. One of the values from + api.constants.UserTypes, or None for a normal user. + """ + return { + "token": token, + "password_hash": password_hash, + "was_guest": was_guest, + "make_guest": make_guest, + "appservice_id": appservice_id, + "create_profile_with_displayname": create_profile_with_displayname, + "admin": admin, + "user_type": user_type, + } + + @defer.inlineCallbacks + def _handle_request(self, request, user_id): + content = parse_json_object_from_request(request) + + yield self.store.register( + user_id=user_id, + token=content["token"], + password_hash=content["password_hash"], + was_guest=content["was_guest"], + make_guest=content["make_guest"], + appservice_id=content["appservice_id"], + create_profile_with_displayname=content["create_profile_with_displayname"], + admin=content["admin"], + user_type=content["user_type"], + ) + + defer.returnValue((200, {})) + + +def register_servlets(hs, http_server): + ReplicationRegisterServlet(hs).register(http_server) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 7f812b8209..d78da50787 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -33,6 +33,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.threepids import check_3pid_allowed @@ -190,9 +191,15 @@ class RegisterRestServlet(RestServlet): self.registration_handler = hs.get_handlers().registration_handler self.identity_handler = hs.get_handlers().identity_handler self.room_member_handler = hs.get_room_member_handler() - self.device_handler = hs.get_device_handler() self.macaroon_gen = hs.get_macaroon_generator() + if self.hs.config.worker_app: + self._register_device_client = ( + RegisterDeviceReplicationServlet.make_client(hs) + ) + else: + self.device_handler = hs.get_device_handler() + @interactive_auth_handler @defer.inlineCallbacks def on_POST(self, request): @@ -633,12 +640,10 @@ class RegisterRestServlet(RestServlet): "home_server": self.hs.hostname, } if not params.get("inhibit_login", False): - device_id = yield self._register_device(user_id, params) - - access_token = ( - yield self.auth_handler.get_access_token_for_user_id( - user_id, device_id=device_id, - ) + device_id = params.get("device_id") + initial_display_name = params.get("initial_device_display_name") + device_id, access_token = yield self._register_device( + user_id, device_id, initial_display_name, is_guest=False, ) result.update({ @@ -647,25 +652,42 @@ class RegisterRestServlet(RestServlet): }) defer.returnValue(result) - def _register_device(self, user_id, params): - """Register a device for a user. - - This is called after the user's credentials have been validated, but - before the access token has been issued. + @defer.inlineCallbacks + def _register_device(self, user_id, device_id, initial_display_name, + is_guest): + """Register a device for a user and generate an access token. Args: - (str) user_id: full canonical @user:id - (object) params: registration parameters, from which we pull - device_id and initial_device_name + user_id (str): full canonical @user:id + device_id (str|None): The device ID to check, or None to generate + a new one. + initial_display_name (str|None): An optional display name for the + device. + is_guest (bool): Whether this is a guest account Returns: - defer.Deferred: (str) device_id + defer.Deferred[(str, str)]: Tuple of device ID and access token """ - # register the user's device - device_id = params.get("device_id") - initial_display_name = params.get("initial_device_display_name") - return self.device_handler.check_device_registered( - user_id, device_id, initial_display_name - ) + if self.hs.config.worker_app: + r = yield self._register_device_client( + user_id=user_id, + device_id=device_id, + initial_display_name=initial_display_name, + is_guest=is_guest, + ) + defer.returnValue((r["device_id"], r["access_token"])) + else: + device_id = yield self.device_handler.check_device_registered( + user_id, device_id, initial_display_name + ) + if is_guest: + access_token = self.macaroon_gen.generate_access_token( + user_id, ["guest = true"] + ) + else: + access_token = yield self.auth_handler.get_access_token_for_user_id( + user_id, device_id=device_id, + ) + defer.returnValue((device_id, access_token)) @defer.inlineCallbacks def _do_guest_registration(self, params): @@ -680,13 +702,10 @@ class RegisterRestServlet(RestServlet): # we have nowhere to store it. device_id = synapse.api.auth.GUEST_DEVICE_ID initial_display_name = params.get("initial_device_display_name") - yield self.device_handler.check_device_registered( - user_id, device_id, initial_display_name + device_id, access_token = yield self._register_device( + user_id, device_id, initial_display_name, is_guest=True, ) - access_token = self.macaroon_gen.generate_access_token( - user_id, ["guest = true"] - ) defer.returnValue((200, { "user_id": user_id, "device_id": device_id, diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index c9e11c3135..3bc5def48e 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -139,6 +139,121 @@ class RegistrationWorkerStore(SQLBaseStore): ) return True if res == UserTypes.SUPPORT else False + def get_users_by_id_case_insensitive(self, user_id): + """Gets users that match user_id case insensitively. + Returns a mapping of user_id -> password_hash. + """ + def f(txn): + sql = ( + "SELECT name, password_hash FROM users" + " WHERE lower(name) = lower(?)" + ) + txn.execute(sql, (user_id,)) + return dict(txn) + + return self.runInteraction("get_users_by_id_case_insensitive", f) + + @defer.inlineCallbacks + def count_all_users(self): + """Counts all users registered on the homeserver.""" + def _count_users(txn): + txn.execute("SELECT COUNT(*) AS users FROM users") + rows = self.cursor_to_dict(txn) + if rows: + return rows[0]["users"] + return 0 + + ret = yield self.runInteraction("count_users", _count_users) + defer.returnValue(ret) + + def count_daily_user_type(self): + """ + Counts 1) native non guest users + 2) native guests users + 3) bridged users + who registered on the homeserver in the past 24 hours + """ + def _count_daily_user_type(txn): + yesterday = int(self._clock.time()) - (60 * 60 * 24) + + sql = """ + SELECT user_type, COALESCE(count(*), 0) AS count FROM ( + SELECT + CASE + WHEN is_guest=0 AND appservice_id IS NULL THEN 'native' + WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest' + WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged' + END AS user_type + FROM users + WHERE creation_ts > ? + ) AS t GROUP BY user_type + """ + results = {'native': 0, 'guest': 0, 'bridged': 0} + txn.execute(sql, (yesterday,)) + for row in txn: + results[row[0]] = row[1] + return results + return self.runInteraction("count_daily_user_type", _count_daily_user_type) + + @defer.inlineCallbacks + def count_nonbridged_users(self): + def _count_users(txn): + txn.execute(""" + SELECT COALESCE(COUNT(*), 0) FROM users + WHERE appservice_id IS NULL + """) + count, = txn.fetchone() + return count + + ret = yield self.runInteraction("count_users", _count_users) + defer.returnValue(ret) + + @defer.inlineCallbacks + def find_next_generated_user_id_localpart(self): + """ + Gets the localpart of the next generated user ID. + + Generated user IDs are integers, and we aim for them to be as small as + we can. Unfortunately, it's possible some of them are already taken by + existing users, and there may be gaps in the already taken range. This + function returns the start of the first allocatable gap. This is to + avoid the case of ID 10000000 being pre-allocated, so us wasting the + first (and shortest) many generated user IDs. + """ + def _find_next_generated_user_id(txn): + txn.execute("SELECT name FROM users") + + regex = re.compile(r"^@(\d+):") + + found = set() + + for user_id, in txn: + match = regex.search(user_id) + if match: + found.add(int(match.group(1))) + for i in range(len(found) + 1): + if i not in found: + return i + + defer.returnValue((yield self.runInteraction( + "find_next_generated_user_id", + _find_next_generated_user_id + ))) + + @defer.inlineCallbacks + def get_3pid_guest_access_token(self, medium, address): + ret = yield self._simple_select_one( + "threepid_guest_access_tokens", + { + "medium": medium, + "address": address + }, + ["guest_access_token"], True, 'get_3pid_guest_access_token' + ) + if ret: + defer.returnValue(ret["guest_access_token"]) + defer.returnValue(None) + class RegistrationStore(RegistrationWorkerStore, background_updates.BackgroundUpdateStore): @@ -326,20 +441,6 @@ class RegistrationStore(RegistrationWorkerStore, ) txn.call_after(self.is_guest.invalidate, (user_id,)) - def get_users_by_id_case_insensitive(self, user_id): - """Gets users that match user_id case insensitively. - Returns a mapping of user_id -> password_hash. - """ - def f(txn): - sql = ( - "SELECT name, password_hash FROM users" - " WHERE lower(name) = lower(?)" - ) - txn.execute(sql, (user_id,)) - return dict(txn) - - return self.runInteraction("get_users_by_id_case_insensitive", f) - def user_set_password_hash(self, user_id, password_hash): """ NB. This does *not* evict any cache because the one use for this @@ -564,107 +665,6 @@ class RegistrationStore(RegistrationWorkerStore, desc="user_delete_threepids", ) - @defer.inlineCallbacks - def count_all_users(self): - """Counts all users registered on the homeserver.""" - def _count_users(txn): - txn.execute("SELECT COUNT(*) AS users FROM users") - rows = self.cursor_to_dict(txn) - if rows: - return rows[0]["users"] - return 0 - - ret = yield self.runInteraction("count_users", _count_users) - defer.returnValue(ret) - - def count_daily_user_type(self): - """ - Counts 1) native non guest users - 2) native guests users - 3) bridged users - who registered on the homeserver in the past 24 hours - """ - def _count_daily_user_type(txn): - yesterday = int(self._clock.time()) - (60 * 60 * 24) - - sql = """ - SELECT user_type, COALESCE(count(*), 0) AS count FROM ( - SELECT - CASE - WHEN is_guest=0 AND appservice_id IS NULL THEN 'native' - WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest' - WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged' - END AS user_type - FROM users - WHERE creation_ts > ? - ) AS t GROUP BY user_type - """ - results = {'native': 0, 'guest': 0, 'bridged': 0} - txn.execute(sql, (yesterday,)) - for row in txn: - results[row[0]] = row[1] - return results - return self.runInteraction("count_daily_user_type", _count_daily_user_type) - - @defer.inlineCallbacks - def count_nonbridged_users(self): - def _count_users(txn): - txn.execute(""" - SELECT COALESCE(COUNT(*), 0) FROM users - WHERE appservice_id IS NULL - """) - count, = txn.fetchone() - return count - - ret = yield self.runInteraction("count_users", _count_users) - defer.returnValue(ret) - - @defer.inlineCallbacks - def find_next_generated_user_id_localpart(self): - """ - Gets the localpart of the next generated user ID. - - Generated user IDs are integers, and we aim for them to be as small as - we can. Unfortunately, it's possible some of them are already taken by - existing users, and there may be gaps in the already taken range. This - function returns the start of the first allocatable gap. This is to - avoid the case of ID 10000000 being pre-allocated, so us wasting the - first (and shortest) many generated user IDs. - """ - def _find_next_generated_user_id(txn): - txn.execute("SELECT name FROM users") - - regex = re.compile(r"^@(\d+):") - - found = set() - - for user_id, in txn: - match = regex.search(user_id) - if match: - found.add(int(match.group(1))) - for i in range(len(found) + 1): - if i not in found: - return i - - defer.returnValue((yield self.runInteraction( - "find_next_generated_user_id", - _find_next_generated_user_id - ))) - - @defer.inlineCallbacks - def get_3pid_guest_access_token(self, medium, address): - ret = yield self._simple_select_one( - "threepid_guest_access_tokens", - { - "medium": medium, - "address": address - }, - ["guest_access_token"], True, 'get_3pid_guest_access_token' - ) - if ret: - defer.returnValue(ret["guest_access_token"]) - defer.returnValue(None) - @defer.inlineCallbacks def save_or_get_3pid_guest_access_token( self, medium, address, access_token, inviter_user_id -- cgit 1.4.1 From 9caab0c364e4c79c76cc0816a598ad6174da364b Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 14 Feb 2019 13:43:50 +0000 Subject: Transfer bans on room upgrade --- synapse/handlers/room.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f9af1f0046..924880d522 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -285,6 +285,7 @@ class RoomCreationHandler(BaseHandler): (EventTypes.RoomAvatar, ""), (EventTypes.Encryption, ""), (EventTypes.ServerACL, ""), + (EventTypes.Member, None), ) old_room_state_ids = yield self.store.get_filtered_current_state_ids( @@ -296,6 +297,19 @@ class RoomCreationHandler(BaseHandler): for k, old_event_id in iteritems(old_room_state_ids): old_event = old_room_state_events.get(old_event_id) if old_event: + + # Only transfer ban membership events + if ("membership" in old_event.content and + old_event.content["membership"] == "ban"): + yield self.room_member_handler.update_membership( + requester, + UserID.from_string(old_event['state_key']), + room_id, + "ban", + ratelimit=False, + content=old_event.content, + ) + initial_state[k] = old_event.content yield self._send_events_for_new_room( -- cgit 1.4.1 From 915421065b6738c78779d40994cb2dd33d618b9b Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 18 Feb 2019 14:02:09 +0000 Subject: Membership events are done later --- synapse/handlers/room.py | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 924880d522..2d24c115b6 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -294,21 +294,14 @@ class RoomCreationHandler(BaseHandler): # map from event_id to BaseEvent old_room_state_events = yield self.store.get_events(old_room_state_ids.values()) + member_events = [] for k, old_event_id in iteritems(old_room_state_ids): old_event = old_room_state_events.get(old_event_id) if old_event: - - # Only transfer ban membership events - if ("membership" in old_event.content and - old_event.content["membership"] == "ban"): - yield self.room_member_handler.update_membership( - requester, - UserID.from_string(old_event['state_key']), - room_id, - "ban", - ratelimit=False, - content=old_event.content, - ) + # Do membership events later + if ("membership" in old_event.content): + member_events.append(old_event) + continue initial_state[k] = old_event.content @@ -325,6 +318,21 @@ class RoomCreationHandler(BaseHandler): creation_content=creation_content, ) + # Transfer membership events + for old_event in member_events: + # Only transfer ban events + logger.info("Event type: " + str(old_event.content)) + if ("membership" in old_event.content and + old_event.content["membership"] == "ban"): + yield self.room_member_handler.update_membership( + requester, + UserID.from_string(old_event['state_key']), + new_room_id, + "ban", + ratelimit=False, + content=old_event.content, + ) + # XXX invites/joins # XXX 3pid invites -- cgit 1.4.1 From 32e54b472a6c6c12d0b92ac46c8733c300589f19 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 14:08:13 +0000 Subject: Fix kicking guest users in worker mode When guest_access changes from allowed to forbidden all local guest users should be kicked from the room. This did not happen when revocation was received from federation on a worker. Presumably broken in #4141 --- synapse/app/federation_reader.py | 2 ++ synapse/handlers/_base.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 6ee2b76dcd..b116c17669 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -40,6 +40,7 @@ from synapse.replication.slave.storage.profile import SlavedProfileStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.pushers import SlavedPusherStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore +from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler @@ -62,6 +63,7 @@ class FederationReaderSlavedStore( SlavedReceiptsStore, SlavedEventStore, SlavedKeyStore, + SlavedRegistrationStore, RoomStore, DirectoryStore, SlavedTransactionStore, diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 704181d2d3..594754cfd8 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -167,4 +167,4 @@ class BaseHandler(object): ratelimit=False, ) except Exception as e: - logger.warn("Error kicking guest user: %s" % (e,)) + logger.exception("Error kicking guest user: %s" % (e,)) -- cgit 1.4.1 From e07cc31cb800c6ec99a23960f78a2e7f968c255b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 14:52:48 +0000 Subject: Correctly handle HttpResponseException --- synapse/handlers/device.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 6eddb10e0d..d9d65347bc 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -504,13 +504,13 @@ class DeviceListEduUpdater(object): origin = get_domain_from_id(user_id) try: result = yield self.federation.query_user_devices(origin, user_id) - except (NotRetryingDestination, RequestSendFailed): + except ( + NotRetryingDestination, RequestSendFailed, HttpResponseException, + ): # TODO: Remember that we are now out of sync and try again # later logger.warn( - "Failed to handle device list update for %s," - " we're not retrying the remote", - user_id, + "Failed to handle device list update for %s", user_id, ) # We abort on exceptions rather than accepting the update # as otherwise synapse will 'forget' that its device list -- cgit 1.4.1 From 94960cef037cac1361f42a4ab709b0693ba903ae Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 15:24:13 +0000 Subject: pep8 --- synapse/handlers/device.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d9d65347bc..c708c35d4d 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -20,7 +20,11 @@ from twisted.internet import defer from synapse.api import errors from synapse.api.constants import EventTypes -from synapse.api.errors import FederationDeniedError, RequestSendFailed +from synapse.api.errors import ( + FederationDeniedError, + HttpResponseException, + RequestSendFailed, +) from synapse.types import RoomStreamToken, get_domain_from_id from synapse.util import stringutils from synapse.util.async_helpers import Linearizer -- cgit 1.4.1 From 45bb55c6de8b50fdd00893a6ef86623d2f34b864 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 18 Feb 2019 15:46:23 +0000 Subject: Use a configuration parameter to give the domain to generate a certificate for --- synapse/config/tls.py | 7 +++++++ synapse/handlers/acme.py | 29 ++++------------------------- 2 files changed, 11 insertions(+), 25 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 5fb3486db1..a3a5ece681 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -42,6 +42,7 @@ class TlsConfig(Config): self.acme_port = acme_config.get("port", 80) self.acme_bind_addresses = acme_config.get("bind_addresses", ['::', '0.0.0.0']) self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) + self.acme_domain = acme_config.get("domain", config.get("server_name")) self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) self.tls_private_key_file = self.abspath(config.get("tls_private_key_path")) @@ -229,6 +230,12 @@ class TlsConfig(Config): # # reprovision_threshold: 30 + # What domain the certificate should be for. Only useful if + # delegation via a /.well-known/matrix/server file is being used. + # Defaults to the server_name configuration parameter. + # + # domain: matrix.example.com + # List of allowed TLS fingerprints for this server to publish along # with the signing keys for this server. Other matrix servers that # make HTTPS requests to this server will check that the TLS diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index ca5b7257d3..f8a786a4da 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -27,8 +27,6 @@ from twisted.web import server, static from twisted.web.resource import Resource from synapse.app import check_bind_error -from synapse.crypto.context_factory import ClientTLSOptionsFactory -from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent logger = logging.getLogger(__name__) @@ -125,34 +123,15 @@ class AcmeHandler(object): @defer.inlineCallbacks def provision_certificate(self): - # Retrieve .well-known if it's in use. We do so through the federation - # agent, because that's where the .well-known logic lives. - agent = MatrixFederationAgent( - tls_client_options_factory=ClientTLSOptionsFactory(None), - reactor=self.reactor, - ) - delegated = yield agent._get_well_known(bytes(self.hs.hostname, "ascii")) - - # If .well-known is in use, use the delegated hostname instead of the - # homeserver's server_name. - if delegated: - cert_name = delegated.decode("ascii") - logger.info( - ".well-known is in use, provisioning %s instead of %s", - cert_name, self.hs.hostname, - ) - else: - cert_name = self.hs.hostname - - logger.warning("Reprovisioning %s", cert_name) + logger.warning("Reprovisioning %s", self.hs.config.acme_domain) try: - yield self._issuer.issue_cert(cert_name) + yield self._issuer.issue_cert(self.hs.config.acme_domain) except Exception: logger.exception("Fail!") raise - logger.warning("Reprovisioned %s, saving.", cert_name) - cert_chain = self._store.certs[cert_name] + logger.warning("Reprovisioned %s, saving.", self.hs.config.acme_domain) + cert_chain = self._store.certs[self.hs.config.acme_domain] try: with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: -- cgit 1.4.1 From af691e415c3247b912137227a06a68d4c4356586 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 16:49:38 +0000 Subject: Move register_device into handler --- synapse/handlers/register.py | 51 ++++++++++++++-- synapse/replication/http/login.py | 17 +----- synapse/rest/client/v1/login.py | 59 +++++++----------- synapse/rest/client/v2_alpha/register.py | 49 +-------------- tests/rest/client/v2_alpha/test_register.py | 93 +++++++---------------------- 5 files changed, 97 insertions(+), 172 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 8ea557a003..f92ab4d525 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -27,6 +27,7 @@ from synapse.api.errors import ( SynapseError, ) from synapse.http.client import CaptchaServerHttpClient +from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.replication.http.register import ReplicationRegisterServlet from synapse.types import RoomAlias, RoomID, UserID, create_requester from synapse.util.async_helpers import Linearizer @@ -64,6 +65,11 @@ class RegistrationHandler(BaseHandler): if hs.config.worker_app: self._register_client = ReplicationRegisterServlet.make_client(hs) + self._register_device_client = ( + RegisterDeviceReplicationServlet.make_client(hs) + ) + else: + self.device_handler = hs.get_device_handler() @defer.inlineCallbacks def check_username(self, localpart, guest_access_token=None, @@ -159,7 +165,7 @@ class RegistrationHandler(BaseHandler): yield self.auth.check_auth_blocking(threepid=threepid) password_hash = None if password: - password_hash = yield self.auth_handler().hash(password) + password_hash = yield self._auth_handler.hash(password) if localpart: yield self.check_username(localpart, guest_access_token=guest_access_token) @@ -516,9 +522,6 @@ class RegistrationHandler(BaseHandler): defer.returnValue((user_id, token)) - def auth_handler(self): - return self.hs.get_auth_handler() - @defer.inlineCallbacks def get_or_register_3pid_guest(self, medium, address, inviter_user_id): """Get a guest access token for a 3PID, creating a guest account if @@ -628,3 +631,43 @@ class RegistrationHandler(BaseHandler): admin=admin, user_type=user_type, ) + + @defer.inlineCallbacks + def register_device(self, user_id, device_id, initial_display_name, + is_guest=False): + """Register a device for a user and generate an access token. + + Args: + user_id (str): full canonical @user:id + device_id (str|None): The device ID to check, or None to generate + a new one. + initial_display_name (str|None): An optional display name for the + device. + is_guest (bool): Whether this is a guest account + + Returns: + defer.Deferred[tuple[str, str]]: Tuple of device ID and access token + """ + + if self.hs.config.worker_app: + r = yield self._register_device_client( + user_id=user_id, + device_id=device_id, + initial_display_name=initial_display_name, + is_guest=is_guest, + ) + defer.returnValue((r["device_id"], r["access_token"])) + else: + device_id = yield self.device_handler.check_device_registered( + user_id, device_id, initial_display_name + ) + if is_guest: + access_token = self.macaroon_gen.generate_access_token( + user_id, ["guest = true"] + ) + else: + access_token = yield self._auth_handler.get_access_token_for_user_id( + user_id, device_id=device_id, + ) + + defer.returnValue((device_id, access_token)) diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py index 797f6aabd1..1590eca317 100644 --- a/synapse/replication/http/login.py +++ b/synapse/replication/http/login.py @@ -35,9 +35,7 @@ class RegisterDeviceReplicationServlet(ReplicationEndpoint): def __init__(self, hs): super(RegisterDeviceReplicationServlet, self).__init__(hs) - self.auth_handler = hs.get_auth_handler() - self.device_handler = hs.get_device_handler() - self.macaroon_gen = hs.get_macaroon_generator() + self.registration_handler = hs.get_handlers().registration_handler @staticmethod def _serialize_payload(user_id, device_id, initial_display_name, is_guest): @@ -62,19 +60,10 @@ class RegisterDeviceReplicationServlet(ReplicationEndpoint): initial_display_name = content["initial_display_name"] is_guest = content["is_guest"] - device_id = yield self.device_handler.check_device_registered( - user_id, device_id, initial_display_name, + device_id, access_token = yield self.registration_handler.register_device( + user_id, device_id, initial_display_name, is_guest, ) - if is_guest: - access_token = self.macaroon_gen.generate_access_token( - user_id, ["guest = true"] - ) - else: - access_token = yield self.auth_handler.get_access_token_for_user_id( - user_id, device_id=device_id, - ) - defer.returnValue((200, { "device_id": device_id, "access_token": access_token, diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 942e4d3816..4a5775083f 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -94,7 +94,7 @@ class LoginRestServlet(ClientV1RestServlet): self.jwt_algorithm = hs.config.jwt_algorithm self.cas_enabled = hs.config.cas_enabled self.auth_handler = self.hs.get_auth_handler() - self.device_handler = self.hs.get_device_handler() + self.registration_handler = hs.get_handlers().registration_handler self.handlers = hs.get_handlers() self._well_known_builder = WellKnownBuilder(hs) @@ -220,11 +220,10 @@ class LoginRestServlet(ClientV1RestServlet): login_submission, ) - device_id = yield self._register_device( - canonical_user_id, login_submission, - ) - access_token = yield auth_handler.get_access_token_for_user_id( - canonical_user_id, device_id, + device_id = login_submission.get("device_id") + initial_display_name = login_submission.get("initial_device_display_name") + device_id, access_token = yield self.registration_handler.register_device( + canonical_user_id, device_id, initial_display_name, ) result = { @@ -246,10 +245,13 @@ class LoginRestServlet(ClientV1RestServlet): user_id = ( yield auth_handler.validate_short_term_login_token_and_get_user_id(token) ) - device_id = yield self._register_device(user_id, login_submission) - access_token = yield auth_handler.get_access_token_for_user_id( - user_id, device_id, + + device_id = login_submission.get("device_id") + initial_display_name = login_submission.get("initial_device_display_name") + device_id, access_token = yield self.registration_handler.register_device( + user_id, device_id, initial_display_name, ) + result = { "user_id": user_id, # may have changed "access_token": access_token, @@ -286,11 +288,10 @@ class LoginRestServlet(ClientV1RestServlet): auth_handler = self.auth_handler registered_user_id = yield auth_handler.check_user_exists(user_id) if registered_user_id: - device_id = yield self._register_device( - registered_user_id, login_submission - ) - access_token = yield auth_handler.get_access_token_for_user_id( - registered_user_id, device_id, + device_id = login_submission.get("device_id") + initial_display_name = login_submission.get("initial_device_display_name") + device_id, access_token = yield self.registration_handler.register_device( + registered_user_id, device_id, initial_display_name, ) result = { @@ -299,12 +300,16 @@ class LoginRestServlet(ClientV1RestServlet): "home_server": self.hs.hostname, } else: - # TODO: we should probably check that the register isn't going - # to fonx/change our user_id before registering the device - device_id = yield self._register_device(user_id, login_submission) user_id, access_token = ( yield self.handlers.registration_handler.register(localpart=user) ) + + device_id = login_submission.get("device_id") + initial_display_name = login_submission.get("initial_device_display_name") + device_id, access_token = yield self.registration_handler.register_device( + registered_user_id, device_id, initial_display_name, + ) + result = { "user_id": user_id, # may have changed "access_token": access_token, @@ -313,26 +318,6 @@ class LoginRestServlet(ClientV1RestServlet): defer.returnValue(result) - def _register_device(self, user_id, login_submission): - """Register a device for a user. - - This is called after the user's credentials have been validated, but - before the access token has been issued. - - Args: - (str) user_id: full canonical @user:id - (object) login_submission: dictionary supplied to /login call, from - which we pull device_id and initial_device_name - Returns: - defer.Deferred: (str) device_id - """ - device_id = login_submission.get("device_id") - initial_display_name = login_submission.get( - "initial_device_display_name") - return self.device_handler.check_device_registered( - user_id, device_id, initial_display_name - ) - class CasRedirectServlet(RestServlet): PATTERNS = client_path_patterns("/login/(cas|sso)/redirect") diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index c52280c50c..c1cdb8f9c8 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -33,7 +33,6 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) -from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.threepids import check_3pid_allowed @@ -193,13 +192,6 @@ class RegisterRestServlet(RestServlet): self.room_member_handler = hs.get_room_member_handler() self.macaroon_gen = hs.get_macaroon_generator() - if self.hs.config.worker_app: - self._register_device_client = ( - RegisterDeviceReplicationServlet.make_client(hs) - ) - else: - self.device_handler = hs.get_device_handler() - @interactive_auth_handler @defer.inlineCallbacks def on_POST(self, request): @@ -642,7 +634,7 @@ class RegisterRestServlet(RestServlet): if not params.get("inhibit_login", False): device_id = params.get("device_id") initial_display_name = params.get("initial_device_display_name") - device_id, access_token = yield self._register_device( + device_id, access_token = yield self.registration_handler.register_device( user_id, device_id, initial_display_name, is_guest=False, ) @@ -652,43 +644,6 @@ class RegisterRestServlet(RestServlet): }) defer.returnValue(result) - @defer.inlineCallbacks - def _register_device(self, user_id, device_id, initial_display_name, - is_guest): - """Register a device for a user and generate an access token. - - Args: - user_id (str): full canonical @user:id - device_id (str|None): The device ID to check, or None to generate - a new one. - initial_display_name (str|None): An optional display name for the - device. - is_guest (bool): Whether this is a guest account - Returns: - defer.Deferred[tuple[str, str]]: Tuple of device ID and access token - """ - if self.hs.config.worker_app: - r = yield self._register_device_client( - user_id=user_id, - device_id=device_id, - initial_display_name=initial_display_name, - is_guest=is_guest, - ) - defer.returnValue((r["device_id"], r["access_token"])) - else: - device_id = yield self.device_handler.check_device_registered( - user_id, device_id, initial_display_name - ) - if is_guest: - access_token = self.macaroon_gen.generate_access_token( - user_id, ["guest = true"] - ) - else: - access_token = yield self.auth_handler.get_access_token_for_user_id( - user_id, device_id=device_id, - ) - defer.returnValue((device_id, access_token)) - @defer.inlineCallbacks def _do_guest_registration(self, params): if not self.hs.config.allow_guest_access: @@ -702,7 +657,7 @@ class RegisterRestServlet(RestServlet): # we have nowhere to store it. device_id = synapse.api.auth.GUEST_DEVICE_ID initial_display_name = params.get("initial_device_display_name") - device_id, access_token = yield self._register_device( + device_id, access_token = yield self.registration_handler.register_device( user_id, device_id, initial_display_name, is_guest=True, ) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 18080ebfd6..906b348d3e 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -1,10 +1,7 @@ import json -from mock import Mock - -from twisted.python import failure - -from synapse.api.errors import InteractiveAuthIncompleteError +from synapse.api.constants import LoginType +from synapse.appservice import ApplicationService from synapse.rest.client.v2_alpha.register import register_servlets from tests import unittest @@ -18,61 +15,28 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.url = b"/_matrix/client/r0/register" - self.appservice = None - self.auth = Mock( - get_appservice_by_req=Mock(side_effect=lambda x: self.appservice) - ) - - self.auth_result = failure.Failure(InteractiveAuthIncompleteError(None)) - self.auth_handler = Mock( - check_auth=Mock(side_effect=lambda x, y, z: self.auth_result), - get_session_data=Mock(return_value=None), - ) - self.registration_handler = Mock() - self.identity_handler = Mock() - self.login_handler = Mock() - self.device_handler = Mock() - - def check_device_registered(user_id, device_id, initial_display_name): - # Just echo back the given device ID, or return a new "FAKE" device - # ID - if device_id: - return device_id - else: - return "FAKE" - - self.device_handler.check_device_registered = Mock( - side_effect=check_device_registered, - ) - - self.datastore = Mock(return_value=Mock()) - self.datastore.get_current_state_deltas = Mock(return_value=[]) - - # do the dance to hook it up to the hs global - self.handlers = Mock( - registration_handler=self.registration_handler, - identity_handler=self.identity_handler, - login_handler=self.login_handler, - ) self.hs = self.setup_test_homeserver() - self.hs.get_auth = Mock(return_value=self.auth) - self.hs.get_handlers = Mock(return_value=self.handlers) - self.hs.get_auth_handler = Mock(return_value=self.auth_handler) - self.hs.get_device_handler = Mock(return_value=self.device_handler) - self.hs.get_datastore = Mock(return_value=self.datastore) self.hs.config.enable_registration = True self.hs.config.registrations_require_3pid = [] self.hs.config.auto_join_rooms = [] + self.hs.config.enable_registration_captcha = False return self.hs def test_POST_appservice_registration_valid(self): - user_id = "@kermit:muppet" - token = "kermits_access_token" - self.appservice = {"id": "1234"} - self.registration_handler.appservice_register = Mock(return_value=user_id) - self.auth_handler.get_access_token_for_user_id = Mock(return_value=token) - request_data = json.dumps({"username": "kermit"}) + user_id = "@as_user_kermit:test" + as_token = "i_am_an_app_service" + + appservice = ApplicationService( + as_token, self.hs.config.hostname, + id="1234", + namespaces={ + "users": [{"regex": r"@as_user.*", "exclusive": True}], + }, + ) + + self.hs.get_datastore().services_cache.append(appservice) + request_data = json.dumps({"username": "as_user_kermit"}) request, channel = self.make_request( b"POST", self.url + b"?access_token=i_am_an_app_service", request_data @@ -82,7 +46,6 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.result["code"], b"200", channel.result) det_data = { "user_id": user_id, - "access_token": token, "home_server": self.hs.hostname, } self.assertDictContainsSubset(det_data, channel.json_body) @@ -114,37 +77,30 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.json_body["error"], "Invalid username") def test_POST_user_valid(self): - user_id = "@kermit:muppet" - token = "kermits_access_token" + user_id = "@kermit:test" device_id = "frogfone" - params = {"username": "kermit", "password": "monkey", "device_id": device_id} + params = { + "username": "kermit", + "password": "monkey", + "device_id": device_id, + "auth": {"type": LoginType.DUMMY}, + } request_data = json.dumps(params) - self.registration_handler.check_username = Mock(return_value=True) - self.auth_result = (None, params, None) - self.registration_handler.register = Mock(return_value=(user_id, None)) - self.auth_handler.get_access_token_for_user_id = Mock(return_value=token) - request, channel = self.make_request(b"POST", self.url, request_data) self.render(request) det_data = { "user_id": user_id, - "access_token": token, "home_server": self.hs.hostname, "device_id": device_id, } self.assertEquals(channel.result["code"], b"200", channel.result) self.assertDictContainsSubset(det_data, channel.json_body) - self.auth_handler.get_login_tuple_for_user_id( - user_id, device_id=device_id, initial_device_display_name=None - ) def test_POST_disabled_registration(self): self.hs.config.enable_registration = False request_data = json.dumps({"username": "kermit", "password": "monkey"}) - self.registration_handler.check_username = Mock(return_value=True) self.auth_result = (None, {"username": "kermit", "password": "monkey"}, None) - self.registration_handler.register = Mock(return_value=("@user:id", "t")) request, channel = self.make_request(b"POST", self.url, request_data) self.render(request) @@ -153,16 +109,13 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.json_body["error"], "Registration has been disabled") def test_POST_guest_registration(self): - user_id = "a@b" self.hs.config.macaroon_secret_key = "test" self.hs.config.allow_guest_access = True - self.registration_handler.register = Mock(return_value=(user_id, None)) request, channel = self.make_request(b"POST", self.url + b"?kind=guest", b"{}") self.render(request) det_data = { - "user_id": user_id, "home_server": self.hs.hostname, "device_id": "guest_device", } -- cgit 1.4.1 From f8b9ca53cedced14c5687581d981b5473bb7054d Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 18 Feb 2019 16:56:34 +0000 Subject: Move member event processing and changelog fix --- changelog.d/4642.bugfix | 1 - changelog.d/4642.feature | 1 + synapse/handlers/room.py | 16 +++++++--------- 3 files changed, 8 insertions(+), 10 deletions(-) delete mode 100644 changelog.d/4642.bugfix create mode 100644 changelog.d/4642.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4642.bugfix b/changelog.d/4642.bugfix deleted file mode 100644 index bfbf95bcbb..0000000000 --- a/changelog.d/4642.bugfix +++ /dev/null @@ -1 +0,0 @@ -Transfer bans on room upgrade. \ No newline at end of file diff --git a/changelog.d/4642.feature b/changelog.d/4642.feature new file mode 100644 index 0000000000..bfbf95bcbb --- /dev/null +++ b/changelog.d/4642.feature @@ -0,0 +1 @@ +Transfer bans on room upgrade. \ No newline at end of file diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 2d24c115b6..0676e7f626 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -285,7 +285,6 @@ class RoomCreationHandler(BaseHandler): (EventTypes.RoomAvatar, ""), (EventTypes.Encryption, ""), (EventTypes.ServerACL, ""), - (EventTypes.Member, None), ) old_room_state_ids = yield self.store.get_filtered_current_state_ids( @@ -294,15 +293,9 @@ class RoomCreationHandler(BaseHandler): # map from event_id to BaseEvent old_room_state_events = yield self.store.get_events(old_room_state_ids.values()) - member_events = [] for k, old_event_id in iteritems(old_room_state_ids): old_event = old_room_state_events.get(old_event_id) if old_event: - # Do membership events later - if ("membership" in old_event.content): - member_events.append(old_event) - continue - initial_state[k] = old_event.content yield self._send_events_for_new_room( @@ -319,9 +312,14 @@ class RoomCreationHandler(BaseHandler): ) # Transfer membership events - for old_event in member_events: + old_room_member_state_ids = yield self.store.get_filtered_current_state_ids( + old_room_id, StateFilter.from_types([(EventTypes.Member, None)]), + ) + + # map from event_id to BaseEvent + old_room_member_state_events = yield self.store.get_events(old_room_member_state_ids.values()) + for k, old_event in iteritems(old_room_member_state_events): # Only transfer ban events - logger.info("Event type: " + str(old_event.content)) if ("membership" in old_event.content and old_event.content["membership"] == "ban"): yield self.room_member_handler.update_membership( -- cgit 1.4.1 From 34ac75ce2c576f32a58fb7070fd52279cba70515 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 18 Feb 2019 18:23:37 +0000 Subject: lint --- synapse/handlers/room.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 0676e7f626..67b15697fd 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -317,7 +317,9 @@ class RoomCreationHandler(BaseHandler): ) # map from event_id to BaseEvent - old_room_member_state_events = yield self.store.get_events(old_room_member_state_ids.values()) + old_room_member_state_events = yield self.store.get_events( + old_room_member_state_ids.values(), + ) for k, old_event in iteritems(old_room_member_state_events): # Only transfer ban events if ("membership" in old_event.content and -- cgit 1.4.1 From a8626901cd384f263c8ae578466f95f0c3cceb95 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 19 Feb 2019 10:54:33 +0000 Subject: Fetch ACME domain into an instance member --- synapse/handlers/acme.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index f8a786a4da..813777bf18 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -56,6 +56,7 @@ class AcmeHandler(object): def __init__(self, hs): self.hs = hs self.reactor = hs.get_reactor() + self._acme_domain = hs.config.acme_domain @defer.inlineCallbacks def start_listening(self): @@ -123,15 +124,15 @@ class AcmeHandler(object): @defer.inlineCallbacks def provision_certificate(self): - logger.warning("Reprovisioning %s", self.hs.config.acme_domain) + logger.warning("Reprovisioning %s", self._acme_domain) try: - yield self._issuer.issue_cert(self.hs.config.acme_domain) + yield self._issuer.issue_cert(self._acme_domain) except Exception: logger.exception("Fail!") raise - logger.warning("Reprovisioned %s, saving.", self.hs.config.acme_domain) - cert_chain = self._store.certs[self.hs.config.acme_domain] + logger.warning("Reprovisioned %s, saving.", self._acme_domain) + cert_chain = self._store.certs[self._acme_domain] try: with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: -- cgit 1.4.1 From dbdc565dfdca0e689f816b7614dea6530cd54843 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 20 Feb 2019 07:47:31 +0000 Subject: Fix registration on workers (#4682) * Move RegistrationHandler init to HomeServer * Move post registration actions to RegistrationHandler * Add post regisration replication endpoint * Newsfile --- changelog.d/4682.feature | 1 + synapse/handlers/__init__.py | 2 - synapse/handlers/register.py | 200 ++++++++++++++++++++++++++++++- synapse/handlers/room_member.py | 2 +- synapse/module_api/__init__.py | 2 +- synapse/replication/http/login.py | 2 +- synapse/replication/http/membership.py | 4 +- synapse/replication/http/register.py | 55 +++++++++ synapse/rest/client/v1/login.py | 4 +- synapse/rest/client/v2_alpha/auth.py | 2 +- synapse/rest/client/v2_alpha/register.py | 145 ++-------------------- synapse/rest/consent/consent_resource.py | 2 +- synapse/server.py | 5 + tests/handlers/test_register.py | 2 +- 14 files changed, 277 insertions(+), 151 deletions(-) create mode 100644 changelog.d/4682.feature (limited to 'synapse/handlers') diff --git a/changelog.d/4682.feature b/changelog.d/4682.feature new file mode 100644 index 0000000000..b3a3915eb0 --- /dev/null +++ b/changelog.d/4682.feature @@ -0,0 +1 @@ +Allow registration and login to be handled by a worker instance. diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py index 413425fed1..2dd183018a 100644 --- a/synapse/handlers/__init__.py +++ b/synapse/handlers/__init__.py @@ -17,7 +17,6 @@ from .admin import AdminHandler from .directory import DirectoryHandler from .federation import FederationHandler from .identity import IdentityHandler -from .register import RegistrationHandler from .search import SearchHandler @@ -41,7 +40,6 @@ class Handlers(object): """ def __init__(self, hs): - self.registration_handler = RegistrationHandler(hs) self.federation_handler = FederationHandler(hs) self.directory_handler = DirectoryHandler(hs) self.admin_handler = AdminHandler(hs) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index f92ab4d525..24a4cb5a83 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -19,6 +19,7 @@ import logging from twisted.internet import defer from synapse import types +from synapse.api.constants import LoginType from synapse.api.errors import ( AuthError, Codes, @@ -26,9 +27,14 @@ from synapse.api.errors import ( RegistrationError, SynapseError, ) +from synapse.config.server import is_threepid_reserved from synapse.http.client import CaptchaServerHttpClient +from synapse.http.servlet import assert_params_in_dict from synapse.replication.http.login import RegisterDeviceReplicationServlet -from synapse.replication.http.register import ReplicationRegisterServlet +from synapse.replication.http.register import ( + ReplicationPostRegisterActionsServlet, + ReplicationRegisterServlet, +) from synapse.types import RoomAlias, RoomID, UserID, create_requester from synapse.util.async_helpers import Linearizer from synapse.util.threepids import check_3pid_allowed @@ -53,6 +59,7 @@ class RegistrationHandler(BaseHandler): self.profile_handler = hs.get_profile_handler() self.user_directory_handler = hs.get_user_directory_handler() self.captcha_client = CaptchaServerHttpClient(hs) + self.identity_handler = self.hs.get_handlers().identity_handler self._next_generated_user_id = None @@ -68,8 +75,12 @@ class RegistrationHandler(BaseHandler): self._register_device_client = ( RegisterDeviceReplicationServlet.make_client(hs) ) + self._post_registration_client = ( + ReplicationPostRegisterActionsServlet.make_client(hs) + ) else: self.device_handler = hs.get_device_handler() + self.pusher_pool = hs.get_pusherpool() @defer.inlineCallbacks def check_username(self, localpart, guest_access_token=None, @@ -369,8 +380,7 @@ class RegistrationHandler(BaseHandler): logger.info("validating threepidcred sid %s on id server %s", c['sid'], c['idServer']) try: - identity_handler = self.hs.get_handlers().identity_handler - threepid = yield identity_handler.threepid_from_creds(c) + threepid = yield self.identity_handler.threepid_from_creds(c) except Exception: logger.exception("Couldn't validate 3pid") raise RegistrationError(400, "Couldn't validate 3pid") @@ -394,9 +404,8 @@ class RegistrationHandler(BaseHandler): # Now we have a matrix ID, bind it to the threepids we were given for c in threepidCreds: - identity_handler = self.hs.get_handlers().identity_handler # XXX: This should be a deferred list, shouldn't it? - yield identity_handler.bind_threepid(c, user_id) + yield self.identity_handler.bind_threepid(c, user_id) def check_user_id_not_appservice_exclusive(self, user_id, allowed_appservice=None): # don't allow people to register the server notices mxid @@ -671,3 +680,184 @@ class RegistrationHandler(BaseHandler): ) defer.returnValue((device_id, access_token)) + + @defer.inlineCallbacks + def post_registration_actions(self, user_id, auth_result, access_token, + bind_email, bind_msisdn): + """A user has completed registration + + Args: + user_id (str): The user ID that consented + auth_result (dict): The authenticated credentials of the newly + registered user. + access_token (str|None): The access token of the newly logged in + device, or None if `inhibit_login` enabled. + bind_email (bool): Whether to bind the email with the identity + server + bind_msisdn (bool): Whether to bind the msisdn with the identity + server + """ + if self.hs.config.worker_app: + yield self._post_registration_client( + user_id=user_id, + auth_result=auth_result, + access_token=access_token, + bind_email=bind_email, + bind_msisdn=bind_msisdn, + ) + return + + if auth_result and LoginType.EMAIL_IDENTITY in auth_result: + threepid = auth_result[LoginType.EMAIL_IDENTITY] + # Necessary due to auth checks prior to the threepid being + # written to the db + if is_threepid_reserved( + self.hs.config.mau_limits_reserved_threepids, threepid + ): + yield self.store.upsert_monthly_active_user(user_id) + + yield self._register_email_threepid( + user_id, threepid, access_token, + bind_email, + ) + + if auth_result and LoginType.MSISDN in auth_result: + threepid = auth_result[LoginType.MSISDN] + yield self._register_msisdn_threepid( + user_id, threepid, bind_msisdn, + ) + + if auth_result and LoginType.TERMS in auth_result: + yield self._on_user_consented( + user_id, self.hs.config.user_consent_version, + ) + + @defer.inlineCallbacks + def _on_user_consented(self, user_id, consent_version): + """A user consented to the terms on registration + + Args: + user_id (str): The user ID that consented + consent_version (str): version of the policy the user has + consented to. + """ + logger.info("%s has consented to the privacy policy", user_id) + yield self.store.user_set_consent_version( + user_id, consent_version, + ) + yield self.post_consent_actions(user_id) + + @defer.inlineCallbacks + def _register_email_threepid(self, user_id, threepid, token, bind_email): + """Add an email address as a 3pid identifier + + Also adds an email pusher for the email address, if configured in the + HS config + + Also optionally binds emails to the given user_id on the identity server + + Must be called on master. + + Args: + user_id (str): id of user + threepid (object): m.login.email.identity auth response + token (str|None): access_token for the user, or None if not logged + in. + bind_email (bool): true if the client requested the email to be + bound at the identity server + Returns: + defer.Deferred: + """ + reqd = ('medium', 'address', 'validated_at') + if any(x not in threepid for x in reqd): + # This will only happen if the ID server returns a malformed response + logger.info("Can't add incomplete 3pid") + return + + yield self._auth_handler.add_threepid( + user_id, + threepid['medium'], + threepid['address'], + threepid['validated_at'], + ) + + # And we add an email pusher for them by default, but only + # if email notifications are enabled (so people don't start + # getting mail spam where they weren't before if email + # notifs are set up on a home server) + if (self.hs.config.email_enable_notifs and + self.hs.config.email_notif_for_new_users + and token): + # Pull the ID of the access token back out of the db + # It would really make more sense for this to be passed + # up when the access token is saved, but that's quite an + # invasive change I'd rather do separately. + user_tuple = yield self.store.get_user_by_access_token( + token + ) + token_id = user_tuple["token_id"] + + yield self.pusher_pool.add_pusher( + user_id=user_id, + access_token=token_id, + kind="email", + app_id="m.email", + app_display_name="Email Notifications", + device_display_name=threepid["address"], + pushkey=threepid["address"], + lang=None, # We don't know a user's language here + data={}, + ) + + if bind_email: + logger.info("bind_email specified: binding") + logger.debug("Binding emails %s to %s" % ( + threepid, user_id + )) + yield self.identity_handler.bind_threepid( + threepid['threepid_creds'], user_id + ) + else: + logger.info("bind_email not specified: not binding email") + + @defer.inlineCallbacks + def _register_msisdn_threepid(self, user_id, threepid, bind_msisdn): + """Add a phone number as a 3pid identifier + + Also optionally binds msisdn to the given user_id on the identity server + + Must be called on master. + + Args: + user_id (str): id of user + threepid (object): m.login.msisdn auth response + token (str): access_token for the user + bind_email (bool): true if the client requested the email to be + bound at the identity server + Returns: + defer.Deferred: + """ + try: + assert_params_in_dict(threepid, ['medium', 'address', 'validated_at']) + except SynapseError as ex: + if ex.errcode == Codes.MISSING_PARAM: + # This will only happen if the ID server returns a malformed response + logger.info("Can't add incomplete 3pid") + defer.returnValue(None) + raise + + yield self._auth_handler.add_threepid( + user_id, + threepid['medium'], + threepid['address'], + threepid['validated_at'], + ) + + if bind_msisdn: + logger.info("bind_msisdn specified: binding") + logger.debug("Binding msisdn %s to %s", threepid, user_id) + yield self.identity_handler.bind_threepid( + threepid['threepid_creds'], user_id + ) + else: + logger.info("bind_msisdn not specified: not binding msisdn") diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 2beffdf41e..190ea2c7b1 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -61,7 +61,7 @@ class RoomMemberHandler(object): self.federation_handler = hs.get_handlers().federation_handler self.directory_handler = hs.get_handlers().directory_handler - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() self.profile_handler = hs.get_profile_handler() self.event_creation_handler = hs.get_event_creation_handler() diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 097c844d31..fc9a20ff59 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -79,7 +79,7 @@ class ModuleApi(object): Returns: Deferred: a 2-tuple of (user_id, access_token) """ - reg = self.hs.get_handlers().registration_handler + reg = self.hs.get_registration_handler() return reg.register(localpart=localpart) @defer.inlineCallbacks diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py index 1590eca317..63bc0405ea 100644 --- a/synapse/replication/http/login.py +++ b/synapse/replication/http/login.py @@ -35,7 +35,7 @@ class RegisterDeviceReplicationServlet(ReplicationEndpoint): def __init__(self, hs): super(RegisterDeviceReplicationServlet, self).__init__(hs) - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() @staticmethod def _serialize_payload(user_id, device_id, initial_display_name, is_guest): diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index e58bebf12a..81a2b204c7 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -191,7 +191,7 @@ class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): def __init__(self, hs): super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs) - self.registeration_handler = hs.get_handlers().registration_handler + self.registeration_handler = hs.get_registration_handler() self.store = hs.get_datastore() self.clock = hs.get_clock() @@ -251,7 +251,7 @@ class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): def __init__(self, hs): super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__(hs) - self.registeration_handler = hs.get_handlers().registration_handler + self.registeration_handler = hs.get_registration_handler() self.store = hs.get_datastore() self.clock = hs.get_clock() self.distributor = hs.get_distributor() diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py index bdaf37396c..1d27c9221f 100644 --- a/synapse/replication/http/register.py +++ b/synapse/replication/http/register.py @@ -87,5 +87,60 @@ class ReplicationRegisterServlet(ReplicationEndpoint): defer.returnValue((200, {})) +class ReplicationPostRegisterActionsServlet(ReplicationEndpoint): + """Run any post registration actions + """ + + NAME = "post_register" + PATH_ARGS = ("user_id",) + + def __init__(self, hs): + super(ReplicationPostRegisterActionsServlet, self).__init__(hs) + self.store = hs.get_datastore() + self.registration_handler = hs.get_registration_handler() + + @staticmethod + def _serialize_payload(user_id, auth_result, access_token, bind_email, + bind_msisdn): + """ + Args: + user_id (str): The user ID that consented + auth_result (dict): The authenticated credentials of the newly + registered user. + access_token (str|None): The access token of the newly logged in + device, or None if `inhibit_login` enabled. + bind_email (bool): Whether to bind the email with the identity + server + bind_msisdn (bool): Whether to bind the msisdn with the identity + server + """ + return { + "auth_result": auth_result, + "access_token": access_token, + "bind_email": bind_email, + "bind_msisdn": bind_msisdn, + } + + @defer.inlineCallbacks + def _handle_request(self, request, user_id): + content = parse_json_object_from_request(request) + + auth_result = content["auth_result"] + access_token = content["access_token"] + bind_email = content["bind_email"] + bind_msisdn = content["bind_msisdn"] + + yield self.registration_handler.post_registration_actions( + user_id=user_id, + auth_result=auth_result, + access_token=access_token, + bind_email=bind_email, + bind_msisdn=bind_msisdn, + ) + + defer.returnValue((200, {})) + + def register_servlets(hs, http_server): ReplicationRegisterServlet(hs).register(http_server) + ReplicationPostRegisterActionsServlet(hs).register(http_server) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 4a5775083f..6121c5b6df 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -94,7 +94,7 @@ class LoginRestServlet(ClientV1RestServlet): self.jwt_algorithm = hs.config.jwt_algorithm self.cas_enabled = hs.config.cas_enabled self.auth_handler = self.hs.get_auth_handler() - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() self.handlers = hs.get_handlers() self._well_known_builder = WellKnownBuilder(hs) @@ -434,7 +434,7 @@ class SSOAuthHandler(object): def __init__(self, hs): self._hostname = hs.hostname self._auth_handler = hs.get_auth_handler() - self._registration_handler = hs.get_handlers().registration_handler + self._registration_handler = hs.get_registration_handler() self._macaroon_gen = hs.get_macaroon_generator() @defer.inlineCallbacks diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index fa73bdf3a1..f7bb710642 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -129,7 +129,7 @@ class AuthRestServlet(RestServlet): self.hs = hs self.auth = hs.get_auth() self.auth_handler = hs.get_auth_handler() - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() def on_GET(self, request, stagetype): session = parse_string(request, "session") diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index c1cdb8f9c8..94cbba4303 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -145,7 +145,7 @@ class UsernameAvailabilityRestServlet(RestServlet): """ super(UsernameAvailabilityRestServlet, self).__init__() self.hs = hs - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() self.ratelimiter = FederationRateLimiter( hs.get_clock(), # Time window of 2s @@ -187,7 +187,7 @@ class RegisterRestServlet(RestServlet): self.auth = hs.get_auth() self.store = hs.get_datastore() self.auth_handler = hs.get_auth_handler() - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() self.identity_handler = hs.get_handlers().identity_handler self.room_member_handler = hs.get_room_member_handler() self.macaroon_gen = hs.get_macaroon_generator() @@ -389,8 +389,7 @@ class RegisterRestServlet(RestServlet): registered_user_id ) # don't re-register the threepids - add_email = False - add_msisdn = False + registered = False else: # NB: This may be from the auth handler and NOT from the POST assert_params_in_dict(params, ["password"]) @@ -427,34 +426,21 @@ class RegisterRestServlet(RestServlet): session_id, "registered_user_id", registered_user_id ) - add_email = True - add_msisdn = True + registered = True return_dict = yield self._create_registration_details( registered_user_id, params ) - if add_email and auth_result and LoginType.EMAIL_IDENTITY in auth_result: - threepid = auth_result[LoginType.EMAIL_IDENTITY] - yield self._register_email_threepid( - registered_user_id, threepid, return_dict["access_token"], - params.get("bind_email") + if registered: + yield self.registration_handler.post_registration_actions( + user_id=registered_user_id, + auth_result=auth_result, + access_token=return_dict.get("access_token"), + bind_email=params.get("bind_email"), + bind_msisdn=params.get("bind_msisdn"), ) - if add_msisdn and auth_result and LoginType.MSISDN in auth_result: - threepid = auth_result[LoginType.MSISDN] - yield self._register_msisdn_threepid( - registered_user_id, threepid, return_dict["access_token"], - params.get("bind_msisdn") - ) - - if auth_result and LoginType.TERMS in auth_result: - logger.info("%s has consented to the privacy policy" % registered_user_id) - yield self.store.user_set_consent_version( - registered_user_id, self.hs.config.user_consent_version, - ) - yield self.registration_handler.post_consent_actions(registered_user_id) - defer.returnValue((200, return_dict)) def on_OPTIONS(self, _): @@ -505,115 +491,6 @@ class RegisterRestServlet(RestServlet): result = yield self._create_registration_details(user_id, body) defer.returnValue(result) - @defer.inlineCallbacks - def _register_email_threepid(self, user_id, threepid, token, bind_email): - """Add an email address as a 3pid identifier - - Also adds an email pusher for the email address, if configured in the - HS config - - Also optionally binds emails to the given user_id on the identity server - - Args: - user_id (str): id of user - threepid (object): m.login.email.identity auth response - token (str): access_token for the user - bind_email (bool): true if the client requested the email to be - bound at the identity server - Returns: - defer.Deferred: - """ - reqd = ('medium', 'address', 'validated_at') - if any(x not in threepid for x in reqd): - # This will only happen if the ID server returns a malformed response - logger.info("Can't add incomplete 3pid") - return - - yield self.auth_handler.add_threepid( - user_id, - threepid['medium'], - threepid['address'], - threepid['validated_at'], - ) - - # And we add an email pusher for them by default, but only - # if email notifications are enabled (so people don't start - # getting mail spam where they weren't before if email - # notifs are set up on a home server) - if (self.hs.config.email_enable_notifs and - self.hs.config.email_notif_for_new_users): - # Pull the ID of the access token back out of the db - # It would really make more sense for this to be passed - # up when the access token is saved, but that's quite an - # invasive change I'd rather do separately. - user_tuple = yield self.store.get_user_by_access_token( - token - ) - token_id = user_tuple["token_id"] - - yield self.hs.get_pusherpool().add_pusher( - user_id=user_id, - access_token=token_id, - kind="email", - app_id="m.email", - app_display_name="Email Notifications", - device_display_name=threepid["address"], - pushkey=threepid["address"], - lang=None, # We don't know a user's language here - data={}, - ) - - if bind_email: - logger.info("bind_email specified: binding") - logger.debug("Binding emails %s to %s" % ( - threepid, user_id - )) - yield self.identity_handler.bind_threepid( - threepid['threepid_creds'], user_id - ) - else: - logger.info("bind_email not specified: not binding email") - - @defer.inlineCallbacks - def _register_msisdn_threepid(self, user_id, threepid, token, bind_msisdn): - """Add a phone number as a 3pid identifier - - Also optionally binds msisdn to the given user_id on the identity server - - Args: - user_id (str): id of user - threepid (object): m.login.msisdn auth response - token (str): access_token for the user - bind_email (bool): true if the client requested the email to be - bound at the identity server - Returns: - defer.Deferred: - """ - try: - assert_params_in_dict(threepid, ['medium', 'address', 'validated_at']) - except SynapseError as ex: - if ex.errcode == Codes.MISSING_PARAM: - # This will only happen if the ID server returns a malformed response - logger.info("Can't add incomplete 3pid") - defer.returnValue(None) - raise - - yield self.auth_handler.add_threepid( - user_id, - threepid['medium'], - threepid['address'], - threepid['validated_at'], - ) - - if bind_msisdn: - logger.info("bind_msisdn specified: binding") - logger.debug("Binding msisdn %s to %s", threepid, user_id) - yield self.identity_handler.bind_threepid( - threepid['threepid_creds'], user_id - ) - else: - logger.info("bind_msisdn not specified: not binding msisdn") - @defer.inlineCallbacks def _create_registration_details(self, user_id, params): """Complete registration of newly-registered user diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 008d4edae5..6b371bfa2f 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -89,7 +89,7 @@ class ConsentResource(Resource): self.hs = hs self.store = hs.get_datastore() - self.registration_handler = hs.get_handlers().registration_handler + self.registration_handler = hs.get_registration_handler() # this is required by the request_handler wrapper self.clock = hs.get_clock() diff --git a/synapse/server.py b/synapse/server.py index 8615b67ad4..4d364fccce 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -64,6 +64,7 @@ from synapse.handlers.presence import PresenceHandler from synapse.handlers.profile import BaseProfileHandler, MasterProfileHandler from synapse.handlers.read_marker import ReadMarkerHandler from synapse.handlers.receipts import ReceiptsHandler +from synapse.handlers.register import RegistrationHandler from synapse.handlers.room import RoomContextHandler, RoomCreationHandler from synapse.handlers.room_list import RoomListHandler from synapse.handlers.room_member import RoomMemberMasterHandler @@ -181,6 +182,7 @@ class HomeServer(object): 'pagination_handler', 'room_context_handler', 'sendmail', + 'registration_handler', ] # This is overridden in derived application classes @@ -481,6 +483,9 @@ class HomeServer(object): def build_room_context_handler(self): return RoomContextHandler(self) + def build_registration_handler(self): + return RegistrationHandler(self) + def remove_pusher(self, app_id, push_key, user_id): return self.get_pusherpool().remove_pusher(app_id, push_key, user_id) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index eb70e1daa6..c9c1506273 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -48,7 +48,7 @@ class RegistrationTestCase(unittest.TestCase): generate_access_token=Mock(return_value='secret') ) self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator) - self.handler = self.hs.get_handlers().registration_handler + self.handler = self.hs.get_registration_handler() self.store = self.hs.get_datastore() self.hs.config.max_mau_value = 50 self.lots_of_users = 100 -- cgit 1.4.1 From 6d65659b62d1e338987a071d2f053cc3447e7ff5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 21 Feb 2019 17:50:30 +0000 Subject: Run push_receipts_to_remotes as background job (#4707) I suspect the CPU usage metrics for this are going to /dev/null at the moment. --- changelog.d/4707.misc | 1 + synapse/handlers/receipts.py | 68 ++++++++++++++++++++++---------------------- 2 files changed, 35 insertions(+), 34 deletions(-) create mode 100644 changelog.d/4707.misc (limited to 'synapse/handlers') diff --git a/changelog.d/4707.misc b/changelog.d/4707.misc new file mode 100644 index 0000000000..ef0772b9af --- /dev/null +++ b/changelog.d/4707.misc @@ -0,0 +1 @@ +Run push_receipts_to_remotes as background job. diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index 4c2690ba26..696469732c 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -16,8 +16,8 @@ import logging from twisted.internet import defer +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import get_domain_from_id -from synapse.util import logcontext from ._base import BaseHandler @@ -59,7 +59,9 @@ class ReceiptsHandler(BaseHandler): if is_new: # fire off a process in the background to send the receipt to # remote servers - self._push_remotes([receipt]) + run_as_background_process( + 'push_receipts_to_remotes', self._push_remotes, receipt + ) @defer.inlineCallbacks def _received_remote_receipt(self, origin, content): @@ -125,44 +127,42 @@ class ReceiptsHandler(BaseHandler): defer.returnValue(True) - @logcontext.preserve_fn # caller should not yield on this @defer.inlineCallbacks - def _push_remotes(self, receipts): - """Given a list of receipts, works out which remote servers should be + def _push_remotes(self, receipt): + """Given a receipt, works out which remote servers should be poked and pokes them. """ try: - # TODO: Some of this stuff should be coallesced. - for receipt in receipts: - room_id = receipt["room_id"] - receipt_type = receipt["receipt_type"] - user_id = receipt["user_id"] - event_ids = receipt["event_ids"] - data = receipt["data"] - - users = yield self.state.get_current_user_in_room(room_id) - remotedomains = set(get_domain_from_id(u) for u in users) - remotedomains = remotedomains.copy() - remotedomains.discard(self.server_name) - - logger.debug("Sending receipt to: %r", remotedomains) - - for domain in remotedomains: - self.federation.send_edu( - destination=domain, - edu_type="m.receipt", - content={ - room_id: { - receipt_type: { - user_id: { - "event_ids": event_ids, - "data": data, - } + # TODO: optimise this to move some of the work to the workers. + room_id = receipt["room_id"] + receipt_type = receipt["receipt_type"] + user_id = receipt["user_id"] + event_ids = receipt["event_ids"] + data = receipt["data"] + + users = yield self.state.get_current_user_in_room(room_id) + remotedomains = set(get_domain_from_id(u) for u in users) + remotedomains = remotedomains.copy() + remotedomains.discard(self.server_name) + + logger.debug("Sending receipt to: %r", remotedomains) + + for domain in remotedomains: + self.federation.send_edu( + destination=domain, + edu_type="m.receipt", + content={ + room_id: { + receipt_type: { + user_id: { + "event_ids": event_ids, + "data": data, } - }, + } }, - key=(room_id, receipt_type, user_id), - ) + }, + key=(room_id, receipt_type, user_id), + ) except Exception: logger.exception("Error pushing receipts to remote servers") -- cgit 1.4.1 From 7b288826b76cae63b553fced10d3779355b592ca Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 22 Feb 2019 11:33:51 +0000 Subject: Fix backfill storing incorrect state for events --- synapse/handlers/federation.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 083f2e0ac3..40655bf92c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -772,8 +772,11 @@ class FederationHandler(BaseHandler): ev_infos = [] for a in auth_events.values(): - if a.event_id in seen_events: + # We only want to persist auth events as outliers that we haven't + # seen and aren't about to persist as part of the backfilled chunk. + if a.event_id in seen_events or a.event_id in event_map: continue + a.internal_metadata.outlier = True ev_infos.append({ "event": a, -- cgit 1.4.1 From 9342cc6ab167836ca3965923989c33f5230c27e9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Feb 2019 10:02:12 +0000 Subject: Add comments and paranoia --- synapse/handlers/federation.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 40655bf92c..fbf044b407 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -770,6 +770,18 @@ class FederationHandler(BaseHandler): set(auth_events.keys()) | set(state_events.keys()) ) + # We now have a chunk of events plus associated state and auth chain to + # persist. We do the persistence in two steps: + # 1. Auth events and state get persisted as outliers, plus the + # backward extremities get persisted (as non-outliers). + # 2. The rest of the events in the chunk get persisted one by one, as + # each one depends on the previous event for its state. + # + # The important thing is that events in the chunk get persisted as + # non-outliers, including when those events are also in the state or + # auth chain. Caution must therefore be taken to ensure that they are + # not accidentally marked as outliers. + ev_infos = [] for a in auth_events.values(): # We only want to persist auth events as outliers that we haven't @@ -789,13 +801,18 @@ class FederationHandler(BaseHandler): }) for e_id in events_to_state: + # For paranoia we ensure that these events are marked as + # non-outliers + ev = event_map[e_id] + ev.internal_metadata.outlier = False + ev_infos.append({ - "event": event_map[e_id], + "event": ev, "state": events_to_state[e_id], "auth_events": { (auth_events[a_id].type, auth_events[a_id].state_key): auth_events[a_id] - for a_id in event_map[e_id].auth_event_ids() + for a_id in ev.auth_event_ids() if a_id in auth_events } }) @@ -811,6 +828,10 @@ class FederationHandler(BaseHandler): if event in events_to_state: continue + # For paranoia we ensure that these events are marked as + # non-outliers + event.internal_metadata.outlier = False + # We store these one at a time since each event depends on the # previous to work out the state. # TODO: We can probably do something more clever here. -- cgit 1.4.1 From 890cb048fdbb05bcd8b80d3038f4811a1fdfe9f0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Feb 2019 14:42:11 +0000 Subject: Assert rather than clobber the values --- synapse/handlers/federation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index fbf044b407..8e6d0a3bbc 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -804,7 +804,7 @@ class FederationHandler(BaseHandler): # For paranoia we ensure that these events are marked as # non-outliers ev = event_map[e_id] - ev.internal_metadata.outlier = False + assert(not event.internal_metadata.is_outlier()) ev_infos.append({ "event": ev, @@ -830,7 +830,7 @@ class FederationHandler(BaseHandler): # For paranoia we ensure that these events are marked as # non-outliers - event.internal_metadata.outlier = False + assert(not event.internal_metadata.is_outlier()) # We store these one at a time since each event depends on the # previous to work out the state. -- cgit 1.4.1 From d730c2c22b53bef52afb6b26a878acc186e4ecae Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Feb 2019 14:45:02 +0000 Subject: More comments --- synapse/handlers/federation.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 8e6d0a3bbc..ca9b281be1 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -782,6 +782,7 @@ class FederationHandler(BaseHandler): # auth chain. Caution must therefore be taken to ensure that they are # not accidentally marked as outliers. + # Step 1a: persist auth events that *don't* appear in the chunk ev_infos = [] for a in auth_events.values(): # We only want to persist auth events as outliers that we haven't @@ -800,6 +801,8 @@ class FederationHandler(BaseHandler): } }) + # Step 1b: persist the events in the chunk we fetched state for (i.e. + # the backwards extremities) as non-outliers. for e_id in events_to_state: # For paranoia we ensure that these events are marked as # non-outliers @@ -822,6 +825,7 @@ class FederationHandler(BaseHandler): backfilled=True, ) + # Step 2: Persist the rest of the events in the chunk one by one events.sort(key=lambda e: e.depth) for event in events: -- cgit 1.4.1 From e4b078a6004ea34684c27056825346178c22aa0f Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 22 Feb 2019 12:11:43 +0000 Subject: Config option to prevent showing non-fed rooms in fed /publicRooms --- synapse/config/room_directory.py | 11 +++++++++++ synapse/handlers/room_list.py | 17 +++++++++++++++-- 2 files changed, 26 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 9da13ab11b..08d1d26e58 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -27,6 +27,10 @@ class RoomDirectoryConfig(Config): for rule in alias_creation_rules ] + self.allow_non_federated_in_public_rooms = config.get( + "allow_non_federated_in_public_rooms", True, + ) + def default_config(self, config_dir_path, server_name, **kwargs): return """ # The `alias_creation` option controls who's allowed to create aliases @@ -42,6 +46,13 @@ class RoomDirectoryConfig(Config): - user_id: "*" alias: "*" action: allow + + # Specify whether rooms that only allow local users to join should be + # shown in the federation public room directory. + # + # Note that this does not affect the room directory shown to users on + # this homeserver, only those on other homeservers. + #allow_non_federated_in_public_rooms: True """ def is_alias_creation_allowed(self, user_id, alias): diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index dc88620885..47aeeb1d88 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -47,6 +47,7 @@ class RoomListHandler(BaseHandler): self.response_cache = ResponseCache(hs, "room_list") self.remote_response_cache = ResponseCache(hs, "remote_room_list", timeout_ms=30 * 1000) + self.config = hs.get_config() def get_local_public_room_list(self, limit=None, since_token=None, search_filter=None, @@ -286,14 +287,16 @@ class RoomListHandler(BaseHandler): # We've already got enough, so lets just drop it. return - result = yield self.generate_room_entry(room_id, num_joined_users) + result = yield self.generate_room_entry(room_id, num_joined_users, + allow_federated=self.config.allow_non_federated_in_public_rooms) if result and _matches_room_entry(result, search_filter): chunk.append(result) @cachedInlineCallbacks(num_args=1, cache_context=True) def generate_room_entry(self, room_id, num_joined_users, cache_context, - with_alias=True, allow_private=False): + with_alias=True, allow_private=False, + allow_federated=True): """Returns the entry for a room """ result = { @@ -308,6 +311,7 @@ class RoomListHandler(BaseHandler): event_map = yield self.store.get_events([ event_id for key, event_id in iteritems(current_state_ids) if key[0] in ( + EventTypes.Create, EventTypes.JoinRules, EventTypes.Name, EventTypes.Topic, @@ -324,12 +328,21 @@ class RoomListHandler(BaseHandler): } # Double check that this is actually a public room. + join_rules_event = current_state.get((EventTypes.JoinRules, "")) if join_rules_event: join_rule = join_rules_event.content.get("join_rule", None) if not allow_private and join_rule and join_rule != JoinRules.PUBLIC: defer.returnValue(None) + if not allow_federated: + # Disallow non-federated from appearing + create_event = current_state.get((EventTypes.Create, "")) + if create_event: + federate = create_event.content.get("m.federate", True) + if federate == False: + defer.returnValue(None) + if with_alias: aliases = yield self.store.get_aliases_for_room( room_id, on_invalidate=cache_context.invalidate -- cgit 1.4.1 From bd398b874eb375e92939a087a64258a4feb91f49 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 22 Feb 2019 15:43:11 +0000 Subject: Don't restrict non-fed rooms over client APIs --- synapse/federation/transport/server.py | 3 ++- synapse/groups/groups_server.py | 4 ++-- synapse/handlers/room_list.py | 29 ++++++++++++++++++----------- 3 files changed, 22 insertions(+), 14 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 7288d49074..6d4a26f595 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -697,7 +697,8 @@ class PublicRoomList(BaseFederationServlet): data = yield self.handler.get_local_public_room_list( limit, since_token, - network_tuple=network_tuple + network_tuple=network_tuple, + from_federation=True, ) defer.returnValue((200, data)) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 633c865ed8..691752a30c 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -113,7 +113,7 @@ class GroupsServerHandler(object): room_id = room_entry["room_id"] joined_users = yield self.store.get_users_in_room(room_id) entry = yield self.room_list_handler.generate_room_entry( - room_id, len(joined_users), + room_id, True, len(joined_users), with_alias=False, allow_private=True, ) entry = dict(entry) # so we don't change whats cached @@ -544,7 +544,7 @@ class GroupsServerHandler(object): joined_users = yield self.store.get_users_in_room(room_id) entry = yield self.room_list_handler.generate_room_entry( - room_id, len(joined_users), + room_id, True, len(joined_users), with_alias=False, allow_private=True, ) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 47aeeb1d88..bde9ec744f 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -51,7 +51,8 @@ class RoomListHandler(BaseHandler): def get_local_public_room_list(self, limit=None, since_token=None, search_filter=None, - network_tuple=EMPTY_THIRD_PARTY_ID,): + network_tuple=EMPTY_THIRD_PARTY_ID, + from_federation=False): """Generate a local public room list. There are multiple different lists: the main one plus one per third @@ -82,13 +83,15 @@ class RoomListHandler(BaseHandler): return self.response_cache.wrap( key, self._get_public_room_list, - limit, since_token, network_tuple=network_tuple, + limit, since_token, + network_tuple=network_tuple, from_federation=from_federation, ) @defer.inlineCallbacks def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, - network_tuple=EMPTY_THIRD_PARTY_ID,): + network_tuple=EMPTY_THIRD_PARTY_ID, + from_federation=False,): if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: @@ -208,7 +211,8 @@ class RoomListHandler(BaseHandler): yield concurrently_execute( lambda r: self._append_room_entry_to_chunk( r, rooms_to_num_joined[r], - chunk, limit, search_filter + chunk, limit, search_filter, + from_federation=from_federation, ), batch, 5, ) @@ -279,7 +283,7 @@ class RoomListHandler(BaseHandler): @defer.inlineCallbacks def _append_room_entry_to_chunk(self, room_id, num_joined_users, chunk, limit, - search_filter): + search_filter, from_federation=False): """Generate the entry for a room in the public room list and append it to the `chunk` if it matches the search filter """ @@ -287,16 +291,19 @@ class RoomListHandler(BaseHandler): # We've already got enough, so lets just drop it. return - result = yield self.generate_room_entry(room_id, num_joined_users, - allow_federated=self.config.allow_non_federated_in_public_rooms) + if from_federation: + result = yield self.generate_room_entry(room_id, + self.config.allow_non_federated_in_public_rooms, + num_joined_users) + else: + result = yield self.generate_room_entry(room_id, True, num_joined_users) if result and _matches_room_entry(result, search_filter): chunk.append(result) - @cachedInlineCallbacks(num_args=1, cache_context=True) - def generate_room_entry(self, room_id, num_joined_users, cache_context, - with_alias=True, allow_private=False, - allow_federated=True): + @cachedInlineCallbacks(num_args=2, cache_context=True) + def generate_room_entry(self, room_id, allow_federated, num_joined_users, + cache_context, with_alias=True, allow_private=False): """Returns the entry for a room """ result = { -- cgit 1.4.1 From 07493607a84665be006ec52f897eb904d923721c Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 25 Feb 2019 12:42:30 +0000 Subject: Docs and arg name clarification --- synapse/handlers/room_list.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index bde9ec744f..0636e5b8eb 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -302,9 +302,22 @@ class RoomListHandler(BaseHandler): chunk.append(result) @cachedInlineCallbacks(num_args=2, cache_context=True) - def generate_room_entry(self, room_id, allow_federated, num_joined_users, + def generate_room_entry(self, room_id, allow_non_federated, num_joined_users, cache_context, with_alias=True, allow_private=False): """Returns the entry for a room + + Args: + room_id (str): The room's ID. + allow_non_federated (bool): Whether rooms with federation + disabled should be shown. + num_joined_users (int): Number of users in the room. + cache_context: Information for cached responses. + with_alias (bool): Whether to return the room's aliases in the result. + allow_private (bool): Whether invite-only rooms should be shown. + + Returns: + Deferred[dict|None]: Returns a room entry as a dictionary, or None if this + room was determined not to be shown publicly. """ result = { "room_id": room_id, @@ -342,7 +355,7 @@ class RoomListHandler(BaseHandler): if not allow_private and join_rule and join_rule != JoinRules.PUBLIC: defer.returnValue(None) - if not allow_federated: + if not allow_non_federated: # Disallow non-federated from appearing create_event = current_state.get((EventTypes.Create, "")) if create_event: -- cgit 1.4.1 From 84c0a20dfeb043f1113a6e213bc7488ef21a0432 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 25 Feb 2019 15:23:27 +0000 Subject: Simplify call to generate_room_entry --- synapse/handlers/room_list.py | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 810c624c2b..69b0bc3dd1 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -300,24 +300,24 @@ class RoomListHandler(BaseHandler): # We've already got enough, so lets just drop it. return - if from_federation: - result = yield self.generate_room_entry(room_id, - self.config.allow_non_federated_in_public_rooms, - num_joined_users) - else: - result = yield self.generate_room_entry(room_id, True, num_joined_users) - - if result and _matches_room_entry(result, search_filter): + result = yield self.generate_room_entry(room_id, + num_joined_users) + + if from_federation and not self.config.allow_non_federated_in_public_rooms: + if result["m.federate"] = False: + # This is a non-federating room and the config has chosen not + # to show these rooms to other servers + chunk.append(None) + else if result and _matches_room_entry(result, search_filter): chunk.append(result) @cachedInlineCallbacks(num_args=2, cache_context=True) - def generate_room_entry(self, room_id, allow_non_federated, num_joined_users, + def generate_room_entry(self, room_id, num_joined_users, cache_context, with_alias=True, allow_private=False): """Returns the entry for a room Args: room_id (str): The room's ID. - allow_non_federated (bool): Whether rooms with federation disabled should be shown. num_joined_users (int): Number of users in the room. cache_context: Information for cached responses. @@ -364,13 +364,9 @@ class RoomListHandler(BaseHandler): if not allow_private and join_rule and join_rule != JoinRules.PUBLIC: defer.returnValue(None) - if not allow_non_federated: - # Disallow non-federated from appearing - create_event = current_state.get((EventTypes.Create, "")) - if create_event: - federate = create_event.content.get("m.federate", True) - if federate == False: - defer.returnValue(None) + # Return whether this room is open to federation users or not + create_event = current_state.get((EventTypes.Create, "")) + result["m.federate"] = create_event.content.get("m.federate", True) if with_alias: aliases = yield self.store.get_aliases_for_room( -- cgit 1.4.1 From 8aaf7ffc4415e9f7da2d34448b29fb2d0cb80574 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 25 Feb 2019 15:27:17 +0000 Subject: syntax derp --- synapse/handlers/room_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 69b0bc3dd1..b7f450e719 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -304,7 +304,7 @@ class RoomListHandler(BaseHandler): num_joined_users) if from_federation and not self.config.allow_non_federated_in_public_rooms: - if result["m.federate"] = False: + if result["m.federate"] == False: # This is a non-federating room and the config has chosen not # to show these rooms to other servers chunk.append(None) -- cgit 1.4.1 From 9c598dddcbd75f60d422164215f3b078def604b2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Feb 2019 16:32:02 +0000 Subject: Fix typo --- synapse/handlers/federation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index ca9b281be1..f80486102a 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -807,7 +807,7 @@ class FederationHandler(BaseHandler): # For paranoia we ensure that these events are marked as # non-outliers ev = event_map[e_id] - assert(not event.internal_metadata.is_outlier()) + assert(not ev.internal_metadata.is_outlier()) ev_infos.append({ "event": ev, -- cgit 1.4.1 From c7b333c545adf8958ec97771e6816ceb5e83f524 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Feb 2019 16:56:41 +0000 Subject: Log tracebacks correctly --- synapse/federation/federation_server.py | 5 +++-- synapse/handlers/pagination.py | 6 +++++- synapse/http/server.py | 8 ++++---- 3 files changed, 12 insertions(+), 7 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 3da86d4ba6..8f45c2badf 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -239,8 +239,9 @@ class FederationServer(FederationBase): f = failure.Failure() pdu_results[event_id] = {"error": str(e)} logger.error( - "Failed to handle PDU %s: %s", - event_id, f.getTraceback().rstrip(), + "Failed to handle PDU %s", + event_id, + exc_info=(f.type, f.value, f.getTracebackObject()), ) yield concurrently_execute( diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 084c1503da..e4fdae9266 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -136,7 +136,11 @@ class PaginationHandler(object): logger.info("[purge] complete") self._purges_by_id[purge_id].status = PurgeStatus.STATUS_COMPLETE except Exception: - logger.error("[purge] failed: %s", Failure().getTraceback().rstrip()) + f = Failure() + logger.error( + "[purge] failed", + exc_info=(f.type, f.value, f.getTracebackObject()), + ) self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED finally: self._purges_in_progress_by_room.discard(room_id) diff --git a/synapse/http/server.py b/synapse/http/server.py index 6c67a25a11..16fb7935da 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -169,18 +169,18 @@ def _return_html_error(f, request): ) else: logger.error( - "Failed handle request %r: %s", + "Failed handle request %r", request, - f.getTraceback().rstrip(), + exc_info=(f.type, f.value, f.getTracebackObject()), ) else: code = http_client.INTERNAL_SERVER_ERROR msg = "Internal server error" logger.error( - "Failed handle request %r: %s", + "Failed handle request %r", request, - f.getTraceback().rstrip(), + exc_info=(f.type, f.value, f.getTracebackObject()), ) body = HTML_ERROR_TEMPLATE.format( -- cgit 1.4.1 From 1330aa4a8fdff5f6d1ff4bc5d27de674dfcd67e7 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Mon, 25 Feb 2019 17:28:19 +0000 Subject: elif not else if --- synapse/handlers/room_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index b7f450e719..94cabc2a2e 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -308,7 +308,7 @@ class RoomListHandler(BaseHandler): # This is a non-federating room and the config has chosen not # to show these rooms to other servers chunk.append(None) - else if result and _matches_room_entry(result, search_filter): + elif result and _matches_room_entry(result, search_filter): chunk.append(result) @cachedInlineCallbacks(num_args=2, cache_context=True) -- cgit 1.4.1 From 70ea2f4e1df41458532c7964f4f707e04810e619 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 25 Feb 2019 19:15:36 +0000 Subject: switch from google.com to recaptcha.net for reCAPTCHA (#4731) * add trivial clarification about jemalloc * switch from google.com to recaptcha.net because https://developers.google.com/recaptcha/docs/faq#can-i-use-recaptcha-globally --- README.rst | 2 ++ synapse/config/captcha.py | 2 +- synapse/handlers/register.py | 4 ++-- synapse/rest/client/v2_alpha/auth.py | 2 +- synapse/static/client/register/index.html | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) (limited to 'synapse/handlers') diff --git a/README.rst b/README.rst index 9a7c04b55e..8e22109973 100644 --- a/README.rst +++ b/README.rst @@ -199,6 +199,8 @@ by installing the ``libjemalloc1`` package and adding this line to LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1 +This can make a significant difference on Python 2.7 - it's unclear how +much of an improvement it provides on Python 3.x. Upgrading an existing Synapse ============================= diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py index 4064891ffb..d25196be08 100644 --- a/synapse/config/captcha.py +++ b/synapse/config/captcha.py @@ -47,5 +47,5 @@ class CaptchaConfig(Config): #captcha_bypass_secret: "YOUR_SECRET_HERE" # The API endpoint to use for verifying m.login.recaptcha responses. - recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify" + recaptcha_siteverify_api: "https://www.recaptcha.net/recaptcha/api/siteverify" """ diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 24a4cb5a83..c0e06929bd 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -460,7 +460,7 @@ class RegistrationHandler(BaseHandler): lines = response.split('\n') json = { "valid": lines[0] == 'true', - "error_url": "http://www.google.com/recaptcha/api/challenge?" + + "error_url": "http://www.recaptcha.net/recaptcha/api/challenge?" + "error=%s" % lines[1] } defer.returnValue(json) @@ -471,7 +471,7 @@ class RegistrationHandler(BaseHandler): Used only by c/s api v1 """ data = yield self.captcha_client.post_urlencoded_get_raw( - "http://www.google.com:80/recaptcha/api/verify", + "http://www.recaptcha.net:80/recaptcha/api/verify", args={ 'privatekey': private_key, 'remoteip': ip_addr, diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index f7bb710642..ac035c7735 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -33,7 +33,7 @@ RECAPTCHA_TEMPLATE = """ Authentication - diff --git a/synapse/static/client/register/index.html b/synapse/static/client/register/index.html index 886f2edd1f..6edc4deb03 100644 --- a/synapse/static/client/register/index.html +++ b/synapse/static/client/register/index.html @@ -4,7 +4,7 @@ - + -- cgit 1.4.1 From 71669a0fba524e4d8cdb9e0076dca5d2770ef788 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 11:25:00 +0000 Subject: Address rich comments --- synapse/handlers/room_list.py | 36 ++++++++++++++++++++++++++++++------ synapse/server.pyi | 2 +- 2 files changed, 31 insertions(+), 7 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 94cabc2a2e..a9725ade37 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -97,7 +97,23 @@ class RoomListHandler(BaseHandler): def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, network_tuple=EMPTY_THIRD_PARTY_ID, - from_federation=False, timeout=None,): + from_federation=False, + timeout=None,): + """Generate a public room list. + + Args: + limit (int): Maximum amount of rooms to return. + since_token (str) + search_filter (dict): Dictionary to filter rooms by. + network_tuple (ThirdPartyInstanceID): Which public list to use. + This can be (None, None) to indicate the main list, or a particular + appservice and network id to use an appservice specific one. + Setting to None returns all public rooms across all lists. + from_federation (bool): Whether this request originated from a + federating server or a client. Used for room filtering. + timeout (int): Amount of seconds to wait for a response before + timing out. + """ if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: @@ -295,19 +311,28 @@ class RoomListHandler(BaseHandler): search_filter, from_federation=False): """Generate the entry for a room in the public room list and append it to the `chunk` if it matches the search filter + + Args: + room_id (str): The ID of the room. + num_joined_users (int): The number of joined users in the room. + chunk (list) + limit (int): Maximum amount of rooms to display. Function will + return if length of chunk is greater than limit + 1. + search_filter (dict) + from_federation (bool): Whether this request originated from a + federating server or a client. Used for room filtering. """ if limit and len(chunk) > limit + 1: # We've already got enough, so lets just drop it. return - result = yield self.generate_room_entry(room_id, - num_joined_users) + result = yield self.generate_room_entry(room_id, num_joined_users) if from_federation and not self.config.allow_non_federated_in_public_rooms: - if result["m.federate"] == False: + if result["m.federate"] is False: # This is a non-federating room and the config has chosen not # to show these rooms to other servers - chunk.append(None) + return elif result and _matches_room_entry(result, search_filter): chunk.append(result) @@ -318,7 +343,6 @@ class RoomListHandler(BaseHandler): Args: room_id (str): The room's ID. - disabled should be shown. num_joined_users (int): Number of users in the room. cache_context: Information for cached responses. with_alias (bool): Whether to return the room's aliases in the result. diff --git a/synapse/server.pyi b/synapse/server.pyi index 06cd083a74..fb8df56cd5 100644 --- a/synapse/server.pyi +++ b/synapse/server.pyi @@ -7,9 +7,9 @@ import synapse.handlers.auth import synapse.handlers.deactivate_account import synapse.handlers.device import synapse.handlers.e2e_keys +import synapse.handlers.message import synapse.handlers.room import synapse.handlers.room_member -import synapse.handlers.message import synapse.handlers.set_password import synapse.rest.media.v1.media_repository import synapse.server_notices.server_notices_manager -- cgit 1.4.1 From 6946c20111cb7c460deb340b61236dce9f4fd878 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 11:27:19 +0000 Subject: Result may be None --- synapse/handlers/room_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index a9725ade37..7c21f142a0 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -329,7 +329,7 @@ class RoomListHandler(BaseHandler): result = yield self.generate_room_entry(room_id, num_joined_users) if from_federation and not self.config.allow_non_federated_in_public_rooms: - if result["m.federate"] is False: + if not result or result["m.federate"] is False: # This is a non-federating room and the config has chosen not # to show these rooms to other servers return -- cgit 1.4.1 From 6728bf39405c52e4f2473c1e19a7648134361c15 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 11:52:52 +0000 Subject: Make not showing non-federated rooms the default --- synapse/config/room_directory.py | 12 ------------ synapse/handlers/room_list.py | 2 +- 2 files changed, 1 insertion(+), 13 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 3322cf2eea..9b897abe3c 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -52,10 +52,6 @@ class RoomDirectoryConfig(Config): ) ] - self.allow_non_federated_in_public_rooms = config.get( - "allow_non_federated_in_public_rooms", True, - ) - def default_config(self, config_dir_path, server_name, **kwargs): return """ # The `alias_creation` option controls who's allowed to create aliases @@ -114,14 +110,6 @@ class RoomDirectoryConfig(Config): # alias: "*" # room_id: "*" # action: allow - - # Specify whether rooms that only allow local users to join should be - # shown in the federation public room directory. - # - # Note that this does not affect the room directory shown to users on - # this homeserver, only those on other homeservers. - # - #allow_non_federated_in_public_rooms: True """ def is_alias_creation_allowed(self, user_id, room_id, alias): diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 7c21f142a0..f1a51a7ca9 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -328,7 +328,7 @@ class RoomListHandler(BaseHandler): result = yield self.generate_room_entry(room_id, num_joined_users) - if from_federation and not self.config.allow_non_federated_in_public_rooms: + if from_federation: if not result or result["m.federate"] is False: # This is a non-federating room and the config has chosen not # to show these rooms to other servers -- cgit 1.4.1 From 40c2271680501c5ca2c29349d62df42d80b953db Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 12:04:34 +0000 Subject: Clean up room chunk logic --- synapse/handlers/room_list.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index f1a51a7ca9..e6038a1069 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -327,13 +327,15 @@ class RoomListHandler(BaseHandler): return result = yield self.generate_room_entry(room_id, num_joined_users) + if not result: + return - if from_federation: - if not result or result["m.federate"] is False: - # This is a non-federating room and the config has chosen not - # to show these rooms to other servers - return - elif result and _matches_room_entry(result, search_filter): + if from_federation and result["m.federate"] is False: + # This is a room that other servers cannot join. Do not show them + # this room. + return + + if _matches_room_entry(result, search_filter): chunk.append(result) @cachedInlineCallbacks(num_args=2, cache_context=True) -- cgit 1.4.1 From 16565e67dbc81ddf5fc16d39f82accbd508ac13f Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 12:12:48 +0000 Subject: Correct docstring types and chunk logic --- synapse/handlers/room_list.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index e6038a1069..7ab8476680 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -59,9 +59,9 @@ class RoomListHandler(BaseHandler): party network. A client can ask for a specific list or to return all. Args: - limit (int) - since_token (str) - search_filter (dict) + limit (int|None) + since_token (str|None) + search_filter (dict|None) network_tuple (ThirdPartyInstanceID): Which public list to use. This can be (None, None) to indicate the main list, or a particular appservice and network id to use an appservice specific one. @@ -102,16 +102,16 @@ class RoomListHandler(BaseHandler): """Generate a public room list. Args: - limit (int): Maximum amount of rooms to return. - since_token (str) - search_filter (dict): Dictionary to filter rooms by. + limit (int|None): Maximum amount of rooms to return. + since_token (str|None) + search_filter (dict|None): Dictionary to filter rooms by. network_tuple (ThirdPartyInstanceID): Which public list to use. This can be (None, None) to indicate the main list, or a particular appservice and network id to use an appservice specific one. Setting to None returns all public rooms across all lists. from_federation (bool): Whether this request originated from a federating server or a client. Used for room filtering. - timeout (int): Amount of seconds to wait for a response before + timeout (int|None): Amount of seconds to wait for a response before timing out. """ if since_token and since_token != "END": @@ -316,9 +316,9 @@ class RoomListHandler(BaseHandler): room_id (str): The ID of the room. num_joined_users (int): The number of joined users in the room. chunk (list) - limit (int): Maximum amount of rooms to display. Function will + limit (int|None): Maximum amount of rooms to display. Function will return if length of chunk is greater than limit + 1. - search_filter (dict) + search_filter (dict|None) from_federation (bool): Whether this request originated from a federating server or a client. Used for room filtering. """ @@ -330,7 +330,8 @@ class RoomListHandler(BaseHandler): if not result: return - if from_federation and result["m.federate"] is False: + if from_federation: + if "m.federate" in result and not result["m.federate"]: # This is a room that other servers cannot join. Do not show them # this room. return -- cgit 1.4.1 From a712aa3a9c5c58eca6ecfa0cb23636035fbbf449 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 12:13:55 +0000 Subject: Correct indent --- synapse/handlers/room_list.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 7ab8476680..0ce274e9ae 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -332,9 +332,9 @@ class RoomListHandler(BaseHandler): if from_federation: if "m.federate" in result and not result["m.federate"]: - # This is a room that other servers cannot join. Do not show them - # this room. - return + # This is a room that other servers cannot join. Do not show them + # this room. + return if _matches_room_entry(result, search_filter): chunk.append(result) -- cgit 1.4.1 From c4414768af5e2213b1ad2b16bc8e1b8062fd1e49 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 26 Feb 2019 12:22:34 +0000 Subject: Cleaner chunk logic --- synapse/handlers/room_list.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 0ce274e9ae..c02fedcfe4 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -330,11 +330,10 @@ class RoomListHandler(BaseHandler): if not result: return - if from_federation: - if "m.federate" in result and not result["m.federate"]: - # This is a room that other servers cannot join. Do not show them - # this room. - return + if from_federation and not result.get("m.federate", True): + # This is a room that other servers cannot join. Do not show them + # this room. + return if _matches_room_entry(result, search_filter): chunk.append(result) -- cgit 1.4.1 From c74624a633ae883474a5ec3137bddd009165144c Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 26 Feb 2019 13:20:38 +0000 Subject: Revert "Prevent showing non-fed rooms in fed /publicRooms" --- synapse/federation/transport/server.py | 3 +- synapse/groups/groups_server.py | 6 ++- synapse/handlers/room_list.py | 76 +++++----------------------------- synapse/server.pyi | 2 +- 4 files changed, 17 insertions(+), 70 deletions(-) (limited to 'synapse/handlers') diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 5ba94be2ec..a2396ab466 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -736,8 +736,7 @@ class PublicRoomList(BaseFederationServlet): data = yield self.handler.get_local_public_room_list( limit, since_token, - network_tuple=network_tuple, - from_federation=True, + network_tuple=network_tuple ) defer.returnValue((200, data)) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index a7eaead56b..633c865ed8 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -113,7 +113,8 @@ class GroupsServerHandler(object): room_id = room_entry["room_id"] joined_users = yield self.store.get_users_in_room(room_id) entry = yield self.room_list_handler.generate_room_entry( - room_id, len(joined_users), with_alias=False, allow_private=True, + room_id, len(joined_users), + with_alias=False, allow_private=True, ) entry = dict(entry) # so we don't change whats cached entry.pop("room_id", None) @@ -543,7 +544,8 @@ class GroupsServerHandler(object): joined_users = yield self.store.get_users_in_room(room_id) entry = yield self.room_list_handler.generate_room_entry( - room_id, len(joined_users), with_alias=False, allow_private=True, + room_id, len(joined_users), + with_alias=False, allow_private=True, ) if not entry: diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index c02fedcfe4..13e212d669 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -47,21 +47,19 @@ class RoomListHandler(BaseHandler): self.response_cache = ResponseCache(hs, "room_list") self.remote_response_cache = ResponseCache(hs, "remote_room_list", timeout_ms=30 * 1000) - self.config = hs.get_config() def get_local_public_room_list(self, limit=None, since_token=None, search_filter=None, - network_tuple=EMPTY_THIRD_PARTY_ID, - from_federation=False): + network_tuple=EMPTY_THIRD_PARTY_ID,): """Generate a local public room list. There are multiple different lists: the main one plus one per third party network. A client can ask for a specific list or to return all. Args: - limit (int|None) - since_token (str|None) - search_filter (dict|None) + limit (int) + since_token (str) + search_filter (dict) network_tuple (ThirdPartyInstanceID): Which public list to use. This can be (None, None) to indicate the main list, or a particular appservice and network id to use an appservice specific one. @@ -89,31 +87,14 @@ class RoomListHandler(BaseHandler): return self.response_cache.wrap( key, self._get_public_room_list, - limit, since_token, - network_tuple=network_tuple, from_federation=from_federation, + limit, since_token, network_tuple=network_tuple, ) @defer.inlineCallbacks def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, network_tuple=EMPTY_THIRD_PARTY_ID, - from_federation=False, timeout=None,): - """Generate a public room list. - - Args: - limit (int|None): Maximum amount of rooms to return. - since_token (str|None) - search_filter (dict|None): Dictionary to filter rooms by. - network_tuple (ThirdPartyInstanceID): Which public list to use. - This can be (None, None) to indicate the main list, or a particular - appservice and network id to use an appservice specific one. - Setting to None returns all public rooms across all lists. - from_federation (bool): Whether this request originated from a - federating server or a client. Used for room filtering. - timeout (int|None): Amount of seconds to wait for a response before - timing out. - """ if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: @@ -236,8 +217,7 @@ class RoomListHandler(BaseHandler): yield concurrently_execute( lambda r: self._append_room_entry_to_chunk( r, rooms_to_num_joined[r], - chunk, limit, search_filter, - from_federation=from_federation, + chunk, limit, search_filter ), batch, 5, ) @@ -308,51 +288,23 @@ class RoomListHandler(BaseHandler): @defer.inlineCallbacks def _append_room_entry_to_chunk(self, room_id, num_joined_users, chunk, limit, - search_filter, from_federation=False): + search_filter): """Generate the entry for a room in the public room list and append it to the `chunk` if it matches the search filter - - Args: - room_id (str): The ID of the room. - num_joined_users (int): The number of joined users in the room. - chunk (list) - limit (int|None): Maximum amount of rooms to display. Function will - return if length of chunk is greater than limit + 1. - search_filter (dict|None) - from_federation (bool): Whether this request originated from a - federating server or a client. Used for room filtering. """ if limit and len(chunk) > limit + 1: # We've already got enough, so lets just drop it. return result = yield self.generate_room_entry(room_id, num_joined_users) - if not result: - return - - if from_federation and not result.get("m.federate", True): - # This is a room that other servers cannot join. Do not show them - # this room. - return - if _matches_room_entry(result, search_filter): + if result and _matches_room_entry(result, search_filter): chunk.append(result) - @cachedInlineCallbacks(num_args=2, cache_context=True) - def generate_room_entry(self, room_id, num_joined_users, - cache_context, with_alias=True, allow_private=False): + @cachedInlineCallbacks(num_args=1, cache_context=True) + def generate_room_entry(self, room_id, num_joined_users, cache_context, + with_alias=True, allow_private=False): """Returns the entry for a room - - Args: - room_id (str): The room's ID. - num_joined_users (int): Number of users in the room. - cache_context: Information for cached responses. - with_alias (bool): Whether to return the room's aliases in the result. - allow_private (bool): Whether invite-only rooms should be shown. - - Returns: - Deferred[dict|None]: Returns a room entry as a dictionary, or None if this - room was determined not to be shown publicly. """ result = { "room_id": room_id, @@ -366,7 +318,6 @@ class RoomListHandler(BaseHandler): event_map = yield self.store.get_events([ event_id for key, event_id in iteritems(current_state_ids) if key[0] in ( - EventTypes.Create, EventTypes.JoinRules, EventTypes.Name, EventTypes.Topic, @@ -383,17 +334,12 @@ class RoomListHandler(BaseHandler): } # Double check that this is actually a public room. - join_rules_event = current_state.get((EventTypes.JoinRules, "")) if join_rules_event: join_rule = join_rules_event.content.get("join_rule", None) if not allow_private and join_rule and join_rule != JoinRules.PUBLIC: defer.returnValue(None) - # Return whether this room is open to federation users or not - create_event = current_state.get((EventTypes.Create, "")) - result["m.federate"] = create_event.content.get("m.federate", True) - if with_alias: aliases = yield self.store.get_aliases_for_room( room_id, on_invalidate=cache_context.invalidate diff --git a/synapse/server.pyi b/synapse/server.pyi index fb8df56cd5..06cd083a74 100644 --- a/synapse/server.pyi +++ b/synapse/server.pyi @@ -7,9 +7,9 @@ import synapse.handlers.auth import synapse.handlers.deactivate_account import synapse.handlers.device import synapse.handlers.e2e_keys -import synapse.handlers.message import synapse.handlers.room import synapse.handlers.room_member +import synapse.handlers.message import synapse.handlers.set_password import synapse.rest.media.v1.media_repository import synapse.server_notices.server_notices_manager -- cgit 1.4.1