From eddc6d8855b0aa50fe85f85d40bd3ffc24678238 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 5 Sep 2019 16:25:22 +0100 Subject: Forbid changing the name, avatar or topic of a direct room --- synapse/third_party_rules/access_rules.py | 50 +++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 1a295ea7ce..41862f6d0b 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -237,6 +237,15 @@ class RoomAccessRules(object): if event.type == EventTypes.JoinRules: return self._on_join_rule_change(event, rule) + if event.type == EventTypes.RoomAvatar: + return self._on_room_avatar_change(event, rule) + + if event.type == EventTypes.Name: + return self._on_room_name_change(event, rule) + + if event.type == EventTypes.Topic: + return self._on_room_topic_change(event, rule) + return True def _on_rules_change(self, event, state_events): @@ -461,6 +470,47 @@ class RoomAccessRules(object): return True + def _on_room_avatar_change(self, event, rule): + """Check whether a change of room avatar is allowed. + The current rule is to forbid such a change in direct chats but allow it + everywhere else. + + Args: + event (synapse.events.EventBase): The event to check. + rule (str): The name of the rule to apply. + Returns: + bool, True if the event can be allowed, False otherwise. + """ + return rule != ACCESS_RULE_DIRECT + + + def _on_room_name_change(self, event, rule): + """Check whether a change of room name is allowed. + The current rule is to forbid such a change in direct chats but allow it + everywhere else. + + Args: + event (synapse.events.EventBase): The event to check. + rule (str): The name of the rule to apply. + Returns: + bool, True if the event can be allowed, False otherwise. + """ + return rule != ACCESS_RULE_DIRECT + + + def _on_room_topic_change(self, event, rule): + """Check whether a change of room topic is allowed. + The current rule is to forbid such a change in direct chats but allow it + everywhere else. + + Args: + event (synapse.events.EventBase): The event to check. + rule (str): The name of the rule to apply. + Returns: + bool, True if the event can be allowed, False otherwise. + """ + return rule != ACCESS_RULE_DIRECT + @staticmethod def _get_rule_from_state(state_events): """Extract the rule to be applied from the given set of state events. -- cgit 1.5.1 From 2477f1f7aa8db44a8a74d622dcea31ba9ae85a37 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 5 Sep 2019 16:25:37 +0100 Subject: Add tests --- tests/rest/client/test_room_access_rules.py | 105 ++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/tests/rest/client/test_room_access_rules.py b/tests/rest/client/test_room_access_rules.py index 7e23add6b7..28e07b3928 100644 --- a/tests/rest/client/test_room_access_rules.py +++ b/tests/rest/client/test_room_access_rules.py @@ -483,6 +483,111 @@ class RoomAccessTestCase(unittest.HomeserverTestCase): expected_code=403, ) + def test_change_room_avatar(self): + """Tests that changing the room avatar is always allowed unless the room is a + direct chat, in which case it's forbidden. + """ + + avatar_content = { + "info": { + "h": 398, + "mimetype": "image/jpeg", + "size": 31037, + "w": 394 + }, + "url": "mxc://example.org/JWEIFJgwEIhweiWJE", + } + + self.helper.send_state( + room_id=self.restricted_room, + event_type=EventTypes.RoomAvatar, + body=avatar_content, + tok=self.tok, + expect_code=200, + ) + + self.helper.send_state( + room_id=self.unrestricted_room, + event_type=EventTypes.RoomAvatar, + body=avatar_content, + tok=self.tok, + expect_code=200, + ) + + self.helper.send_state( + room_id=self.direct_rooms[0], + event_type=EventTypes.RoomAvatar, + body=avatar_content, + tok=self.tok, + expect_code=403, + ) + + def test_change_room_name(self): + """Tests that changing the room name is always allowed unless the room is a direct + chat, in which case it's forbidden. + """ + + name_content = { + "name": "My super room", + } + + self.helper.send_state( + room_id=self.restricted_room, + event_type=EventTypes.Name, + body=name_content, + tok=self.tok, + expect_code=200, + ) + + self.helper.send_state( + room_id=self.unrestricted_room, + event_type=EventTypes.Name, + body=name_content, + tok=self.tok, + expect_code=200, + ) + + self.helper.send_state( + room_id=self.direct_rooms[0], + event_type=EventTypes.Name, + body=name_content, + tok=self.tok, + expect_code=403, + ) + + def test_change_room_topic(self): + """Tests that changing the room topic is always allowed unless the room is a + direct chat, in which case it's forbidden. + """ + + topic_content = { + "topic": "Welcome to this room", + } + + self.helper.send_state( + room_id=self.restricted_room, + event_type=EventTypes.Topic, + body=topic_content, + tok=self.tok, + expect_code=200, + ) + + self.helper.send_state( + room_id=self.unrestricted_room, + event_type=EventTypes.Topic, + body=topic_content, + tok=self.tok, + expect_code=200, + ) + + self.helper.send_state( + room_id=self.direct_rooms[0], + event_type=EventTypes.Topic, + body=topic_content, + tok=self.tok, + expect_code=403, + ) + def create_room( self, direct=False, rule=None, preset=RoomCreationPreset.TRUSTED_PRIVATE_CHAT, initial_state=None, expected_code=200, -- cgit 1.5.1 From 9ef4e90be73ef61212a548f61c738882c7878462 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 5 Sep 2019 16:35:00 +0100 Subject: Changelog --- changelog.d/1.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/1.feature diff --git a/changelog.d/1.feature b/changelog.d/1.feature new file mode 100644 index 0000000000..845642e445 --- /dev/null +++ b/changelog.d/1.feature @@ -0,0 +1 @@ +Forbid changing the name, avatar or topic of a direct room. -- cgit 1.5.1 From d1d464388acff70b75b61bd1ec6b9d33999c5924 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 5 Sep 2019 16:35:13 +0100 Subject: Lint --- synapse/third_party_rules/access_rules.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 41862f6d0b..f564c0484c 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -483,7 +483,6 @@ class RoomAccessRules(object): """ return rule != ACCESS_RULE_DIRECT - def _on_room_name_change(self, event, rule): """Check whether a change of room name is allowed. The current rule is to forbid such a change in direct chats but allow it @@ -497,7 +496,6 @@ class RoomAccessRules(object): """ return rule != ACCESS_RULE_DIRECT - def _on_room_topic_change(self, event, rule): """Check whether a change of room topic is allowed. The current rule is to forbid such a change in direct chats but allow it -- cgit 1.5.1 From 6cf60da6e96f3dfbd58435b0390da91db04901c0 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 5 Sep 2019 16:38:20 +0100 Subject: Fix CI --- .circleci/merge_base_branch.sh | 2 +- scripts-dev/check-newsfragment | 6 +++--- tox.ini | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.circleci/merge_base_branch.sh b/.circleci/merge_base_branch.sh index 56895284ba..3a6476a96c 100755 --- a/.circleci/merge_base_branch.sh +++ b/.circleci/merge_base_branch.sh @@ -17,7 +17,7 @@ then GITBASE="dinsic" else # Get the reference, using the GitHub API - GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` + GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse-dinsic/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` fi # Show what we are before diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment index 0ec5075e79..b8a85abe18 100755 --- a/scripts-dev/check-newsfragment +++ b/scripts-dev/check-newsfragment @@ -5,9 +5,9 @@ set -e -# make sure that origin/develop is up to date -git remote set-branches --add origin develop -git fetch origin develop +# make sure that origin/dinsic is up to date +git remote set-branches --add origin dinsic +git fetch origin dinsic # if there are changes in the debian directory, check that the debian changelog # has been updated diff --git a/tox.ini b/tox.ini index 543b232ae7..b702cf670b 100644 --- a/tox.ini +++ b/tox.ini @@ -128,7 +128,7 @@ commands = /bin/sh -c "isort -c -df -sp setup.cfg -rc synapse tests" skip_install = True deps = towncrier>=18.6.0rc1 commands = - python -m towncrier.check --compare-with=origin/develop + python -m towncrier.check --compare-with=origin/dinsic basepython = python3.6 [testenv:check-sampleconfig] -- cgit 1.5.1 From b15557cd466c30281c92436c2d5a05b9680c1786 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 15:32:11 +0100 Subject: Don't treat 3PID revokation as a new 3PID invite --- synapse/third_party_rules/access_rules.py | 26 ++++++++++---- tests/rest/client/test_room_access_rules.py | 55 +++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 7 deletions(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 1a295ea7ce..5698e3e062 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -265,7 +265,7 @@ class RoomAccessRules(object): # Make sure we don't apply "direct" if the room has more than two members. if new_rule == ACCESS_RULE_DIRECT: existing_members, threepid_tokens = self._get_members_and_tokens_from_state( - state_events, + state_events, event ) if len(existing_members) > 2 or len(threepid_tokens) > 1: @@ -356,7 +356,7 @@ class RoomAccessRules(object): """ # Get the room memberships and 3PID invite tokens from the room's state. existing_members, threepid_tokens = self._get_members_and_tokens_from_state( - state_events, + state_events, event ) # There should never be more than one 3PID invite in the room state: if the second @@ -494,13 +494,14 @@ class RoomAccessRules(object): return join_rule_event.content.get("join_rule") @staticmethod - def _get_members_and_tokens_from_state(state_events): + def _get_members_and_tokens_from_state(state_events, event): """Retrieves from a list of state events the list of users that have a m.room.member event in the room, and the tokens of 3PID invites in the room. Args: state_events (dict[tuple[event type, state key], EventBase]): The set of state events. + event (EventBase): The event being checked. Returns: existing_members (list[str]): List of targets of the m.room.member events in the state. @@ -509,13 +510,24 @@ class RoomAccessRules(object): """ existing_members = [] threepid_invite_tokens = [] - for key, event in state_events.items(): + for key, state_event in state_events.items(): if key[0] == EventTypes.Member: - existing_members.append(event.state_key) + existing_members.append(state_event.state_key) if key[0] == EventTypes.ThirdPartyInvite: - threepid_invite_tokens.append(event.state_key) + threepid_invite_tokens.append(state_event.state_key) - return existing_members, threepid_invite_tokens + # If the event is a state event, there already is an event with the same state key + # in the room's state, then the event is updating an existing event from the + # room's state, in which case we need to remove the entry from the list in order + # to avoid conflicts. + if event.is_state(): + def filter_out_event(state_key): + return event.state_key != state_key + + existing_members = filter(filter_out_event, existing_members) + threepid_invite_tokens = filter(filter_out_event, threepid_invite_tokens) + + return list(existing_members), list(threepid_invite_tokens) @staticmethod def _is_invite_from_threepid(invite, threepid_invite_token): diff --git a/tests/rest/client/test_room_access_rules.py b/tests/rest/client/test_room_access_rules.py index 7e23add6b7..bb164c1e5e 100644 --- a/tests/rest/client/test_room_access_rules.py +++ b/tests/rest/client/test_room_access_rules.py @@ -483,6 +483,44 @@ class RoomAccessTestCase(unittest.HomeserverTestCase): expected_code=403, ) + def test_revoke_3pid_invite_direct(self): + """Tests that revoking a 3PID invite doesn't cause the room access rules module to + confuse the revokation as a new 3PID invite. + """ + invite_token = "sometoken" + + invite_body = { + "display_name": "ker...@exa...", + "public_keys": [ + { + "key_validity_url": "https://validity_url", + "public_key": "ta8IQ0u1sp44HVpxYi7dFOdS/bfwDjcy4xLFlfY5KOA" + }, + { + "key_validity_url": "https://validity_url", + "public_key": "4_9nzEeDwR5N9s51jPodBiLnqH43A2_g2InVT137t9I" + } + ], + "key_validity_url": "https://validity_url", + "public_key": "ta8IQ0u1sp44HVpxYi7dFOdS/bfwDjcy4xLFlfY5KOA" + } + + self.send_state_with_state_key( + room_id=self.direct_rooms[1], + event_type=EventTypes.ThirdPartyInvite, + state_key=invite_token, + body=invite_body, + tok=self.tok, + ) + + self.send_state_with_state_key( + room_id=self.direct_rooms[1], + event_type=EventTypes.ThirdPartyInvite, + state_key=invite_token, + body={}, + tok=self.tok, + ) + def create_room( self, direct=False, rule=None, preset=RoomCreationPreset.TRUSTED_PRIVATE_CHAT, initial_state=None, expected_code=200, @@ -574,3 +612,20 @@ class RoomAccessTestCase(unittest.HomeserverTestCase): ) self.render(request) self.assertEqual(channel.code, expected_code, channel.result) + + def send_state_with_state_key( + self, room_id, event_type, state_key, body, tok, expect_code=200 + ): + path = "/_matrix/client/r0/rooms/%s/state/%s/%s" % ( + room_id, event_type, state_key + ) + + request, channel = self.make_request( + "PUT", path, json.dumps(body), access_token=tok + ) + self.render(request) + + self.assertEqual(channel.code, expect_code, channel.result) + + return channel.json_body + -- cgit 1.5.1 From b2ec4467c942975dea8d69a0bec76d88e344f54d Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 15:36:43 +0100 Subject: Don't process revoked/redacted events as part of the room's membership info --- synapse/third_party_rules/access_rules.py | 4 ++-- tests/rest/client/test_room_access_rules.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 5698e3e062..ac09807738 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -511,9 +511,9 @@ class RoomAccessRules(object): existing_members = [] threepid_invite_tokens = [] for key, state_event in state_events.items(): - if key[0] == EventTypes.Member: + if key[0] == EventTypes.Member and state_event.content: existing_members.append(state_event.state_key) - if key[0] == EventTypes.ThirdPartyInvite: + if key[0] == EventTypes.ThirdPartyInvite and state_event.content: threepid_invite_tokens.append(state_event.state_key) # If the event is a state event, there already is an event with the same state key diff --git a/tests/rest/client/test_room_access_rules.py b/tests/rest/client/test_room_access_rules.py index bb164c1e5e..8fa828fa15 100644 --- a/tests/rest/client/test_room_access_rules.py +++ b/tests/rest/client/test_room_access_rules.py @@ -521,6 +521,16 @@ class RoomAccessTestCase(unittest.HomeserverTestCase): tok=self.tok, ) + invite_token = "someothertoken" + + self.send_state_with_state_key( + room_id=self.direct_rooms[1], + event_type=EventTypes.ThirdPartyInvite, + state_key=invite_token, + body=invite_body, + tok=self.tok, + ) + def create_room( self, direct=False, rule=None, preset=RoomCreationPreset.TRUSTED_PRIVATE_CHAT, initial_state=None, expected_code=200, -- cgit 1.5.1 From e35c30ed4bb132cb20132c31cdabaebfce1b69bb Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 15:44:48 +0100 Subject: Fix bogus conflict resolution --- tests/rest/client/test_room_access_rules.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/rest/client/test_room_access_rules.py b/tests/rest/client/test_room_access_rules.py index 8da56c29dd..ab76e0f25a 100644 --- a/tests/rest/client/test_room_access_rules.py +++ b/tests/rest/client/test_room_access_rules.py @@ -586,6 +586,7 @@ class RoomAccessTestCase(unittest.HomeserverTestCase): body=topic_content, tok=self.tok, expect_code=403, + ) def test_revoke_3pid_invite_direct(self): """Tests that revoking a 3PID invite doesn't cause the room access rules module to -- cgit 1.5.1 From e1c4d2c8bacaa85712eb3a26d6b7dd2fb34585d4 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 15:48:02 +0100 Subject: Only filter on 3PID invite tokens --- synapse/third_party_rules/access_rules.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 0fa36ba200..2635200989 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -569,13 +569,13 @@ class RoomAccessRules(object): # room's state, in which case we need to remove the entry from the list in order # to avoid conflicts. if event.is_state(): - def filter_out_event(state_key): - return event.state_key != state_key - - existing_members = filter(filter_out_event, existing_members) - threepid_invite_tokens = filter(filter_out_event, threepid_invite_tokens) + existing_members = existing_members + threepid_invite_tokens = filter( + lambda sk: event.state_key != sk, + threepid_invite_tokens, + ) - return list(existing_members), list(threepid_invite_tokens) + return existing_members, len(threepid_invite_tokens) @staticmethod def _is_invite_from_threepid(invite, threepid_invite_token): -- cgit 1.5.1 From 160a0c767d66a39eb29fb3044e02a73147970d38 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 15:51:30 +0100 Subject: Changelog --- changelog.d/2.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/2.bugfix diff --git a/changelog.d/2.bugfix b/changelog.d/2.bugfix new file mode 100644 index 0000000000..317d180496 --- /dev/null +++ b/changelog.d/2.bugfix @@ -0,0 +1 @@ +Don't treat 3PID revokation as a new 3PID invite. -- cgit 1.5.1 From 6a78a0ce9b843ba779042a3a262283bd6d7938a0 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 15:51:52 +0100 Subject: Lint --- tests/rest/client/test_room_access_rules.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/rest/client/test_room_access_rules.py b/tests/rest/client/test_room_access_rules.py index ab76e0f25a..7d3ba0ee2a 100644 --- a/tests/rest/client/test_room_access_rules.py +++ b/tests/rest/client/test_room_access_rules.py @@ -743,4 +743,3 @@ class RoomAccessTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, expect_code, channel.result) return channel.json_body - -- cgit 1.5.1 From 5076c2ebf6c967590c0fc8c51ef13154ac356ee6 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 6 Sep 2019 17:48:42 +0100 Subject: Typo --- synapse/third_party_rules/access_rules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 2635200989..33496519c3 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -575,7 +575,7 @@ class RoomAccessRules(object): threepid_invite_tokens, ) - return existing_members, len(threepid_invite_tokens) + return existing_members, list(threepid_invite_tokens) @staticmethod def _is_invite_from_threepid(invite, threepid_invite_token): -- cgit 1.5.1 From 665dd9f7f8db2f8d38f454d4d5b96efcf163a5db Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 11:35:56 +0100 Subject: Ensure the password reset template is correctly converted to binary Regardless of the Python version --- synapse/rest/client/v2_alpha/account.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 32770a4a95..20e1e01592 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -17,6 +17,7 @@ import logging import re +from six import ensure_binary from six.moves import http_client import jinja2 @@ -295,7 +296,7 @@ class PasswordResetSubmitTokenServlet(RestServlet): ) request.setResponseCode(e.code) - request.write(html.encode('utf-8')) + request.write(ensure_binary(html)) finish_request(request) defer.returnValue(None) -- cgit 1.5.1 From a8c7c26e7d76fac9a7749a0b13c47cd32802b298 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 11:49:42 +0100 Subject: Changelog --- changelog.d/3.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3.bugfix diff --git a/changelog.d/3.bugfix b/changelog.d/3.bugfix new file mode 100644 index 0000000000..cc4bcefa80 --- /dev/null +++ b/changelog.d/3.bugfix @@ -0,0 +1 @@ +Fix encoding on password reset HTML responses in Python 2. -- cgit 1.5.1 From 6ceaf90e13c019fc55db05f79f47922106d55d63 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 16:33:09 +0100 Subject: Revert "Ensure the password reset template is correctly converted to binary" This reverts commit 665dd9f7f8db2f8d38f454d4d5b96efcf163a5db. --- synapse/rest/client/v2_alpha/account.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 20e1e01592..32770a4a95 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -17,7 +17,6 @@ import logging import re -from six import ensure_binary from six.moves import http_client import jinja2 @@ -296,7 +295,7 @@ class PasswordResetSubmitTokenServlet(RestServlet): ) request.setResponseCode(e.code) - request.write(ensure_binary(html)) + request.write(html.encode('utf-8')) finish_request(request) defer.returnValue(None) -- cgit 1.5.1 From 21a6f0b12c3e0db147b73b26fa465460f46a3bd8 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 16:34:47 +0100 Subject: Read all files as UTF-8 --- synapse/config/_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index f7d7f153bb..ae05667a40 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -15,6 +15,7 @@ import argparse import errno +from io import open import os from textwrap import dedent @@ -131,7 +132,7 @@ class Config(object): @classmethod def read_file(cls, file_path, config_name): cls.check_file(file_path, config_name) - with open(file_path) as file_stream: + with open(file_path, encoding="utf-8") as file_stream: return file_stream.read() @staticmethod -- cgit 1.5.1 From 76f70779f18a130d6ec4194dba8ae42eeaa5afaa Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 16:36:00 +0100 Subject: Revert "Merge pull request #5932 from matrix-org/babolivier/account_validity_template_encode" This reverts commit 84e695f506faf54982b9e19dceb9c02acffad95f, reversing changes made to 99eec6d2d5cc76e645c3fd7ca6cda85b2bab6feb. --- synapse/python_dependencies.py | 2 +- synapse/rest/client/v2_alpha/account_validity.py | 4 +--- tests/rest/client/v2_alpha/test_register.py | 5 ++--- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 9692f0bf75..7dfa78dadb 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -67,7 +67,7 @@ REQUIREMENTS = [ "pymacaroons>=0.13.0", "msgpack>=0.5.0", "phonenumbers>=8.2.0", - "six>=1.12", + "six>=1.10", # prometheus_client 0.4.0 changed the format of counter metrics # (cf https://github.com/matrix-org/synapse/issues/4001) "prometheus_client>=0.0.18,<0.4.0", diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py index 98f7c60016..8091b78285 100644 --- a/synapse/rest/client/v2_alpha/account_validity.py +++ b/synapse/rest/client/v2_alpha/account_validity.py @@ -15,8 +15,6 @@ import logging -from six import ensure_binary - from twisted.internet import defer from synapse.api.errors import AuthError, SynapseError @@ -65,7 +63,7 @@ class AccountValidityRenewServlet(RestServlet): request.setResponseCode(status_code) request.setHeader(b"Content-Type", b"text/html; charset=utf-8") request.setHeader(b"Content-Length", b"%d" % (len(response),)) - request.write(ensure_binary(response)) + request.write(response.encode("utf8")) finish_request(request) defer.returnValue(None) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index b28de3663c..af1e600591 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -20,7 +20,6 @@ import json import os from mock import Mock -from six import ensure_binary import pkg_resources @@ -438,7 +437,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # Check that the HTML we're getting is the one we expect on a successful renewal. expected_html = self.hs.config.account_validity.account_renewed_html_content self.assertEqual( - channel.result["body"], ensure_binary(expected_html), channel.result + channel.result["body"], expected_html.encode("utf8"), channel.result ) # Move 3 days forward. If the renewal failed, every authed request with @@ -468,7 +467,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # invalid/unknown token. expected_html = self.hs.config.account_validity.invalid_token_html_content self.assertEqual( - channel.result["body"], ensure_binary(expected_html), channel.result + channel.result["body"], expected_html.encode("utf8"), channel.result ) def test_manual_email_send(self): -- cgit 1.5.1 From b0eec085bd518e76cf96b84f98e90860dca1a5c7 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 16:41:46 +0100 Subject: Lint --- synapse/config/_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index ae05667a40..71a81c1cc9 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -15,8 +15,8 @@ import argparse import errno -from io import open import os +from io import open from textwrap import dedent from six import integer_types -- cgit 1.5.1 From c8f03a8fb0160956c5c593f63641e9a522ee2b8e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 17:13:37 +0100 Subject: Rename io.open import to limite side-effects --- synapse/config/_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 71a81c1cc9..bf039e5823 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -16,7 +16,7 @@ import argparse import errno import os -from io import open +from io import open as io_open from textwrap import dedent from six import integer_types @@ -132,7 +132,7 @@ class Config(object): @classmethod def read_file(cls, file_path, config_name): cls.check_file(file_path, config_name) - with open(file_path, encoding="utf-8") as file_stream: + with io_open(file_path, encoding="utf-8") as file_stream: return file_stream.read() @staticmethod -- cgit 1.5.1 From 66be293c79cb240f37f585af2451dd814d8894e2 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 9 Sep 2019 17:52:41 +0100 Subject: Process revocations in _on_membership_or_invite_direct --- synapse/third_party_rules/access_rules.py | 29 +++++++++++------------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 33496519c3..55b59fba8d 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -274,7 +274,7 @@ class RoomAccessRules(object): # Make sure we don't apply "direct" if the room has more than two members. if new_rule == ACCESS_RULE_DIRECT: existing_members, threepid_tokens = self._get_members_and_tokens_from_state( - state_events, event + state_events ) if len(existing_members) > 2 or len(threepid_tokens) > 1: @@ -365,7 +365,7 @@ class RoomAccessRules(object): """ # Get the room memberships and 3PID invite tokens from the room's state. existing_members, threepid_tokens = self._get_members_and_tokens_from_state( - state_events, event + state_events ) # There should never be more than one 3PID invite in the room state: if the second @@ -374,8 +374,12 @@ class RoomAccessRules(object): # join the first time), Synapse will successfully look it up before attempting to # store an invite on the IS. if len(threepid_tokens) == 1 and event.type == EventTypes.ThirdPartyInvite: - # If we already have a 3PID invite in flight, don't accept another one. - return False + # If we already have a 3PID invite in flight, don't accept another one, unless + # the new one has the same invite token as its state key. This is because 3PID + # invite revocations must be allowed, and a revocation is basically a new 3PID + # invite event with an empty content and the same token as the invite it + # revokes. + return event.state_key in threepid_tokens if len(existing_members) == 2: # If the user was within the two initial user of the room, Synapse would have @@ -542,14 +546,13 @@ class RoomAccessRules(object): return join_rule_event.content.get("join_rule") @staticmethod - def _get_members_and_tokens_from_state(state_events, event): + def _get_members_and_tokens_from_state(state_events): """Retrieves from a list of state events the list of users that have a m.room.member event in the room, and the tokens of 3PID invites in the room. Args: state_events (dict[tuple[event type, state key], EventBase]): The set of state events. - event (EventBase): The event being checked. Returns: existing_members (list[str]): List of targets of the m.room.member events in the state. @@ -562,20 +565,10 @@ class RoomAccessRules(object): if key[0] == EventTypes.Member and state_event.content: existing_members.append(state_event.state_key) if key[0] == EventTypes.ThirdPartyInvite and state_event.content: + # Don't include revoked invites. threepid_invite_tokens.append(state_event.state_key) - # If the event is a state event, there already is an event with the same state key - # in the room's state, then the event is updating an existing event from the - # room's state, in which case we need to remove the entry from the list in order - # to avoid conflicts. - if event.is_state(): - existing_members = existing_members - threepid_invite_tokens = filter( - lambda sk: event.state_key != sk, - threepid_invite_tokens, - ) - - return existing_members, list(threepid_invite_tokens) + return existing_members, threepid_invite_tokens @staticmethod def _is_invite_from_threepid(invite, threepid_invite_token): -- cgit 1.5.1 From 0b993427e1f02b8e400da95a8d3c1bf2de20c16b Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 10 Sep 2019 10:18:37 +0100 Subject: Update changelog.d/2.bugfix Co-Authored-By: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> --- changelog.d/2.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/2.bugfix b/changelog.d/2.bugfix index 317d180496..4fe5691468 100644 --- a/changelog.d/2.bugfix +++ b/changelog.d/2.bugfix @@ -1 +1 @@ -Don't treat 3PID revokation as a new 3PID invite. +Don't treat 3PID revocation as a new 3PID invite. -- cgit 1.5.1 From ae036ed63605cbe0bb62010565eece6c7b1a9249 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 19 Sep 2019 11:58:06 +0100 Subject: Add unit tests for strip_invalid_mxid_characters --- tests/test_types.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/tests/test_types.py b/tests/test_types.py index d83c36559f..73d3b2cda2 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -12,9 +12,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from six import string_types from synapse.api.errors import SynapseError -from synapse.types import GroupID, RoomAlias, UserID, map_username_to_mxid_localpart +from synapse.types import ( + GroupID, + RoomAlias, + UserID, + map_username_to_mxid_localpart, + strip_invalid_mxid_characters, +) from tests import unittest from tests.utils import TestHomeServer @@ -106,3 +113,16 @@ class MapUsernameTestCase(unittest.TestCase): self.assertEqual( map_username_to_mxid_localpart(u'têst'.encode('utf-8')), "t=c3=aast" ) + + +class StripInvalidMxidCharactersTestCase(unittest.TestCase): + def test_return_type(self): + unstripped = strip_invalid_mxid_characters("test") + stripped = strip_invalid_mxid_characters("test@") + + self.assertTrue(isinstance(unstripped, string_types), type(unstripped)) + self.assertTrue(isinstance(stripped, string_types), type(stripped)) + + def test_strip(self): + stripped = strip_invalid_mxid_characters("test@") + self.assertEqual(stripped, "test", stripped) -- cgit 1.5.1 From 30c085fbc33e7ad47c31f28debd23bacaf41b599 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 19 Sep 2019 12:03:10 +0100 Subject: Use six.moves.filter when filtering out from MXID Python 2's filter() function and Python 3's don't return the same type when processing a string (respectively str and filter), therefore use six's compatibility mapping (which resolves to itertools.ifilter() if using Python2), then generate a string from the filtered list, in order to ensure consistent behaviour between Python 2 and Python 3. --- synapse/types.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/types.py b/synapse/types.py index eebe29d1f0..842275e0fa 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -17,6 +17,7 @@ import string from collections import namedtuple import attr +from six.moves import filter from synapse.api.errors import SynapseError @@ -240,7 +241,8 @@ def strip_invalid_mxid_characters(localpart): Returns: localpart (basestring): the localpart having been stripped """ - return filter(lambda c: c in mxid_localpart_allowed_characters, localpart) + filtered = filter(lambda c: c in mxid_localpart_allowed_characters, localpart) + return "".join(list(filtered)) UPPER_CASE_PATTERN = re.compile(b"[A-Z_]") -- cgit 1.5.1 From 8bc39401fec09c4338c9cc2f1dfe139cf5461945 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 19 Sep 2019 13:01:05 +0100 Subject: Lint --- synapse/types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/types.py b/synapse/types.py index 842275e0fa..6d20ef3641 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -16,9 +16,10 @@ import re import string from collections import namedtuple -import attr from six.moves import filter +import attr + from synapse.api.errors import SynapseError -- cgit 1.5.1 From 6f364634eed95c99cbfa9fed610701bf0c9ebd62 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 19 Sep 2019 13:02:23 +0100 Subject: Changelog --- changelog.d/4.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4.bugfix diff --git a/changelog.d/4.bugfix b/changelog.d/4.bugfix new file mode 100644 index 0000000000..fe717920a6 --- /dev/null +++ b/changelog.d/4.bugfix @@ -0,0 +1 @@ +Fix handling of filtered strings in Python 3. -- cgit 1.5.1 From 736394d46b0b7c35ba3f394fc1e74448a07c22f5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 20 Sep 2019 10:07:55 +0100 Subject: Remove unnecessary cast to list --- synapse/types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/types.py b/synapse/types.py index 6d20ef3641..e6afc05cee 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -243,7 +243,7 @@ def strip_invalid_mxid_characters(localpart): localpart (basestring): the localpart having been stripped """ filtered = filter(lambda c: c in mxid_localpart_allowed_characters, localpart) - return "".join(list(filtered)) + return "".join(filtered) UPPER_CASE_PATTERN = re.compile(b"[A-Z_]") -- cgit 1.5.1 From af597b1eb6a675979aaa7a6347fe3b988c3f1eca Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 24 Sep 2019 17:05:12 +0100 Subject: Move get_retention_policy_for_room to RoomWorkerStore --- synapse/storage/room.py | 110 ++++++++++++++++++++++++------------------------ 1 file changed, 55 insertions(+), 55 deletions(-) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index c61dfa527f..eba4acd758 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -223,6 +223,61 @@ class RoomWorkerStore(SQLBaseStore): else: defer.returnValue(None) + @cachedInlineCallbacks() + def get_retention_policy_for_room(self, room_id): + """Get the retention policy for a given room. + + If no retention policy has been found for this room, returns a policy defined + by the configured default policy (which has None as both the 'min_lifetime' and + the 'max_lifetime' if no default policy has been defined in the server's + configuration). + + Args: + room_id (str): The ID of the room to get the retention policy of. + + Returns: + dict[int, int]: "min_lifetime" and "max_lifetime" for this room. + """ + + def get_retention_policy_for_room_txn(txn): + txn.execute( + """ + SELECT min_lifetime, max_lifetime FROM room_retention + INNER JOIN current_state_events USING (event_id, room_id) + WHERE room_id = ?; + """, + (room_id,) + ) + + return self.cursor_to_dict(txn) + + ret = yield self.runInteraction( + "get_retention_policy_for_room", + get_retention_policy_for_room_txn, + ) + + # If we don't know this room ID, ret will be None, in this case return the default + # policy. + if not ret: + defer.returnValue({ + "min_lifetime": self.config.retention_default_min_lifetime, + "max_lifetime": self.config.retention_default_max_lifetime, + }) + + row = ret[0] + + # If one of the room's policy's attributes isn't defined, use the matching + # attribute from the default policy. + # The default values will be None if no default policy has been defined, or if one + # of the attributes is missing from the default policy. + if row["min_lifetime"] is None: + row["min_lifetime"] = self.config.retention_default_min_lifetime + + if row["max_lifetime"] is None: + row["max_lifetime"] = self.config.retention_default_max_lifetime + + defer.returnValue(row) + class RoomStore(RoomWorkerStore, SearchStore): def __init__(self, db_conn, hs): @@ -835,58 +890,3 @@ class RoomStore(RoomWorkerStore, SearchStore): ) defer.returnValue(rooms) - - @cachedInlineCallbacks() - def get_retention_policy_for_room(self, room_id): - """Get the retention policy for a given room. - - If no retention policy has been found for this room, returns a policy defined - by the configured default policy (which has None as both the 'min_lifetime' and - the 'max_lifetime' if no default policy has been defined in the server's - configuration). - - Args: - room_id (str): The ID of the room to get the retention policy of. - - Returns: - dict[int, int]: "min_lifetime" and "max_lifetime" for this room. - """ - - def get_retention_policy_for_room_txn(txn): - txn.execute( - """ - SELECT min_lifetime, max_lifetime FROM room_retention - INNER JOIN current_state_events USING (event_id, room_id) - WHERE room_id = ?; - """, - (room_id,) - ) - - return self.cursor_to_dict(txn) - - ret = yield self.runInteraction( - "get_retention_policy_for_room", - get_retention_policy_for_room_txn, - ) - - # If we don't know this room ID, ret will be None, in this case return the default - # policy. - if not ret: - defer.returnValue({ - "min_lifetime": self.config.retention_default_min_lifetime, - "max_lifetime": self.config.retention_default_max_lifetime, - }) - - row = ret[0] - - # If one of the room's policy's attributes isn't defined, use the matching - # attribute from the default policy. - # The default values will be None if no default policy has been defined, or if one - # of the attributes is missing from the default policy. - if row["min_lifetime"] is None: - row["min_lifetime"] = self.config.retention_default_min_lifetime - - if row["max_lifetime"] is None: - row["max_lifetime"] = self.config.retention_default_max_lifetime - - defer.returnValue(row) -- cgit 1.5.1 From 32bc69d0f52bcf6ebfa7d1daeafaafb535e668bc Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 24 Sep 2019 17:06:20 +0100 Subject: Changelog --- changelog.d/5.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5.bugfix diff --git a/changelog.d/5.bugfix b/changelog.d/5.bugfix new file mode 100644 index 0000000000..4c94daedce --- /dev/null +++ b/changelog.d/5.bugfix @@ -0,0 +1 @@ +Fix room retention policy management for in worker mode. -- cgit 1.5.1 From 25815841b197964703312c3f96fac728e8b1f4a2 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 24 Sep 2019 17:24:28 +0100 Subject: Consider every room as having no retention policy if the feature is disabled --- synapse/storage/room.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index eba4acd758..db3d052d33 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -238,6 +238,14 @@ class RoomWorkerStore(SQLBaseStore): Returns: dict[int, int]: "min_lifetime" and "max_lifetime" for this room. """ + # If the room retention feature is disabled, return a policy with no minimum nor + # maximum, in order not to filter out events we should filter out when sending to + # the client. + if not self.config.retention_enabled: + defer.returnValue({ + "min_lifetime": None, + "max_lifetime": None, + }) def get_retention_policy_for_room_txn(txn): txn.execute( -- cgit 1.5.1 From f3dfbc82d5b7c98040b4d95238b533b2ca95cd57 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 24 Sep 2019 17:25:13 +0100 Subject: Typo --- changelog.d/5.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/5.bugfix b/changelog.d/5.bugfix index 4c94daedce..53f57f46ca 100644 --- a/changelog.d/5.bugfix +++ b/changelog.d/5.bugfix @@ -1 +1 @@ -Fix room retention policy management for in worker mode. +Fix room retention policy management in worker mode. -- cgit 1.5.1 From e6a7f8964f6f6f87493a6f81002d2149e177d639 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 26 Sep 2019 11:12:21 +0100 Subject: Allow membership events which membership isn't join or invite in restricted rooms --- synapse/third_party_rules/access_rules.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/synapse/third_party_rules/access_rules.py b/synapse/third_party_rules/access_rules.py index 55b59fba8d..bd79de845f 100644 --- a/synapse/third_party_rules/access_rules.py +++ b/synapse/third_party_rules/access_rules.py @@ -17,7 +17,7 @@ import email.utils from twisted.internet import defer -from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset +from synapse.api.constants import EventTypes, JoinRules, Membership, RoomCreationPreset from synapse.api.errors import SynapseError from synapse.config._base import ConfigError from synapse.types import get_domain_from_id @@ -336,6 +336,14 @@ class RoomAccessRules(object): # called before check_event_allowed. if event.type == EventTypes.ThirdPartyInvite: return True + + # We only need to process "join" and "invite" memberships, in order to be backward + # compatible, e.g. if a user from a blacklisted server joined a restricted room + # before the rules started being enforced on the server, that user must be able to + # leave it. + if event.membership not in [Membership.JOIN, Membership.INVITE]: + return True + invitee_domain = get_domain_from_id(event.state_key) return invitee_domain not in self.domains_forbidden_when_restricted -- cgit 1.5.1 From 0d069e440749dd18b25a9ad5f77afd0fa3e6e668 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 26 Sep 2019 11:15:49 +0100 Subject: Changelog --- changelog.d/6.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6.bugfix diff --git a/changelog.d/6.bugfix b/changelog.d/6.bugfix new file mode 100644 index 0000000000..43ab65cc95 --- /dev/null +++ b/changelog.d/6.bugfix @@ -0,0 +1 @@ +Don't forbid membership events which membership isn't 'join' or 'invite' in restricted rooms, so that users who got into these rooms before the access rules started to be enforced can leave them. -- cgit 1.5.1 From acf6b2388e9b5d1a03b53d4c3078f550caf75d8b Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 15:21:03 +0100 Subject: Lint --- synapse/handlers/deactivate_account.py | 35 +++++++++++++++++++- tests/rest/client/v2_alpha/test_account.py | 51 ++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index b8cf6ab57c..432fed1d04 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -128,7 +128,40 @@ class DeactivateAccountHandler(BaseHandler): # Mark the user as deactivated. yield self.store.set_user_deactivated_status(user_id, True) - defer.returnValue(identity_server_supports_unbinding) + return identity_server_supports_unbinding + + @defer.inlineCallbacks + def _reject_pending_invites_for_user(self, user_id): + """Reject pending invites addressed to a given user ID. + + Args: + user_id (str): The user ID to reject pending invites for. + """ + user = UserID.from_string(user_id) + pending_invites = yield self.store.get_invited_rooms_for_user(user_id) + + logger.info(pending_invites) + + for room in pending_invites: + try: + yield self._room_member_handler.update_membership( + create_requester(user), + user, + room.room_id, + "leave", + ratelimit=False, + require_consent=False, + ) + logger.info( + "Rejected invite for user %r in room %r", user_id, room.room_id + ) + except Exception: + logger.exception( + "Failed to reject invite for user %r in room %r:" + " ignoring and continuing", + user_id, + room.room_id, + ) def _start_user_parting(self): """ diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py index a60a4a3b87..f2b0c37d19 100644 --- a/tests/rest/client/v2_alpha/test_account.py +++ b/tests/rest/client/v2_alpha/test_account.py @@ -284,3 +284,54 @@ class DeactivateTestCase(unittest.HomeserverTestCase): request, channel = self.make_request("GET", "account/whoami") self.render(request) self.assertEqual(request.code, 401) + + @unittest.INFO + def test_pending_invites(self): + """Tests that deactivating a user rejects every pending invite for them.""" + store = self.hs.get_datastore() + + inviter_id = self.register_user("inviter", "test") + inviter_tok = self.login("inviter", "test") + + invitee_id = self.register_user("invitee", "test") + invitee_tok = self.login("invitee", "test") + + # Make @inviter:test invite @invitee:test in a new room. + room_id = self.helper.create_room_as(inviter_id, tok=inviter_tok) + self.helper.invite(room=room_id, src=inviter_id, targ=invitee_id, tok=inviter_tok) + + # Make sure the invite is here. + pending_invites = self.get_success(store.get_invited_rooms_for_user(invitee_id)) + self.assertEqual(len(pending_invites), 1, pending_invites) + self.assertEqual(pending_invites[0].room_id, room_id, pending_invites) + + # Deactivate @invitee:test. + self.deactivate(invitee_id, invitee_tok) + + # Check that the invite isn't there anymore. + pending_invites = self.get_success(store.get_invited_rooms_for_user(invitee_id)) + self.assertEqual(len(pending_invites), 0, pending_invites) + + # Check that the membership of @invitee:test in the room is now "leave". + memberships = self.get_success( + store.get_rooms_for_user_where_membership_is(invitee_id, [Membership.LEAVE]) + ) + self.assertEqual(len(memberships), 1, memberships) + self.assertEqual(memberships[0].room_id, room_id, memberships) + + def deactivate(self, user_id, tok): + request_data = json.dumps( + { + "auth": { + "type": "m.login.password", + "user": user_id, + "password": "test", + }, + "erase": False, + } + ) + request, channel = self.make_request( + "POST", "account/deactivate", request_data, access_token=tok + ) + self.render(request) + self.assertEqual(request.code, 200) -- cgit 1.5.1 From 97672c03e2e1355206eead17748912c59be49fb8 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 15:23:07 +0100 Subject: ok --- tests/rest/client/v2_alpha/test_account.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py index f2b0c37d19..da68e4539d 100644 --- a/tests/rest/client/v2_alpha/test_account.py +++ b/tests/rest/client/v2_alpha/test_account.py @@ -298,7 +298,9 @@ class DeactivateTestCase(unittest.HomeserverTestCase): # Make @inviter:test invite @invitee:test in a new room. room_id = self.helper.create_room_as(inviter_id, tok=inviter_tok) - self.helper.invite(room=room_id, src=inviter_id, targ=invitee_id, tok=inviter_tok) + self.helper.invite( + room=room_id, src=inviter_id, targ=invitee_id, tok=inviter_tok + ) # Make sure the invite is here. pending_invites = self.get_success(store.get_invited_rooms_for_user(invitee_id)) -- cgit 1.5.1 From 318fed18da5af69615601e2ce09a994b24c619aa Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 16:10:24 +0100 Subject: Update changelog.d/6125.feature Co-Authored-By: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/6125.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6125.feature diff --git a/changelog.d/6125.feature b/changelog.d/6125.feature new file mode 100644 index 0000000000..cbe5f8d3c8 --- /dev/null +++ b/changelog.d/6125.feature @@ -0,0 +1 @@ +Reject all pending invites for a user during deactivation. -- cgit 1.5.1 From 0ae6c8efc13ab2a0c89e1099996c5fe768aa8350 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 16:10:36 +0100 Subject: Update synapse/handlers/deactivate_account.py Co-Authored-By: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- synapse/handlers/deactivate_account.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 432fed1d04..1acf6fc572 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -121,6 +121,10 @@ class DeactivateAccountHandler(BaseHandler): # parts users from rooms (if it isn't already running) self._start_user_parting() + # Reject all pending invites for the user, so that they do not show up in the + # invitees list of rooms. + yield self._reject_pending_invites_for_user(user_id) + # Remove all information on the user from the account_validity table. if self._account_validity_enabled: yield self.store.delete_account_validity_for_user(user_id) -- cgit 1.5.1 From 5a207c1113ff34c1f7a69a1fa36da4ede39e0b8e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 16:12:15 +0100 Subject: Update synapse/handlers/deactivate_account.py Co-Authored-By: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- synapse/handlers/deactivate_account.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 1acf6fc572..2ae6808a34 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -157,7 +157,9 @@ class DeactivateAccountHandler(BaseHandler): require_consent=False, ) logger.info( - "Rejected invite for user %r in room %r", user_id, room.room_id + "Rejected invite for deactivated user %r in room %r", + user_id, + room.room_id, ) except Exception: logger.exception( -- cgit 1.5.1 From 42409b3022fb491a10d0e43810dad427fbfc3473 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 16:15:01 +0100 Subject: Incorporate review --- synapse/handlers/deactivate_account.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 2ae6808a34..cd6f19ec4c 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -121,8 +121,8 @@ class DeactivateAccountHandler(BaseHandler): # parts users from rooms (if it isn't already running) self._start_user_parting() - # Reject all pending invites for the user, so that they do not show up in the - # invitees list of rooms. + # Reject all pending invites for the user, so that the user doesn't show up in the + # "invited" section of rooms' members list. yield self._reject_pending_invites_for_user(user_id) # Remove all information on the user from the account_validity table. @@ -144,8 +144,6 @@ class DeactivateAccountHandler(BaseHandler): user = UserID.from_string(user_id) pending_invites = yield self.store.get_invited_rooms_for_user(user_id) - logger.info(pending_invites) - for room in pending_invites: try: yield self._room_member_handler.update_membership( -- cgit 1.5.1 From 04b779a6ac59a54abfcb5c74922ae22d09ba7646 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 17:06:12 +0100 Subject: s/return/defer.returnValue/ --- synapse/handlers/deactivate_account.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index cd6f19ec4c..32e004e53e 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -132,7 +132,7 @@ class DeactivateAccountHandler(BaseHandler): # Mark the user as deactivated. yield self.store.set_user_deactivated_status(user_id, True) - return identity_server_supports_unbinding + defer.returnValue(identity_server_supports_unbinding) @defer.inlineCallbacks def _reject_pending_invites_for_user(self, user_id): -- cgit 1.5.1 From ce2448efbe470f6c652a5c74bfd77aca80f0b4ec Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 27 Sep 2019 17:08:50 +0100 Subject: Fix git messing up --- tests/rest/client/v2_alpha/test_account.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py index da68e4539d..32adf88c35 100644 --- a/tests/rest/client/v2_alpha/test_account.py +++ b/tests/rest/client/v2_alpha/test_account.py @@ -23,8 +23,8 @@ from email.parser import Parser import pkg_resources import synapse.rest.admin -from synapse.api.constants import LoginType -from synapse.rest.client.v1 import login +from synapse.api.constants import LoginType, Membership +from synapse.rest.client.v1 import login, room from synapse.rest.client.v2_alpha import account, register from tests import unittest @@ -248,6 +248,7 @@ class DeactivateTestCase(unittest.HomeserverTestCase): synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, account.register_servlets, + room.register_servlets, ] def make_homeserver(self, reactor, clock): -- cgit 1.5.1 From dfcf4ba406f37f3e6b021c95f1bae2a2bfd57dec Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 2 Oct 2019 11:16:38 +0100 Subject: Don't 500 code when trying to exchange a revoked 3PID invite While this is not documented in the spec (but should be), Riot (and other clients) revoke 3PID invites by sending a m.room.third_party_invite event with an empty ({}) content to the room's state. When the invited 3PID gets associated with a MXID, the identity server (which doesn't know about revocations) sends down to the MXID's homeserver all of the undelivered invites it has for this 3PID. The homeserver then tries to talk to the inviting homeserver in order to exchange these invite for m.room.member events. When one of the invite is revoked, the inviting homeserver responds with a 500 error because it tries to extract a 'display_name' property from the content, which is empty. This might cause the invited server to consider that the server is down and not try to exchange other, valid invites (or at least delay it). This fix handles the case of revoked invites by avoiding trying to fetch a 'display_name' from the original invite's content, and letting the m.room.member event fail the auth rules (because, since the original invite's content is empty, it doesn't have public keys), which results in sending a 403 with the correct error message to the invited server. --- synapse/handlers/federation.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index e96edb8bbf..9a8683df37 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2699,8 +2699,19 @@ class FederationHandler(BaseHandler): original_invite_id, allow_none=True ) if original_invite: - display_name = original_invite.content["display_name"] - event_dict["content"]["third_party_invite"]["display_name"] = display_name + # If the m.room.third_party_invite event's content is empty, it means the + # invite has been revoked. + if original_invite.content: + display_name = original_invite.content["display_name"] + event_dict["content"]["third_party_invite"]["display_name"] = display_name + else: + # Don't discard or raise an error here because that's not the right place + # to do auth checks. The auth check will fail on this invite because we + # won't be able to fetch public keys from the m.room.third_party_invite + # event's content (because it's empty). + logger.info( + "Found invite event for third_party_invite but it has been revoked" + ) else: logger.info( "Could not find invite event for third_party_invite: %r", -- cgit 1.5.1 From 78d9b4a6e6359ceac8f441aebb5253d396924ea9 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 2 Oct 2019 12:17:46 +0100 Subject: Lint --- synapse/handlers/federation.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 9a8683df37..80733679c6 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2703,7 +2703,9 @@ class FederationHandler(BaseHandler): # invite has been revoked. if original_invite.content: display_name = original_invite.content["display_name"] - event_dict["content"]["third_party_invite"]["display_name"] = display_name + event_dict["content"]["third_party_invite"][ + "display_name" + ] = display_name else: # Don't discard or raise an error here because that's not the right place # to do auth checks. The auth check will fail on this invite because we -- cgit 1.5.1 From 1a01ca07740aa187ded23a91cde4ee996806410f Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 2 Oct 2019 12:20:03 +0100 Subject: Changelog --- changelog.d/6147.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6147.bugfix diff --git a/changelog.d/6147.bugfix b/changelog.d/6147.bugfix new file mode 100644 index 0000000000..b0f936d280 --- /dev/null +++ b/changelog.d/6147.bugfix @@ -0,0 +1 @@ +Don't 500 when trying to exchange a revoked 3PID invite. -- cgit 1.5.1 From a6a55039a4d5daddbcd1ea2100dff0694d3f4fcb Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 2 Oct 2019 14:44:58 +0100 Subject: Add test case --- synapse/handlers/federation.py | 2 +- tests/handlers/test_federation.py | 83 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 tests/handlers/test_federation.py diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 80733679c6..403bebbf59 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2671,7 +2671,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check_from_context(room_version, event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying third party invite %r because %s", event, e) raise e diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py new file mode 100644 index 0000000000..20416a0142 --- /dev/null +++ b/tests/handlers/test_federation.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.api.constants import EventTypes +from synapse.api.errors import AuthError, Codes +from synapse.rest import admin +from synapse.rest.client.v1 import login, room + +from tests import unittest + + +class FederationTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + hs = self.setup_test_homeserver(http_client=None) + self.handler = hs.get_handlers().federation_handler + self.store = hs.get_datastore() + return hs + + def test_exchange_revoked_invite(self): + user_id = self.register_user("kermit", "test") + tok = self.login("kermit", "test") + + room_id = self.helper.create_room_as( + room_creator=user_id, tok=tok + ) + + # Send a 3PID invite event with an empty body so it's considered as a revoked one. + invite_token = "sometoken" + self.helper.send_state( + room_id=room_id, + event_type=EventTypes.ThirdPartyInvite, + state_key=invite_token, + body={}, + tok=tok, + ) + + d = self.handler.on_exchange_third_party_invite_request( + room_id=room_id, + event_dict={ + "type": EventTypes.Member, + "room_id": room_id, + "sender": user_id, + "state_key": "@someone:example.org", + "content": { + "membership": "invite", + "third_party_invite": { + "display_name": "alice", + "signed": { + "mxid": "@alice:localhost", + "token": invite_token, + "signatures": { + "magic.forest": { + "ed25519:3": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg" + } + } + } + } + } + } + ) + + failure = self.get_failure(d, AuthError).value + + self.assertEqual(failure.code, 403, failure) + self.assertEqual(failure.errcode, Codes.FORBIDDEN, failure) + self.assertEqual(failure.msg, "You are not invited to this room.") -- cgit 1.5.1 From 06159a0ee7346d290a77e3a2833e45ce3cc15afd Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 3 Oct 2019 11:29:07 +0100 Subject: Lint --- tests/handlers/test_federation.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index 20416a0142..a18dfc0e96 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -69,11 +69,11 @@ class FederationTestCase(unittest.HomeserverTestCase): "magic.forest": { "ed25519:3": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg" } - } - } - } - } - } + }, + }, + }, + }, + }, ) failure = self.get_failure(d, AuthError).value -- cgit 1.5.1 From d6945464537fed3a601879aff58d88ad654a4f86 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 3 Oct 2019 11:30:43 +0100 Subject: Lint (again) --- tests/handlers/test_federation.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index a18dfc0e96..d56220f403 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -37,9 +37,7 @@ class FederationTestCase(unittest.HomeserverTestCase): user_id = self.register_user("kermit", "test") tok = self.login("kermit", "test") - room_id = self.helper.create_room_as( - room_creator=user_id, tok=tok - ) + room_id = self.helper.create_room_as(room_creator=user_id, tok=tok) # Send a 3PID invite event with an empty body so it's considered as a revoked one. invite_token = "sometoken" -- cgit 1.5.1 From eaec1d4ce71a8bc1b327bb4e05163674476406c7 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 4 Oct 2019 11:16:19 +0100 Subject: Incorporate review --- synapse/handlers/federation.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 403bebbf59..c5dc3f2a27 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2700,20 +2700,14 @@ class FederationHandler(BaseHandler): ) if original_invite: # If the m.room.third_party_invite event's content is empty, it means the - # invite has been revoked. - if original_invite.content: - display_name = original_invite.content["display_name"] - event_dict["content"]["third_party_invite"][ - "display_name" - ] = display_name - else: - # Don't discard or raise an error here because that's not the right place - # to do auth checks. The auth check will fail on this invite because we - # won't be able to fetch public keys from the m.room.third_party_invite - # event's content (because it's empty). - logger.info( - "Found invite event for third_party_invite but it has been revoked" - ) + # invite has been revoked. In this case, we don't have to raise an error here + # because the auth check will fail on the invite (because it's not able to + # fetch public keys from the m.room.third_party_invite event's content, which + # is empty. + display_name = original_invite.content.get("display_name") + event_dict["content"]["third_party_invite"][ + "display_name" + ] = display_name else: logger.info( "Could not find invite event for third_party_invite: %r", -- cgit 1.5.1 From b0a350ef48a970ab9026dde3e6cf8b79beebd805 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 4 Oct 2019 11:18:28 +0100 Subject: Lint --- synapse/handlers/federation.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index c5dc3f2a27..3feb95d53f 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2705,9 +2705,7 @@ class FederationHandler(BaseHandler): # fetch public keys from the m.room.third_party_invite event's content, which # is empty. display_name = original_invite.content.get("display_name") - event_dict["content"]["third_party_invite"][ - "display_name" - ] = display_name + event_dict["content"]["third_party_invite"]["display_name"] = display_name else: logger.info( "Could not find invite event for third_party_invite: %r", -- cgit 1.5.1 From 04d4fff806c27b90f9c84ea37288fd36d7678e7a Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 4 Oct 2019 11:21:24 +0100 Subject: Typo --- synapse/handlers/federation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 3feb95d53f..35528eb48a 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2703,7 +2703,7 @@ class FederationHandler(BaseHandler): # invite has been revoked. In this case, we don't have to raise an error here # because the auth check will fail on the invite (because it's not able to # fetch public keys from the m.room.third_party_invite event's content, which - # is empty. + # is empty). display_name = original_invite.content.get("display_name") event_dict["content"]["third_party_invite"]["display_name"] = display_name else: -- cgit 1.5.1 From c49ba3677c635ff2c0b19b9005967df619b4c748 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 4 Oct 2019 12:21:33 +0100 Subject: Fixup tests --- tests/handlers/test_federation.py | 6 +++++- tests/rest/client/v1/utils.py | 10 +++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index d56220f403..b1ae15a2bd 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -50,6 +50,7 @@ class FederationTestCase(unittest.HomeserverTestCase): ) d = self.handler.on_exchange_third_party_invite_request( + origin="example.com", room_id=room_id, event_dict={ "type": EventTypes.Member, @@ -65,7 +66,10 @@ class FederationTestCase(unittest.HomeserverTestCase): "token": invite_token, "signatures": { "magic.forest": { - "ed25519:3": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg" + "ed25519:3": ( + "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIs" + "NffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg" + ) } }, }, diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index f7133fc12e..449a69183f 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -128,13 +128,17 @@ class RestHelper(object): return channel.json_body - def send_state(self, room_id, event_type, body, tok, expect_code=200): - path = "/_matrix/client/r0/rooms/%s/state/%s" % (room_id, event_type) + def send_state(self, room_id, event_type, body, tok, expect_code=200, state_key=""): + path = "/_matrix/client/r0/rooms/%s/state/%s/%s" % ( + room_id, + event_type, + state_key, + ) if tok: path = path + "?access_token=%s" % tok request, channel = make_request( - self.hs.get_reactor(), "PUT", path, json.dumps(body).encode('utf8') + self.hs.get_reactor(), "PUT", path, json.dumps(body).encode("utf8") ) render(request, self.resource, self.hs.get_reactor()) -- cgit 1.5.1 From 9e1e5f8ed5aa6047d4d0d0b5cc527d9a5dd681f5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 7 Oct 2019 12:52:02 +0100 Subject: First attempt at running SyTest in buildkite --- .buildkite/merge_base_branch.sh | 35 +++++++++++++++++++++++++++++++++++ .buildkite/pipeline.yml | 25 +++++++++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100755 .buildkite/merge_base_branch.sh diff --git a/.buildkite/merge_base_branch.sh b/.buildkite/merge_base_branch.sh new file mode 100755 index 0000000000..3a6476a96c --- /dev/null +++ b/.buildkite/merge_base_branch.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +set -e + +# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful. +# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL. +echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV +source $BASH_ENV + +if [[ -z "${CIRCLE_PR_NUMBER}" ]] +then + echo "Can't figure out what the PR number is! Assuming merge target is dinsic." + + # It probably hasn't had a PR opened yet. Since all PRs for dinsic land on + # dinsic, we can probably assume it's based on it and will be merged into + # it. + GITBASE="dinsic" +else + # Get the reference, using the GitHub API + GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse-dinsic/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` +fi + +# Show what we are before +git --no-pager show -s + +# Set up username so it can do a merge +git config --global user.email bot@matrix.org +git config --global user.name "A robot" + +# Fetch and merge. If it doesn't work, it will raise due to set -e. +git fetch -u origin $GITBASE +git merge --no-edit origin/$GITBASE + +# Show what we are after. +git --no-pager show -s diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 719f22b4e1..e875fa61dc 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -232,3 +232,28 @@ steps: limit: 2 - exit_status: 2 limit: 2 + + - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith" + agents: + queue: "medium" + env: + POSTGRES: "1" + command: + - "bash .buildkite/merge_base_branch.sh" + - "bash /synapse_sytest.sh" + plugins: + - docker#v3.0.1: + image: "matrixdotorg/sytest-synapse:dinsic" + propagate-environment: true + always-pull: true + workdir: "/src" + entrypoint: "/bin/sh" + init: false + shell: ["-x", "-c"] + mount-buildkite-agent: false + volumes: ["./logs:/logs"] + - artifacts#v1.2.0: + upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] + - matrix-org/annotate: + path: "logs/annotate.md" + style: "error" \ No newline at end of file -- cgit 1.5.1 From 90f1eb3ee5bead0bece960fec815edb37b83e22b Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 7 Oct 2019 12:54:20 +0100 Subject: Changelog --- changelog.d/9.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/9.misc diff --git a/changelog.d/9.misc b/changelog.d/9.misc new file mode 100644 index 0000000000..24fd12c978 --- /dev/null +++ b/changelog.d/9.misc @@ -0,0 +1 @@ +Add SyTest to the BuildKite CI. -- cgit 1.5.1 From f38ad87384f45af3db0c6dbf071aef30a16bc81e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 7 Oct 2019 13:03:08 +0100 Subject: Use mainline's merge_base_branch.sh --- .buildkite/merge_base_branch.sh | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/.buildkite/merge_base_branch.sh b/.buildkite/merge_base_branch.sh index 3a6476a96c..2dc18c9cf6 100755 --- a/.buildkite/merge_base_branch.sh +++ b/.buildkite/merge_base_branch.sh @@ -1,15 +1,14 @@ #!/usr/bin/env bash -set -e +set -ex -# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful. -# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL. -echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV -source $BASH_ENV +if [[ "$BUILDKITE_BRANCH" == "dinsic" ]]; then + echo "Not merging forward, as this is a release branch" + exit 0 +fi -if [[ -z "${CIRCLE_PR_NUMBER}" ]] -then - echo "Can't figure out what the PR number is! Assuming merge target is dinsic." +if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then + echo "Can't figure out what the PR number is! Assuming merge target is dinsic." # It probably hasn't had a PR opened yet. Since all PRs for dinsic land on # dinsic, we can probably assume it's based on it and will be merged into @@ -17,7 +16,7 @@ then GITBASE="dinsic" else # Get the reference, using the GitHub API - GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse-dinsic/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` + GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH fi # Show what we are before @@ -29,7 +28,7 @@ git config --global user.name "A robot" # Fetch and merge. If it doesn't work, it will raise due to set -e. git fetch -u origin $GITBASE -git merge --no-edit origin/$GITBASE +git merge --no-edit --no-commit origin/$GITBASE # Show what we are after. -git --no-pager show -s +git --no-pager show -s \ No newline at end of file -- cgit 1.5.1 From c5eb8342b278d0a0268547c1e3a4f8c942321278 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 7 Oct 2019 15:34:35 +0100 Subject: Try adding workers to CircleCI instead --- .circleci/config.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 341395765d..ba316f7233 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -13,12 +13,30 @@ jobs: destination: logs - store_test_results: path: /logs + sytestpy2postgresworkersmerged: + docker: + - image: matrixdotorg/sytest-synapse:dinsic + working_directory: /src + steps: + - checkout + - run: bash .circleci/merge_base_branch.sh + - run: POSTGRES=1 WORKERS=1 /synapse_sytest.sh + - store_artifacts: + path: /logs + destination: logs + - store_test_results: + path: /logs + workflows: version: 2 build: jobs: - sytestpy2postgresmerged: + filters: + branches: + ignore: /develop|master|release-.*/ + - sytestpy2postgresworkersmerged: filters: branches: ignore: /develop|master|release-.*/ \ No newline at end of file -- cgit 1.5.1 From 078c0638e3fc75f36cdb77e5d1f5cdcc6cfff971 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 16:33:17 +0100 Subject: peek --- .buildkite/pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index e875fa61dc..2df0a07b39 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -240,6 +240,7 @@ steps: POSTGRES: "1" command: - "bash .buildkite/merge_base_branch.sh" + - "ls" - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: -- cgit 1.5.1 From 9ec8072a87b54f6e45d5db42c6ea96dbfff7f306 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 16:35:08 +0100 Subject: Temporarily move the sytest job before the wait --- .buildkite/pipeline.yml | 53 +++++++++++++++++++++++++------------------------ 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 2df0a07b39..d3e8a90297 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -44,6 +44,33 @@ steps: - docker#v3.0.1: image: "python:3.6" + + - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith" + agents: + queue: "medium" + env: + POSTGRES: "1" + command: + - "bash .buildkite/merge_base_branch.sh" + - "ls" + - "bash /synapse_sytest.sh" + plugins: + - docker#v3.0.1: + image: "matrixdotorg/sytest-synapse:dinsic" + propagate-environment: true + always-pull: true + workdir: "/src" + entrypoint: "/bin/sh" + init: false + shell: ["-x", "-c"] + mount-buildkite-agent: false + volumes: ["./logs:/logs"] + - artifacts#v1.2.0: + upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] + - matrix-org/annotate: + path: "logs/annotate.md" + style: "error" + - wait - command: @@ -232,29 +259,3 @@ steps: limit: 2 - exit_status: 2 limit: 2 - - - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith" - agents: - queue: "medium" - env: - POSTGRES: "1" - command: - - "bash .buildkite/merge_base_branch.sh" - - "ls" - - "bash /synapse_sytest.sh" - plugins: - - docker#v3.0.1: - image: "matrixdotorg/sytest-synapse:dinsic" - propagate-environment: true - always-pull: true - workdir: "/src" - entrypoint: "/bin/sh" - init: false - shell: ["-x", "-c"] - mount-buildkite-agent: false - volumes: ["./logs:/logs"] - - artifacts#v1.2.0: - upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] - - matrix-org/annotate: - path: "logs/annotate.md" - style: "error" \ No newline at end of file -- cgit 1.5.1 From a093ac6d86d08eec874c45bf4f504950c233bc02 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 16:40:15 +0100 Subject: peek --- .buildkite/pipeline.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index d3e8a90297..3b95ab1053 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -52,7 +52,9 @@ steps: POSTGRES: "1" command: - "bash .buildkite/merge_base_branch.sh" + - "pwd" - "ls" + - "ls /" - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: -- cgit 1.5.1 From 41505087597c953d56e86d5b3ccf22693e02a3de Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 16:45:04 +0100 Subject: peek --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index ba316f7233..162407e539 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,6 +7,8 @@ jobs: steps: - checkout - run: bash .circleci/merge_base_branch.sh + - run: pwd + - run: ls - run: POSTGRES=1 /synapse_sytest.sh - store_artifacts: path: /logs -- cgit 1.5.1 From 6bf5dbc5f23032021e948e71c8595979c6459d5f Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 16:47:18 +0100 Subject: peek --- .buildkite/pipeline.yml | 1 + .circleci/config.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 3b95ab1053..1650eb42bb 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -55,6 +55,7 @@ steps: - "pwd" - "ls" - "ls /" + - "ls jenkins" - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: diff --git a/.circleci/config.yml b/.circleci/config.yml index 162407e539..82a2fc6d80 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,6 +9,7 @@ jobs: - run: bash .circleci/merge_base_branch.sh - run: pwd - run: ls + - run: ls jenkins - run: POSTGRES=1 /synapse_sytest.sh - store_artifacts: path: /logs -- cgit 1.5.1 From 60b458540394e90401629eac11ff6575b24e01fb Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 16:51:18 +0100 Subject: peek --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 82a2fc6d80..027b82e1cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,6 +9,7 @@ jobs: - run: bash .circleci/merge_base_branch.sh - run: pwd - run: ls + - run: ls / - run: ls jenkins - run: POSTGRES=1 /synapse_sytest.sh - store_artifacts: -- cgit 1.5.1 From b77d92514e5bc71aa9c51908435ecc2e48051983 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 17:07:58 +0100 Subject: peek --- .buildkite/pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 1650eb42bb..2fb3682a91 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -53,7 +53,7 @@ steps: command: - "bash .buildkite/merge_base_branch.sh" - "pwd" - - "ls" + - "ls -a" - "ls /" - "ls jenkins" - "bash /synapse_sytest.sh" -- cgit 1.5.1 From 4d52ccf41f37a52f7cb7d84c6999a1e33558da72 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 17:09:14 +0100 Subject: peek --- .buildkite/pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 2fb3682a91..72678e82c8 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -53,6 +53,7 @@ steps: command: - "bash .buildkite/merge_base_branch.sh" - "pwd" + - "git --git-dir=/src/.git symbolic-ref HEAD" - "ls -a" - "ls /" - "ls jenkins" -- cgit 1.5.1 From 9398f55982d7acf3d3267084f791465a10f5dd51 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 17:37:29 +0100 Subject: Try to fix CircleCI --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 027b82e1cf..6d56ca703c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ jobs: path: /logs sytestpy2postgresworkersmerged: docker: - - image: matrixdotorg/sytest-synapse:dinsic + - image: matrixdotorg/sytest-synapse:dinsic-test working_directory: /src steps: - checkout -- cgit 1.5.1 From 7123f50c44bd8adf169aa2cadfd64c8a3b9bd504 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Oct 2019 18:15:47 +0100 Subject: fix --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6d56ca703c..027b82e1cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ jobs: path: /logs sytestpy2postgresworkersmerged: docker: - - image: matrixdotorg/sytest-synapse:dinsic-test + - image: matrixdotorg/sytest-synapse:dinsic working_directory: /src steps: - checkout -- cgit 1.5.1 From c29182ce8ac162e5b3584c6174a5b9915bd1ace1 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 9 Oct 2019 11:06:29 +0100 Subject: Add python3 jobs --- .circleci/config.yml | 38 ++++++++++++++++++++++++++++++++++---- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 027b82e1cf..86f9fa9372 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,10 +7,6 @@ jobs: steps: - checkout - run: bash .circleci/merge_base_branch.sh - - run: pwd - - run: ls - - run: ls / - - run: ls jenkins - run: POSTGRES=1 /synapse_sytest.sh - store_artifacts: path: /logs @@ -30,6 +26,32 @@ jobs: destination: logs - store_test_results: path: /logs + sytestpy3postgresmerged: + docker: + - image: matrixdotorg/sytest-synapse:dinsic + working_directory: /src + steps: + - checkout + - run: bash .circleci/merge_base_branch.sh + - run: POSTGRES=1 /synapse_sytest.sh + - store_artifacts: + path: /logs + destination: logs + - store_test_results: + path: /logs + sytestpy3postgresworkersmerged: + docker: + - image: matrixdotorg/sytest-synapse:dinsic + working_directory: /src + steps: + - checkout + - run: bash .circleci/merge_base_branch.sh + - run: POSTGRES=1 WORKERS=1 /synapse_sytest.sh + - store_artifacts: + path: /logs + destination: logs + - store_test_results: + path: /logs workflows: @@ -41,6 +63,14 @@ workflows: branches: ignore: /develop|master|release-.*/ - sytestpy2postgresworkersmerged: + filters: + branches: + ignore: /develop|master|release-.*/ + - sytestpy3postgresmerged: + filters: + branches: + ignore: /develop|master|release-.*/ + - sytestpy3postgresworkersmerged: filters: branches: ignore: /develop|master|release-.*/ \ No newline at end of file -- cgit 1.5.1 From d6371916cc999a964867f1fc80d3d0d5e8b2d6f0 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 9 Oct 2019 11:08:30 +0100 Subject: Actually use the right image --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 86f9fa9372..ad6d839f35 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -28,7 +28,7 @@ jobs: path: /logs sytestpy3postgresmerged: docker: - - image: matrixdotorg/sytest-synapse:dinsic + - image: matrixdotorg/sytest-synapse:dinsic-py3 working_directory: /src steps: - checkout @@ -41,7 +41,7 @@ jobs: path: /logs sytestpy3postgresworkersmerged: docker: - - image: matrixdotorg/sytest-synapse:dinsic + - image: matrixdotorg/sytest-synapse:dinsic-py3 working_directory: /src steps: - checkout -- cgit 1.5.1 From 1a58f6196f2f601b85daf997c3749aff9eb98e70 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 9 Oct 2019 11:17:36 +0100 Subject: Try to run stuff on buildkite --- .buildkite/pipeline.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 72678e82c8..d890e4a9c0 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -52,15 +52,10 @@ steps: POSTGRES: "1" command: - "bash .buildkite/merge_base_branch.sh" - - "pwd" - - "git --git-dir=/src/.git symbolic-ref HEAD" - - "ls -a" - - "ls /" - - "ls jenkins" - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: - image: "matrixdotorg/sytest-synapse:dinsic" + image: "matrixdotorg/sytest-synapse:dinsic-test" propagate-environment: true always-pull: true workdir: "/src" -- cgit 1.5.1 From 7a50b07bb425cfd65705602dad426697d6d0011e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 9 Oct 2019 11:34:20 +0100 Subject: Add workers to buildkite --- .buildkite/pipeline.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index d890e4a9c0..f16682824d 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -44,6 +44,31 @@ steps: - docker#v3.0.1: image: "python:3.6" + - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Workers" + agents: + queue: "medium" + env: + POSTGRES: "1" + WORKERS: "1" + command: + - "bash .buildkite/merge_base_branch.sh" + - "bash /synapse_sytest.sh" + plugins: + - docker#v3.0.1: + image: "matrixdotorg/sytest-synapse:dinsic-test" + propagate-environment: true + always-pull: true + workdir: "/src" + entrypoint: "/bin/sh" + init: false + shell: ["-x", "-c"] + mount-buildkite-agent: false + volumes: ["./logs:/logs"] + - artifacts#v1.2.0: + upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ] + - matrix-org/annotate: + path: "logs/annotate.md" + style: "error" - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith" agents: -- cgit 1.5.1 From c3c1add9f30102b0c2b2f47abcd64fe705de7c2d Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 18 Oct 2019 14:52:57 +0100 Subject: Add TAP formatting script --- .buildkite/format_tap.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 .buildkite/format_tap.py diff --git a/.buildkite/format_tap.py b/.buildkite/format_tap.py new file mode 100644 index 0000000000..f153ffb661 --- /dev/null +++ b/.buildkite/format_tap.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from tap.parser import Parser +from tap.line import Result, Unknown, Diagnostic + +out = ["### TAP Output for " + sys.argv[2]] + +p = Parser() + +in_error = False + +for line in p.parse_file(sys.argv[1]): + if isinstance(line, Result): + if in_error: + out.append("") + out.append("") + out.append("") + out.append("----") + out.append("") + in_error = False + + if not line.ok and not line.todo: + in_error = True + + out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description)) + out.append("") + out.append("
Show log
")
+
+    elif isinstance(line, Diagnostic) and in_error:
+        out.append(line.text)
+
+if out:
+    for line in out[:-3]:
+        print(line)
\ No newline at end of file
-- 
cgit 1.5.1


From 73147f44fcbaea6ee1e5869a04196a1da0a26e1d Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Fri, 18 Oct 2019 15:25:45 +0100
Subject: Add py3 jobs on BuildKite

---
 .buildkite/pipeline.yml | 55 +++++++++++++++++++++++++++++++++++++++++++++++--
 1 file changed, 53 insertions(+), 2 deletions(-)

diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index f16682824d..7bbd2c47fa 100644
--- a/.buildkite/pipeline.yml
+++ b/.buildkite/pipeline.yml
@@ -44,6 +44,31 @@ steps:
       - docker#v3.0.1:
           image: "python:3.6"
 
+  - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith"
+    agents:
+      queue: "medium"
+    env:
+      POSTGRES: "1"
+    command:
+      - "bash .buildkite/merge_base_branch.sh"
+      - "bash /synapse_sytest.sh"
+    plugins:
+      - docker#v3.0.1:
+          image: "matrixdotorg/sytest-synapse:dinsic-test"
+          propagate-environment: true
+          always-pull: true
+          workdir: "/src"
+          entrypoint: "/bin/sh"
+          init: false
+          shell: ["-x", "-c"]
+          mount-buildkite-agent: false
+          volumes: ["./logs:/logs"]
+      - artifacts#v1.2.0:
+          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
+      - matrix-org/annotate:
+          path: "logs/annotate.md"
+          style: "error"
+
   - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Workers"
     agents:
       queue: "medium"
@@ -70,7 +95,7 @@ steps:
           path: "logs/annotate.md"
           style: "error"
 
-  - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith"
+  - label: "SyTest - :python: 3 / :postgres: 9.6 / Monolith"
     agents:
       queue: "medium"
     env:
@@ -80,7 +105,33 @@ steps:
       - "bash /synapse_sytest.sh"
     plugins:
       - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test"
+          image: "matrixdotorg/sytest-synapse:dinsic-test-py3"
+          propagate-environment: true
+          always-pull: true
+          workdir: "/src"
+          entrypoint: "/bin/sh"
+          init: false
+          shell: ["-x", "-c"]
+          mount-buildkite-agent: false
+          volumes: ["./logs:/logs"]
+      - artifacts#v1.2.0:
+          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
+      - matrix-org/annotate:
+          path: "logs/annotate.md"
+          style: "error"
+
+  - label: "SyTest - :python: 3 / :postgres: 9.6 / Workers"
+    agents:
+      queue: "medium"
+    env:
+      POSTGRES: "1"
+      WORKERS: "1"
+    command:
+      - "bash .buildkite/merge_base_branch.sh"
+      - "bash /synapse_sytest.sh"
+    plugins:
+      - docker#v3.0.1:
+          image: "matrixdotorg/sytest-synapse:dinsic-test-py3"
           propagate-environment: true
           always-pull: true
           workdir: "/src"
-- 
cgit 1.5.1


From a4e4a9c93b14fcf0d6ee3f31adbc071a7075d169 Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Mon, 21 Oct 2019 17:35:16 +0100
Subject: Try running the workers job on bigger agents

---
 .buildkite/pipeline.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index 7bbd2c47fa..32fe64092a 100644
--- a/.buildkite/pipeline.yml
+++ b/.buildkite/pipeline.yml
@@ -71,7 +71,7 @@ steps:
 
   - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Workers"
     agents:
-      queue: "medium"
+      queue: "xlarge"
     env:
       POSTGRES: "1"
       WORKERS: "1"
@@ -122,7 +122,7 @@ steps:
 
   - label: "SyTest - :python: 3 / :postgres: 9.6 / Workers"
     agents:
-      queue: "medium"
+      queue: "xlarge"
     env:
       POSTGRES: "1"
       WORKERS: "1"
-- 
cgit 1.5.1


From 0263c044ff34b21d4a9346ed39c5aa07470a1313 Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Wed, 23 Oct 2019 17:37:28 +0100
Subject: Move sytest jobs to the right location

---
 .buildkite/pipeline.yml | 205 ++++++++++++++++++++++++------------------------
 1 file changed, 103 insertions(+), 102 deletions(-)

diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index 32fe64092a..1f035a6ab1 100644
--- a/.buildkite/pipeline.yml
+++ b/.buildkite/pipeline.yml
@@ -44,108 +44,6 @@ steps:
       - docker#v3.0.1:
           image: "python:3.6"
 
-  - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith"
-    agents:
-      queue: "medium"
-    env:
-      POSTGRES: "1"
-    command:
-      - "bash .buildkite/merge_base_branch.sh"
-      - "bash /synapse_sytest.sh"
-    plugins:
-      - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test"
-          propagate-environment: true
-          always-pull: true
-          workdir: "/src"
-          entrypoint: "/bin/sh"
-          init: false
-          shell: ["-x", "-c"]
-          mount-buildkite-agent: false
-          volumes: ["./logs:/logs"]
-      - artifacts#v1.2.0:
-          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
-      - matrix-org/annotate:
-          path: "logs/annotate.md"
-          style: "error"
-
-  - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Workers"
-    agents:
-      queue: "xlarge"
-    env:
-      POSTGRES: "1"
-      WORKERS: "1"
-    command:
-      - "bash .buildkite/merge_base_branch.sh"
-      - "bash /synapse_sytest.sh"
-    plugins:
-      - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test"
-          propagate-environment: true
-          always-pull: true
-          workdir: "/src"
-          entrypoint: "/bin/sh"
-          init: false
-          shell: ["-x", "-c"]
-          mount-buildkite-agent: false
-          volumes: ["./logs:/logs"]
-      - artifacts#v1.2.0:
-          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
-      - matrix-org/annotate:
-          path: "logs/annotate.md"
-          style: "error"
-
-  - label: "SyTest - :python: 3 / :postgres: 9.6 / Monolith"
-    agents:
-      queue: "medium"
-    env:
-      POSTGRES: "1"
-    command:
-      - "bash .buildkite/merge_base_branch.sh"
-      - "bash /synapse_sytest.sh"
-    plugins:
-      - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test-py3"
-          propagate-environment: true
-          always-pull: true
-          workdir: "/src"
-          entrypoint: "/bin/sh"
-          init: false
-          shell: ["-x", "-c"]
-          mount-buildkite-agent: false
-          volumes: ["./logs:/logs"]
-      - artifacts#v1.2.0:
-          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
-      - matrix-org/annotate:
-          path: "logs/annotate.md"
-          style: "error"
-
-  - label: "SyTest - :python: 3 / :postgres: 9.6 / Workers"
-    agents:
-      queue: "xlarge"
-    env:
-      POSTGRES: "1"
-      WORKERS: "1"
-    command:
-      - "bash .buildkite/merge_base_branch.sh"
-      - "bash /synapse_sytest.sh"
-    plugins:
-      - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test-py3"
-          propagate-environment: true
-          always-pull: true
-          workdir: "/src"
-          entrypoint: "/bin/sh"
-          init: false
-          shell: ["-x", "-c"]
-          mount-buildkite-agent: false
-          volumes: ["./logs:/logs"]
-      - artifacts#v1.2.0:
-          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
-      - matrix-org/annotate:
-          path: "logs/annotate.md"
-          style: "error"
-
   - wait
 
   - command:
@@ -334,3 +232,106 @@ steps:
           limit: 2
         - exit_status: 2
           limit: 2
+
+  - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Monolith"
+    agents:
+      queue: "medium"
+    env:
+      POSTGRES: "1"
+    command:
+      - "bash .buildkite/merge_base_branch.sh"
+      - "bash /synapse_sytest.sh"
+    plugins:
+      - docker#v3.0.1:
+          image: "matrixdotorg/sytest-synapse:dinsic-test"
+          propagate-environment: true
+          always-pull: true
+          workdir: "/src"
+          entrypoint: "/bin/sh"
+          init: false
+          shell: ["-x", "-c"]
+          mount-buildkite-agent: false
+          volumes: ["./logs:/logs"]
+      - artifacts#v1.2.0:
+          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
+      - matrix-org/annotate:
+          path: "logs/annotate.md"
+          style: "error"
+
+  - label: "SyTest - :python: 2.7 / :postgres: 9.6 / Workers"
+    agents:
+      queue: "xlarge"
+    env:
+      POSTGRES: "1"
+      WORKERS: "1"
+    command:
+      - "bash .buildkite/merge_base_branch.sh"
+      - "bash /synapse_sytest.sh"
+    plugins:
+      - docker#v3.0.1:
+          image: "matrixdotorg/sytest-synapse:dinsic-test"
+          propagate-environment: true
+          always-pull: true
+          workdir: "/src"
+          entrypoint: "/bin/sh"
+          init: false
+          shell: ["-x", "-c"]
+          mount-buildkite-agent: false
+          volumes: ["./logs:/logs"]
+      - artifacts#v1.2.0:
+          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
+      - matrix-org/annotate:
+          path: "logs/annotate.md"
+          style: "error"
+
+  - label: "SyTest - :python: 3 / :postgres: 9.6 / Monolith"
+    agents:
+      queue: "medium"
+    env:
+      POSTGRES: "1"
+    command:
+      - "bash .buildkite/merge_base_branch.sh"
+      - "bash /synapse_sytest.sh"
+    plugins:
+      - docker#v3.0.1:
+          image: "matrixdotorg/sytest-synapse:dinsic-py3"
+          propagate-environment: true
+          always-pull: true
+          workdir: "/src"
+          entrypoint: "/bin/sh"
+          init: false
+          shell: ["-x", "-c"]
+          mount-buildkite-agent: false
+          volumes: ["./logs:/logs"]
+      - artifacts#v1.2.0:
+          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
+      - matrix-org/annotate:
+          path: "logs/annotate.md"
+          style: "error"
+
+  - label: "SyTest - :python: 3 / :postgres: 9.6 / Workers"
+    agents:
+      queue: "xlarge"
+    env:
+      POSTGRES: "1"
+      WORKERS: "1"
+    command:
+      - "bash .buildkite/merge_base_branch.sh"
+      - "bash /synapse_sytest.sh"
+    plugins:
+      - docker#v3.0.1:
+          image: "matrixdotorg/sytest-synapse:dinsic-py3"
+          propagate-environment: true
+          always-pull: true
+          workdir: "/src"
+          entrypoint: "/bin/sh"
+          init: false
+          shell: ["-x", "-c"]
+          mount-buildkite-agent: false
+          volumes: ["./logs:/logs"]
+      - artifacts#v1.2.0:
+          upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/results.tap" ]
+      - matrix-org/annotate:
+          path: "logs/annotate.md"
+          style: "error"
+
-- 
cgit 1.5.1


From 7467a8090c51ac862d60cb70c79f11be7137492c Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Wed, 23 Oct 2019 17:37:50 +0100
Subject: Remove CircleCI configuration

---
 .circleci/config.yml           | 76 ------------------------------------------
 .circleci/merge_base_branch.sh | 35 -------------------
 2 files changed, 111 deletions(-)
 delete mode 100644 .circleci/config.yml
 delete mode 100755 .circleci/merge_base_branch.sh

diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index ad6d839f35..0000000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-version: 2
-jobs:
-  sytestpy2postgresmerged:
-    docker:
-      - image: matrixdotorg/sytest-synapse:dinsic
-    working_directory: /src
-    steps:
-      - checkout
-      - run: bash .circleci/merge_base_branch.sh
-      - run: POSTGRES=1 /synapse_sytest.sh
-      - store_artifacts:
-          path: /logs
-          destination: logs
-      - store_test_results:
-          path: /logs
-  sytestpy2postgresworkersmerged:
-    docker:
-      - image: matrixdotorg/sytest-synapse:dinsic
-    working_directory: /src
-    steps:
-      - checkout
-      - run: bash .circleci/merge_base_branch.sh
-      - run: POSTGRES=1 WORKERS=1 /synapse_sytest.sh
-      - store_artifacts:
-          path: /logs
-          destination: logs
-      - store_test_results:
-          path: /logs
-  sytestpy3postgresmerged:
-      docker:
-        - image: matrixdotorg/sytest-synapse:dinsic-py3
-      working_directory: /src
-      steps:
-        - checkout
-        - run: bash .circleci/merge_base_branch.sh
-        - run: POSTGRES=1 /synapse_sytest.sh
-        - store_artifacts:
-            path: /logs
-            destination: logs
-        - store_test_results:
-            path: /logs
-  sytestpy3postgresworkersmerged:
-      docker:
-        - image: matrixdotorg/sytest-synapse:dinsic-py3
-      working_directory: /src
-      steps:
-        - checkout
-        - run: bash .circleci/merge_base_branch.sh
-        - run: POSTGRES=1 WORKERS=1 /synapse_sytest.sh
-        - store_artifacts:
-            path: /logs
-            destination: logs
-        - store_test_results:
-            path: /logs
-
-
-workflows:
-  version: 2
-  build:
-    jobs:
-      - sytestpy2postgresmerged:
-          filters:
-            branches:
-              ignore: /develop|master|release-.*/
-      - sytestpy2postgresworkersmerged:
-          filters:
-            branches:
-              ignore: /develop|master|release-.*/
-      - sytestpy3postgresmerged:
-          filters:
-            branches:
-              ignore: /develop|master|release-.*/
-      - sytestpy3postgresworkersmerged:
-          filters:
-            branches:
-              ignore: /develop|master|release-.*/
\ No newline at end of file
diff --git a/.circleci/merge_base_branch.sh b/.circleci/merge_base_branch.sh
deleted file mode 100755
index 3a6476a96c..0000000000
--- a/.circleci/merge_base_branch.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
-# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
-echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
-source $BASH_ENV
-
-if [[ -z "${CIRCLE_PR_NUMBER}" ]]
-then
-    echo "Can't figure out what the PR number is! Assuming merge target is dinsic."
-
-    # It probably hasn't had a PR opened yet. Since all PRs for dinsic land on
-    # dinsic, we can probably assume it's based on it and will be merged into
-    # it.
-    GITBASE="dinsic"
-else
-    # Get the reference, using the GitHub API
-    GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse-dinsic/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
-fi
-
-# Show what we are before
-git --no-pager show -s
-
-# Set up username so it can do a merge
-git config --global user.email bot@matrix.org
-git config --global user.name "A robot"
-
-# Fetch and merge. If it doesn't work, it will raise due to set -e.
-git fetch -u origin $GITBASE
-git merge --no-edit origin/$GITBASE
-
-# Show what we are after.
-git --no-pager show -s
-- 
cgit 1.5.1


From 0559c87007972ef76cf26b9dc7e0a150afac69ce Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Thu, 31 Oct 2019 15:49:15 +0000
Subject: Don't use test image

---
 .buildkite/pipeline.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index 1f035a6ab1..75d0eb7f32 100644
--- a/.buildkite/pipeline.yml
+++ b/.buildkite/pipeline.yml
@@ -243,7 +243,7 @@ steps:
       - "bash /synapse_sytest.sh"
     plugins:
       - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test"
+          image: "matrixdotorg/sytest-synapse:dinsic"
           propagate-environment: true
           always-pull: true
           workdir: "/src"
@@ -269,7 +269,7 @@ steps:
       - "bash /synapse_sytest.sh"
     plugins:
       - docker#v3.0.1:
-          image: "matrixdotorg/sytest-synapse:dinsic-test"
+          image: "matrixdotorg/sytest-synapse:dinsic"
           propagate-environment: true
           always-pull: true
           workdir: "/src"
-- 
cgit 1.5.1


From e6f0536de17c625d627a5288581ee404400a533a Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Thu, 31 Oct 2019 16:16:25 +0000
Subject: Remove python TAP script

---
 .buildkite/format_tap.py | 48 ------------------------------------------------
 1 file changed, 48 deletions(-)
 delete mode 100644 .buildkite/format_tap.py

diff --git a/.buildkite/format_tap.py b/.buildkite/format_tap.py
deleted file mode 100644
index f153ffb661..0000000000
--- a/.buildkite/format_tap.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Matrix.org Foundation C.I.C.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-from tap.parser import Parser
-from tap.line import Result, Unknown, Diagnostic
-
-out = ["### TAP Output for " + sys.argv[2]]
-
-p = Parser()
-
-in_error = False
-
-for line in p.parse_file(sys.argv[1]):
-    if isinstance(line, Result):
-        if in_error:
-            out.append("")
-            out.append("
") - out.append("") - out.append("----") - out.append("") - in_error = False - - if not line.ok and not line.todo: - in_error = True - - out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description)) - out.append("") - out.append("
Show log
")
-
-    elif isinstance(line, Diagnostic) and in_error:
-        out.append(line.text)
-
-if out:
-    for line in out[:-3]:
-        print(line)
\ No newline at end of file
-- 
cgit 1.5.1


From 85f15ac59246b3fdbb914ca47b9f7c47d76a3b6b Mon Sep 17 00:00:00 2001
From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com>
Date: Fri, 1 Nov 2019 14:07:44 +0000
Subject: Support for routing outbound HTTP requests via a proxy (#6239)

The `http_proxy` and `HTTPS_PROXY` env vars can be set to a `host[:port]` value which should point to a proxy.

The address of the proxy should be excluded from IP blacklists such as the `url_preview_ip_range_blacklist`.

The proxy will then be used for
 * push
 * url previews
 * phone-home stats
 * recaptcha validation
 * CAS auth validation

It will *not* be used for:
 * Application Services
 * Identity servers
 * Outbound federation
 * In worker configurations, connections from workers to masters

Fixes #4198.
---
 changelog.d/6238.feature                           |   1 +
 synapse/app/homeserver.py                          |   5 +-
 synapse/handlers/auth.py                           |   2 +-
 synapse/http/client.py                             |  17 +-
 synapse/http/connectproxyclient.py                 | 195 ++++++++++++
 synapse/http/proxyagent.py                         | 195 ++++++++++++
 synapse/push/httppusher.py                         |   2 +-
 synapse/rest/client/v1/login.py                    |   2 +-
 synapse/rest/media/v1/preview_url_resource.py      |   2 +
 synapse/server.py                                  |   9 +
 synapse/server.pyi                                 |  10 +-
 tests/http/__init__.py                             |  17 ++
 .../federation/test_matrix_federation_agent.py     |  11 +-
 tests/http/test_proxyagent.py                      | 334 +++++++++++++++++++++
 tests/push/test_http.py                            |   2 +-
 tests/server.py                                    |  24 +-
 16 files changed, 813 insertions(+), 15 deletions(-)
 create mode 100644 changelog.d/6238.feature
 create mode 100644 synapse/http/connectproxyclient.py
 create mode 100644 synapse/http/proxyagent.py
 create mode 100644 tests/http/test_proxyagent.py

diff --git a/changelog.d/6238.feature b/changelog.d/6238.feature
new file mode 100644
index 0000000000..d225ac33b6
--- /dev/null
+++ b/changelog.d/6238.feature
@@ -0,0 +1 @@
+Add support for outbound http proxying via http_proxy/HTTPS_PROXY env vars.
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 1045d28949..5a46b11bc0 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -563,9 +563,8 @@ def run(hs):
         stats["database_server_version"] = hs.get_datastore().get_server_version()
         logger.info("Reporting stats to matrix.org: %s" % (stats,))
         try:
-            yield hs.get_simple_http_client().put_json(
-                "https://matrix.org/report-usage-stats/push",
-                stats
+            yield hs.get_proxied_http_client().put_json(
+                "https://matrix.org/report-usage-stats/push", stats
             )
         except Exception as e:
             logger.warn("Error reporting stats: %s", e)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index a0cf37a9f9..9a2ff177a6 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -420,7 +420,7 @@ class AuthHandler(BaseHandler):
         # TODO: get this from the homeserver rather than creating a new one for
         # each request
         try:
-            client = self.hs.get_simple_http_client()
+            client = self.hs.get_proxied_http_client()
             resp_body = yield client.post_urlencoded_get_json(
                 self.hs.config.recaptcha_siteverify_api,
                 args={
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 77fe68818b..bf8afa703e 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -45,6 +45,7 @@ from synapse.http import (
     cancelled_to_request_timed_out_error,
     redact_uri,
 )
+from synapse.http.proxyagent import ProxyAgent
 from synapse.util.async_helpers import timeout_deferred
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.logcontext import make_deferred_yieldable
@@ -185,7 +186,15 @@ class SimpleHttpClient(object):
     using HTTP in Matrix
     """
 
-    def __init__(self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None):
+    def __init__(
+        self,
+        hs,
+        treq_args={},
+        ip_whitelist=None,
+        ip_blacklist=None,
+        http_proxy=None,
+        https_proxy=None,
+    ):
         """
         Args:
             hs (synapse.server.HomeServer)
@@ -194,6 +203,8 @@ class SimpleHttpClient(object):
                 we may not request.
             ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can
                request if it were otherwise caught in a blacklist.
+            http_proxy (bytes): proxy server to use for http connections. host[:port]
+            https_proxy (bytes): proxy server to use for https connections. host[:port]
         """
         self.hs = hs
 
@@ -238,11 +249,13 @@ class SimpleHttpClient(object):
         # The default context factory in Twisted 14.0.0 (which we require) is
         # BrowserLikePolicyForHTTPS which will do regular cert validation
         # 'like a browser'
-        self.agent = Agent(
+        self.agent = ProxyAgent(
             self.reactor,
             connectTimeout=15,
             contextFactory=self.hs.get_http_client_context_factory(),
             pool=pool,
+            http_proxy=http_proxy,
+            https_proxy=https_proxy,
         )
 
         if self._ip_blacklist:
diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py
new file mode 100644
index 0000000000..be7b2ceb8e
--- /dev/null
+++ b/synapse/http/connectproxyclient.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from zope.interface import implementer
+
+from twisted.internet import defer, protocol
+from twisted.internet.error import ConnectError
+from twisted.internet.interfaces import IStreamClientEndpoint
+from twisted.internet.protocol import connectionDone
+from twisted.web import http
+
+logger = logging.getLogger(__name__)
+
+
+class ProxyConnectError(ConnectError):
+    pass
+
+
+@implementer(IStreamClientEndpoint)
+class HTTPConnectProxyEndpoint(object):
+    """An Endpoint implementation which will send a CONNECT request to an http proxy
+
+    Wraps an existing HostnameEndpoint for the proxy.
+
+    When we get the connect() request from the connection pool (via the TLS wrapper),
+    we'll first connect to the proxy endpoint with a ProtocolFactory which will make the
+    CONNECT request. Once that completes, we invoke the protocolFactory which was passed
+    in.
+
+    Args:
+        reactor: the Twisted reactor to use for the connection
+        proxy_endpoint (IStreamClientEndpoint): the endpoint to use to connect to the
+            proxy
+        host (bytes): hostname that we want to CONNECT to
+        port (int): port that we want to connect to
+    """
+
+    def __init__(self, reactor, proxy_endpoint, host, port):
+        self._reactor = reactor
+        self._proxy_endpoint = proxy_endpoint
+        self._host = host
+        self._port = port
+
+    def __repr__(self):
+        return "" % (self._proxy_endpoint,)
+
+    def connect(self, protocolFactory):
+        f = HTTPProxiedClientFactory(self._host, self._port, protocolFactory)
+        d = self._proxy_endpoint.connect(f)
+        # once the tcp socket connects successfully, we need to wait for the
+        # CONNECT to complete.
+        d.addCallback(lambda conn: f.on_connection)
+        return d
+
+
+class HTTPProxiedClientFactory(protocol.ClientFactory):
+    """ClientFactory wrapper that triggers an HTTP proxy CONNECT on connect.
+
+    Once the CONNECT completes, invokes the original ClientFactory to build the
+    HTTP Protocol object and run the rest of the connection.
+
+    Args:
+        dst_host (bytes): hostname that we want to CONNECT to
+        dst_port (int): port that we want to connect to
+        wrapped_factory (protocol.ClientFactory): The original Factory
+    """
+
+    def __init__(self, dst_host, dst_port, wrapped_factory):
+        self.dst_host = dst_host
+        self.dst_port = dst_port
+        self.wrapped_factory = wrapped_factory
+        self.on_connection = defer.Deferred()
+
+    def startedConnecting(self, connector):
+        return self.wrapped_factory.startedConnecting(connector)
+
+    def buildProtocol(self, addr):
+        wrapped_protocol = self.wrapped_factory.buildProtocol(addr)
+
+        return HTTPConnectProtocol(
+            self.dst_host, self.dst_port, wrapped_protocol, self.on_connection
+        )
+
+    def clientConnectionFailed(self, connector, reason):
+        logger.debug("Connection to proxy failed: %s", reason)
+        if not self.on_connection.called:
+            self.on_connection.errback(reason)
+        return self.wrapped_factory.clientConnectionFailed(connector, reason)
+
+    def clientConnectionLost(self, connector, reason):
+        logger.debug("Connection to proxy lost: %s", reason)
+        if not self.on_connection.called:
+            self.on_connection.errback(reason)
+        return self.wrapped_factory.clientConnectionLost(connector, reason)
+
+
+class HTTPConnectProtocol(protocol.Protocol):
+    """Protocol that wraps an existing Protocol to do a CONNECT handshake at connect
+
+    Args:
+        host (bytes): The original HTTP(s) hostname or IPv4 or IPv6 address literal
+            to put in the CONNECT request
+
+        port (int): The original HTTP(s) port to put in the CONNECT request
+
+        wrapped_protocol (interfaces.IProtocol): the original protocol (probably
+            HTTPChannel or TLSMemoryBIOProtocol, but could be anything really)
+
+        connected_deferred (Deferred): a Deferred which will be callbacked with
+            wrapped_protocol when the CONNECT completes
+    """
+
+    def __init__(self, host, port, wrapped_protocol, connected_deferred):
+        self.host = host
+        self.port = port
+        self.wrapped_protocol = wrapped_protocol
+        self.connected_deferred = connected_deferred
+        self.http_setup_client = HTTPConnectSetupClient(self.host, self.port)
+        self.http_setup_client.on_connected.addCallback(self.proxyConnected)
+
+    def connectionMade(self):
+        self.http_setup_client.makeConnection(self.transport)
+
+    def connectionLost(self, reason=connectionDone):
+        if self.wrapped_protocol.connected:
+            self.wrapped_protocol.connectionLost(reason)
+
+        self.http_setup_client.connectionLost(reason)
+
+        if not self.connected_deferred.called:
+            self.connected_deferred.errback(reason)
+
+    def proxyConnected(self, _):
+        self.wrapped_protocol.makeConnection(self.transport)
+
+        self.connected_deferred.callback(self.wrapped_protocol)
+
+        # Get any pending data from the http buf and forward it to the original protocol
+        buf = self.http_setup_client.clearLineBuffer()
+        if buf:
+            self.wrapped_protocol.dataReceived(buf)
+
+    def dataReceived(self, data):
+        # if we've set up the HTTP protocol, we can send the data there
+        if self.wrapped_protocol.connected:
+            return self.wrapped_protocol.dataReceived(data)
+
+        # otherwise, we must still be setting up the connection: send the data to the
+        # setup client
+        return self.http_setup_client.dataReceived(data)
+
+
+class HTTPConnectSetupClient(http.HTTPClient):
+    """HTTPClient protocol to send a CONNECT message for proxies and read the response.
+
+    Args:
+        host (bytes): The hostname to send in the CONNECT message
+        port (int): The port to send in the CONNECT message
+    """
+
+    def __init__(self, host, port):
+        self.host = host
+        self.port = port
+        self.on_connected = defer.Deferred()
+
+    def connectionMade(self):
+        logger.debug("Connected to proxy, sending CONNECT")
+        self.sendCommand(b"CONNECT", b"%s:%d" % (self.host, self.port))
+        self.endHeaders()
+
+    def handleStatus(self, version, status, message):
+        logger.debug("Got Status: %s %s %s", status, message, version)
+        if status != b"200":
+            raise ProxyConnectError("Unexpected status on CONNECT: %s" % status)
+
+    def handleEndHeaders(self):
+        logger.debug("End Headers")
+        self.on_connected.callback(None)
+
+    def handleResponse(self, body):
+        pass
diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
new file mode 100644
index 0000000000..332da02a8d
--- /dev/null
+++ b/synapse/http/proxyagent.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import re
+
+from zope.interface import implementer
+
+from twisted.internet import defer
+from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
+from twisted.python.failure import Failure
+from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase
+from twisted.web.error import SchemeNotSupported
+from twisted.web.iweb import IAgent
+
+from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
+
+logger = logging.getLogger(__name__)
+
+_VALID_URI = re.compile(br"\A[\x21-\x7e]+\Z")
+
+
+@implementer(IAgent)
+class ProxyAgent(_AgentBase):
+    """An Agent implementation which will use an HTTP proxy if one was requested
+
+    Args:
+        reactor: twisted reactor to place outgoing
+            connections.
+
+        contextFactory (IPolicyForHTTPS): A factory for TLS contexts, to control the
+            verification parameters of OpenSSL.  The default is to use a
+            `BrowserLikePolicyForHTTPS`, so unless you have special
+            requirements you can leave this as-is.
+
+        connectTimeout (float): The amount of time that this Agent will wait
+            for the peer to accept a connection.
+
+        bindAddress (bytes): The local address for client sockets to bind to.
+
+        pool (HTTPConnectionPool|None): connection pool to be used. If None, a
+            non-persistent pool instance will be created.
+    """
+
+    def __init__(
+        self,
+        reactor,
+        contextFactory=BrowserLikePolicyForHTTPS(),
+        connectTimeout=None,
+        bindAddress=None,
+        pool=None,
+        http_proxy=None,
+        https_proxy=None,
+    ):
+        _AgentBase.__init__(self, reactor, pool)
+
+        self._endpoint_kwargs = {}
+        if connectTimeout is not None:
+            self._endpoint_kwargs["timeout"] = connectTimeout
+        if bindAddress is not None:
+            self._endpoint_kwargs["bindAddress"] = bindAddress
+
+        self.http_proxy_endpoint = _http_proxy_endpoint(
+            http_proxy, reactor, **self._endpoint_kwargs
+        )
+
+        self.https_proxy_endpoint = _http_proxy_endpoint(
+            https_proxy, reactor, **self._endpoint_kwargs
+        )
+
+        self._policy_for_https = contextFactory
+        self._reactor = reactor
+
+    def request(self, method, uri, headers=None, bodyProducer=None):
+        """
+        Issue a request to the server indicated by the given uri.
+
+        Supports `http` and `https` schemes.
+
+        An existing connection from the connection pool may be used or a new one may be
+        created.
+
+        See also: twisted.web.iweb.IAgent.request
+
+        Args:
+            method (bytes): The request method to use, such as `GET`, `POST`, etc
+
+            uri (bytes): The location of the resource to request.
+
+            headers (Headers|None): Extra headers to send with the request
+
+            bodyProducer (IBodyProducer|None): An object which can generate bytes to
+                make up the body of this request (for example, the properly encoded
+                contents of a file for a file upload). Or, None if the request is to
+                have no body.
+
+        Returns:
+            Deferred[IResponse]: completes when the header of the response has
+                 been received (regardless of the response status code).
+        """
+        uri = uri.strip()
+        if not _VALID_URI.match(uri):
+            raise ValueError("Invalid URI {!r}".format(uri))
+
+        parsed_uri = URI.fromBytes(uri)
+        pool_key = (parsed_uri.scheme, parsed_uri.host, parsed_uri.port)
+        request_path = parsed_uri.originForm
+
+        if parsed_uri.scheme == b"http" and self.http_proxy_endpoint:
+            # Cache *all* connections under the same key, since we are only
+            # connecting to a single destination, the proxy:
+            pool_key = ("http-proxy", self.http_proxy_endpoint)
+            endpoint = self.http_proxy_endpoint
+            request_path = uri
+        elif parsed_uri.scheme == b"https" and self.https_proxy_endpoint:
+            endpoint = HTTPConnectProxyEndpoint(
+                self._reactor,
+                self.https_proxy_endpoint,
+                parsed_uri.host,
+                parsed_uri.port,
+            )
+        else:
+            # not using a proxy
+            endpoint = HostnameEndpoint(
+                self._reactor, parsed_uri.host, parsed_uri.port, **self._endpoint_kwargs
+            )
+
+        logger.debug("Requesting %s via %s", uri, endpoint)
+
+        if parsed_uri.scheme == b"https":
+            tls_connection_creator = self._policy_for_https.creatorForNetloc(
+                parsed_uri.host, parsed_uri.port
+            )
+            endpoint = wrapClientTLS(tls_connection_creator, endpoint)
+        elif parsed_uri.scheme == b"http":
+            pass
+        else:
+            return defer.fail(
+                Failure(
+                    SchemeNotSupported("Unsupported scheme: %r" % (parsed_uri.scheme,))
+                )
+            )
+
+        return self._requestWithEndpoint(
+            pool_key, endpoint, method, parsed_uri, headers, bodyProducer, request_path
+        )
+
+
+def _http_proxy_endpoint(proxy, reactor, **kwargs):
+    """Parses an http proxy setting and returns an endpoint for the proxy
+
+    Args:
+        proxy (bytes|None):  the proxy setting
+        reactor: reactor to be used to connect to the proxy
+        kwargs: other args to be passed to HostnameEndpoint
+
+    Returns:
+        interfaces.IStreamClientEndpoint|None: endpoint to use to connect to the proxy,
+            or None
+    """
+    if proxy is None:
+        return None
+
+    # currently we only support hostname:port. Some apps also support
+    # protocol://[:port], which allows a way of requiring a TLS connection to the
+    # proxy.
+
+    host, port = parse_host_port(proxy, default_port=1080)
+    return HostnameEndpoint(reactor, host, port, **kwargs)
+
+
+def parse_host_port(hostport, default_port=None):
+    # could have sworn we had one of these somewhere else...
+    if b":" in hostport:
+        host, port = hostport.rsplit(b":", 1)
+        try:
+            port = int(port)
+            return host, port
+        except ValueError:
+            # the thing after the : wasn't a valid port; presumably this is an
+            # IPv6 address.
+            pass
+
+    return hostport, default_port
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index fac05aa44c..a21b164266 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -107,7 +107,7 @@ class HttpPusher(object):
                 "'url' required in data for HTTP pusher"
             )
         self.url = self.data['url']
-        self.http_client = hs.get_simple_http_client()
+        self.http_client = hs.get_proxied_http_client()
         self.data_minus_url = {}
         self.data_minus_url.update(self.data)
         del self.data_minus_url['url']
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 3b60728628..7c86b88f30 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -403,7 +403,7 @@ class CasTicketServlet(RestServlet):
         self.cas_service_url = hs.config.cas_service_url
         self.cas_required_attributes = hs.config.cas_required_attributes
         self._sso_auth_handler = SSOAuthHandler(hs)
-        self._http_client = hs.get_simple_http_client()
+        self._http_client = hs.get_proxied_http_client()
 
     @defer.inlineCallbacks
     def on_GET(self, request):
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index acf87709f2..85a7c61a24 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -75,6 +75,8 @@ class PreviewUrlResource(Resource):
             treq_args={"browser_like_redirects": True},
             ip_whitelist=hs.config.url_preview_ip_range_whitelist,
             ip_blacklist=hs.config.url_preview_ip_range_blacklist,
+            http_proxy=os.getenv("http_proxy"),
+            https_proxy=os.getenv("HTTPS_PROXY"),
         )
         self.media_repo = media_repo
         self.primary_base_path = media_repo.primary_base_path
diff --git a/synapse/server.py b/synapse/server.py
index 9d5600afa9..3f3c79498a 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -23,6 +23,7 @@
 # Imports required for the default HomeServer() implementation
 import abc
 import logging
+import os
 
 from twisted.enterprise import adbapi
 from twisted.mail.smtp import sendmail
@@ -165,6 +166,7 @@ class HomeServer(object):
         'event_builder_factory',
         'filtering',
         'http_client_context_factory',
+        "proxied_http_client",
         'simple_http_client',
         'media_repository',
         'media_repository_resource',
@@ -306,6 +308,13 @@ class HomeServer(object):
     def build_simple_http_client(self):
         return SimpleHttpClient(self)
 
+    def build_proxied_http_client(self):
+        return SimpleHttpClient(
+            self,
+            http_proxy=os.getenv("http_proxy"),
+            https_proxy=os.getenv("HTTPS_PROXY"),
+        )
+
     def build_room_creation_handler(self):
         return RoomCreationHandler(self)
 
diff --git a/synapse/server.pyi b/synapse/server.pyi
index 9583e82d52..69263458db 100644
--- a/synapse/server.pyi
+++ b/synapse/server.pyi
@@ -12,6 +12,7 @@ import synapse.handlers.message
 import synapse.handlers.room
 import synapse.handlers.room_member
 import synapse.handlers.set_password
+import synapse.http.client
 import synapse.rest.media.v1.media_repository
 import synapse.server_notices.server_notices_manager
 import synapse.server_notices.server_notices_sender
@@ -46,7 +47,14 @@ class HomeServer(object):
 
     def get_state_resolution_handler(self) -> synapse.state.StateResolutionHandler:
         pass
-
+    def get_simple_http_client(self) -> synapse.http.client.SimpleHttpClient:
+        """Fetch an HTTP client implementation which doesn't do any blacklisting
+        or support any HTTP_PROXY settings"""
+        pass
+    def get_proxied_http_client(self) -> synapse.http.client.SimpleHttpClient:
+        """Fetch an HTTP client implementation which doesn't do any blacklisting
+        but does support HTTP_PROXY settings"""
+        pass
     def get_deactivate_account_handler(self) -> synapse.handlers.deactivate_account.DeactivateAccountHandler:
         pass
 
diff --git a/tests/http/__init__.py b/tests/http/__init__.py
index 2d5dba6464..2096ba3c91 100644
--- a/tests/http/__init__.py
+++ b/tests/http/__init__.py
@@ -20,6 +20,23 @@ from zope.interface import implementer
 from OpenSSL import SSL
 from OpenSSL.SSL import Connection
 from twisted.internet.interfaces import IOpenSSLServerConnectionCreator
+from twisted.internet.ssl import Certificate, trustRootFromCertificates
+from twisted.web.client import BrowserLikePolicyForHTTPS  # noqa: F401
+from twisted.web.iweb import IPolicyForHTTPS  # noqa: F401
+
+
+def get_test_https_policy():
+    """Get a test IPolicyForHTTPS which trusts the test CA cert
+
+    Returns:
+        IPolicyForHTTPS
+    """
+    ca_file = get_test_ca_cert_file()
+    with open(ca_file) as stream:
+        content = stream.read()
+    cert = Certificate.loadPEM(content)
+    trust_root = trustRootFromCertificates([cert])
+    return BrowserLikePolicyForHTTPS(trustRoot=trust_root)
 
 
 def get_test_ca_cert_file():
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index ecce473b01..6609d6b366 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -113,19 +113,24 @@ class MatrixFederationAgentTests(TestCase):
             FakeTransport(client_protocol, self.reactor, server_tls_protocol)
         )
 
+        # grab a hold of the TLS connection, in case it gets torn down
+        server_tls_connection = server_tls_protocol._tlsConnection
+
+        # fish the test server back out of the server-side TLS protocol.
+        http_protocol = server_tls_protocol.wrappedProtocol
+
         # give the reactor a pump to get the TLS juices flowing.
         self.reactor.pump((0.1,))
 
         # check the SNI
-        server_name = server_tls_protocol._tlsConnection.get_servername()
+        server_name = server_tls_connection.get_servername()
         self.assertEqual(
             server_name,
             expected_sni,
             "Expected SNI %s but got %s" % (expected_sni, server_name),
         )
 
-        # fish the test server back out of the server-side TLS protocol.
-        return server_tls_protocol.wrappedProtocol
+        return http_protocol
 
     @defer.inlineCallbacks
     def _make_get_request(self, uri):
diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py
new file mode 100644
index 0000000000..22abf76515
--- /dev/null
+++ b/tests/http/test_proxyagent.py
@@ -0,0 +1,334 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+import treq
+
+from twisted.internet import interfaces  # noqa: F401
+from twisted.internet.protocol import Factory
+from twisted.protocols.tls import TLSMemoryBIOFactory
+from twisted.web.http import HTTPChannel
+
+from synapse.http.proxyagent import ProxyAgent
+
+from tests.http import TestServerTLSConnectionFactory, get_test_https_policy
+from tests.server import FakeTransport, ThreadedMemoryReactorClock
+from tests.unittest import TestCase
+
+logger = logging.getLogger(__name__)
+
+HTTPFactory = Factory.forProtocol(HTTPChannel)
+
+
+class MatrixFederationAgentTests(TestCase):
+    def setUp(self):
+        self.reactor = ThreadedMemoryReactorClock()
+
+    def _make_connection(
+        self, client_factory, server_factory, ssl=False, expected_sni=None
+    ):
+        """Builds a test server, and completes the outgoing client connection
+
+        Args:
+            client_factory (interfaces.IProtocolFactory): the the factory that the
+                application is trying to use to make the outbound connection. We will
+                invoke it to build the client Protocol
+
+            server_factory (interfaces.IProtocolFactory): a factory to build the
+                server-side protocol
+
+            ssl (bool): If true, we will expect an ssl connection and wrap
+                server_factory with a TLSMemoryBIOFactory
+
+            expected_sni (bytes|None): the expected SNI value
+
+        Returns:
+            IProtocol: the server Protocol returned by server_factory
+        """
+        if ssl:
+            server_factory = _wrap_server_factory_for_tls(server_factory)
+
+        server_protocol = server_factory.buildProtocol(None)
+
+        # now, tell the client protocol factory to build the client protocol,
+        # and wire the output of said protocol up to the server via
+        # a FakeTransport.
+        #
+        # Normally this would be done by the TCP socket code in Twisted, but we are
+        # stubbing that out here.
+        client_protocol = client_factory.buildProtocol(None)
+        client_protocol.makeConnection(
+            FakeTransport(server_protocol, self.reactor, client_protocol)
+        )
+
+        # tell the server protocol to send its stuff back to the client, too
+        server_protocol.makeConnection(
+            FakeTransport(client_protocol, self.reactor, server_protocol)
+        )
+
+        if ssl:
+            http_protocol = server_protocol.wrappedProtocol
+            tls_connection = server_protocol._tlsConnection
+        else:
+            http_protocol = server_protocol
+            tls_connection = None
+
+        # give the reactor a pump to get the TLS juices flowing (if needed)
+        self.reactor.advance(0)
+
+        if expected_sni is not None:
+            server_name = tls_connection.get_servername()
+            self.assertEqual(
+                server_name,
+                expected_sni,
+                "Expected SNI %s but got %s" % (expected_sni, server_name),
+            )
+
+        return http_protocol
+
+    def test_http_request(self):
+        agent = ProxyAgent(self.reactor)
+
+        self.reactor.lookups["test.com"] = "1.2.3.4"
+        d = agent.request(b"GET", b"http://test.com")
+
+        # there should be a pending TCP connection
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 1)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+        self.assertEqual(host, "1.2.3.4")
+        self.assertEqual(port, 80)
+
+        # make a test server, and wire up the client
+        http_server = self._make_connection(
+            client_factory, _get_test_protocol_factory()
+        )
+
+        # the FakeTransport is async, so we need to pump the reactor
+        self.reactor.advance(0)
+
+        # now there should be a pending request
+        self.assertEqual(len(http_server.requests), 1)
+
+        request = http_server.requests[0]
+        self.assertEqual(request.method, b"GET")
+        self.assertEqual(request.path, b"/")
+        self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+        request.write(b"result")
+        request.finish()
+
+        self.reactor.advance(0)
+
+        resp = self.successResultOf(d)
+        body = self.successResultOf(treq.content(resp))
+        self.assertEqual(body, b"result")
+
+    def test_https_request(self):
+        agent = ProxyAgent(self.reactor, contextFactory=get_test_https_policy())
+
+        self.reactor.lookups["test.com"] = "1.2.3.4"
+        d = agent.request(b"GET", b"https://test.com/abc")
+
+        # there should be a pending TCP connection
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 1)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+        self.assertEqual(host, "1.2.3.4")
+        self.assertEqual(port, 443)
+
+        # make a test server, and wire up the client
+        http_server = self._make_connection(
+            client_factory,
+            _get_test_protocol_factory(),
+            ssl=True,
+            expected_sni=b"test.com",
+        )
+
+        # the FakeTransport is async, so we need to pump the reactor
+        self.reactor.advance(0)
+
+        # now there should be a pending request
+        self.assertEqual(len(http_server.requests), 1)
+
+        request = http_server.requests[0]
+        self.assertEqual(request.method, b"GET")
+        self.assertEqual(request.path, b"/abc")
+        self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+        request.write(b"result")
+        request.finish()
+
+        self.reactor.advance(0)
+
+        resp = self.successResultOf(d)
+        body = self.successResultOf(treq.content(resp))
+        self.assertEqual(body, b"result")
+
+    def test_http_request_via_proxy(self):
+        agent = ProxyAgent(self.reactor, http_proxy=b"proxy.com:8888")
+
+        self.reactor.lookups["proxy.com"] = "1.2.3.5"
+        d = agent.request(b"GET", b"http://test.com")
+
+        # there should be a pending TCP connection
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 1)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+        self.assertEqual(host, "1.2.3.5")
+        self.assertEqual(port, 8888)
+
+        # make a test server, and wire up the client
+        http_server = self._make_connection(
+            client_factory, _get_test_protocol_factory()
+        )
+
+        # the FakeTransport is async, so we need to pump the reactor
+        self.reactor.advance(0)
+
+        # now there should be a pending request
+        self.assertEqual(len(http_server.requests), 1)
+
+        request = http_server.requests[0]
+        self.assertEqual(request.method, b"GET")
+        self.assertEqual(request.path, b"http://test.com")
+        self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+        request.write(b"result")
+        request.finish()
+
+        self.reactor.advance(0)
+
+        resp = self.successResultOf(d)
+        body = self.successResultOf(treq.content(resp))
+        self.assertEqual(body, b"result")
+
+    def test_https_request_via_proxy(self):
+        agent = ProxyAgent(
+            self.reactor,
+            contextFactory=get_test_https_policy(),
+            https_proxy=b"proxy.com",
+        )
+
+        self.reactor.lookups["proxy.com"] = "1.2.3.5"
+        d = agent.request(b"GET", b"https://test.com/abc")
+
+        # there should be a pending TCP connection
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 1)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+        self.assertEqual(host, "1.2.3.5")
+        self.assertEqual(port, 1080)
+
+        # make a test HTTP server, and wire up the client
+        proxy_server = self._make_connection(
+            client_factory, _get_test_protocol_factory()
+        )
+
+        # fish the transports back out so that we can do the old switcheroo
+        s2c_transport = proxy_server.transport
+        client_protocol = s2c_transport.other
+        c2s_transport = client_protocol.transport
+
+        # the FakeTransport is async, so we need to pump the reactor
+        self.reactor.advance(0)
+
+        # now there should be a pending CONNECT request
+        self.assertEqual(len(proxy_server.requests), 1)
+
+        request = proxy_server.requests[0]
+        self.assertEqual(request.method, b"CONNECT")
+        self.assertEqual(request.path, b"test.com:443")
+
+        # tell the proxy server not to close the connection
+        proxy_server.persistent = True
+
+        # this just stops the http Request trying to do a chunked response
+        # request.setHeader(b"Content-Length", b"0")
+        request.finish()
+
+        # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel
+        ssl_factory = _wrap_server_factory_for_tls(_get_test_protocol_factory())
+        ssl_protocol = ssl_factory.buildProtocol(None)
+        http_server = ssl_protocol.wrappedProtocol
+
+        ssl_protocol.makeConnection(
+            FakeTransport(client_protocol, self.reactor, ssl_protocol)
+        )
+        c2s_transport.other = ssl_protocol
+
+        self.reactor.advance(0)
+
+        server_name = ssl_protocol._tlsConnection.get_servername()
+        expected_sni = b"test.com"
+        self.assertEqual(
+            server_name,
+            expected_sni,
+            "Expected SNI %s but got %s" % (expected_sni, server_name),
+        )
+
+        # now there should be a pending request
+        self.assertEqual(len(http_server.requests), 1)
+
+        request = http_server.requests[0]
+        self.assertEqual(request.method, b"GET")
+        self.assertEqual(request.path, b"/abc")
+        self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+        request.write(b"result")
+        request.finish()
+
+        self.reactor.advance(0)
+
+        resp = self.successResultOf(d)
+        body = self.successResultOf(treq.content(resp))
+        self.assertEqual(body, b"result")
+
+
+def _wrap_server_factory_for_tls(factory, sanlist=None):
+    """Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory
+
+    The resultant factory will create a TLS server which presents a certificate
+    signed by our test CA, valid for the domains in `sanlist`
+
+    Args:
+        factory (interfaces.IProtocolFactory): protocol factory to wrap
+        sanlist (iterable[bytes]): list of domains the cert should be valid for
+
+    Returns:
+        interfaces.IProtocolFactory
+    """
+    if sanlist is None:
+        sanlist = [b"DNS:test.com"]
+
+    connection_creator = TestServerTLSConnectionFactory(sanlist=sanlist)
+    return TLSMemoryBIOFactory(
+        connection_creator, isClient=False, wrappedFactory=factory
+    )
+
+
+def _get_test_protocol_factory():
+    """Get a protocol Factory which will build an HTTPChannel
+
+    Returns:
+        interfaces.IProtocolFactory
+    """
+    server_factory = Factory.forProtocol(HTTPChannel)
+
+    # Request.finish expects the factory to have a 'log' method.
+    server_factory.log = _log_request
+
+    return server_factory
+
+
+def _log_request(request):
+    """Implements Factory.log, which is expected by Request.finish"""
+    logger.info("Completed request %s", request)
diff --git a/tests/push/test_http.py b/tests/push/test_http.py
index 22c3f73ef3..92ce32d448 100644
--- a/tests/push/test_http.py
+++ b/tests/push/test_http.py
@@ -50,7 +50,7 @@ class HTTPPusherTests(HomeserverTestCase):
         config = self.default_config()
         config["start_pushers"] = True
 
-        hs = self.setup_test_homeserver(config=config, simple_http_client=m)
+        hs = self.setup_test_homeserver(config=config, proxied_http_client=m)
 
         return hs
 
diff --git a/tests/server.py b/tests/server.py
index c15a47f2a4..aa0958f6e9 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -387,11 +387,24 @@ class FakeTransport(object):
             self.disconnecting = True
             if self._protocol:
                 self._protocol.connectionLost(reason)
-            self.disconnected = True
+
+            # if we still have data to write, delay until that is done
+            if self.buffer:
+                logger.info(
+                    "FakeTransport: Delaying disconnect until buffer is flushed"
+                )
+            else:
+                self.disconnected = True
 
     def abortConnection(self):
         logger.info("FakeTransport: abortConnection()")
-        self.loseConnection()
+
+        if not self.disconnecting:
+            self.disconnecting = True
+            if self._protocol:
+                self._protocol.connectionLost(None)
+
+        self.disconnected = True
 
     def pauseProducing(self):
         if not self.producer:
@@ -422,6 +435,9 @@ class FakeTransport(object):
             self._reactor.callLater(0.0, _produce)
 
     def write(self, byt):
+        if self.disconnecting:
+            raise Exception("Writing to disconnecting FakeTransport")
+
         self.buffer = self.buffer + byt
 
         # always actually do the write asynchronously. Some protocols (notably the
@@ -465,3 +481,7 @@ class FakeTransport(object):
         self.buffer = self.buffer[len(to_write) :]
         if self.buffer and self.autoflush:
             self._reactor.callLater(0.0, self.flush)
+
+        if not self.buffer and self.disconnecting:
+            logger.info("FakeTransport: Buffer now empty, completing disconnect")
+            self.disconnected = True
-- 
cgit 1.5.1


From dfa60504af03d841b8dbf02b4a1420b3b5203618 Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Wed, 6 Nov 2019 15:47:40 +0000
Subject: Don't apply retention policy based filtering on state events

As per MSC1763, 'Retention is only considered for non-state events.', so don't filter out state events based on the room's retention policy.
---
 synapse/visibility.py               | 15 +++++++++------
 tests/rest/client/test_retention.py | 10 ++++++++++
 2 files changed, 19 insertions(+), 6 deletions(-)

diff --git a/synapse/visibility.py b/synapse/visibility.py
index 5b6562b481..79aa2ee1e2 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -115,14 +115,17 @@ def filter_events_for_client(store, user_id, events, is_peeking=False,
         if not event.is_state() and event.sender in ignore_list:
             return None
 
-        retention_policy = retention_policies[event.room_id]
-        max_lifetime = retention_policy.get("max_lifetime")
+        # Don't try to apply the room's retention policy if the event is a state event, as
+        # MSC1763 states that retention is only considered for non-state events.
+        if not event.is_state():
+            retention_policy = retention_policies[event.room_id]
+            max_lifetime = retention_policy.get("max_lifetime")
 
-        if max_lifetime is not None:
-            oldest_allowed_ts = store.clock.time_msec() - max_lifetime
+            if max_lifetime is not None:
+                oldest_allowed_ts = store.clock.time_msec() - max_lifetime
 
-            if event.origin_server_ts < oldest_allowed_ts:
-                return None
+                if event.origin_server_ts < oldest_allowed_ts:
+                    return None
 
         if event.event_id in always_include_ids:
             return event
diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py
index a040433994..d0deff5a3b 100644
--- a/tests/rest/client/test_retention.py
+++ b/tests/rest/client/test_retention.py
@@ -163,6 +163,12 @@ class RetentionTestCase(unittest.HomeserverTestCase):
         self.assertEqual(filtered_events[0].event_id, valid_event_id, filtered_events)
 
     def _test_retention_event_purged(self, room_id, increment):
+        # Get the create event to, later, check that we can still access it.
+        message_handler = self.hs.get_message_handler()
+        create_event = self.get_success(
+            message_handler.get_room_data(self.user_id, room_id, EventTypes.Create)
+        )
+
         # Send a first event to the room. This is the event we'll want to be purged at the
         # end of the test.
         resp = self.helper.send(
@@ -201,6 +207,10 @@ class RetentionTestCase(unittest.HomeserverTestCase):
         valid_event = self.get_event(room_id, valid_event_id)
         self.assertEqual(valid_event.get("content", {}).get("body"), "2", valid_event)
 
+        # Check that we can still access state events that were sent before the event that
+        # has been purged.
+        self.get_event(room_id, create_event.event_id)
+
     def get_event(self, room_id, event_id, expected_code=200):
         url = "/_matrix/client/r0/rooms/%s/event/%s" % (room_id, event_id)
 
-- 
cgit 1.5.1


From f93e4b6a39c111d3e593e7fa05dca0caf368bb60 Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Wed, 6 Nov 2019 15:53:31 +0000
Subject: Changelog

---
 changelog.d/10.bugfix | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 changelog.d/10.bugfix

diff --git a/changelog.d/10.bugfix b/changelog.d/10.bugfix
new file mode 100644
index 0000000000..51f89f46dd
--- /dev/null
+++ b/changelog.d/10.bugfix
@@ -0,0 +1 @@
+Don't apply retention policy based filtering on state events.
-- 
cgit 1.5.1


From d6b7606e07a83c9bcea4e1bff49fae4b1a25de35 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Tue, 12 Nov 2019 11:38:35 +0000
Subject: Create configurable ratelimiter for 3pid invites (#11)

---
 changelog.d/11.feature                      |  1 +
 docs/sample_config.yaml                     |  6 ++++++
 synapse/config/ratelimiting.py              |  9 +++++++++
 synapse/handlers/room_member.py             | 16 ++++++++--------
 tests/rest/client/test_room_access_rules.py |  9 +++++++++
 5 files changed, 33 insertions(+), 8 deletions(-)
 create mode 100644 changelog.d/11.feature

diff --git a/changelog.d/11.feature b/changelog.d/11.feature
new file mode 100644
index 0000000000..362e4b1efd
--- /dev/null
+++ b/changelog.d/11.feature
@@ -0,0 +1 @@
+Allow server admins to configure a custom global rate-limiting for third party invites.
\ No newline at end of file
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index 63051dd56f..b4713b687e 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -568,6 +568,8 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 #   - one for login that ratelimits login requests based on the account the
 #     client is attempting to log into, based on the amount of failed login
 #     attempts for this account.
+#   - one that ratelimits third-party invites requests based on the account
+#     that's making the requests.
 #
 # The defaults are as shown below.
 #
@@ -589,6 +591,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 #  failed_attempts:
 #    per_second: 0.17
 #    burst_count: 3
+#
+#rc_third_party_invite:
+#  per_second: 0.2
+#  burst_count: 10
 
 
 # Ratelimiting settings for incoming federation
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 5a9adac480..2a4fe43406 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -68,6 +68,9 @@ class RatelimitConfig(Config):
             )
 
         self.rc_registration = RateLimitConfig(config.get("rc_registration", {}))
+        self.rc_third_party_invite = RateLimitConfig(
+            config.get("rc_third_party_invite", {})
+        )
 
         rc_login_config = config.get("rc_login", {})
         self.rc_login_address = RateLimitConfig(rc_login_config.get("address", {}))
@@ -102,6 +105,8 @@ class RatelimitConfig(Config):
         #   - one for login that ratelimits login requests based on the account the
         #     client is attempting to log into, based on the amount of failed login
         #     attempts for this account.
+        #   - one that ratelimits third-party invites requests based on the account
+        #     that's making the requests.
         #
         # The defaults are as shown below.
         #
@@ -123,6 +128,10 @@ class RatelimitConfig(Config):
         #  failed_attempts:
         #    per_second: 0.17
         #    burst_count: 3
+        #
+        #rc_third_party_invite:
+        #  per_second: 0.2
+        #  burst_count: 10
 
 
         # Ratelimiting settings for incoming federation
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index e940e4183b..790aeba9f5 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -26,12 +26,11 @@ import synapse.server
 import synapse.types
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, ProxiedRequestError, SynapseError
+from synapse.api.ratelimiting import Ratelimiter
 from synapse.types import RoomID, UserID
 from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room, user_left_room
 
-from ._base import BaseHandler
-
 logger = logging.getLogger(__name__)
 
 id_server_scheme = "https://"
@@ -74,11 +73,7 @@ class RoomMemberHandler(object):
         self.rewrite_identity_server_urls = self.config.rewrite_identity_server_urls
         self._enable_lookup = hs.config.enable_3pid_lookup
         self.allow_per_room_profiles = self.config.allow_per_room_profiles
-
-        # This is only used to get at ratelimit function, and
-        # maybe_kick_guest_users. It's fine there are multiple of these as
-        # it doesn't store state.
-        self.base_handler = BaseHandler(hs)
+        self.ratelimiter = Ratelimiter()
 
     @abc.abstractmethod
     def _remote_join(self, requester, remote_room_hosts, room_id, user, content):
@@ -773,7 +768,12 @@ class RoomMemberHandler(object):
 
         # We need to rate limit *before* we send out any 3PID invites, so we
         # can't just rely on the standard ratelimiting of events.
-        yield self.base_handler.ratelimit(requester)
+        self.ratelimiter.ratelimit(
+            requester.user.to_string(), time_now_s=self.hs.clock.time(),
+            rate_hz=self.hs.config.rc_third_party_invite.per_second,
+            burst_count=self.hs.config.rc_third_party_invite.burst_count,
+            update=True,
+        )
 
         can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited(
             medium, address, room_id,
diff --git a/tests/rest/client/test_room_access_rules.py b/tests/rest/client/test_room_access_rules.py
index 7d3ba0ee2a..13caea3b01 100644
--- a/tests/rest/client/test_room_access_rules.py
+++ b/tests/rest/client/test_room_access_rules.py
@@ -326,6 +326,12 @@ class RoomAccessTestCase(unittest.HomeserverTestCase):
             expect_code=200,
         )
 
+        # Disable the 3pid invite ratelimiter
+        burst = self.hs.config.rc_third_party_invite.burst_count
+        per_second = self.hs.config.rc_third_party_invite.per_second
+        self.hs.config.rc_third_party_invite.burst_count = 10
+        self.hs.config.rc_third_party_invite.per_second = 0.1
+
         # We can't send a 3PID invite to a room that already has two members.
         self.send_threepid_invite(
             address="test@allowed_domain",
@@ -354,6 +360,9 @@ class RoomAccessTestCase(unittest.HomeserverTestCase):
             expected_code=403,
         )
 
+        self.hs.config.rc_third_party_invite.burst_count = burst
+        self.hs.config.rc_third_party_invite.per_second = per_second
+
     def test_unrestricted(self):
         """Tests that, in unrestricted mode, we can invite whoever we want, but we can
         only change the power level of users that wouldn't be forbidden in restricted
-- 
cgit 1.5.1


From 28578e75683c7f75c4be71c530a10efe84f2671f Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Thu, 14 Nov 2019 14:22:58 +0000
Subject: Add a /user/:user_id/info servlet to give user deactivated/expired
 information (#12)

---
 changelog.d/12.feature                         |  1 +
 synapse/rest/client/v2_alpha/user_directory.py | 89 ++++++++++++++++++++++++++
 synapse/storage/_base.py                       |  9 +--
 3 files changed, 93 insertions(+), 6 deletions(-)
 create mode 100644 changelog.d/12.feature

diff --git a/changelog.d/12.feature b/changelog.d/12.feature
new file mode 100644
index 0000000000..8e6e7a28af
--- /dev/null
+++ b/changelog.d/12.feature
@@ -0,0 +1 @@
+Add `/user/:user_id/info` CS servlet and to give user deactivated/expired information.
\ No newline at end of file
diff --git a/synapse/rest/client/v2_alpha/user_directory.py b/synapse/rest/client/v2_alpha/user_directory.py
index e3603f2998..b6f4d8b3f4 100644
--- a/synapse/rest/client/v2_alpha/user_directory.py
+++ b/synapse/rest/client/v2_alpha/user_directory.py
@@ -21,6 +21,7 @@ from twisted.internet import defer
 
 from synapse.api.errors import SynapseError
 from synapse.http.servlet import RestServlet, parse_json_object_from_request
+from synapse.types import UserID
 
 from ._base import client_patterns
 
@@ -93,5 +94,93 @@ class UserDirectorySearchRestServlet(RestServlet):
         defer.returnValue((200, results))
 
 
+class UserInfoServlet(RestServlet):
+    """
+    GET /user/{user_id}/info HTTP/1.1
+    """
+    PATTERNS = client_patterns(
+        "/user/(?P[^/]*)/info$"
+    )
+
+    def __init__(self, hs):
+        super(UserInfoServlet, self).__init__()
+        self.hs = hs
+        self.auth = hs.get_auth()
+        self.store = hs.get_datastore()
+        self.notifier = hs.get_notifier()
+        self.clock = hs.get_clock()
+        self.transport_layer = hs.get_federation_transport_client()
+        registry = hs.get_federation_registry()
+
+        if not registry.query_handlers.get("user_info"):
+            registry.register_query_handler(
+                "user_info", self._on_federation_query
+            )
+
+    @defer.inlineCallbacks
+    def on_GET(self, request, user_id):
+        # Ensure the user is authenticated
+        yield self.auth.get_user_by_req(request, allow_guest=False)
+
+        user = UserID.from_string(user_id)
+        if not self.hs.is_mine(user):
+            # Attempt to make a federation request to the server that owns this user
+            args = {"user_id": user_id}
+            res = yield self.transport_layer.make_query(
+                user.domain, "user_info", args, retry_on_dns_fail=True,
+            )
+            defer.returnValue((200, res))
+
+        res = yield self._get_user_info(user_id)
+        defer.returnValue((200, res))
+
+    @defer.inlineCallbacks
+    def _on_federation_query(self, args):
+        """Called when a request for user information appears over federation
+
+        Args:
+            args (dict): Dictionary of query arguments provided by the request
+
+        Returns:
+            Deferred[dict]: Deactivation and expiration information for a given user
+        """
+        user_id = args.get("user_id")
+        if not user_id:
+            raise SynapseError(400, "user_id not provided")
+
+        user = UserID.from_string(user_id)
+        if not self.hs.is_mine(user):
+            raise SynapseError(400, "User is not hosted on this homeserver")
+
+        res = yield self._get_user_info(user_id)
+        defer.returnValue(res)
+
+    @defer.inlineCallbacks
+    def _get_user_info(self, user_id):
+        """Retrieve information about a given user
+
+        Args:
+            user_id (str): The User ID of a given user on this homeserver
+
+        Returns:
+            Deferred[dict]: Deactivation and expiration information for a given user
+        """
+        # Check whether user is deactivated
+        is_deactivated = yield self.store.get_user_deactivated_status(user_id)
+
+        # Check whether user is expired
+        expiration_ts = yield self.store.get_expiration_ts_for_user(user_id)
+        is_expired = (
+            expiration_ts is not None and self.clock.time_msec() >= expiration_ts
+        )
+
+        res = {
+            "expired": is_expired,
+            "deactivated": is_deactivated,
+        }
+        defer.returnValue(res)
+
+
 def register_servlets(hs, http_server):
     UserDirectorySearchRestServlet(hs).register(http_server)
+    UserInfoServlet(hs).register(http_server)
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 941c07fce5..537696547c 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -341,14 +341,11 @@ class SQLBaseStore(object):
                 expiration_ts,
             )
 
-        self._simple_insert_txn(
+        self._simple_upsert_txn(
             txn,
             "account_validity",
-            values={
-                "user_id": user_id,
-                "expiration_ts_ms": expiration_ts,
-                "email_sent": False,
-            },
+            keyvalues={"user_id": user_id, },
+            values={"expiration_ts_ms": expiration_ts, "email_sent": False, },
         )
 
     def start_profiling(self):
-- 
cgit 1.5.1


From c446f59047136ec3f5c4afda4acaa2871443dba0 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Thu, 14 Nov 2019 18:42:55 +0000
Subject: Hide expired users from user directory, optionally show on renewal
 (#13)

---
 changelog.d/13.feature                      |   1 +
 synapse/handlers/account_validity.py        |  32 +++++++
 synapse/storage/registration.py             |  23 +++++
 tests/rest/client/v2_alpha/test_register.py | 135 ++++++++++++++++++++++++++++
 4 files changed, 191 insertions(+)
 create mode 100644 changelog.d/13.feature

diff --git a/changelog.d/13.feature b/changelog.d/13.feature
new file mode 100644
index 0000000000..c2d2e93abf
--- /dev/null
+++ b/changelog.d/13.feature
@@ -0,0 +1 @@
+Hide expired users from the user directory, and optionally re-add them on renewal.
\ No newline at end of file
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 396f0059f7..947237d7da 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -42,6 +42,8 @@ class AccountValidityHandler(object):
         self.clock = self.hs.get_clock()
 
         self._account_validity = self.hs.config.account_validity
+        self._show_users_in_user_directory = self.hs.config.show_users_in_user_directory
+        self.profile_handler = self.hs.get_profile_handler()
 
         if self._account_validity.renew_by_email_enabled and load_jinja2_templates:
             # Don't do email-specific configuration if renewal by email is disabled.
@@ -74,6 +76,12 @@ class AccountValidityHandler(object):
                 30 * 60 * 1000,
             )
 
+        # Check every hour to remove expired users from the user directory
+        self.clock.looping_call(
+            self._mark_expired_users_as_inactive,
+            60 * 60 * 1000,
+        )
+
     @defer.inlineCallbacks
     def send_renewal_emails(self):
         """Gets the list of users whose account is expiring in the amount of time
@@ -261,4 +269,28 @@ class AccountValidityHandler(object):
             email_sent=email_sent,
         )
 
+        # Check if renewed users should be reintroduced to the user directory
+        if self._show_users_in_user_directory:
+            # Show the user in the directory again by setting them to active
+            yield self.profile_handler.set_active(UserID.from_string(user_id), True, True)
+
         defer.returnValue(expiration_ts)
+
+    @defer.inlineCallbacks
+    def _mark_expired_users_as_inactive(self):
+        """Iterate over expired users. Mark them as inactive in order to hide them from the
+        user directory.
+
+        Returns:
+            Deferred
+        """
+        # Get expired users
+        expired_user_ids = yield self.store.get_expired_users()
+        expired_users = [
+            UserID.from_string(user_id)
+            for user_id in expired_user_ids
+        ]
+
+        # Mark each one as non-active
+        for user in expired_users:
+            yield self.profile_handler.set_active(user, False, True)
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 0b3c656e90..028848cf89 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -151,6 +151,29 @@ class RegistrationWorkerStore(SQLBaseStore):
             set_account_validity_for_user_txn,
         )
 
+    @defer.inlineCallbacks
+    def get_expired_users(self):
+        """Get IDs of all expired users
+
+        Returns:
+            Deferred[list[str]]: List of expired user IDs
+        """
+        def get_expired_users_txn(txn, now_ms):
+            sql = """
+                SELECT user_id from account_validity
+                WHERE expiration_ts_ms <= ?
+            """
+            txn.execute(sql, (now_ms,))
+            rows = txn.fetchall()
+            return [row[0] for row in rows]
+
+        res = yield self.runInteraction(
+            "get_expired_users",
+            get_expired_users_txn,
+            self.clock.time_msec(),
+        )
+        defer.returnValue(res)
+
     @defer.inlineCallbacks
     def set_renewal_token_for_user(self, user_id, renewal_token):
         """Defines a renewal token for a given user.
diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py
index af1e600591..a5c7aaa9c0 100644
--- a/tests/rest/client/v2_alpha/test_register.py
+++ b/tests/rest/client/v2_alpha/test_register.py
@@ -352,6 +352,141 @@ class AccountValidityTestCase(unittest.HomeserverTestCase):
         )
 
 
+class AccountValidityUserDirectoryTestCase(unittest.HomeserverTestCase):
+
+    servlets = [
+        synapse.rest.client.v1.profile.register_servlets,
+        synapse.rest.client.v1.room.register_servlets,
+        synapse.rest.client.v2_alpha.user_directory.register_servlets,
+        login.register_servlets,
+        register.register_servlets,
+        synapse.rest.admin.register_servlets_for_client_rest_resource,
+        account_validity.register_servlets,
+    ]
+
+    def make_homeserver(self, reactor, clock):
+        config = self.default_config()
+
+        # Set accounts to expire after a week
+        config["enable_registration"] = True
+        config["account_validity"] = {
+            "enabled": True,
+            "period": 604800000,  # Time in ms for 1 week
+        }
+        config["replicate_user_profiles_to"] = "test.is"
+
+        # Mock homeserver requests to an identity server
+        mock_http_client = Mock(spec=[
+            "post_json_get_json",
+        ])
+        mock_http_client.post_json_get_json.return_value = defer.succeed((200, "{}"))
+
+        self.hs = self.setup_test_homeserver(
+            config=config,
+            simple_http_client=mock_http_client,
+        )
+
+        return self.hs
+
+    def test_expired_user_in_directory(self):
+        """Test that an expired user is hidden in the user directory"""
+        # Create an admin user to search the user directory
+        admin_id = self.register_user("admin", "adminpassword", admin=True)
+        admin_tok = self.login("admin", "adminpassword")
+
+        # Ensure the admin never expires
+        url = "/_matrix/client/unstable/admin/account_validity/validity"
+        params = {
+            "user_id": admin_id,
+            "expiration_ts": 999999999999,
+            "enable_renewal_emails": False,
+        }
+        request_data = json.dumps(params)
+        request, channel = self.make_request(
+            b"POST", url, request_data, access_token=admin_tok
+        )
+        self.render(request)
+        self.assertEquals(channel.result["code"], b"200", channel.result)
+
+        # Create a user to expire
+        username = "kermit"
+        user_id = self.register_user(username, "monkey")
+        self.login(username, "monkey")
+
+        self.pump(1000)
+        self.reactor.advance(1000)
+        self.pump()
+
+        # Expire the user
+        url = "/_matrix/client/unstable/admin/account_validity/validity"
+        params = {
+            "user_id": user_id,
+            "expiration_ts": 0,
+            "enable_renewal_emails": False,
+        }
+        request_data = json.dumps(params)
+        request, channel = self.make_request(
+            b"POST", url, request_data, access_token=admin_tok
+        )
+        self.render(request)
+        self.assertEquals(channel.result["code"], b"200", channel.result)
+
+        # Wait for the background job to run which hides expired users in the directory
+        self.pump(60 * 60 * 1000)
+
+        # Mock the homeserver's HTTP client
+        post_json = self.hs.get_simple_http_client().post_json_get_json
+
+        # Check if the homeserver has replicated the user's profile to the identity server
+        self.assertNotEquals(post_json.call_args, None, post_json.call_args)
+        payload = post_json.call_args[0][1]
+        batch = payload.get("batch")
+        self.assertNotEquals(batch, None, batch)
+        self.assertEquals(len(batch), 1, batch)
+        replicated_user_id = list(batch.keys())[0]
+        self.assertEquals(replicated_user_id, user_id, replicated_user_id)
+
+        # There was replicated information about our user
+        # Check that it's None, signifying that the user should be removed from the user
+        # directory because they were expired
+        replicated_content = batch[user_id]
+        self.assertIsNone(replicated_content)
+
+        # Now renew the user, and check they get replicated again to the identity server
+        url = "/_matrix/client/unstable/admin/account_validity/validity"
+        params = {
+            "user_id": user_id,
+            "expiration_ts": 99999999999,
+            "enable_renewal_emails": False,
+        }
+        request_data = json.dumps(params)
+        request, channel = self.make_request(
+            b"POST", url, request_data, access_token=admin_tok
+        )
+        self.render(request)
+        self.assertEquals(channel.result["code"], b"200", channel.result)
+
+        self.pump(10)
+        self.reactor.advance(10)
+        self.pump()
+
+        # Check if the homeserver has replicated the user's profile to the identity server
+        post_json = self.hs.get_simple_http_client().post_json_get_json
+        self.assertNotEquals(post_json.call_args, None, post_json.call_args)
+        payload = post_json.call_args[0][1]
+        batch = payload.get("batch")
+        self.assertNotEquals(batch, None, batch)
+        self.assertEquals(len(batch), 1, batch)
+        replicated_user_id = list(batch.keys())[0]
+        self.assertEquals(replicated_user_id, user_id, replicated_user_id)
+
+        # There was replicated information about our user
+        # Check that it's not None, signifying that the user is back in the user
+        # directory
+        replicated_content = batch[user_id]
+        self.assertIsNotNone(replicated_content)
+
+
 class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
 
     servlets = [
-- 
cgit 1.5.1


From f7b12c955b67d0701dc693a47ac8402b344d3d2c Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Thu, 28 Nov 2019 19:26:13 +0000
Subject: Discard retention policies when retrieving state

Purge jobs don't delete the latest event in a room in order to keep the forward extremity and not break the room. On the other hand, get_state_events, when given an at_token argument calls filter_events_for_client to know if the user can see the event that matches that (sync) token. That function uses the retention policies of the events it's given to filter out those that are too old from a client's view.

Some clients, such as Riot, when loading a room, request the list of members for the latest sync token it knows about, and get confused to the point of refusing to send any message if the server tells it that it can't get that information. This can happen very easily with the message retention feature turned on and a room with low activity so that the last event sent becomes too old according to the room's retention policy.

An easy and clean fix for that issue is to discard the room's retention policies when retrieving state.
---
 synapse/handlers/message.py |  2 +-
 synapse/visibility.py       | 20 ++++++++++++++------
 2 files changed, 15 insertions(+), 7 deletions(-)

diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index d75fb2a078..eb750d65d8 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -130,7 +130,7 @@ class MessageHandler(object):
                 raise NotFoundError("Can't find event for token %s" % (at_token, ))
 
             visible_events = yield filter_events_for_client(
-                self.store, user_id, last_events,
+                self.store, user_id, last_events, apply_retention_policies=False
             )
 
             event = last_events[0]
diff --git a/synapse/visibility.py b/synapse/visibility.py
index 79aa2ee1e2..09d8334b26 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -48,7 +48,8 @@ MEMBERSHIP_PRIORITY = (
 
 @defer.inlineCallbacks
 def filter_events_for_client(store, user_id, events, is_peeking=False,
-                             always_include_ids=frozenset()):
+                             always_include_ids=frozenset(),
+                             apply_retention_policies=True):
     """
     Check which events a user is allowed to see
 
@@ -63,6 +64,10 @@ def filter_events_for_client(store, user_id, events, is_peeking=False,
             events
         always_include_ids (set(event_id)): set of event ids to specifically
             include (unless sender is ignored)
+        apply_retention_policies (bool): Whether to filter out events that's older than
+            allowed by the room's retention policy. Useful when this function is called
+            to e.g. check whether a user should be allowed to see the state at a given
+            event rather than to know if it should send an event to a user's client(s).
 
     Returns:
         Deferred[list[synapse.events.EventBase]]
@@ -92,11 +97,14 @@ def filter_events_for_client(store, user_id, events, is_peeking=False,
 
     erased_senders = yield store.are_users_erased((e.sender for e in events))
 
-    room_ids = set(e.room_id for e in events)
-    retention_policies = {}
+    if apply_retention_policies:
+        room_ids = set(e.room_id for e in events)
+        retention_policies = {}
 
-    for room_id in room_ids:
-        retention_policies[room_id] = yield store.get_retention_policy_for_room(room_id)
+        for room_id in room_ids:
+            retention_policies[room_id] = (
+                yield store.get_retention_policy_for_room(room_id)
+            )
 
     def allowed(event):
         """
@@ -117,7 +125,7 @@ def filter_events_for_client(store, user_id, events, is_peeking=False,
 
         # Don't try to apply the room's retention policy if the event is a state event, as
         # MSC1763 states that retention is only considered for non-state events.
-        if not event.is_state():
+        if apply_retention_policies and not event.is_state():
             retention_policy = retention_policies[event.room_id]
             max_lifetime = retention_policy.get("max_lifetime")
 
-- 
cgit 1.5.1


From 9cd241d0702e5059a8b9ba88bddd37618a87d4ea Mon Sep 17 00:00:00 2001
From: Brendan Abolivier 
Date: Thu, 28 Nov 2019 19:32:49 +0000
Subject: Changelog

---
 changelog.d/6436.bugfix | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 changelog.d/6436.bugfix

diff --git a/changelog.d/6436.bugfix b/changelog.d/6436.bugfix
new file mode 100644
index 0000000000..954a4e1d84
--- /dev/null
+++ b/changelog.d/6436.bugfix
@@ -0,0 +1 @@
+Fix a bug where a room could become unusable with a low retention policy and a low activity.
-- 
cgit 1.5.1


From b69732705d4b2bf4cb467da7df7f283905c7d7eb Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Mon, 2 Dec 2019 15:10:23 +0000
Subject: Capatilise letters after a - in new user displaynames (#14)

---
 changelog.d/14.feature                   |  1 +
 synapse/rest/client/v2_alpha/register.py | 74 +++++++++++++++++++++++++-------
 tests/handlers/test_register.py          | 28 ++++++++++++
 3 files changed, 87 insertions(+), 16 deletions(-)
 create mode 100644 changelog.d/14.feature

diff --git a/changelog.d/14.feature b/changelog.d/14.feature
new file mode 100644
index 0000000000..020d0bac1e
--- /dev/null
+++ b/changelog.d/14.feature
@@ -0,0 +1 @@
+User displaynames now have capitalised letters after - symbols.
\ No newline at end of file
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 0958a7fc58..dade6530f4 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -19,7 +19,6 @@ import hmac
 import logging
 import re
 from hashlib import sha1
-from string import capwords
 
 from six import string_types
 
@@ -486,21 +485,8 @@ class RegisterRestServlet(RestServlet):
                     if self.hs.config.register_just_use_email_for_display_name:
                         desired_display_name = address
                     else:
-                        # XXX: a nasty heuristic to turn an email address into
-                        # a displayname, as part of register_mxid_from_3pid
-                        parts = address.replace('.', ' ').split('@')
-                        org_parts = parts[1].split(' ')
-
-                        if org_parts[-2] == "matrix" and org_parts[-1] == "org":
-                            org = "Tchap Admin"
-                        elif org_parts[-2] == "gouv" and org_parts[-1] == "fr":
-                            org = org_parts[-3] if len(org_parts) > 2 else org_parts[-2]
-                        else:
-                            org = org_parts[-2]
-
-                        desired_display_name = (
-                            capwords(parts[0]) + " [" + capwords(org) + "]"
-                        )
+                        # Custom mapping between email address and display name
+                        desired_display_name = self._map_email_to_displayname(address)
                 elif (
                     self.hs.config.register_mxid_from_3pid == 'msisdn' and
                     LoginType.MSISDN in auth_result
@@ -743,6 +729,62 @@ class RegisterRestServlet(RestServlet):
         }))
 
 
+def cap(name):
+    """Capitalise parts of a name containing different words, including those
+    separated by hyphens.
+    For example, 'John-Doe'
+
+    Args:
+        name (str): The name to parse
+    """
+    if not name:
+        return name
+
+    # Split the name by whitespace then hyphens, capitalizing each part then
+    # joining it back together.
+    capatilized_name = " ".join(
+        "-".join(part.capitalize() for part in space_part.split("-"))
+        for space_part in name.split()
+    )
+    return capatilized_name
+
+
+def _map_email_to_displayname(address):
+    """Custom mapping from an email address to a user displayname
+
+    Args:
+        address (str): The email address to process
+    Returns:
+        str: The new displayname
+    """
+    # Split the part before and after the @ in the email.
+    # Replace all . with spaces in the first part
+    parts = address.replace('.', ' ').split('@')
+
+    # Figure out which org this email address belongs to
+    org_parts = parts[1].split(' ')
+
+    # If this is a ...matrix.org email, mark them as an Admin
+    if org_parts[-2] == "matrix" and org_parts[-1] == "org":
+        org = "Tchap Admin"
+
+    # Is this is a ...gouv.fr address, set the org to whatever is before
+    # gouv.fr. If there isn't anything (a @gouv.fr email) simply mark their
+    # org as "gouv"
+    elif org_parts[-2] == "gouv" and org_parts[-1] == "fr":
+        org = org_parts[-3] if len(org_parts) > 2 else org_parts[-2]
+
+    # Otherwise, mark their org as the email's second-level domain name
+    else:
+        org = org_parts[-2]
+
+    desired_display_name = (
+        cap(parts[0]) + " [" + cap(org) + "]"
+    )
+
+    return desired_display_name
+
+
 def register_servlets(hs, http_server):
     EmailRegisterRequestTokenRestServlet(hs).register(http_server)
     MsisdnRegisterRequestTokenRestServlet(hs).register(http_server)
diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
index 5ffba2ca7a..b2aa5c2478 100644
--- a/tests/handlers/test_register.py
+++ b/tests/handlers/test_register.py
@@ -20,6 +20,7 @@ from twisted.internet import defer
 from synapse.api.constants import UserTypes
 from synapse.api.errors import ResourceLimitError, SynapseError
 from synapse.handlers.register import RegistrationHandler
+from synapse.rest.client.v2_alpha.register import _map_email_to_displayname
 from synapse.types import RoomAlias, UserID, create_requester
 
 from .. import unittest
@@ -235,3 +236,30 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
             self.handler.register(localpart=invalid_user_id),
             SynapseError
         )
+
+    def test_email_to_displayname_mapping(self):
+        """Test that custom emails are mapped to new user displaynames correctly"""
+        self._check_mapping(
+            "jack-phillips.rivers@big-org.com",
+            "Jack-Phillips Rivers [Big-Org]",
+        )
+
+        self._check_mapping(
+            "bob.jones@matrix.org",
+            "Bob Jones [Tchap Admin]",
+        )
+
+        self._check_mapping(
+            "bob-jones.blabla@gouv.fr",
+            "Bob-Jones Blabla [Gouv]",
+        )
+
+        # Multibyte unicode characters
+        self._check_mapping(
+            u"j\u030a\u0065an-poppy.seed@example.com",
+            u"J\u030a\u0065an-Poppy Seed [Example]",
+        )
+
+    def _check_mapping(self, i, expected):
+        result = _map_email_to_displayname(i)
+        self.assertEqual(result, expected)
-- 
cgit 1.5.1


From 32b7e20d4143cf9a0b0263eb04d610ee6e3a1883 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Tue, 3 Dec 2019 10:49:29 +0000
Subject: Fix scripts/generate_signing_key.py import statement (#15)

---
 changelog.d/15.misc             | 1 +
 scripts/generate_signing_key.py | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
 create mode 100644 changelog.d/15.misc

diff --git a/changelog.d/15.misc b/changelog.d/15.misc
new file mode 100644
index 0000000000..4cc4a5175f
--- /dev/null
+++ b/changelog.d/15.misc
@@ -0,0 +1 @@
+Fix the ordering on `scripts/generate_signing_key.py`'s import statement.
diff --git a/scripts/generate_signing_key.py b/scripts/generate_signing_key.py
index ba3ba97395..36e9140b50 100755
--- a/scripts/generate_signing_key.py
+++ b/scripts/generate_signing_key.py
@@ -16,7 +16,7 @@
 import argparse
 import sys
 
-from signedjson.key import write_signing_keys, generate_signing_key
+from signedjson.key import generate_signing_key, write_signing_keys
 
 from synapse.util.stringutils import random_string
 
-- 
cgit 1.5.1


From d49933470d5aa74dcb3620c40db1563c9cba27e5 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Thu, 5 Dec 2019 11:55:12 +0000
Subject: Add limit_profile_requests_to_known_users option (#18)

---
 changelog.d/18.feature               |  1 +
 docs/sample_config.yaml              |  7 +++++++
 synapse/config/server.py             | 13 +++++++++++++
 synapse/handlers/profile.py          |  4 ++--
 tests/rest/client/v1/test_profile.py |  1 +
 5 files changed, 24 insertions(+), 2 deletions(-)
 create mode 100644 changelog.d/18.feature

diff --git a/changelog.d/18.feature b/changelog.d/18.feature
new file mode 100644
index 0000000000..f5aa29a6e8
--- /dev/null
+++ b/changelog.d/18.feature
@@ -0,0 +1 @@
+Add option `limit_profile_requests_to_known_users` to prevent requirement of a user sharing a room with another user to query their profile information.
\ No newline at end of file
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index b4713b687e..7531e3aef8 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -77,6 +77,13 @@ pid_file: DATADIR/homeserver.pid
 #
 #require_auth_for_profile_requests: true
 
+# Whether to require a user to share a room with another user in order
+# to retrieve their profile information. Only checked on Client-Server
+# requests. Profile requests from other servers should be checked by the
+# requesting server. Defaults to 'false'.
+#
+# limit_profile_requests_to_known_users: true
+
 # If set to 'false', requires authentication to access the server's public rooms
 # directory through the client API. Defaults to 'true'.
 #
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 4729b30b36..2ef1d940c4 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -84,6 +84,12 @@ class ServerConfig(Config):
             "require_auth_for_profile_requests", False,
         )
 
+        # Whether to require sharing a room with a user to retrieve their
+        # profile data
+        self.limit_profile_requests_to_known_users = config.get(
+            "limit_profile_requests_to_known_users", False,
+        )
+
         if "restrict_public_rooms_to_local_users" in config and (
             "allow_public_rooms_without_auth" in config
             or "allow_public_rooms_over_federation" in config
@@ -536,6 +542,13 @@ class ServerConfig(Config):
         #
         #require_auth_for_profile_requests: true
 
+        # Whether to require a user to share a room with another user in order
+        # to retrieve their profile information. Only checked on Client-Server
+        # requests. Profile requests from other servers should be checked by the
+        # requesting server. Defaults to 'false'.
+        #
+        # limit_profile_requests_to_known_users: true
+
         # If set to 'false', requires authentication to access the server's public rooms
         # directory through the client API. Defaults to 'true'.
         #
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 5c493b8d63..5e92c65492 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -441,7 +441,7 @@ class BaseProfileHandler(BaseHandler):
     @defer.inlineCallbacks
     def check_profile_query_allowed(self, target_user, requester=None):
         """Checks whether a profile query is allowed. If the
-        'require_auth_for_profile_requests' config flag is set to True and a
+        'limit_profile_requests_to_known_users' config flag is set to True and a
         'requester' is provided, the query is only allowed if the two users
         share a room.
 
@@ -459,7 +459,7 @@ class BaseProfileHandler(BaseHandler):
         # be None when this function is called outside of a profile query, e.g.
         # when building a membership event. In this case, we must allow the
         # lookup.
-        if not self.hs.config.require_auth_for_profile_requests or not requester:
+        if not self.hs.config.limit_profile_requests_to_known_users or not requester:
             return
 
         # Always allow the user to query their own profile.
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index d932dd3c06..6958430608 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -230,6 +230,7 @@ class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
 
         config = self.default_config()
         config["require_auth_for_profile_requests"] = True
+        config["limit_profile_requests_to_known_users"] = True
         self.hs = self.setup_test_homeserver(config=config)
 
         return self.hs
-- 
cgit 1.5.1


From c7e206b69b60566bcf740f19a8a3c695df75dcc0 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Mon, 9 Dec 2019 15:17:07 +0000
Subject: Add some flaky sytests to a sytest-blacklist (#17)

---
 MANIFEST.in         |  1 +
 changelog.d/17.misc |  1 +
 sytest-blacklist    | 14 ++++++++++++++
 3 files changed, 16 insertions(+)
 create mode 100644 changelog.d/17.misc
 create mode 100644 sytest-blacklist

diff --git a/MANIFEST.in b/MANIFEST.in
index 07cc3c3be3..737d9d3710 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,5 @@
 include synctl
+include sytest-blacklist
 include LICENSE
 include VERSION
 include *.rst
diff --git a/changelog.d/17.misc b/changelog.d/17.misc
new file mode 100644
index 0000000000..58120ab5c7
--- /dev/null
+++ b/changelog.d/17.misc
@@ -0,0 +1 @@
+Blacklist some flaky sytests until they're fixed.
\ No newline at end of file
diff --git a/sytest-blacklist b/sytest-blacklist
new file mode 100644
index 0000000000..9b56c653e7
--- /dev/null
+++ b/sytest-blacklist
@@ -0,0 +1,14 @@
+# flaky test
+If remote user leaves room we no longer receive device updates
+
+# flaky test
+Can re-join room if re-invited
+
+# flaky test
+Forgotten room messages cannot be paginated
+
+# flaky test
+Local device key changes get to remote servers
+
+# flaky test
+Old leaves are present in gapped incremental syncs
-- 
cgit 1.5.1


From f4d1ab0027968621c6e6f1c8cc8dfc3ab8cd4b59 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Thu, 12 Dec 2019 13:51:25 +0000
Subject: Add the ability to restrict max avatar filesize and content-type
 (#19)

---
 changelog.d/19.feature            |  1 +
 docs/sample_config.yaml           | 24 ++++++++++++++++++++
 synapse/config/repository.py      | 30 +++++++++++++++++++++++++
 synapse/handlers/profile.py       | 46 +++++++++++++++++++++++++++++++++++++++
 synapse/rest/client/v1/profile.py |  5 +++--
 5 files changed, 104 insertions(+), 2 deletions(-)
 create mode 100644 changelog.d/19.feature

diff --git a/changelog.d/19.feature b/changelog.d/19.feature
new file mode 100644
index 0000000000..95a44a4a89
--- /dev/null
+++ b/changelog.d/19.feature
@@ -0,0 +1 @@
+Add `max_avatar_size` and `allowed_avatar_mimetypes` to restrict the size of user avatars and their file type respectively.
\ No newline at end of file
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index 7531e3aef8..37c53e4f69 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -663,6 +663,30 @@ uploads_path: "DATADIR/uploads"
 #
 #max_upload_size: 10M
 
+# The largest allowed size for a user avatar. If not defined, no
+# restriction will be imposed.
+#
+# Note that this only applies when an avatar is changed globally.
+# Per-room avatar changes are not affected. See allow_per_room_profiles
+# for disabling that functionality.
+#
+# Note that user avatar changes will not work if this is set without
+# using Synapse's local media repo.
+#
+#max_avatar_size: 10M
+
+# Allow mimetypes for a user avatar. If not defined, no restriction will
+# be imposed.
+#
+# Note that this only applies when an avatar is changed globally.
+# Per-room avatar changes are not affected. See allow_per_room_profiles
+# for disabling that functionality.
+#
+# Note that user avatar changes will not work if this is set without
+# using Synapse's local media repo.
+#
+#allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"]
+
 # Maximum number of pixels that will be thumbnailed
 #
 #max_image_pixels: 32M
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index fbfcecc240..2abede409a 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -111,6 +111,12 @@ class ContentRepositoryConfig(Config):
         self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
         self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
 
+        self.max_avatar_size = config.get("max_avatar_size")
+        if self.max_avatar_size:
+            self.max_avatar_size = self.parse_size(self.max_avatar_size)
+
+        self.allowed_avatar_mimetypes = config.get("allowed_avatar_mimetypes", [])
+
         self.media_store_path = self.ensure_directory(config["media_store_path"])
 
         backup_media_store_path = config.get("backup_media_store_path")
@@ -247,6 +253,30 @@ class ContentRepositoryConfig(Config):
         #
         #max_upload_size: 10M
 
+        # The largest allowed size for a user avatar. If not defined, no
+        # restriction will be imposed.
+        #
+        # Note that this only applies when an avatar is changed globally.
+        # Per-room avatar changes are not affected. See allow_per_room_profiles
+        # for disabling that functionality.
+        #
+        # Note that user avatar changes will not work if this is set without
+        # using Synapse's local media repo.
+        #
+        #max_avatar_size: 10M
+
+        # Allow mimetypes for a user avatar. If not defined, no restriction will
+        # be imposed.
+        #
+        # Note that this only applies when an avatar is changed globally.
+        # Per-room avatar changes are not affected. See allow_per_room_profiles
+        # for disabling that functionality.
+        #
+        # Note that user avatar changes will not work if this is set without
+        # using Synapse's local media repo.
+        #
+        #allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"]
+
         # Maximum number of pixels that will be thumbnailed
         #
         #max_image_pixels: 32M
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 5e92c65492..584f804986 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -63,6 +63,9 @@ class BaseProfileHandler(BaseHandler):
 
         self.http_client = hs.get_simple_http_client()
 
+        self.max_avatar_size = hs.config.max_avatar_size
+        self.allowed_avatar_mimetypes = hs.config.allowed_avatar_mimetypes
+
         if hs.config.worker_app is None:
             self.clock.looping_call(
                 self._start_update_remote_profile_cache, self.PROFILE_UPDATE_MS,
@@ -368,6 +371,35 @@ class BaseProfileHandler(BaseHandler):
                 400, "Avatar URL is too long (max %i)" % (MAX_AVATAR_URL_LEN, ),
             )
 
+        # Enforce a max avatar size if one is defined
+        if self.max_avatar_size or self.allowed_avatar_mimetypes:
+            media_id = self._validate_and_parse_media_id_from_avatar_url(new_avatar_url)
+
+            # Check that this media exists locally
+            media_info = yield self.store.get_local_media(media_id)
+            if not media_info:
+                raise SynapseError(
+                    400, "Unknown media id supplied", errcode=Codes.NOT_FOUND
+                )
+
+            # Ensure avatar does not exceed max allowed avatar size
+            media_size = media_info["media_length"]
+            if self.max_avatar_size and media_size > self.max_avatar_size:
+                raise SynapseError(
+                    400, "Avatars must be less than %s bytes in size" %
+                    (self.max_avatar_size,), errcode=Codes.TOO_LARGE,
+                )
+
+            # Ensure the avatar's file type is allowed
+            if (
+                self.allowed_avatar_mimetypes
+                and media_info["media_type"] not in self.allowed_avatar_mimetypes
+            ):
+                raise SynapseError(
+                    400, "Avatar file type '%s' not allowed" %
+                    media_info["media_type"],
+                )
+
         yield self.store.set_profile_avatar_url(
             target_user.localpart, new_avatar_url, new_batchnum,
         )
@@ -383,6 +415,20 @@ class BaseProfileHandler(BaseHandler):
         # start a profile replication push
         run_in_background(self._replicate_profiles)
 
+    def _validate_and_parse_media_id_from_avatar_url(self, mxc):
+        """Validate and parse a provided avatar url and return the local media id
+
+        Args:
+            mxc (str): A mxc URL
+
+        Returns:
+            str: The ID of the media
+        """
+        avatar_pieces = mxc.split("/")
+        if len(avatar_pieces) != 4 or avatar_pieces[0] != "mxc:":
+            raise SynapseError(400, "Invalid avatar URL '%s' supplied" % mxc)
+        return avatar_pieces[-1]
+
     @defer.inlineCallbacks
     def on_profile_query(self, args):
         user = UserID.from_string(args["user_id"])
diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py
index 064bcddaeb..34361697df 100644
--- a/synapse/rest/client/v1/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -134,12 +134,13 @@ class ProfileAvatarURLRestServlet(RestServlet):
 
         content = parse_json_object_from_request(request)
         try:
-            new_name = content["avatar_url"]
+            new_avatar_url = content["avatar_url"]
         except Exception:
             defer.returnValue((400, "Unable to parse name"))
 
         yield self.profile_handler.set_avatar_url(
-            user, requester, new_name, is_admin)
+            user, requester, new_avatar_url, is_admin
+        )
 
         if self.hs.config.shadow_server:
             shadow_user = UserID(
-- 
cgit 1.5.1


From 0cc2594966b05dff1594d993b38c6a5d1ca0d2ce Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Wed, 22 Jan 2020 15:52:46 +0000
Subject: Validate client_secret parameter according to spec (#20)

---
 changelog.d/20.bugfix                    |  1 +
 synapse/rest/client/v2_alpha/account.py  | 15 ++++++++++++++-
 synapse/rest/client/v2_alpha/register.py |  5 +++++
 synapse/util/stringutils.py              | 14 ++++++++++++++
 4 files changed, 34 insertions(+), 1 deletion(-)
 create mode 100644 changelog.d/20.bugfix

diff --git a/changelog.d/20.bugfix b/changelog.d/20.bugfix
new file mode 100644
index 0000000000..8ba53c28f9
--- /dev/null
+++ b/changelog.d/20.bugfix
@@ -0,0 +1 @@
+Validate `client_secret` parameter against the regex provided by the C-S spec.
\ No newline at end of file
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 32770a4a95..9bf4afac66 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -34,7 +34,7 @@ from synapse.http.servlet import (
 )
 from synapse.types import UserID
 from synapse.util.msisdn import phone_number_to_msisdn
-from synapse.util.stringutils import random_string
+from synapse.util.stringutils import assert_valid_client_secret, random_string
 from synapse.util.threepids import check_3pid_allowed
 
 from ._base import client_patterns, interactive_auth_handler
@@ -79,6 +79,8 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
 
         # Extract params from body
         client_secret = body["client_secret"]
+        assert_valid_client_secret(client_secret)
+
         email = body["email"]
         send_attempt = body["send_attempt"]
         next_link = body.get("next_link")  # Optional param
@@ -216,6 +218,8 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        assert_valid_client_secret(body["client_secret"])
+
         existingUid = yield self.datastore.get_user_id_by_threepid(
             'msisdn', msisdn
         )
@@ -257,6 +261,9 @@ class PasswordResetSubmitTokenServlet(RestServlet):
 
         sid = parse_string(request, "sid")
         client_secret = parse_string(request, "client_secret")
+
+        assert_valid_client_secret(client_secret)
+
         token = parse_string(request, "token")
 
         # Attempt to validate a 3PID sesssion
@@ -330,6 +337,8 @@ class PasswordResetSubmitTokenServlet(RestServlet):
             'sid', 'client_secret', 'token',
         ])
 
+        assert_valid_client_secret(body["client_secret"])
+
         valid, _ = yield self.datastore.validate_threepid_validation_token(
             body['sid'],
             body['client_secret'],
@@ -510,6 +519,8 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        assert_valid_client_secret(body["client_secret"])
+
         existingUid = yield self.datastore.get_user_id_by_threepid(
             'email', body['email']
         )
@@ -547,6 +558,8 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        assert_valid_client_secret(body["client_secret"])
+
         existingUid = yield self.datastore.get_user_id_by_threepid(
             'msisdn', msisdn
         )
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index dade6530f4..3d5a198278 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -43,6 +43,7 @@ from synapse.http.servlet import (
 )
 from synapse.util.msisdn import phone_number_to_msisdn
 from synapse.util.ratelimitutils import FederationRateLimiter
+from synapse.util.stringutils import assert_valid_client_secret
 from synapse.util.threepids import check_3pid_allowed
 
 from ._base import client_patterns, interactive_auth_handler
@@ -88,6 +89,8 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        assert_params_in_dict(body["client_secret"])
+
         existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
             'email', body['email']
         )
@@ -123,6 +126,8 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet):
 
         msisdn = phone_number_to_msisdn(body['country'], body['phone_number'])
 
+        assert_valid_client_secret(body["client_secret"])
+
         if not (yield check_3pid_allowed(self.hs, "msisdn", msisdn)):
             raise SynapseError(
                 403,
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 69dffd8244..5fb18ee1f8 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2020 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,12 +15,15 @@
 # limitations under the License.
 
 import random
+import re
 import string
 
 import six
 from six import PY2, PY3
 from six.moves import range
 
+from synapse.api.errors import Codes, SynapseError
+
 _string_with_symbols = (
     string.digits + string.ascii_letters + ".,;:^&*-_+=#~@"
 )
@@ -29,6 +33,8 @@ _string_with_symbols = (
 # we get cryptographically-secure randoms.
 rand = random.SystemRandom()
 
+client_secret_regex = re.compile(r"^[0-9a-zA-Z.=_-]+$")
+
 
 def random_string(length):
     return ''.join(rand.choice(string.ascii_letters) for _ in range(length))
@@ -113,3 +119,11 @@ def exception_to_unicode(e):
         return msg.decode('utf-8', errors='replace')
     else:
         return msg
+
+
+def assert_valid_client_secret(client_secret):
+    """Validate that a given string matches the client_secret regex defined by the spec"""
+    if client_secret_regex.match(client_secret) is None:
+        raise SynapseError(
+            400, "Invalid client_secret parameter", errcode=Codes.INVALID_PARAM
+        )
-- 
cgit 1.5.1


From cafeb5e0e7af12f0e6d6ce3337ac0d1f7f8c12a2 Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Fri, 24 Jan 2020 15:23:31 +0000
Subject: Fix resetting password via a phone number (#21)

---
 changelog.d/21.bugfix                   | 1 +
 synapse/rest/client/v2_alpha/account.py | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
 create mode 100644 changelog.d/21.bugfix

diff --git a/changelog.d/21.bugfix b/changelog.d/21.bugfix
new file mode 100644
index 0000000000..630d7812f7
--- /dev/null
+++ b/changelog.d/21.bugfix
@@ -0,0 +1 @@
+Fix resetting user passwords via a phone number.
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 9bf4afac66..974465e90c 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -339,7 +339,7 @@ class PasswordResetSubmitTokenServlet(RestServlet):
 
         assert_valid_client_secret(body["client_secret"])
 
-        valid, _ = yield self.datastore.validate_threepid_validation_token(
+        valid, _ = yield self.datastore.validate_threepid_session(
             body['sid'],
             body['client_secret'],
             body['token'],
-- 
cgit 1.5.1