From f79f45448527f22f3813e38233521a5e13e9223e Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 31 Oct 2018 22:29:02 +1100 Subject: Remove deprecated v1 key exchange endpoint (#4119) --- synapse/app/homeserver.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 593e1e75db..415374a2ce 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -37,7 +37,6 @@ from synapse.api.urls import ( FEDERATION_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX, - SERVER_KEY_PREFIX, SERVER_KEY_V2_PREFIX, STATIC_PREFIX, WEB_CLIENT_PREFIX, @@ -59,7 +58,6 @@ from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, check_requirem from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource -from synapse.rest.key.v1.server_key_resource import LocalKey from synapse.rest.key.v2 import KeyApiV2Resource from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.server import HomeServer @@ -236,10 +234,7 @@ class SynapseHomeServer(HomeServer): ) if name in ["keys", "federation"]: - resources.update({ - SERVER_KEY_PREFIX: LocalKey(self), - SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self), - }) + resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self) if name == "webclient": resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self) -- cgit 1.5.1 From cb7a6b2379e0e0a4ba8043da98e376b45d05b977 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 3 Nov 2018 00:19:23 +1100 Subject: Fix typing being reset causing infinite syncs (#4127) --- changelog.d/4127.bugfix | 1 + synapse/app/synchrotron.py | 14 ++++ synapse/handlers/typing.py | 14 ++-- tests/rest/client/v2_alpha/test_sync.py | 123 ++++++++++++++++++++++++++++++++ tests/server.py | 8 ++- 5 files changed, 155 insertions(+), 5 deletions(-) create mode 100644 changelog.d/4127.bugfix (limited to 'synapse/app') diff --git a/changelog.d/4127.bugfix b/changelog.d/4127.bugfix new file mode 100644 index 0000000000..0701d2ceaa --- /dev/null +++ b/changelog.d/4127.bugfix @@ -0,0 +1 @@ +If the typing stream ID goes backwards (as on a worker when the master restarts), the worker's typing handler will no longer erroneously report rooms containing new typing events. diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 3926c7f263..0354e82bf8 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -226,7 +226,15 @@ class SynchrotronPresence(object): class SynchrotronTyping(object): def __init__(self, hs): self._latest_room_serial = 0 + self._reset() + + def _reset(self): + """ + Reset the typing handler's data caches. + """ + # map room IDs to serial numbers self._room_serials = {} + # map room IDs to sets of users currently typing self._room_typing = {} def stream_positions(self): @@ -236,6 +244,12 @@ class SynchrotronTyping(object): return {"typing": self._latest_room_serial} def process_replication_rows(self, token, rows): + if self._latest_room_serial > token: + # The master has gone backwards. To prevent inconsistent data, just + # clear everything. + self._reset() + + # Set the latest serial token to whatever the server gave us. self._latest_room_serial = token for row in rows: diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index c610933dd4..a61bbf9392 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -63,11 +63,8 @@ class TypingHandler(object): self._member_typing_until = {} # clock time we expect to stop self._member_last_federation_poke = {} - # map room IDs to serial numbers - self._room_serials = {} self._latest_room_serial = 0 - # map room IDs to sets of users currently typing - self._room_typing = {} + self._reset() # caches which room_ids changed at which serials self._typing_stream_change_cache = StreamChangeCache( @@ -79,6 +76,15 @@ class TypingHandler(object): 5000, ) + def _reset(self): + """ + Reset the typing handler's data caches. + """ + # map room IDs to serial numbers + self._room_serials = {} + # map room IDs to sets of users currently typing + self._room_typing = {} + def _handle_timeouts(self): logger.info("Checking for typing timeouts") diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 4c30c5f258..99b716f00a 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -15,9 +15,11 @@ from mock import Mock +from synapse.rest.client.v1 import admin, login, room from synapse.rest.client.v2_alpha import sync from tests import unittest +from tests.server import TimedOutException class FilterTestCase(unittest.HomeserverTestCase): @@ -65,3 +67,124 @@ class FilterTestCase(unittest.HomeserverTestCase): ["next_batch", "rooms", "account_data", "to_device", "device_lists"] ).issubset(set(channel.json_body.keys())) ) + + +class SyncTypingTests(unittest.HomeserverTestCase): + + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + sync.register_servlets, + ] + user_id = True + hijack_auth = False + + def test_sync_backwards_typing(self): + """ + If the typing serial goes backwards and the typing handler is then reset + (such as when the master restarts and sets the typing serial to 0), we + do not incorrectly return typing information that had a serial greater + than the now-reset serial. + """ + typing_url = "/rooms/%s/typing/%s?access_token=%s" + sync_url = "/sync?timeout=3000000&access_token=%s&since=%s" + + # Register the user who gets notified + user_id = self.register_user("user", "pass") + access_token = self.login("user", "pass") + + # Register the user who sends the message + other_user_id = self.register_user("otheruser", "pass") + other_access_token = self.login("otheruser", "pass") + + # Create a room + room = self.helper.create_room_as(user_id, tok=access_token) + + # Invite the other person + self.helper.invite(room=room, src=user_id, tok=access_token, targ=other_user_id) + + # The other user joins + self.helper.join(room=room, user=other_user_id, tok=other_access_token) + + # The other user sends some messages + self.helper.send(room, body="Hi!", tok=other_access_token) + self.helper.send(room, body="There!", tok=other_access_token) + + # Start typing. + request, channel = self.make_request( + "PUT", + typing_url % (room, other_user_id, other_access_token), + b'{"typing": true, "timeout": 30000}', + ) + self.render(request) + self.assertEquals(200, channel.code) + + request, channel = self.make_request( + "GET", "/sync?access_token=%s" % (access_token,) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # Stop typing. + request, channel = self.make_request( + "PUT", + typing_url % (room, other_user_id, other_access_token), + b'{"typing": false}', + ) + self.render(request) + self.assertEquals(200, channel.code) + + # Start typing. + request, channel = self.make_request( + "PUT", + typing_url % (room, other_user_id, other_access_token), + b'{"typing": true, "timeout": 30000}', + ) + self.render(request) + self.assertEquals(200, channel.code) + + # Should return immediately + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # Reset typing serial back to 0, as if the master had. + typing = self.hs.get_typing_handler() + typing._latest_room_serial = 0 + + # Since it checks the state token, we need some state to update to + # invalidate the stream token. + self.helper.send(room, body="There!", tok=other_access_token) + + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # This should time out! But it does not, because our stream token is + # ahead, and therefore it's saying the typing (that we've actually + # already seen) is new, since it's got a token above our new, now-reset + # stream token. + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.render(request) + self.assertEquals(200, channel.code) + next_batch = channel.json_body["next_batch"] + + # Clear the typing information, so that it doesn't think everything is + # in the future. + typing._reset() + + # Now it SHOULD fail as it never completes! + request, channel = self.make_request( + "GET", sync_url % (access_token, next_batch) + ) + self.assertRaises(TimedOutException, self.render, request) diff --git a/tests/server.py b/tests/server.py index 819c854448..cc6dbe04ac 100644 --- a/tests/server.py +++ b/tests/server.py @@ -21,6 +21,12 @@ from synapse.util import Clock from tests.utils import setup_test_homeserver as _sth +class TimedOutException(Exception): + """ + A web query timed out. + """ + + @attr.s class FakeChannel(object): """ @@ -153,7 +159,7 @@ def wait_until_result(clock, request, timeout=100): x += 1 if x > timeout: - raise Exception("Timed out waiting for request to finish.") + raise TimedOutException("Timed out waiting for request to finish.") clock.advance(0.1) -- cgit 1.5.1 From 835779f7fb8e8b84c3f8e371528d6ea08d0c373f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 15 Nov 2018 11:08:27 -0700 Subject: Add option to track MAU stats (but not limit people) (#3830) --- changelog.d/3830.feature | 1 + synapse/app/homeserver.py | 2 +- synapse/config/server.py | 6 +++ synapse/storage/monthly_active_users.py | 74 ++++++++++++++++-------------- tests/storage/test_monthly_active_users.py | 25 ++++++++++ tests/test_mau.py | 18 ++++++++ tests/utils.py | 1 + 7 files changed, 92 insertions(+), 35 deletions(-) create mode 100644 changelog.d/3830.feature (limited to 'synapse/app') diff --git a/changelog.d/3830.feature b/changelog.d/3830.feature new file mode 100644 index 0000000000..af472cf763 --- /dev/null +++ b/changelog.d/3830.feature @@ -0,0 +1 @@ +Add option to track MAU stats (but not limit people) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 415374a2ce..3e4dea2f19 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -535,7 +535,7 @@ def run(hs): current_mau_count = 0 reserved_count = 0 store = hs.get_datastore() - if hs.config.limit_usage_by_mau: + if hs.config.limit_usage_by_mau or hs.config.mau_stats_only: current_mau_count = yield store.get_monthly_active_count() reserved_count = yield store.get_registered_reserved_users_count() current_mau_gauge.set(float(current_mau_count)) diff --git a/synapse/config/server.py b/synapse/config/server.py index c1c7c0105e..5ff9ac288d 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -77,6 +77,7 @@ class ServerConfig(Config): self.max_mau_value = config.get( "max_mau_value", 0, ) + self.mau_stats_only = config.get("mau_stats_only", False) self.mau_limits_reserved_threepids = config.get( "mau_limit_reserved_threepids", [] @@ -372,6 +373,11 @@ class ServerConfig(Config): # max_mau_value: 50 # mau_trial_days: 2 # + # If enabled, the metrics for the number of monthly active users will + # be populated, however no one will be limited. If limit_usage_by_mau + # is true, this is implied to be true. + # mau_stats_only: False + # # Sometimes the server admin will want to ensure certain accounts are # never blocked by mau checking. These accounts are specified here. # diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index cf4104dc2e..c353b11c9a 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -96,37 +96,38 @@ class MonthlyActiveUsersStore(SQLBaseStore): txn.execute(sql, query_args) - # If MAU user count still exceeds the MAU threshold, then delete on - # a least recently active basis. - # Note it is not possible to write this query using OFFSET due to - # incompatibilities in how sqlite and postgres support the feature. - # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present - # While Postgres does not require 'LIMIT', but also does not support - # negative LIMIT values. So there is no way to write it that both can - # support - safe_guard = self.hs.config.max_mau_value - len(self.reserved_users) - # Must be greater than zero for postgres - safe_guard = safe_guard if safe_guard > 0 else 0 - query_args = [safe_guard] - - base_sql = """ - DELETE FROM monthly_active_users - WHERE user_id NOT IN ( - SELECT user_id FROM monthly_active_users - ORDER BY timestamp DESC - LIMIT ? + if self.hs.config.limit_usage_by_mau: + # If MAU user count still exceeds the MAU threshold, then delete on + # a least recently active basis. + # Note it is not possible to write this query using OFFSET due to + # incompatibilities in how sqlite and postgres support the feature. + # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present + # While Postgres does not require 'LIMIT', but also does not support + # negative LIMIT values. So there is no way to write it that both can + # support + safe_guard = self.hs.config.max_mau_value - len(self.reserved_users) + # Must be greater than zero for postgres + safe_guard = safe_guard if safe_guard > 0 else 0 + query_args = [safe_guard] + + base_sql = """ + DELETE FROM monthly_active_users + WHERE user_id NOT IN ( + SELECT user_id FROM monthly_active_users + ORDER BY timestamp DESC + LIMIT ? + ) + """ + # Need if/else since 'AND user_id NOT IN ({})' fails on Postgres + # when len(reserved_users) == 0. Works fine on sqlite. + if len(self.reserved_users) > 0: + query_args.extend(self.reserved_users) + sql = base_sql + """ AND user_id NOT IN ({})""".format( + ','.join(questionmarks) ) - """ - # Need if/else since 'AND user_id NOT IN ({})' fails on Postgres - # when len(reserved_users) == 0. Works fine on sqlite. - if len(self.reserved_users) > 0: - query_args.extend(self.reserved_users) - sql = base_sql + """ AND user_id NOT IN ({})""".format( - ','.join(questionmarks) - ) - else: - sql = base_sql - txn.execute(sql, query_args) + else: + sql = base_sql + txn.execute(sql, query_args) yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports @@ -252,8 +253,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): Args: user_id(str): the user_id to query """ - - if self.hs.config.limit_usage_by_mau: + if self.hs.config.limit_usage_by_mau or self.hs.config.mau_stats_only: # Trial users and guests should not be included as part of MAU group is_guest = yield self.is_guest(user_id) if is_guest: @@ -271,8 +271,14 @@ class MonthlyActiveUsersStore(SQLBaseStore): # but only update if we have not previously seen the user for # LAST_SEEN_GRANULARITY ms if last_seen_timestamp is None: - count = yield self.get_monthly_active_count() - if count < self.hs.config.max_mau_value: + # In the case where mau_stats_only is True and limit_usage_by_mau is + # False, there is no point in checking get_monthly_active_count - it + # adds no value and will break the logic if max_mau_value is exceeded. + if not self.hs.config.limit_usage_by_mau: yield self.upsert_monthly_active_user(user_id) + else: + count = yield self.get_monthly_active_count() + if count < self.hs.config.max_mau_value: + yield self.upsert_monthly_active_user(user_id) elif now - last_seen_timestamp > LAST_SEEN_GRANULARITY: yield self.upsert_monthly_active_user(user_id) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 832e379a83..8664bc3d54 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -220,3 +220,28 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): self.store.user_add_threepid(user2, "email", user2_email, now, now) count = self.store.get_registered_reserved_users_count() self.assertEquals(self.get_success(count), len(threepids)) + + def test_track_monthly_users_without_cap(self): + self.hs.config.limit_usage_by_mau = False + self.hs.config.mau_stats_only = True + self.hs.config.max_mau_value = 1 # should not matter + + count = self.store.get_monthly_active_count() + self.assertEqual(0, self.get_success(count)) + + self.store.upsert_monthly_active_user("@user1:server") + self.store.upsert_monthly_active_user("@user2:server") + self.pump() + + count = self.store.get_monthly_active_count() + self.assertEqual(2, self.get_success(count)) + + def test_no_users_when_not_tracking(self): + self.hs.config.limit_usage_by_mau = False + self.hs.config.mau_stats_only = False + self.store.upsert_monthly_active_user = Mock() + + self.store.populate_monthly_active_users("@user:sever") + self.pump() + + self.store.upsert_monthly_active_user.assert_not_called() diff --git a/tests/test_mau.py b/tests/test_mau.py index 0afdeb0818..04f95c942f 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -171,6 +171,24 @@ class TestMauLimit(unittest.HomeserverTestCase): self.assertEqual(e.code, 403) self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) + def test_tracked_but_not_limited(self): + self.hs.config.max_mau_value = 1 # should not matter + self.hs.config.limit_usage_by_mau = False + self.hs.config.mau_stats_only = True + + # Simply being able to create 2 users indicates that the + # limit was not reached. + token1 = self.create_user("kermit1") + self.do_sync_for_user(token1) + token2 = self.create_user("kermit2") + self.do_sync_for_user(token2) + + # We do want to verify that the number of tracked users + # matches what we want though + count = self.store.get_monthly_active_count() + self.reactor.advance(100) + self.assertEqual(2, self.successResultOf(count)) + def create_user(self, localpart): request_data = json.dumps( { diff --git a/tests/utils.py b/tests/utils.py index 67ab916f30..52ab762010 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -134,6 +134,7 @@ def default_config(name): config.hs_disabled_limit_type = "" config.max_mau_value = 50 config.mau_trial_days = 0 + config.mau_stats_only = False config.mau_limits_reserved_threepids = [] config.admin_contact = None config.rc_messages_per_second = 10000 -- cgit 1.5.1 From e8d98466b0d29abd3604470af9f10134f3373506 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 5 Dec 2018 14:38:58 +0100 Subject: Implement .well-known handling (#4262) Sometimes it's useful for synapse to generate its own .well-known file. --- changelog.d/4262.feature | 1 + synapse/app/homeserver.py | 2 ++ synapse/config/registration.py | 9 ++++++ synapse/rest/well_known.py | 70 ++++++++++++++++++++++++++++++++++++++++++ tests/rest/test_well_known.py | 58 ++++++++++++++++++++++++++++++++++ 5 files changed, 140 insertions(+) create mode 100644 changelog.d/4262.feature create mode 100644 synapse/rest/well_known.py create mode 100644 tests/rest/test_well_known.py (limited to 'synapse/app') diff --git a/changelog.d/4262.feature b/changelog.d/4262.feature new file mode 100644 index 0000000000..89cfdcab15 --- /dev/null +++ b/changelog.d/4262.feature @@ -0,0 +1 @@ +Support for serving .well-known files diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3e4dea2f19..a03a3e4b8a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -60,6 +60,7 @@ from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource from synapse.rest.key.v2 import KeyApiV2Resource from synapse.rest.media.v0.content_repository import ContentRepoResource +from synapse.rest.well_known import WellKnownResource from synapse.server import HomeServer from synapse.storage import DataStore, are_all_users_on_domain from synapse.storage.engines import IncorrectDatabaseSetup, create_engine @@ -195,6 +196,7 @@ class SynapseHomeServer(HomeServer): "/_matrix/client/unstable": client_resource, "/_matrix/client/v2_alpha": client_resource, "/_matrix/client/versions": client_resource, + "/.well-known/matrix/client": WellKnownResource(self), }) if name == "consent": diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 717bbfec61..e365f0c30b 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -37,6 +37,7 @@ class RegistrationConfig(Config): self.bcrypt_rounds = config.get("bcrypt_rounds", 12) self.trusted_third_party_id_servers = config["trusted_third_party_id_servers"] + self.default_identity_server = config.get("default_identity_server") self.allow_guest_access = config.get("allow_guest_access", False) self.invite_3pid_guest = ( @@ -91,6 +92,14 @@ class RegistrationConfig(Config): # accessible to anonymous users. allow_guest_access: False + # The identity server which we suggest that clients should use when users log + # in on this server. + # + # (By default, no suggestion is made, so it is left up to the client. + # This setting is ignored unless public_baseurl is also set.) + # + # default_identity_server: https://matrix.org + # The list of identity servers trusted to verify third party # identifiers by this server. # diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py new file mode 100644 index 0000000000..6e043d6162 --- /dev/null +++ b/synapse/rest/well_known.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging + +from twisted.web.resource import Resource + +logger = logging.getLogger(__name__) + + +class WellKnownBuilder(object): + """Utility to construct the well-known response + + Args: + hs (synapse.server.HomeServer): + """ + def __init__(self, hs): + self._config = hs.config + + def get_well_known(self): + # if we don't have a public_base_url, we can't help much here. + if self._config.public_baseurl is None: + return None + + result = { + "m.homeserver": { + "base_url": self._config.public_baseurl, + }, + } + + if self._config.default_identity_server: + result["m.identity_server"] = { + "base_url": self._config.default_identity_server, + } + + return result + + +class WellKnownResource(Resource): + """A Twisted web resource which renders the .well-known file""" + + isLeaf = 1 + + def __init__(self, hs): + Resource.__init__(self) + self._well_known_builder = WellKnownBuilder(hs) + + def render_GET(self, request): + r = self._well_known_builder.get_well_known() + if not r: + request.setResponseCode(404) + request.setHeader(b"Content-Type", b"text/plain") + return b'.well-known not available' + + logger.error("returning: %s", r) + request.setHeader(b"Content-Type", b"application/json") + return json.dumps(r).encode("utf-8") diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py new file mode 100644 index 0000000000..8d8f03e005 --- /dev/null +++ b/tests/rest/test_well_known.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from synapse.rest.well_known import WellKnownResource + +from tests import unittest + + +class WellKnownTests(unittest.HomeserverTestCase): + def setUp(self): + super(WellKnownTests, self).setUp() + + # replace the JsonResource with a WellKnownResource + self.resource = WellKnownResource(self.hs) + + def test_well_known(self): + self.hs.config.public_baseurl = "https://tesths" + self.hs.config.default_identity_server = "https://testis" + + request, channel = self.make_request( + "GET", + "/.well-known/matrix/client", + shorthand=False, + ) + self.render(request) + + self.assertEqual(request.code, 200) + self.assertEqual( + channel.json_body, { + "m.homeserver": {"base_url": "https://tesths"}, + "m.identity_server": {"base_url": "https://testis"}, + } + ) + + def test_well_known_no_public_baseurl(self): + self.hs.config.public_baseurl = None + + request, channel = self.make_request( + "GET", + "/.well-known/matrix/client", + shorthand=False, + ) + self.render(request) + + self.assertEqual(request.code, 404) -- cgit 1.5.1 From c7401a697f1ee3410b860afd8686f8bb012a8dce Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 7 Dec 2018 13:11:11 +0100 Subject: Implement SAML2 authentication (#4267) This implements both a SAML2 metadata endpoint (at `/_matrix/saml2/metadata.xml`), and a SAML2 response receiver (at `/_matrix/saml2/authn_response`). If the SAML2 response matches what's been configured, we complete the SSO login flow by redirecting to the client url (aka `RelayState` in SAML2 jargon) with a login token. What we don't yet have is anything to build a SAML2 request and redirect the user to the identity provider. That is left as an exercise for the reader. --- changelog.d/4267.feature | 1 + synapse/app/homeserver.py | 4 ++ synapse/config/homeserver.py | 3 +- synapse/config/saml2_config.py | 110 ++++++++++++++++++++++++++++++++ synapse/python_dependencies.py | 5 +- synapse/rest/saml2/__init__.py | 29 +++++++++ synapse/rest/saml2/metadata_resource.py | 36 +++++++++++ synapse/rest/saml2/response_resource.py | 71 +++++++++++++++++++++ tests/utils.py | 1 + 9 files changed, 258 insertions(+), 2 deletions(-) create mode 100644 changelog.d/4267.feature create mode 100644 synapse/config/saml2_config.py create mode 100644 synapse/rest/saml2/__init__.py create mode 100644 synapse/rest/saml2/metadata_resource.py create mode 100644 synapse/rest/saml2/response_resource.py (limited to 'synapse/app') diff --git a/changelog.d/4267.feature b/changelog.d/4267.feature new file mode 100644 index 0000000000..da36986e2b --- /dev/null +++ b/changelog.d/4267.feature @@ -0,0 +1 @@ +Rework SAML2 authentication diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index a03a3e4b8a..1e495a38b9 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -199,6 +199,10 @@ class SynapseHomeServer(HomeServer): "/.well-known/matrix/client": WellKnownResource(self), }) + if self.get_config().saml2_enabled: + from synapse.rest.saml2 import SAML2Resource + resources["/_matrix/saml2"] = SAML2Resource(self) + if name == "consent": from synapse.rest.consent.consent_resource import ConsentResource consent_resource = ConsentResource(self) diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 36182267c2..9d740c7a71 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -32,6 +32,7 @@ from .ratelimiting import RatelimitConfig from .registration import RegistrationConfig from .repository import ContentRepositoryConfig from .room_directory import RoomDirectoryConfig +from .saml2_config import SAML2Config from .server import ServerConfig from .server_notices_config import ServerNoticesConfig from .spam_checker import SpamCheckerConfig @@ -44,7 +45,7 @@ from .workers import WorkerConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig, - AppServiceConfig, KeyConfig, CasConfig, + AppServiceConfig, KeyConfig, SAML2Config, CasConfig, JWTConfig, PasswordConfig, EmailConfig, WorkerConfig, PasswordAuthProviderConfig, PushConfig, SpamCheckerConfig, GroupsConfig, UserDirectoryConfig, diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py new file mode 100644 index 0000000000..86ffe334f5 --- /dev/null +++ b/synapse/config/saml2_config.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import Config, ConfigError + + +class SAML2Config(Config): + def read_config(self, config): + self.saml2_enabled = False + + saml2_config = config.get("saml2_config") + + if not saml2_config or not saml2_config.get("enabled", True): + return + + self.saml2_enabled = True + + import saml2.config + self.saml2_sp_config = saml2.config.SPConfig() + self.saml2_sp_config.load(self._default_saml_config_dict()) + self.saml2_sp_config.load(saml2_config.get("sp_config", {})) + + config_path = saml2_config.get("config_path", None) + if config_path is not None: + self.saml2_sp_config.load_file(config_path) + + def _default_saml_config_dict(self): + import saml2 + + public_baseurl = self.public_baseurl + if public_baseurl is None: + raise ConfigError( + "saml2_config requires a public_baseurl to be set" + ) + + metadata_url = public_baseurl + "_matrix/saml2/metadata.xml" + response_url = public_baseurl + "_matrix/saml2/authn_response" + return { + "entityid": metadata_url, + + "service": { + "sp": { + "endpoints": { + "assertion_consumer_service": [ + (response_url, saml2.BINDING_HTTP_POST), + ], + }, + "required_attributes": ["uid"], + "optional_attributes": ["mail", "surname", "givenname"], + }, + } + } + + def default_config(self, config_dir_path, server_name, **kwargs): + return """ + # Enable SAML2 for registration and login. Uses pysaml2. + # + # saml2_config: + # + # # The following is the configuration for the pysaml2 Service Provider. + # # See pysaml2 docs for format of config. + # # + # # Default values will be used for the 'entityid' and 'service' settings, + # # so it is not normally necessary to specify them unless you need to + # # override them. + # + # sp_config: + # # point this to the IdP's metadata. You can use either a local file or + # # (preferably) a URL. + # metadata: + # # local: ["saml2/idp.xml"] + # remote: + # - url: https://our_idp/metadata.xml + # + # # The following is just used to generate our metadata xml, and you + # # may well not need it, depending on your setup. Alternatively you + # # may need a whole lot more detail - see the pysaml2 docs! + # + # description: ["My awesome SP", "en"] + # name: ["Test SP", "en"] + # + # organization: + # name: Example com + # display_name: + # - ["Example co", "en"] + # url: "http://example.com" + # + # contact_person: + # - given_name: Bob + # sur_name: "the Sysadmin" + # email_address": ["admin@example.com"] + # contact_type": technical + # + # # Instead of putting the config inline as above, you can specify a + # # separate pysaml2 configuration file: + # # + # # config_path: "%(config_dir_path)s/sp_conf.py" + """ % {"config_dir_path": config_dir_path} diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 75ba9947cc..db631e0c6c 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -80,7 +80,10 @@ CONDITIONAL_REQUIREMENTS = { }, "postgres": { "psycopg2>=2.6": ["psycopg2"] - } + }, + "saml2": { + "pysaml2>=4.5.0": ["saml2"], + }, } diff --git a/synapse/rest/saml2/__init__.py b/synapse/rest/saml2/__init__.py new file mode 100644 index 0000000000..68da37ca6a --- /dev/null +++ b/synapse/rest/saml2/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from twisted.web.resource import Resource + +from synapse.rest.saml2.metadata_resource import SAML2MetadataResource +from synapse.rest.saml2.response_resource import SAML2ResponseResource + +logger = logging.getLogger(__name__) + + +class SAML2Resource(Resource): + def __init__(self, hs): + Resource.__init__(self) + self.putChild(b"metadata.xml", SAML2MetadataResource(hs)) + self.putChild(b"authn_response", SAML2ResponseResource(hs)) diff --git a/synapse/rest/saml2/metadata_resource.py b/synapse/rest/saml2/metadata_resource.py new file mode 100644 index 0000000000..e8c680aeb4 --- /dev/null +++ b/synapse/rest/saml2/metadata_resource.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import saml2.metadata + +from twisted.web.resource import Resource + + +class SAML2MetadataResource(Resource): + """A Twisted web resource which renders the SAML metadata""" + + isLeaf = 1 + + def __init__(self, hs): + Resource.__init__(self) + self.sp_config = hs.config.saml2_sp_config + + def render_GET(self, request): + metadata_xml = saml2.metadata.create_metadata_string( + configfile=None, config=self.sp_config, + ) + request.setHeader(b"Content-Type", b"text/xml; charset=utf-8") + return metadata_xml diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py new file mode 100644 index 0000000000..ad2ed157b5 --- /dev/null +++ b/synapse/rest/saml2/response_resource.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +import saml2 +from saml2.client import Saml2Client + +from twisted.web.resource import Resource +from twisted.web.server import NOT_DONE_YET + +from synapse.api.errors import CodeMessageException +from synapse.http.server import wrap_html_request_handler +from synapse.http.servlet import parse_string +from synapse.rest.client.v1.login import SSOAuthHandler + +logger = logging.getLogger(__name__) + + +class SAML2ResponseResource(Resource): + """A Twisted web resource which handles the SAML response""" + + isLeaf = 1 + + def __init__(self, hs): + Resource.__init__(self) + + self._saml_client = Saml2Client(hs.config.saml2_sp_config) + self._sso_auth_handler = SSOAuthHandler(hs) + + def render_POST(self, request): + self._async_render_POST(request) + return NOT_DONE_YET + + @wrap_html_request_handler + def _async_render_POST(self, request): + resp_bytes = parse_string(request, 'SAMLResponse', required=True) + relay_state = parse_string(request, 'RelayState', required=True) + + try: + saml2_auth = self._saml_client.parse_authn_request_response( + resp_bytes, saml2.BINDING_HTTP_POST, + ) + except Exception as e: + logger.warning("Exception parsing SAML2 response", exc_info=1) + raise CodeMessageException( + 400, "Unable to parse SAML2 response: %s" % (e,), + ) + + if saml2_auth.not_signed: + raise CodeMessageException(400, "SAML2 response was not signed") + + if "uid" not in saml2_auth.ava: + raise CodeMessageException(400, "uid not in SAML2 response") + + username = saml2_auth.ava["uid"][0] + return self._sso_auth_handler.on_successful_auth( + username, request, relay_state, + ) diff --git a/tests/utils.py b/tests/utils.py index 52ab762010..04796a9b30 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -139,6 +139,7 @@ def default_config(name): config.admin_contact = None config.rc_messages_per_second = 10000 config.rc_message_burst_count = 10000 + config.saml2_enabled = False config.use_frozen_dicts = False -- cgit 1.5.1 From df96177ca7f6f646baf96e779e0bf0ef38cc5168 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 11 Dec 2018 13:18:48 +0000 Subject: Stop installing Matrix Console by default This is based on the work done by @krombel in #2601. --- README.rst | 9 +-------- UPGRADE.rst | 4 ++++ changelog.d/4290.feature | 1 + synapse/app/homeserver.py | 43 +++++++++++------------------------------- synapse/config/server.py | 42 ++++++++++++++++++++++++++--------------- synapse/python_dependencies.py | 3 --- 6 files changed, 44 insertions(+), 58 deletions(-) create mode 100644 changelog.d/4290.feature (limited to 'synapse/app') diff --git a/README.rst b/README.rst index 664a0ea03f..02b3db1bc6 100644 --- a/README.rst +++ b/README.rst @@ -289,10 +289,6 @@ go back in your web client and proceed further. If all goes well you should at least be able to log in, create a room, and start sending messages. -(The homeserver runs a web client by default at https://localhost:8448/, though -as of the time of writing it is somewhat outdated and not really recommended - -https://github.com/matrix-org/synapse/issues/1527). - .. _`client-user-reg`: Registering a new user from a client @@ -372,10 +368,7 @@ ArchLinux The quickest way to get up and running with ArchLinux is probably with the community package https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of -the necessary dependencies. If the default web client is to be served (enabled by default in -the generated config), -https://www.archlinux.org/packages/community/any/python2-matrix-angular-sdk/ will also need to -be installed. +the necessary dependencies. pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):: diff --git a/UPGRADE.rst b/UPGRADE.rst index 9d68a64058..95f4e33300 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -115,6 +115,10 @@ Upgrading to v0.34.0 case a hypothetical future identity server was put there. If you don't remove it, users may be unable to deactivate their accounts. +3. This release no longer installs the (unmaintained) Matrix Console web client + as part of the default installation. It is possible to re-enable it by + installing it separately and setting the ``web_client_location`` config + option, but please consider switching to another client. Upgrading to v0.33.7 ==================== diff --git a/changelog.d/4290.feature b/changelog.d/4290.feature new file mode 100644 index 0000000000..df5114bae9 --- /dev/null +++ b/changelog.d/4290.feature @@ -0,0 +1 @@ +Remove Matrix console from the default distribution diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3e4dea2f19..a6af553f79 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -54,7 +54,7 @@ from synapse.metrics import RegistryProxy from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.module_api import ModuleApi -from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, check_requirements +from synapse.python_dependencies import check_requirements from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource @@ -79,36 +79,6 @@ def gz_wrap(r): return EncodingResourceWrapper(r, [GzipEncoderFactory()]) -def build_resource_for_web_client(hs): - webclient_path = hs.get_config().web_client_location - if not webclient_path: - try: - import syweb - except ImportError: - quit_with_error( - "Could not find a webclient.\n\n" - "Please either install the matrix-angular-sdk or configure\n" - "the location of the source to serve via the configuration\n" - "option `web_client_location`\n\n" - "To install the `matrix-angular-sdk` via pip, run:\n\n" - " pip install '%(dep)s'\n" - "\n" - "You can also disable hosting of the webclient via the\n" - "configuration option `web_client`\n" - % {"dep": CONDITIONAL_REQUIREMENTS["web_client"].keys()[0]} - ) - syweb_path = os.path.dirname(syweb.__file__) - webclient_path = os.path.join(syweb_path, "webclient") - # GZip is disabled here due to - # https://twistedmatrix.com/trac/ticket/7678 - # (It can stay enabled for the API resources: they call - # write() with the whole body and then finish() straight - # after and so do not trigger the bug. - # GzipFile was removed in commit 184ba09 - # return GzipFile(webclient_path) # TODO configurable? - return File(webclient_path) # TODO configurable? - - class SynapseHomeServer(HomeServer): DATASTORE_CLASS = DataStore @@ -237,7 +207,16 @@ class SynapseHomeServer(HomeServer): resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self) if name == "webclient": - resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self) + webclient_path = self.get_config().web_client_location + + if webclient_path is None: + logger.warning( + "Not enabling webclient resource, as web_client_location is unset." + ) + else: + # GZip is disabled here due to + # https://twistedmatrix.com/trac/ticket/7678 + resources[WEB_CLIENT_PREFIX] = File(webclient_path) if name == "metrics" and self.get_config().enable_metrics: resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) diff --git a/synapse/config/server.py b/synapse/config/server.py index 4a5b902f8e..a9154ad462 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -34,7 +34,6 @@ class ServerConfig(Config): raise ConfigError(str(e)) self.pid_file = self.abspath(config.get("pid_file")) - self.web_client = config["web_client"] self.web_client_location = config.get("web_client_location", None) self.soft_file_limit = config["soft_file_limit"] self.daemonize = config.get("daemonize") @@ -128,6 +127,9 @@ class ServerConfig(Config): elif not bind_addresses: bind_addresses.append('') + if not self.web_client_location: + _warn_if_webclient_configured(self.listeners) + self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None)) bind_port = config.get("bind_port") @@ -136,8 +138,6 @@ class ServerConfig(Config): bind_host = config.get("bind_host", "") gzip_responses = config.get("gzip_responses", True) - names = ["client", "webclient"] if self.web_client else ["client"] - self.listeners.append({ "port": bind_port, "bind_addresses": [bind_host], @@ -145,7 +145,7 @@ class ServerConfig(Config): "type": "http", "resources": [ { - "names": names, + "names": ["client"], "compress": gzip_responses, }, { @@ -164,7 +164,7 @@ class ServerConfig(Config): "type": "http", "resources": [ { - "names": names, + "names": ["client"], "compress": gzip_responses, }, { @@ -247,13 +247,9 @@ class ServerConfig(Config): # # cpu_affinity: 0xFFFFFFFF - # Whether to serve a web client from the HTTP/HTTPS root resource. - web_client: True - - # The root directory to server for the above web client. - # If left undefined, synapse will serve the matrix-angular-sdk web client. - # Make sure matrix-angular-sdk is installed with pip if web_client is True - # and web_client_location is undefined + # The path to the web client which will be served at /_matrix/client/ + # if 'webclient' is configured under the 'listeners' configuration. + # # web_client_location: "/path/to/web/root" # The public-facing base URL for the client API (not including _matrix/...) @@ -320,8 +316,8 @@ class ServerConfig(Config): - # List of resources to host on this listener. names: - - client # The client-server APIs, both v1 and v2 - - webclient # The bundled webclient. + - client # The client-server APIs, both v1 and v2 + # - webclient # A web client. Requires web_client_location to be set. # Should synapse compress HTTP responses to clients that support it? # This should be disabled if running synapse behind a load balancer @@ -348,7 +344,7 @@ class ServerConfig(Config): x_forwarded: false resources: - - names: [client, webclient] + - names: [client] compress: true - names: [federation] compress: false @@ -452,3 +448,19 @@ def read_gc_thresholds(thresholds): raise ConfigError( "Value of `gc_threshold` must be a list of three integers if set" ) + + +NO_MORE_WEB_CLIENT_WARNING = """ +Synapse no longer includes a web client. To enable a web client, configure +web_client_location. To remove this warning, remove 'webclient' from the 'listeners' +configuration. +""" + + +def _warn_if_webclient_configured(listeners): + for listener in listeners: + for res in listener.get("resources", []): + for name in res.get("names", []): + if name == 'webclient': + logger.warning(NO_MORE_WEB_CLIENT_WARNING) + return diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index ca62ee7637..92422c6ffc 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -69,9 +69,6 @@ REQUIREMENTS = { } CONDITIONAL_REQUIREMENTS = { - "web_client": { - "matrix_angular_sdk>=0.6.8": ["syweb>=0.6.8"], - }, "email.enable_notifs": { "Jinja2>=2.8": ["Jinja2>=2.8"], "bleach>=1.4.2": ["bleach>=1.4.2"], -- cgit 1.5.1 From f537432ef96baf07703805c43d16df45ea765044 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 11 Dec 2018 12:18:19 +0000 Subject: Add a welcome page to the static resources This is largely a precursor for the removal of the bundled webclient. The idea is to present a page at / which reassures people that something is working, and to give them some links for next steps. The welcome page lives at `/_matrix/static/`, so is enabled alongside the other `static` resources (which, in practice, means the client API is enabled). We'll redirect to it from `/` if we have nothing better to display there. It would be nice to have a way to disable it (in the same way that you might disable the nginx welcome page), but I can't really think of a good way to do that without a load of ickiness. It's based on the work done by @krombel for #2601. --- changelog.d/4289.feature | 1 + synapse/app/homeserver.py | 3 +++ synapse/static/index.html | 26 ++++++++++++++++++++++++++ 3 files changed, 30 insertions(+) create mode 100644 changelog.d/4289.feature create mode 100644 synapse/static/index.html (limited to 'synapse/app') diff --git a/changelog.d/4289.feature b/changelog.d/4289.feature new file mode 100644 index 0000000000..4d53bd22c3 --- /dev/null +++ b/changelog.d/4289.feature @@ -0,0 +1 @@ +Add a welcome page for the client API port. Credit to @krombel! \ No newline at end of file diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3e4dea2f19..e433c66558 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -137,8 +137,11 @@ class SynapseHomeServer(HomeServer): handler = handler_cls(config, module_api) resources[path] = AdditionalResource(self, handler.handle_request) + # try to find something useful to redirect '/' to if WEB_CLIENT_PREFIX in resources: root_resource = RootRedirect(WEB_CLIENT_PREFIX) + elif STATIC_PREFIX in resources: + root_resource = RootRedirect(STATIC_PREFIX) else: root_resource = NoResource() diff --git a/synapse/static/index.html b/synapse/static/index.html new file mode 100644 index 0000000000..d664239983 --- /dev/null +++ b/synapse/static/index.html @@ -0,0 +1,26 @@ + + + Synapse is running + + + +

Synapse is running

+

Congratulations!

+

Your Synapse server is listening on this port and is ready for messages.

+

To use this server you'll need a client - e.g. one of + this list of Matrix clients.

+

You can find (federated) rooms that might be of interest to you on + view.matrix.org.

+

Or you just start creating your own rooms with your friends.

+

Welcome to the Matrix universe :)

+ + -- cgit 1.5.1 From 7e22cd90f53ce55af2f46b23293e0dcc869c2976 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 18 Dec 2018 14:36:11 +0000 Subject: ensure can report mau stats when hs.config.mau_stats_only is set (#4305) * ensure can report mau stats when hs.config.mau_stats_only is set --- changelog.d/4305.bugfix | 1 + synapse/app/homeserver.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/4305.bugfix (limited to 'synapse/app') diff --git a/changelog.d/4305.bugfix b/changelog.d/4305.bugfix new file mode 100644 index 0000000000..499fb82077 --- /dev/null +++ b/changelog.d/4305.bugfix @@ -0,0 +1 @@ +The metric synapse_admin_mau:current previously did not update when config.mau_stats_only was set to True diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 023e32fed2..f2064f9d0c 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -537,7 +537,7 @@ def run(hs): ) start_generate_monthly_active_users() - if hs.config.limit_usage_by_mau: + if hs.config.limit_usage_by_mau or hs.config.mau_stats_only: clock.looping_call(start_generate_monthly_active_users, 5 * 60 * 1000) # End of monthly active user settings -- cgit 1.5.1 From c26f49a6645a84730811ea7bc5158d826bc43484 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 22 Dec 2018 01:37:26 +1100 Subject: Make the dependencies more like a standard Python project and hook up the optional dependencies to setuptools (#4298) --- README.rst | 11 +- changelog.d/4298.feature | 1 + setup.py | 16 ++- synapse/app/__init__.py | 6 +- synapse/app/homeserver.py | 3 - synapse/python_dependencies.py | 228 ++++++++++++++++------------------------- tox.ini | 4 +- 7 files changed, 111 insertions(+), 158 deletions(-) create mode 100644 changelog.d/4298.feature (limited to 'synapse/app') diff --git a/README.rst b/README.rst index 02b3db1bc6..c4df779ac7 100644 --- a/README.rst +++ b/README.rst @@ -86,7 +86,7 @@ Synapse is the reference Python/Twisted Matrix homeserver implementation. System requirements: - POSIX-compliant system (tested on Linux & OS X) -- Python 3.5, 3.6, or 2.7 +- Python 3.5, 3.6, 3.7, or 2.7 - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org Installing from source @@ -148,7 +148,7 @@ To install the Synapse homeserver run:: source ~/synapse/env/bin/activate pip install --upgrade pip pip install --upgrade setuptools - pip install matrix-synapse + pip install matrix-synapse[all] This installs Synapse, along with the libraries it uses, into a virtual environment under ``~/synapse/env``. Feel free to pick a different directory @@ -158,7 +158,7 @@ This Synapse installation can then be later upgraded by using pip again with the update flag:: source ~/synapse/env/bin/activate - pip install -U matrix-synapse + pip install -U matrix-synapse[all] In case of problems, please see the _`Troubleshooting` section below. @@ -826,8 +826,7 @@ to install using pip and a virtualenv:: virtualenv -p python2.7 env source env/bin/activate - python -m synapse.python_dependencies | xargs pip install - pip install lxml mock + python -m pip install -e .[all] This will run a process of downloading and installing all the needed dependencies into a virtual env. @@ -835,7 +834,7 @@ dependencies into a virtual env. Once this is done, you may wish to run Synapse's unit tests, to check that everything is installed as it should be:: - PYTHONPATH="." trial tests + python -m twisted.trial tests This should end with a 'PASSED' result:: diff --git a/changelog.d/4298.feature b/changelog.d/4298.feature new file mode 100644 index 0000000000..05ad70fe72 --- /dev/null +++ b/changelog.d/4298.feature @@ -0,0 +1 @@ +Synapse can now have its conditional/extra dependencies installed by pip. This functionality can be used by using `pip install matrix-synapse[feature]`, where feature is a comma separated list with the possible values "email.enable_notifs", "ldap3", "postgres", "saml2", "url_preview", and "test". If you want to install all optional dependencies, you can use "all" instead. diff --git a/setup.py b/setup.py index 00b69c43f5..55b1b10a77 100755 --- a/setup.py +++ b/setup.py @@ -84,13 +84,25 @@ version = exec_file(("synapse", "__init__.py"))["__version__"] dependencies = exec_file(("synapse", "python_dependencies.py")) long_description = read_file(("README.rst",)) +REQUIREMENTS = dependencies['REQUIREMENTS'] +CONDITIONAL_REQUIREMENTS = dependencies['CONDITIONAL_REQUIREMENTS'] + +# Make `pip install matrix-synapse[all]` install all the optional dependencies. +ALL_OPTIONAL_REQUIREMENTS = set() + +for optional_deps in CONDITIONAL_REQUIREMENTS.values(): + ALL_OPTIONAL_REQUIREMENTS = set(optional_deps) | ALL_OPTIONAL_REQUIREMENTS + +CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS) + + setup( name="matrix-synapse", version=version, packages=find_packages(exclude=["tests", "tests.*"]), description="Reference homeserver for the Matrix decentralised comms protocol", - install_requires=dependencies['requirements'](include_conditional=True).keys(), - dependency_links=dependencies["DEPENDENCY_LINKS"].values(), + install_requires=REQUIREMENTS, + extras_require=CONDITIONAL_REQUIREMENTS, include_package_data=True, zip_safe=False, long_description=long_description, diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index c3afcc573b..233bf43fc8 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -22,11 +22,11 @@ sys.dont_write_bytecode = True try: python_dependencies.check_requirements() -except python_dependencies.MissingRequirementError as e: +except python_dependencies.DependencyException as e: message = "\n".join([ - "Missing Requirement: %s" % (str(e),), + "Missing Requirements: %s" % (", ".join(e.dependencies),), "To install run:", - " pip install --upgrade --force \"%s\"" % (e.dependency,), + " pip install --upgrade --force %s" % (" ".join(e.dependencies),), "", ]) sys.stderr.writelines(message) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f2064f9d0c..f3ac3d19f0 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -322,9 +322,6 @@ def setup(config_options): synapse.config.logger.setup_logging(config, use_worker_options=False) - # check any extra requirements we have now we have a config - check_requirements(config) - events.USE_FROZEN_DICTS = config.use_frozen_dicts tls_server_context_factory = context_factory.ServerContextFactory(config) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 72a92cc462..2c65ef5856 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -15,175 +15,121 @@ # limitations under the License. import logging -from distutils.version import LooseVersion + +from pkg_resources import DistributionNotFound, VersionConflict, get_distribution logger = logging.getLogger(__name__) -# this dict maps from python package name to a list of modules we expect it to -# provide. -# -# the key is a "requirement specifier", as used as a parameter to `pip -# install`[1], or an `install_requires` argument to `setuptools.setup` [2]. + +# REQUIREMENTS is a simple list of requirement specifiers[1], and must be +# installed. It is passed to setup() as install_requires in setup.py. # -# the value is a sequence of strings; each entry should be the name of the -# python module, optionally followed by a version assertion which can be either -# ">=" or "==". +# CONDITIONAL_REQUIREMENTS is the optional dependencies, represented as a dict +# of lists. The dict key is the optional dependency name and can be passed to +# pip when installing. The list is a series of requirement specifiers[1] to be +# installed when that optional dependency requirement is specified. It is passed +# to setup() as extras_require in setup.py # # [1] https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers. -# [2] https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-dependencies -REQUIREMENTS = { - "jsonschema>=2.5.1": ["jsonschema>=2.5.1"], - "frozendict>=1": ["frozendict"], - "unpaddedbase64>=1.1.0": ["unpaddedbase64>=1.1.0"], - "canonicaljson>=1.1.3": ["canonicaljson>=1.1.3"], - "signedjson>=1.0.0": ["signedjson>=1.0.0"], - "pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"], - "service_identity>=16.0.0": ["service_identity>=16.0.0"], - "Twisted>=17.1.0": ["twisted>=17.1.0"], - "treq>=15.1": ["treq>=15.1"], +REQUIREMENTS = [ + "jsonschema>=2.5.1", + "frozendict>=1", + "unpaddedbase64>=1.1.0", + "canonicaljson>=1.1.3", + "signedjson>=1.0.0", + "pynacl>=1.2.1", + "service_identity>=16.0.0", + "Twisted>=17.1.0", + "treq>=15.1", # Twisted has required pyopenssl 16.0 since about Twisted 16.6. - "pyopenssl>=16.0.0": ["OpenSSL>=16.0.0"], - - "pyyaml>=3.11": ["yaml"], - "pyasn1>=0.1.9": ["pyasn1"], - "pyasn1-modules>=0.0.7": ["pyasn1_modules"], - "daemonize>=2.3.1": ["daemonize"], - "bcrypt>=3.1.0": ["bcrypt>=3.1.0"], - "pillow>=3.1.2": ["PIL"], - "sortedcontainers>=1.4.4": ["sortedcontainers"], - "psutil>=2.0.0": ["psutil>=2.0.0"], - "pymacaroons-pynacl>=0.9.3": ["pymacaroons"], - "msgpack-python>=0.4.2": ["msgpack"], - "phonenumbers>=8.2.0": ["phonenumbers"], - "six>=1.10": ["six"], - + "pyopenssl>=16.0.0", + "pyyaml>=3.11", + "pyasn1>=0.1.9", + "pyasn1-modules>=0.0.7", + "daemonize>=2.3.1", + "bcrypt>=3.1.0", + "pillow>=3.1.2", + "sortedcontainers>=1.4.4", + "psutil>=2.0.0", + "pymacaroons-pynacl>=0.9.3", + "msgpack-python>=0.4.2", + "phonenumbers>=8.2.0", + "six>=1.10", # prometheus_client 0.4.0 changed the format of counter metrics # (cf https://github.com/matrix-org/synapse/issues/4001) - "prometheus_client>=0.0.18,<0.4.0": ["prometheus_client"], - + "prometheus_client>=0.0.18,<0.4.0", # we use attr.s(slots), which arrived in 16.0.0 - "attrs>=16.0.0": ["attr>=16.0.0"], - "netaddr>=0.7.18": ["netaddr"], -} + "attrs>=16.0.0", + "netaddr>=0.7.18", +] CONDITIONAL_REQUIREMENTS = { - "email.enable_notifs": { - "Jinja2>=2.8": ["Jinja2>=2.8"], - "bleach>=1.4.2": ["bleach>=1.4.2"], - }, - "matrix-synapse-ldap3": { - "matrix-synapse-ldap3>=0.1": ["ldap_auth_provider"], - }, - "postgres": { - "psycopg2>=2.6": ["psycopg2"] - }, - "saml2": { - "pysaml2>=4.5.0": ["saml2"], - }, + "email.enable_notifs": ["Jinja2>=2.8", "bleach>=1.4.2"], + "matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"], + "postgres": ["psycopg2>=2.6"], + "saml2": ["pysaml2>=4.5.0"], + "url_preview": ["lxml>=3.5.0"], + "test": ["mock>=2.0"], } -def requirements(config=None, include_conditional=False): - reqs = REQUIREMENTS.copy() - if include_conditional: - for _, req in CONDITIONAL_REQUIREMENTS.items(): - reqs.update(req) - return reqs +def list_requirements(): + deps = set(REQUIREMENTS) + for opt in CONDITIONAL_REQUIREMENTS.values(): + deps = set(opt) | deps + return list(deps) -def github_link(project, version, egg): - return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg) +class DependencyException(Exception): + @property + def dependencies(self): + for i in self.args[0]: + yield '"' + i + '"' -DEPENDENCY_LINKS = { -} +def check_requirements(_get_distribution=get_distribution): + + deps_needed = [] + errors = [] -class MissingRequirementError(Exception): - def __init__(self, message, module_name, dependency): - super(MissingRequirementError, self).__init__(message) - self.module_name = module_name - self.dependency = dependency - - -def check_requirements(config=None): - """Checks that all the modules needed by synapse have been correctly - installed and are at the correct version""" - for dependency, module_requirements in ( - requirements(config, include_conditional=False).items()): - for module_requirement in module_requirements: - if ">=" in module_requirement: - module_name, required_version = module_requirement.split(">=") - version_test = ">=" - elif "==" in module_requirement: - module_name, required_version = module_requirement.split("==") - version_test = "==" - else: - module_name = module_requirement - version_test = None - - try: - module = __import__(module_name) - except ImportError: - logging.exception( - "Can't import %r which is part of %r", - module_name, dependency - ) - raise MissingRequirementError( - "Can't import %r which is part of %r" - % (module_name, dependency), module_name, dependency - ) - version = getattr(module, "__version__", None) - file_path = getattr(module, "__file__", None) - logger.info( - "Using %r version %r from %r to satisfy %r", - module_name, version, file_path, dependency + # Check the base dependencies exist -- they all must be installed. + for dependency in REQUIREMENTS: + try: + _get_distribution(dependency) + except VersionConflict as e: + deps_needed.append(dependency) + errors.append( + "Needed %s, got %s==%s" + % (dependency, e.dist.project_name, e.dist.version) ) + except DistributionNotFound: + deps_needed.append(dependency) + errors.append("Needed %s but it was not installed" % (dependency,)) - if version_test == ">=": - if version is None: - raise MissingRequirementError( - "Version of %r isn't set as __version__ of module %r" - % (dependency, module_name), module_name, dependency - ) - if LooseVersion(version) < LooseVersion(required_version): - raise MissingRequirementError( - "Version of %r in %r is too old. %r < %r" - % (dependency, file_path, version, required_version), - module_name, dependency - ) - elif version_test == "==": - if version is None: - raise MissingRequirementError( - "Version of %r isn't set as __version__ of module %r" - % (dependency, module_name), module_name, dependency - ) - if LooseVersion(version) != LooseVersion(required_version): - raise MissingRequirementError( - "Unexpected version of %r in %r. %r != %r" - % (dependency, file_path, version, required_version), - module_name, dependency - ) + # Check the optional dependencies are up to date. We allow them to not be + # installed. + OPTS = sum(CONDITIONAL_REQUIREMENTS.values(), []) + for dependency in OPTS: + try: + _get_distribution(dependency) + except VersionConflict: + deps_needed.append(dependency) + errors.append("Needed %s but it was not installed" % (dependency,)) + except DistributionNotFound: + # If it's not found, we don't care + pass -def list_requirements(): - result = [] - linked = [] - for link in DEPENDENCY_LINKS.values(): - egg = link.split("#egg=")[1] - linked.append(egg.split('-')[0]) - result.append(link) - for requirement in requirements(include_conditional=True): - is_linked = False - for link in linked: - if requirement.replace('-', '_').startswith(link): - is_linked = True - if not is_linked: - result.append(requirement) - return result + if deps_needed: + for e in errors: + logging.exception(e) + + raise DependencyException(deps_needed) if __name__ == "__main__": import sys + sys.stdout.writelines(req + "\n" for req in list_requirements()) diff --git a/tox.ini b/tox.ini index f5f07cb690..a0f5486829 100644 --- a/tox.ini +++ b/tox.ini @@ -9,9 +9,6 @@ deps = junitxml coverage - # needed by some of the tests - lxml - # cyptography 2.2 requires setuptools >= 18.5 # # older versions of virtualenv (?) give us a virtualenv with the same @@ -33,6 +30,7 @@ setenv = [testenv] deps = {[base]deps} +extras = all whitelist_externals = sh -- cgit 1.5.1 From b7c021881293c60ad0874f6d15375a9b7a40c22c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 7 Jan 2019 10:14:31 +0000 Subject: Check jinja version for consent resource (#4327) * Raise a ConfigError if an invalid resource is specified * Require Jinja 2.9 for the consent resource * changelog --- changelog.d/4327.bugfix | 1 + synapse/app/__init__.py | 9 +------ synapse/config/server.py | 38 ++++++++++++++++++++++++++++- synapse/python_dependencies.py | 55 +++++++++++++++++++++++++++--------------- 4 files changed, 75 insertions(+), 28 deletions(-) create mode 100644 changelog.d/4327.bugfix (limited to 'synapse/app') diff --git a/changelog.d/4327.bugfix b/changelog.d/4327.bugfix new file mode 100644 index 0000000000..03bf05e05c --- /dev/null +++ b/changelog.d/4327.bugfix @@ -0,0 +1 @@ +Check jinja version for consent resource diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index 233bf43fc8..b45adafdd3 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -19,15 +19,8 @@ from synapse import python_dependencies # noqa: E402 sys.dont_write_bytecode = True - try: python_dependencies.check_requirements() except python_dependencies.DependencyException as e: - message = "\n".join([ - "Missing Requirements: %s" % (", ".join(e.dependencies),), - "To install run:", - " pip install --upgrade --force %s" % (" ".join(e.dependencies),), - "", - ]) - sys.stderr.writelines(message) + sys.stderr.writelines(e.message) sys.exit(1) diff --git a/synapse/config/server.py b/synapse/config/server.py index 120c2b81fc..fb57791098 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd -# Copyright 2017 New Vector Ltd +# Copyright 2017-2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import logging import os.path from synapse.http.endpoint import parse_and_validate_server_name +from synapse.python_dependencies import DependencyException, check_requirements from ._base import Config, ConfigError @@ -204,6 +205,8 @@ class ServerConfig(Config): ] }) + _check_resource_config(self.listeners) + def default_config(self, server_name, data_dir_path, **kwargs): _, bind_port = parse_and_validate_server_name(server_name) if bind_port is not None: @@ -465,3 +468,36 @@ def _warn_if_webclient_configured(listeners): if name == 'webclient': logger.warning(NO_MORE_WEB_CLIENT_WARNING) return + + +KNOWN_RESOURCES = ( + 'client', + 'consent', + 'federation', + 'keys', + 'media', + 'metrics', + 'replication', + 'static', + 'webclient', +) + + +def _check_resource_config(listeners): + resource_names = set( + res_name + for listener in listeners + for res in listener.get("resources", []) + for res_name in res.get("names", []) + ) + + for resource in resource_names: + if resource not in KNOWN_RESOURCES: + raise ConfigError( + "Unknown listener resource '%s'" % (resource, ) + ) + if resource == "consent": + try: + check_requirements('resources.consent') + except DependencyException as e: + raise ConfigError(e.message) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 2c65ef5856..69c5f9fe2e 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -65,9 +65,13 @@ REQUIREMENTS = [ ] CONDITIONAL_REQUIREMENTS = { - "email.enable_notifs": ["Jinja2>=2.8", "bleach>=1.4.2"], + "email.enable_notifs": ["Jinja2>=2.9", "bleach>=1.4.2"], "matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"], "postgres": ["psycopg2>=2.6"], + + # ConsentResource uses select_autoescape, which arrived in jinja 2.9 + "resources.consent": ["Jinja2>=2.9"], + "saml2": ["pysaml2>=4.5.0"], "url_preview": ["lxml>=3.5.0"], "test": ["mock>=2.0"], @@ -83,19 +87,31 @@ def list_requirements(): class DependencyException(Exception): + @property + def message(self): + return "\n".join([ + "Missing Requirements: %s" % (", ".join(self.dependencies),), + "To install run:", + " pip install --upgrade --force %s" % (" ".join(self.dependencies),), + "", + ]) + @property def dependencies(self): for i in self.args[0]: yield '"' + i + '"' -def check_requirements(_get_distribution=get_distribution): - +def check_requirements(for_feature=None, _get_distribution=get_distribution): deps_needed = [] errors = [] - # Check the base dependencies exist -- they all must be installed. - for dependency in REQUIREMENTS: + if for_feature: + reqs = CONDITIONAL_REQUIREMENTS[for_feature] + else: + reqs = REQUIREMENTS + + for dependency in reqs: try: _get_distribution(dependency) except VersionConflict as e: @@ -108,23 +124,24 @@ def check_requirements(_get_distribution=get_distribution): deps_needed.append(dependency) errors.append("Needed %s but it was not installed" % (dependency,)) - # Check the optional dependencies are up to date. We allow them to not be - # installed. - OPTS = sum(CONDITIONAL_REQUIREMENTS.values(), []) - - for dependency in OPTS: - try: - _get_distribution(dependency) - except VersionConflict: - deps_needed.append(dependency) - errors.append("Needed %s but it was not installed" % (dependency,)) - except DistributionNotFound: - # If it's not found, we don't care - pass + if not for_feature: + # Check the optional dependencies are up to date. We allow them to not be + # installed. + OPTS = sum(CONDITIONAL_REQUIREMENTS.values(), []) + + for dependency in OPTS: + try: + _get_distribution(dependency) + except VersionConflict: + deps_needed.append(dependency) + errors.append("Needed %s but it was not installed" % (dependency,)) + except DistributionNotFound: + # If it's not found, we don't care + pass if deps_needed: for e in errors: - logging.exception(e) + logging.error(e) raise DependencyException(deps_needed) -- cgit 1.5.1 From 4f8f41c824dc326903863625ce79e5386ad0f8e1 Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Thu, 17 Jan 2019 15:58:27 +0200 Subject: Make FederationReaderServer _http_listen use self.get_reactor() For all the homeserver classes, only the FrontendProxyServer passes its reactor when doing the http listen. Looking at previous PR's looks like this was introduced to make it possible to write a test, otherwise when you try to run a test with the test homeserver it tries to do a real bind to a port. Passing the reactor that the homeserver is instantiated with should probably be the right thing to do anyway? Signed-off-by: Jason Robinson --- synapse/app/federation_reader.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 228a297fb8..ea594f0f1a 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -99,7 +99,8 @@ class FederationReaderServer(HomeServer): listener_config, root_resource, self.version_string, - ) + ), + reactor=self.get_reactor() ) logger.info("Synapse federation reader now listening on port %d", port) -- cgit 1.5.1 From a17bac171f301e54d6bd3d4c769f4f005c38f112 Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Fri, 18 Jan 2019 10:02:08 +0200 Subject: Make SynapseHomeServer _http_listener use self.get_reactor() For all the homeserver classes, only the FrontendProxyServer passes its reactor when doing the http listen. Looking at previous PR's looks like this was introduced to make it possible to write a test, otherwise when you try to run a test with the test homeserver it tries to do a real bind to a port. Passing the reactor that the homeserver is instantiated with should probably be the right thing to do anyway? Signed-off-by: Jason Robinson --- synapse/app/homeserver.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f3ac3d19f0..5652c6201c 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -130,6 +130,7 @@ class SynapseHomeServer(HomeServer): self.version_string, ), self.tls_server_context_factory, + reactor=self.get_reactor(), ) else: @@ -142,7 +143,8 @@ class SynapseHomeServer(HomeServer): listener_config, root_resource, self.version_string, - ) + ), + reactor=self.get_reactor(), ) logger.info("Synapse now listening on port %d", port) -- cgit 1.5.1 From 82e13662c03a41c085e784a594b423711e0caffa Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Mon, 21 Jan 2019 01:54:43 +0200 Subject: Split federation OpenID userinfo endpoint out of the federation resource This allows the OpenID userinfo endpoint to be active even if the federation resource is not active. The OpenID userinfo endpoint is called by integration managers to verify user actions using the client API OpenID access token. Without this verification, the integration manager cannot know that the access token is valid. The OpenID userinfo endpoint will be loaded in the case that either "federation" or "openid" resource is defined. The new "openid" resource is defaulted to active in default configuration. Signed-off-by: Jason Robinson --- synapse/app/federation_reader.py | 7 ++ synapse/app/homeserver.py | 9 +++ synapse/config/server.py | 9 +-- synapse/federation/transport/server.py | 114 +++++++++++++++++++++------------ 4 files changed, 93 insertions(+), 46 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index ea594f0f1a..99e8a4cf6a 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -87,6 +87,13 @@ class FederationReaderServer(HomeServer): resources.update({ FEDERATION_PREFIX: TransportLayerServer(self), }) + if name == "openid" and "federation" not in res["names"]: + # Only load the openid resource separately if federation resource + # is not specified since federation resource includes openid + # resource. + resources.update({ + FEDERATION_PREFIX: TransportLayerServer(self, servlet_groups=["openid"]), + }) root_resource = create_resource_tree(resources, NoResource()) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5652c6201c..0a924d7a80 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -95,6 +95,10 @@ class SynapseHomeServer(HomeServer): resources = {} for res in listener_config["resources"]: for name in res["names"]: + if name == "openid" and "federation" in res["names"]: + # Skip loading openid resource if federation is defined + # since federation resource will include openid + continue resources.update(self._configure_named_resource( name, res.get("compress", False), )) @@ -192,6 +196,11 @@ class SynapseHomeServer(HomeServer): FEDERATION_PREFIX: TransportLayerServer(self), }) + if name == "openid": + resources.update({ + FEDERATION_PREFIX: TransportLayerServer(self, servlet_groups=["openid"]), + }) + if name in ["static", "client"]: resources.update({ STATIC_PREFIX: File( diff --git a/synapse/config/server.py b/synapse/config/server.py index fb57791098..556f1efee5 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -151,7 +151,7 @@ class ServerConfig(Config): "compress": gzip_responses, }, { - "names": ["federation"], + "names": ["federation", "openid"], "compress": False, } ] @@ -170,7 +170,7 @@ class ServerConfig(Config): "compress": gzip_responses, }, { - "names": ["federation"], + "names": ["federation", "openid"], "compress": False, } ] @@ -328,7 +328,7 @@ class ServerConfig(Config): # that can do automatic compression. compress: true - - names: [federation] # Federation APIs + - names: [federation, openid] # Federation APIs compress: false # optional list of additional endpoints which can be loaded via @@ -350,7 +350,7 @@ class ServerConfig(Config): resources: - names: [client] compress: true - - names: [federation] + - names: [federation, openid] compress: false # Turn on the twisted ssh manhole service on localhost on the given @@ -477,6 +477,7 @@ KNOWN_RESOURCES = ( 'keys', 'media', 'metrics', + 'openid', 'replication', 'static', 'webclient', diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 4557a9e66e..49b80beecb 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -43,9 +43,10 @@ logger = logging.getLogger(__name__) class TransportLayerServer(JsonResource): """Handles incoming federation HTTP requests""" - def __init__(self, hs): + def __init__(self, hs, servlet_groups=None): self.hs = hs self.clock = hs.get_clock() + self.servlet_groups = servlet_groups super(TransportLayerServer, self).__init__(hs, canonical_json=False) @@ -67,6 +68,7 @@ class TransportLayerServer(JsonResource): resource=self, ratelimiter=self.ratelimiter, authenticator=self.authenticator, + servlet_groups=self.servlet_groups, ) @@ -1308,10 +1310,12 @@ FEDERATION_SERVLET_CLASSES = ( FederationClientKeysClaimServlet, FederationThirdPartyInviteExchangeServlet, On3pidBindServlet, - OpenIdUserInfo, FederationVersionServlet, ) +OPENID_SERVLET_CLASSES = ( + OpenIdUserInfo, +) ROOM_LIST_CLASSES = ( PublicRoomList, @@ -1350,44 +1354,70 @@ GROUP_ATTESTATION_SERVLET_CLASSES = ( FederationGroupsRenewAttestaionServlet, ) +DEFAULT_SERVLET_GROUPS = ( + "federation", + "room_list", + "group_server", + "group_local", + "group_attestation", + "openid", +) + -def register_servlets(hs, resource, authenticator, ratelimiter): - for servletclass in FEDERATION_SERVLET_CLASSES: - servletclass( - handler=hs.get_federation_server(), - authenticator=authenticator, - ratelimiter=ratelimiter, - server_name=hs.hostname, - ).register(resource) - - for servletclass in ROOM_LIST_CLASSES: - servletclass( - handler=hs.get_room_list_handler(), - authenticator=authenticator, - ratelimiter=ratelimiter, - server_name=hs.hostname, - ).register(resource) - - for servletclass in GROUP_SERVER_SERVLET_CLASSES: - servletclass( - handler=hs.get_groups_server_handler(), - authenticator=authenticator, - ratelimiter=ratelimiter, - server_name=hs.hostname, - ).register(resource) - - for servletclass in GROUP_LOCAL_SERVLET_CLASSES: - servletclass( - handler=hs.get_groups_local_handler(), - authenticator=authenticator, - ratelimiter=ratelimiter, - server_name=hs.hostname, - ).register(resource) - - for servletclass in GROUP_ATTESTATION_SERVLET_CLASSES: - servletclass( - handler=hs.get_groups_attestation_renewer(), - authenticator=authenticator, - ratelimiter=ratelimiter, - server_name=hs.hostname, - ).register(resource) +def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=None): + if not servlet_groups: + servlet_groups = DEFAULT_SERVLET_GROUPS + + if "federation" in servlet_groups: + for servletclass in FEDERATION_SERVLET_CLASSES: + servletclass( + handler=hs.get_federation_server(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + + if "openid" in servlet_groups: + for servletclass in OPENID_SERVLET_CLASSES: + servletclass( + handler=hs.get_federation_server(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + + if "room_list" in servlet_groups: + for servletclass in ROOM_LIST_CLASSES: + servletclass( + handler=hs.get_room_list_handler(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + + if "group_server" in servlet_groups: + for servletclass in GROUP_SERVER_SERVLET_CLASSES: + servletclass( + handler=hs.get_groups_server_handler(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + + if "group_local" in servlet_groups: + for servletclass in GROUP_LOCAL_SERVLET_CLASSES: + servletclass( + handler=hs.get_groups_local_handler(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + + if "group_attestation" in servlet_groups: + for servletclass in GROUP_ATTESTATION_SERVLET_CLASSES: + servletclass( + handler=hs.get_groups_attestation_renewer(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) -- cgit 1.5.1 From 6129e52f437c2e03b711453434924e170f3d11bf Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 23 Jan 2019 19:39:06 +1100 Subject: Support ACME for certificate provisioning (#4384) --- changelog.d/4384.feature | 1 + scripts-dev/build_debian_packages | 2 +- synapse/app/homeserver.py | 56 ++++++++++++--- synapse/config/_base.py | 4 +- synapse/config/tls.py | 115 ++++++++++++++++++++++------- synapse/handlers/acme.py | 147 ++++++++++++++++++++++++++++++++++++++ synapse/python_dependencies.py | 4 ++ synapse/server.py | 5 ++ 8 files changed, 298 insertions(+), 36 deletions(-) create mode 100644 changelog.d/4384.feature create mode 100644 synapse/handlers/acme.py (limited to 'synapse/app') diff --git a/changelog.d/4384.feature b/changelog.d/4384.feature new file mode 100644 index 0000000000..daedcd58c4 --- /dev/null +++ b/changelog.d/4384.feature @@ -0,0 +1 @@ +Synapse can now automatically provision TLS certificates via ACME (the protocol used by CAs like Let's Encrypt). diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages index 577d93e6f6..6b9be99060 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages @@ -10,12 +10,12 @@ # can be passed on the commandline for debugging. import argparse -from concurrent.futures import ThreadPoolExecutor import os import signal import subprocess import sys import threading +from concurrent.futures import ThreadPoolExecutor DISTS = ( "debian:stretch", diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f3ac3d19f0..ffc49d77cc 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import gc import logging import os import sys +import traceback from six import iteritems @@ -324,17 +326,12 @@ def setup(config_options): events.USE_FROZEN_DICTS = config.use_frozen_dicts - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - database_engine = create_engine(config.database_config) config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection hs = SynapseHomeServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, @@ -361,12 +358,53 @@ def setup(config_options): logger.info("Database prepared in %s.", config.database_config['name']) hs.setup() - hs.start_listening() + @defer.inlineCallbacks def start(): - hs.get_pusherpool().start() - hs.get_datastore().start_profiling() - hs.get_datastore().start_doing_background_updates() + try: + # Check if the certificate is still valid. + cert_days_remaining = hs.config.is_disk_cert_valid() + + if hs.config.acme_enabled: + # If ACME is enabled, we might need to provision a certificate + # before starting. + acme = hs.get_acme_handler() + + # Start up the webservices which we will respond to ACME + # challenges with. + yield acme.start_listening() + + # We want to reprovision if cert_days_remaining is None (meaning no + # certificate exists), or the days remaining number it returns + # is less than our re-registration threshold. + if (cert_days_remaining is None) or ( + not cert_days_remaining > hs.config.acme_reprovision_threshold + ): + yield acme.provision_certificate() + + # Read the certificate from disk and build the context factories for + # TLS. + hs.config.read_certificate_from_disk() + hs.tls_server_context_factory = context_factory.ServerContextFactory(config) + hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + + # It is now safe to start your Synapse. + hs.start_listening() + hs.get_pusherpool().start() + hs.get_datastore().start_profiling() + hs.get_datastore().start_doing_background_updates() + except Exception as e: + # If a DeferredList failed (like in listening on the ACME listener), + # we need to print the subfailure explicitly. + if isinstance(e, defer.FirstError): + e.subFailure.printTraceback(sys.stderr) + sys.exit(1) + + # Something else went wrong when starting. Print it and bail out. + traceback.print_exc(file=sys.stderr) + sys.exit(1) reactor.callWhenRunning(start) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index fd2d6d52ef..5858fb92b4 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -367,7 +367,7 @@ class Config(object): if not keys_directory: keys_directory = os.path.dirname(config_files[-1]) - config_dir_path = os.path.abspath(keys_directory) + self.config_dir_path = os.path.abspath(keys_directory) specified_config = {} for config_file in config_files: @@ -379,7 +379,7 @@ class Config(object): server_name = specified_config["server_name"] config_string = self.generate_config( - config_dir_path=config_dir_path, + config_dir_path=self.config_dir_path, data_dir_path=os.getcwd(), server_name=server_name, generate_secrets=False, diff --git a/synapse/config/tls.py b/synapse/config/tls.py index bb8952c672..a75e233aa0 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -13,60 +13,110 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import os +from datetime import datetime from hashlib import sha256 from unpaddedbase64 import encode_base64 from OpenSSL import crypto -from ._base import Config +from synapse.config._base import Config + +logger = logging.getLogger() class TlsConfig(Config): def read_config(self, config): - self.tls_certificate = self.read_tls_certificate( - config.get("tls_certificate_path") - ) - self.tls_certificate_file = config.get("tls_certificate_path") + acme_config = config.get("acme", {}) + self.acme_enabled = acme_config.get("enabled", False) + self.acme_url = acme_config.get( + "url", "https://acme-v01.api.letsencrypt.org/directory" + ) + self.acme_port = acme_config.get("port", 8449) + self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"]) + self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) + + self.tls_certificate_file = os.path.abspath(config.get("tls_certificate_path")) + self.tls_private_key_file = os.path.abspath(config.get("tls_private_key_path")) + self._original_tls_fingerprints = config["tls_fingerprints"] + self.tls_fingerprints = list(self._original_tls_fingerprints) self.no_tls = config.get("no_tls", False) - if self.no_tls: - self.tls_private_key = None - else: - self.tls_private_key = self.read_tls_private_key( - config.get("tls_private_key_path") - ) + # This config option applies to non-federation HTTP clients + # (e.g. for talking to recaptcha, identity servers, and such) + # It should never be used in production, and is intended for + # use only when running tests. + self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get( + "use_insecure_ssl_client_just_for_testing_do_not_use" + ) + + self.tls_certificate = None + self.tls_private_key = None + + def is_disk_cert_valid(self): + """ + Is the certificate we have on disk valid, and if so, for how long? + + Returns: + int: Days remaining of certificate validity. + None: No certificate exists. + """ + if not os.path.exists(self.tls_certificate_file): + return None + + try: + with open(self.tls_certificate_file, 'rb') as f: + cert_pem = f.read() + except Exception: + logger.exception("Failed to read existing certificate off disk!") + raise + + try: + tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem) + except Exception: + logger.exception("Failed to parse existing certificate off disk!") + raise + + # YYYYMMDDhhmmssZ -- in UTC + expires_on = datetime.strptime( + tls_certificate.get_notAfter().decode('ascii'), "%Y%m%d%H%M%SZ" + ) + now = datetime.utcnow() + days_remaining = (expires_on - now).days + return days_remaining - self.tls_fingerprints = config["tls_fingerprints"] + def read_certificate_from_disk(self): + """ + Read the certificates from disk. + """ + self.tls_certificate = self.read_tls_certificate(self.tls_certificate_file) + + if not self.no_tls: + self.tls_private_key = self.read_tls_private_key(self.tls_private_key_file) + + self.tls_fingerprints = list(self._original_tls_fingerprints) # Check that our own certificate is included in the list of fingerprints # and include it if it is not. x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, - self.tls_certificate + crypto.FILETYPE_ASN1, self.tls_certificate ) sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest()) sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints) if sha256_fingerprint not in sha256_fingerprints: self.tls_fingerprints.append({u"sha256": sha256_fingerprint}) - # This config option applies to non-federation HTTP clients - # (e.g. for talking to recaptcha, identity servers, and such) - # It should never be used in production, and is intended for - # use only when running tests. - self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get( - "use_insecure_ssl_client_just_for_testing_do_not_use" - ) - def default_config(self, config_dir_path, server_name, **kwargs): base_key_name = os.path.join(config_dir_path, server_name) tls_certificate_path = base_key_name + ".tls.crt" tls_private_key_path = base_key_name + ".tls.key" - return """\ + return ( + """\ # PEM encoded X509 certificate for TLS. # You can replace the self-signed certificate that synapse # autogenerates on launch with your own SSL certificate + key pair @@ -107,7 +157,24 @@ class TlsConfig(Config): # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] - """ % locals() + + ## Support for ACME certificate auto-provisioning. + # acme: + # enabled: false + ## ACME path. + ## If you only want to test, use the staging url: + ## https://acme-staging.api.letsencrypt.org/directory + # url: 'https://acme-v01.api.letsencrypt.org/directory' + ## Port number (to listen for the HTTP-01 challenge). + ## Using port 80 requires utilising something like authbind, or proxying to it. + # port: 8449 + ## Hosts to bind to. + # bind_addresses: ['127.0.0.1'] + ## How many days remaining on a certificate before it is renewed. + # reprovision_threshold: 30 + """ + % locals() + ) def read_tls_certificate(self, cert_path): cert_pem = self.read_file(cert_path, "tls_certificate") diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py new file mode 100644 index 0000000000..73ea7ed018 --- /dev/null +++ b/synapse/handlers/acme.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +import attr +from zope.interface import implementer + +from twisted.internet import defer +from twisted.internet.endpoints import serverFromString +from twisted.python.filepath import FilePath +from twisted.python.url import URL +from twisted.web import server, static +from twisted.web.resource import Resource + +logger = logging.getLogger(__name__) + +try: + from txacme.interfaces import ICertificateStore + + @attr.s + @implementer(ICertificateStore) + class ErsatzStore(object): + """ + A store that only stores in memory. + """ + + certs = attr.ib(default=attr.Factory(dict)) + + def store(self, server_name, pem_objects): + self.certs[server_name] = [o.as_bytes() for o in pem_objects] + return defer.succeed(None) + + +except ImportError: + # txacme is missing + pass + + +class AcmeHandler(object): + def __init__(self, hs): + self.hs = hs + self.reactor = hs.get_reactor() + + @defer.inlineCallbacks + def start_listening(self): + + # Configure logging for txacme, if you need to debug + # from eliot import add_destinations + # from eliot.twisted import TwistedDestination + # + # add_destinations(TwistedDestination()) + + from txacme.challenges import HTTP01Responder + from txacme.service import AcmeIssuingService + from txacme.endpoint import load_or_create_client_key + from txacme.client import Client + from josepy.jwa import RS256 + + self._store = ErsatzStore() + responder = HTTP01Responder() + + self._issuer = AcmeIssuingService( + cert_store=self._store, + client_creator=( + lambda: Client.from_url( + reactor=self.reactor, + url=URL.from_text(self.hs.config.acme_url), + key=load_or_create_client_key( + FilePath(self.hs.config.config_dir_path) + ), + alg=RS256, + ) + ), + clock=self.reactor, + responders=[responder], + ) + + well_known = Resource() + well_known.putChild(b'acme-challenge', responder.resource) + responder_resource = Resource() + responder_resource.putChild(b'.well-known', well_known) + responder_resource.putChild(b'check', static.Data(b'OK', b'text/plain')) + + srv = server.Site(responder_resource) + + listeners = [] + + for host in self.hs.config.acme_bind_addresses: + logger.info( + "Listening for ACME requests on %s:%s", host, self.hs.config.acme_port + ) + endpoint = serverFromString( + self.reactor, "tcp:%s:interface=%s" % (self.hs.config.acme_port, host) + ) + listeners.append(endpoint.listen(srv)) + + # Make sure we are registered to the ACME server. There's no public API + # for this, it is usually triggered by startService, but since we don't + # want it to control where we save the certificates, we have to reach in + # and trigger the registration machinery ourselves. + self._issuer._registered = False + yield self._issuer._ensure_registered() + + # Return a Deferred that will fire when all the servers have started up. + yield defer.DeferredList(listeners, fireOnOneErrback=True, consumeErrors=True) + + @defer.inlineCallbacks + def provision_certificate(self): + + logger.warning("Reprovisioning %s", self.hs.hostname) + + try: + yield self._issuer.issue_cert(self.hs.hostname) + except Exception: + logger.exception("Fail!") + raise + logger.warning("Reprovisioned %s, saving.", self.hs.hostname) + cert_chain = self._store.certs[self.hs.hostname] + + try: + with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: + for x in cert_chain: + if x.startswith(b"-----BEGIN RSA PRIVATE KEY-----"): + private_key_file.write(x) + + with open(self.hs.config.tls_certificate_file, "wb") as certificate_file: + for x in cert_chain: + if x.startswith(b"-----BEGIN CERTIFICATE-----"): + certificate_file.write(x) + except Exception: + logger.exception("Failed saving!") + raise + + defer.returnValue(True) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 882e844eb1..756721e304 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -79,6 +79,10 @@ CONDITIONAL_REQUIREMENTS = { # ConsentResource uses select_autoescape, which arrived in jinja 2.9 "resources.consent": ["Jinja2>=2.9"], + # ACME support is required to provision TLS certificates from authorities + # that use the protocol, such as Let's Encrypt. + "acme": ["txacme>=0.9.2"], + "saml2": ["pysaml2>=4.5.0"], "url_preview": ["lxml>=3.5.0"], "test": ["mock>=2.0"], diff --git a/synapse/server.py b/synapse/server.py index 9985687b95..c8914302cf 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -46,6 +46,7 @@ from synapse.federation.transport.client import TransportLayerClient from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer from synapse.groups.groups_server import GroupsServerHandler from synapse.handlers import Handlers +from synapse.handlers.acme import AcmeHandler from synapse.handlers.appservice import ApplicationServicesHandler from synapse.handlers.auth import AuthHandler, MacaroonGenerator from synapse.handlers.deactivate_account import DeactivateAccountHandler @@ -129,6 +130,7 @@ class HomeServer(object): 'sync_handler', 'typing_handler', 'room_list_handler', + 'acme_handler', 'auth_handler', 'device_handler', 'e2e_keys_handler', @@ -310,6 +312,9 @@ class HomeServer(object): def build_e2e_room_keys_handler(self): return E2eRoomKeysHandler(self) + def build_acme_handler(self): + return AcmeHandler(self) + def build_application_service_api(self): return ApplicationServiceApi(self) -- cgit 1.5.1 From 6f680241bd7f53c3a3f912c257d2bfa437dfd486 Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Wed, 23 Jan 2019 10:53:48 +0200 Subject: Fix flake8 issues Signed-off-by: Jason Robinson --- synapse/app/federation_reader.py | 5 ++++- synapse/config/server.py | 2 +- tests/app/test_openid_listener.py | 10 ++++++++-- 3 files changed, 13 insertions(+), 4 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 99e8a4cf6a..2c99ce8c64 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -92,7 +92,10 @@ class FederationReaderServer(HomeServer): # is not specified since federation resource includes openid # resource. resources.update({ - FEDERATION_PREFIX: TransportLayerServer(self, servlet_groups=["openid"]), + FEDERATION_PREFIX: TransportLayerServer( + self, + servlet_groups=["openid"], + ), }) root_resource = create_resource_tree(resources, NoResource()) diff --git a/synapse/config/server.py b/synapse/config/server.py index eebbfccafe..4eefd06f4a 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -330,7 +330,7 @@ class ServerConfig(Config): - names: [federation] # Federation APIs compress: false - + # # If federation is disabled synapse can still expose the open ID endpoint # # to allow integrations to authenticate users # - names: [openid] diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 46a3b61a3c..590abc1e92 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -61,7 +61,10 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase): return raise - request, channel = self.make_request("GET", "/_matrix/federation/v1/openid/userinfo") + request, channel = self.make_request( + "GET", + "/_matrix/federation/v1/openid/userinfo", + ) self.render(request) self.assertEqual(channel.code, 401) @@ -107,7 +110,10 @@ class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): return raise - request, channel = self.make_request("GET", "/_matrix/federation/v1/openid/userinfo") + request, channel = self.make_request( + "GET", + "/_matrix/federation/v1/openid/userinfo", + ) self.render(request) self.assertEqual(channel.code, 401) -- cgit 1.5.1 From 5d976c0c7ce210916ec28b2112fe4c17e42694e8 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 28 Jan 2019 17:18:33 +0000 Subject: Fix worker TLS (#4492) * load cert * changelog * fix --- changelog.d/4492.feature | 1 + synapse/app/client_reader.py | 12 ++++++------ synapse/app/event_creator.py | 12 ++++++------ synapse/app/federation_reader.py | 12 ++++++------ synapse/app/federation_sender.py | 18 +++++++++--------- synapse/app/frontend_proxy.py | 12 ++++++------ synapse/app/media_repository.py | 12 ++++++------ synapse/app/user_dir.py | 18 +++++++++--------- 8 files changed, 49 insertions(+), 48 deletions(-) create mode 100644 changelog.d/4492.feature (limited to 'synapse/app') diff --git a/changelog.d/4492.feature b/changelog.d/4492.feature new file mode 100644 index 0000000000..c7f595cec2 --- /dev/null +++ b/changelog.d/4492.feature @@ -0,0 +1 @@ + Synapse can now automatically provision TLS certificates via ACME (the protocol used by CAs like Let's Encrypt). diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 76aed8c60a..f8a417cb60 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -164,23 +164,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = ClientReaderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index e4a68715aa..656e0edc0f 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -185,23 +185,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = EventCreatorServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 228a297fb8..3de2715132 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -151,23 +151,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = FederationReaderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index e9a99d76e1..d944e0517f 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -183,24 +183,24 @@ def start(config_options): # Force the pushers to start since they will be disabled in the main config config.send_federation = True - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - - ps = FederationSenderServer( + ss = FederationSenderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) - ps.setup() - ps.start_listening(config.worker_listeners) + ss.setup() def start(): - ps.get_datastore().start_profiling() + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) + ss.get_datastore().start_profiling() reactor.callWhenRunning(start) _base.start_worker_reactor("synapse-federation-sender", config) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index f5c61dec5b..d9ef6edc3c 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -241,23 +241,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = FrontendProxyServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index acc0487adc..4ecf64031b 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -151,23 +151,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = MediaRepositoryServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 0a5f62b509..176d55a783 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -211,24 +211,24 @@ def start(config_options): # Force the pushers to start since they will be disabled in the main config config.update_user_directory = True - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - - ps = UserDirectoryServer( + ss = UserDirectoryServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) - ps.setup() - ps.start_listening(config.worker_listeners) + ss.setup() def start(): - ps.get_datastore().start_profiling() + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) + ss.get_datastore().start_profiling() reactor.callWhenRunning(start) -- cgit 1.5.1 From f6813919e8bc8245e5353700bfd4bef338ecbce6 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 30 Jan 2019 11:00:02 +0000 Subject: SIGHUP for TLS cert reloading (#4495) --- .gitignore | 1 + changelog.d/4495.feature | 1 + synapse/app/_base.py | 29 ++++++++++++++++++++------- synapse/app/homeserver.py | 51 ++++++++++++++++++++++++++++++++++++++++++----- synapse/config/logger.py | 19 ++++++++++-------- 5 files changed, 81 insertions(+), 20 deletions(-) create mode 100644 changelog.d/4495.feature (limited to 'synapse/app') diff --git a/.gitignore b/.gitignore index 1033124f1d..37f0028d66 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ dbs/ dist/ docs/build/ *.egg-info +pip-wheel-metadata/ cmdclient_config.json homeserver*.db diff --git a/changelog.d/4495.feature b/changelog.d/4495.feature new file mode 100644 index 0000000000..fc2b5daf63 --- /dev/null +++ b/changelog.d/4495.feature @@ -0,0 +1 @@ +Synapse will now reload TLS certificates from disk upon SIGHUP. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 18584226e9..3840c663ab 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -143,6 +143,9 @@ def listen_metrics(bind_addresses, port): def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): """ Create a TCP socket for a port and several addresses + + Returns: + list (empty) """ for address in bind_addresses: try: @@ -155,25 +158,37 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): except error.CannotListenError as e: check_bind_error(e, address, bind_addresses) + logger.info("Synapse now listening on TCP port %d", port) + return [] + def listen_ssl( bind_addresses, port, factory, context_factory, reactor=reactor, backlog=50 ): """ - Create an SSL socket for a port and several addresses + Create an TLS-over-TCP socket for a port and several addresses + + Returns: + list of twisted.internet.tcp.Port listening for TLS connections """ + r = [] for address in bind_addresses: try: - reactor.listenSSL( - port, - factory, - context_factory, - backlog, - address + r.append( + reactor.listenSSL( + port, + factory, + context_factory, + backlog, + address + ) ) except error.CannotListenError as e: check_bind_error(e, address, bind_addresses) + logger.info("Synapse now listening on port %d (TLS)", port) + return r + def check_bind_error(e, address, bind_addresses): """ diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index ffc49d77cc..019b91576f 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -17,6 +17,7 @@ import gc import logging import os +import signal import sys import traceback @@ -27,6 +28,7 @@ from prometheus_client import Gauge from twisted.application import service from twisted.internet import defer, reactor +from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.web.resource import EncodingResourceWrapper, NoResource from twisted.web.server import GzipEncoderFactory from twisted.web.static import File @@ -84,6 +86,7 @@ def gz_wrap(r): class SynapseHomeServer(HomeServer): DATASTORE_CLASS = DataStore + _listening_services = [] def _listener_http(self, config, listener_config): port = listener_config["port"] @@ -121,7 +124,7 @@ class SynapseHomeServer(HomeServer): root_resource = create_resource_tree(resources, root_resource) if tls: - listen_ssl( + return listen_ssl( bind_addresses, port, SynapseSite( @@ -135,7 +138,7 @@ class SynapseHomeServer(HomeServer): ) else: - listen_tcp( + return listen_tcp( bind_addresses, port, SynapseSite( @@ -146,7 +149,6 @@ class SynapseHomeServer(HomeServer): self.version_string, ) ) - logger.info("Synapse now listening on port %d", port) def _configure_named_resource(self, name, compress=False): """Build a resource map for a named resource @@ -242,7 +244,9 @@ class SynapseHomeServer(HomeServer): for listener in config.listeners: if listener["type"] == "http": - self._listener_http(config, listener) + self._listening_services.extend( + self._listener_http(config, listener) + ) elif listener["type"] == "manhole": listen_tcp( listener["bind_addresses"], @@ -322,7 +326,19 @@ def setup(config_options): # generating config files and shouldn't try to continue. sys.exit(0) - synapse.config.logger.setup_logging(config, use_worker_options=False) + sighup_callbacks = [] + synapse.config.logger.setup_logging( + config, + use_worker_options=False, + register_sighup=sighup_callbacks.append + ) + + def handle_sighup(*args, **kwargs): + for i in sighup_callbacks: + i(*args, **kwargs) + + if hasattr(signal, "SIGHUP"): + signal.signal(signal.SIGHUP, handle_sighup) events.USE_FROZEN_DICTS = config.use_frozen_dicts @@ -359,6 +375,31 @@ def setup(config_options): hs.setup() + def refresh_certificate(*args): + """ + Refresh the TLS certificates that Synapse is using by re-reading them + from disk and updating the TLS context factories to use them. + """ + logging.info("Reloading certificate from disk...") + hs.config.read_certificate_from_disk() + hs.tls_server_context_factory = context_factory.ServerContextFactory(config) + hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + logging.info("Certificate reloaded.") + + logging.info("Updating context factories...") + for i in hs._listening_services: + if isinstance(i.factory, TLSMemoryBIOFactory): + i.factory = TLSMemoryBIOFactory( + hs.tls_server_context_factory, + False, + i.factory.wrappedFactory + ) + logging.info("Context factories updated.") + + sighup_callbacks.append(refresh_certificate) + @defer.inlineCallbacks def start(): try: diff --git a/synapse/config/logger.py b/synapse/config/logger.py index f87efecbf8..a795e39b1a 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -127,7 +127,7 @@ class LoggingConfig(Config): ) -def setup_logging(config, use_worker_options=False): +def setup_logging(config, use_worker_options=False, register_sighup=None): """ Set up python logging Args: @@ -136,7 +136,16 @@ def setup_logging(config, use_worker_options=False): use_worker_options (bool): True to use 'worker_log_config' and 'worker_log_file' options instead of 'log_config' and 'log_file'. + + register_sighup (func | None): Function to call to register a + sighup handler. """ + if not register_sighup: + if getattr(signal, "SIGHUP"): + register_sighup = lambda x: signal.signal(signal.SIGHUP, x) + else: + register_sighup = lambda x: None + log_config = (config.worker_log_config if use_worker_options else config.log_config) log_file = (config.worker_log_file if use_worker_options @@ -198,13 +207,7 @@ def setup_logging(config, use_worker_options=False): load_log_config() - # TODO(paul): obviously this is a terrible mechanism for - # stealing SIGHUP, because it means no other part of synapse - # can use it instead. If we want to catch SIGHUP anywhere - # else as well, I'd suggest we find a nicer way to broadcast - # it around. - if getattr(signal, "SIGHUP"): - signal.signal(signal.SIGHUP, sighup) + register_sighup(sighup) # make sure that the first thing we log is a thing we can grep backwards # for -- cgit 1.5.1 From 270f212a2a89712e78ff2ff4922c2f99285d65dc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 30 Jan 2019 12:14:50 +0000 Subject: _listener_http should return a list --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 019b91576f..d5f5ff9599 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -95,7 +95,7 @@ class SynapseHomeServer(HomeServer): site_tag = listener_config.get("tag", port) if tls and config.no_tls: - return + return [] resources = {} for res in listener_config["resources"]: -- cgit 1.5.1 From e87d7a4b0f946ec6723138e48f4bd7aae8389a5d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 30 Jan 2019 12:48:09 +0000 Subject: Raise ConfigError instead --- synapse/app/homeserver.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index d5f5ff9599..250a17cef8 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -95,7 +95,9 @@ class SynapseHomeServer(HomeServer): site_tag = listener_config.get("tag", port) if tls and config.no_tls: - return [] + raise ConfigError( + "Listener on port %i has TLS enabled, but no_tls is set" % (port,), + ) resources = {} for res in listener_config["resources"]: -- cgit 1.5.1 From 7615a8ced1385460d73dca45fc6534a2fcb64227 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 30 Jan 2019 14:17:55 +0000 Subject: ACME config cleanups (#4525) * Handle listening for ACME requests on IPv6 addresses the weird url-but-not-actually-a-url-string doesn't handle IPv6 addresses without extra quoting. Building a string which you are about to parse again seems like a weird choice. Let's just use listenTCP, which is consistent with what we do elsewhere. * Clean up the default ACME config make it look a bit more consistent with everything else, and tweak the defaults to listen on port 80. * newsfile --- changelog.d/4525.feature | 1 + synapse/app/__init__.py | 25 +++++++++++- synapse/app/_base.py | 22 +---------- synapse/config/tls.py | 100 +++++++++++++++++++++++++++++++++++------------ synapse/handlers/acme.py | 27 +++++++------ 5 files changed, 115 insertions(+), 60 deletions(-) create mode 100644 changelog.d/4525.feature (limited to 'synapse/app') diff --git a/changelog.d/4525.feature b/changelog.d/4525.feature new file mode 100644 index 0000000000..c7f595cec2 --- /dev/null +++ b/changelog.d/4525.feature @@ -0,0 +1 @@ + Synapse can now automatically provision TLS certificates via ACME (the protocol used by CAs like Let's Encrypt). diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index b45adafdd3..f56f5fcc13 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -12,15 +12,38 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import sys from synapse import python_dependencies # noqa: E402 sys.dont_write_bytecode = True +logger = logging.getLogger(__name__) + try: python_dependencies.check_requirements() except python_dependencies.DependencyException as e: sys.stderr.writelines(e.message) sys.exit(1) + + +def check_bind_error(e, address, bind_addresses): + """ + This method checks an exception occurred while binding on 0.0.0.0. + If :: is specified in the bind addresses a warning is shown. + The exception is still raised otherwise. + + Binding on both 0.0.0.0 and :: causes an exception on Linux and macOS + because :: binds on both IPv4 and IPv6 (as per RFC 3493). + When binding on 0.0.0.0 after :: this can safely be ignored. + + Args: + e (Exception): Exception that was caught. + address (str): Address on which binding was attempted. + bind_addresses (list): Addresses on which the service listens. + """ + if address == '0.0.0.0' and '::' in bind_addresses: + logger.warn('Failed to listen on 0.0.0.0, continuing because listening on [::]') + else: + raise e diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 3840c663ab..5b97a54d45 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -22,6 +22,7 @@ from daemonize import Daemonize from twisted.internet import error, reactor +from synapse.app import check_bind_error from synapse.util import PreserveLoggingContext from synapse.util.rlimit import change_resource_limit @@ -188,24 +189,3 @@ def listen_ssl( logger.info("Synapse now listening on port %d (TLS)", port) return r - - -def check_bind_error(e, address, bind_addresses): - """ - This method checks an exception occurred while binding on 0.0.0.0. - If :: is specified in the bind addresses a warning is shown. - The exception is still raised otherwise. - - Binding on both 0.0.0.0 and :: causes an exception on Linux and macOS - because :: binds on both IPv4 and IPv6 (as per RFC 3493). - When binding on 0.0.0.0 after :: this can safely be ignored. - - Args: - e (Exception): Exception that was caught. - address (str): Address on which binding was attempted. - bind_addresses (list): Addresses on which the service listens. - """ - if address == '0.0.0.0' and '::' in bind_addresses: - logger.warn('Failed to listen on 0.0.0.0, continuing because listening on [::]') - else: - raise e diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 734f612db7..5f63676d9c 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -31,13 +31,16 @@ logger = logging.getLogger() class TlsConfig(Config): def read_config(self, config): - acme_config = config.get("acme", {}) + acme_config = config.get("acme", None) + if acme_config is None: + acme_config = {} + self.acme_enabled = acme_config.get("enabled", False) self.acme_url = acme_config.get( "url", "https://acme-v01.api.letsencrypt.org/directory" ) - self.acme_port = acme_config.get("port", 8449) - self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"]) + self.acme_port = acme_config.get("port", 80) + self.acme_bind_addresses = acme_config.get("bind_addresses", ['::', '0.0.0.0']) self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) @@ -126,21 +129,80 @@ class TlsConfig(Config): tls_certificate_path = base_key_name + ".tls.crt" tls_private_key_path = base_key_name + ".tls.key" + # this is to avoid the max line length. Sorrynotsorry + proxypassline = ( + 'ProxyPass /.well-known/acme-challenge ' + 'http://localhost:8009/.well-known/acme-challenge' + ) + return ( """\ - # PEM encoded X509 certificate for TLS. - # This certificate, as of Synapse 1.0, will need to be a valid - # and verifiable certificate, with a root that is available in - # the root store of other servers you wish to federate to. Any - # required intermediary certificates can be appended after the - # primary certificate in hierarchical order. + # PEM-encoded X509 certificate for TLS. + # This certificate, as of Synapse 1.0, will need to be a valid and verifiable + # certificate, signed by a recognised Certificate Authority. + # + # See 'ACME support' below to enable auto-provisioning this certificate via + # Let's Encrypt. + # tls_certificate_path: "%(tls_certificate_path)s" - # PEM encoded private key for TLS + # PEM-encoded private key for TLS tls_private_key_path: "%(tls_private_key_path)s" - # Don't bind to the https port - no_tls: False + # ACME support: This will configure Synapse to request a valid TLS certificate + # for your configured `server_name` via Let's Encrypt. + # + # Note that provisioning a certificate in this way requires port 80 to be + # routed to Synapse so that it can complete the http-01 ACME challenge. + # By default, if you enable ACME support, Synapse will attempt to listen on + # port 80 for incoming http-01 challenges - however, this will likely fail + # with 'Permission denied' or a similar error. + # + # There are a couple of potential solutions to this: + # + # * If you already have an Apache, Nginx, or similar listening on port 80, + # you can configure Synapse to use an alternate port, and have your web + # server forward the requests. For example, assuming you set 'port: 8009' + # below, on Apache, you would write: + # + # %(proxypassline)s + # + # * Alternatively, you can use something like `authbind` to give Synapse + # permission to listen on port 80. + # + acme: + # ACME support is disabled by default. Uncomment the following line + # to enable it. + # + # enabled: true + + # Endpoint to use to request certificates. If you only want to test, + # use Let's Encrypt's staging url: + # https://acme-staging.api.letsencrypt.org/directory + # + # url: https://acme-v01.api.letsencrypt.org/directory + + # Port number to listen on for the HTTP-01 challenge. Change this if + # you are forwarding connections through Apache/Nginx/etc. + # + # port: 80 + + # Local addresses to listen on for incoming connections. + # Again, you may want to change this if you are forwarding connections + # through Apache/Nginx/etc. + # + # bind_addresses: ['::', '0.0.0.0'] + + # How many days remaining on a certificate before it is renewed. + # + # reprovision_threshold: 30 + + # If your server runs behind a reverse-proxy which terminates TLS connections + # (for both client and federation connections), it may be useful to disable + # All TLS support for incoming connections. Setting no_tls to False will + # do so (and avoid the need to give synapse a TLS private key). + # + # no_tls: False # List of allowed TLS fingerprints for this server to publish along # with the signing keys for this server. Other matrix servers that @@ -170,20 +232,6 @@ class TlsConfig(Config): tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] - ## Support for ACME certificate auto-provisioning. - # acme: - # enabled: false - ## ACME path. - ## If you only want to test, use the staging url: - ## https://acme-staging.api.letsencrypt.org/directory - # url: 'https://acme-v01.api.letsencrypt.org/directory' - ## Port number (to listen for the HTTP-01 challenge). - ## Using port 80 requires utilising something like authbind, or proxying to it. - # port: 8449 - ## Hosts to bind to. - # bind_addresses: ['127.0.0.1'] - ## How many days remaining on a certificate before it is renewed. - # reprovision_threshold: 30 """ % locals() ) diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 73ea7ed018..dd0b217965 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -18,13 +18,16 @@ import logging import attr from zope.interface import implementer +import twisted +import twisted.internet.error from twisted.internet import defer -from twisted.internet.endpoints import serverFromString from twisted.python.filepath import FilePath from twisted.python.url import URL from twisted.web import server, static from twisted.web.resource import Resource +from synapse.app import check_bind_error + logger = logging.getLogger(__name__) try: @@ -96,16 +99,19 @@ class AcmeHandler(object): srv = server.Site(responder_resource) - listeners = [] - - for host in self.hs.config.acme_bind_addresses: + bind_addresses = self.hs.config.acme_bind_addresses + for host in bind_addresses: logger.info( - "Listening for ACME requests on %s:%s", host, self.hs.config.acme_port - ) - endpoint = serverFromString( - self.reactor, "tcp:%s:interface=%s" % (self.hs.config.acme_port, host) + "Listening for ACME requests on %s:%i", host, self.hs.config.acme_port, ) - listeners.append(endpoint.listen(srv)) + try: + self.reactor.listenTCP( + self.hs.config.acme_port, + srv, + interface=host, + ) + except twisted.internet.error.CannotListenError as e: + check_bind_error(e, host, bind_addresses) # Make sure we are registered to the ACME server. There's no public API # for this, it is usually triggered by startService, but since we don't @@ -114,9 +120,6 @@ class AcmeHandler(object): self._issuer._registered = False yield self._issuer._ensure_registered() - # Return a Deferred that will fire when all the servers have started up. - yield defer.DeferredList(listeners, fireOnOneErrback=True, consumeErrors=True) - @defer.inlineCallbacks def provision_certificate(self): -- cgit 1.5.1 From 9cd33d2f4bc14a165f693e2d3dfe231179e968e8 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 8 Feb 2019 17:25:57 +0000 Subject: Deduplicate some code in synapse.app (#4567) --- changelog.d/4567.misc | 1 + synapse/app/_base.py | 63 ++++++++++++++++++++++++++++++++++++++++ synapse/app/appservice.py | 7 +---- synapse/app/client_reader.py | 13 +-------- synapse/app/event_creator.py | 13 +-------- synapse/app/federation_reader.py | 13 +-------- synapse/app/federation_sender.py | 12 +------- synapse/app/frontend_proxy.py | 13 +-------- synapse/app/homeserver.py | 54 +++------------------------------- synapse/app/media_repository.py | 13 +-------- synapse/app/pusher.py | 3 +- synapse/app/synchrotron.py | 7 +---- synapse/app/user_dir.py | 13 +-------- synapse/config/logger.py | 16 ++++------ 14 files changed, 83 insertions(+), 158 deletions(-) create mode 100644 changelog.d/4567.misc (limited to 'synapse/app') diff --git a/changelog.d/4567.misc b/changelog.d/4567.misc new file mode 100644 index 0000000000..96a2e0aefc --- /dev/null +++ b/changelog.d/4567.misc @@ -0,0 +1 @@ +Reduce duplication of ``synapse.app`` code. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 5b97a54d45..3cbb003035 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -15,7 +15,9 @@ import gc import logging +import signal import sys +import traceback import psutil from daemonize import Daemonize @@ -23,11 +25,25 @@ from daemonize import Daemonize from twisted.internet import error, reactor from synapse.app import check_bind_error +from synapse.crypto import context_factory from synapse.util import PreserveLoggingContext from synapse.util.rlimit import change_resource_limit logger = logging.getLogger(__name__) +_sighup_callbacks = [] + + +def register_sighup(func): + """ + Register a function to be called when a SIGHUP occurs. + + Args: + func (function): Function to be called when sent a SIGHUP signal. + Will be called with a single argument, the homeserver. + """ + _sighup_callbacks.append(func) + def start_worker_reactor(appname, config): """ Run the reactor in the main process @@ -189,3 +205,50 @@ def listen_ssl( logger.info("Synapse now listening on port %d (TLS)", port) return r + + +def refresh_certificate(hs): + """ + Refresh the TLS certificates that Synapse is using by re-reading them from + disk and updating the TLS context factories to use them. + """ + logging.info("Loading certificate from disk...") + hs.config.read_certificate_from_disk() + hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config) + hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + hs.config + ) + logging.info("Certificate loaded.") + + +def start(hs, listeners=None): + """ + Start a Synapse server or worker. + + Args: + hs (synapse.server.HomeServer) + listeners (list[dict]): Listener configuration ('listeners' in homeserver.yaml) + """ + try: + # Set up the SIGHUP machinery. + if hasattr(signal, "SIGHUP"): + def handle_sighup(*args, **kwargs): + for i in _sighup_callbacks: + i(hs) + + signal.signal(signal.SIGHUP, handle_sighup) + + register_sighup(refresh_certificate) + + # Load the certificate from disk. + refresh_certificate(hs) + + # It is now safe to start your Synapse. + hs.start_listening(listeners) + hs.get_datastore().start_profiling() + except Exception: + traceback.print_exc(file=sys.stderr) + reactor = hs.get_reactor() + if reactor.running: + reactor.stop() + sys.exit(1) diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 8559e141af..33107f56d1 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -168,12 +168,7 @@ def start(config_options): ) ps.setup() - ps.start_listening(config.worker_listeners) - - def start(): - ps.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ps, config.worker_listeners) _base.start_worker_reactor("synapse-appservice", config) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index f8a417cb60..a9d2147022 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -25,7 +25,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy @@ -173,17 +172,7 @@ def start(config_options): ) ss.setup() - - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-client-reader", config) diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 656e0edc0f..b8e5196152 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -25,7 +25,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy @@ -194,17 +193,7 @@ def start(config_options): ) ss.setup() - - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-event-creator", config) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 3de2715132..42886dbfc1 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -26,7 +26,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.federation.transport.server import TransportLayerServer from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy @@ -160,17 +159,7 @@ def start(config_options): ) ss.setup() - - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-federation-reader", config) diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index d944e0517f..a461442fdc 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -25,7 +25,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.federation import send_queue from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy @@ -192,17 +191,8 @@ def start(config_options): ) ss.setup() + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) _base.start_worker_reactor("synapse-federation-sender", config) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index d9ef6edc3c..d5b954361d 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -26,7 +26,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseSite @@ -250,17 +249,7 @@ def start(config_options): ) ss.setup() - - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-frontend-proxy", config) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 250a17cef8..1a341568ac 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -17,7 +17,6 @@ import gc import logging import os -import signal import sys import traceback @@ -28,7 +27,6 @@ from prometheus_client import Gauge from twisted.application import service from twisted.internet import defer, reactor -from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.web.resource import EncodingResourceWrapper, NoResource from twisted.web.server import GzipEncoderFactory from twisted.web.static import File @@ -49,7 +47,6 @@ from synapse.app import _base from synapse.app._base import listen_ssl, listen_tcp, quit_with_error from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig -from synapse.crypto import context_factory from synapse.federation.transport.server import TransportLayerServer from synapse.http.additional_resource import AdditionalResource from synapse.http.server import RootRedirect @@ -241,10 +238,10 @@ class SynapseHomeServer(HomeServer): return resources - def start_listening(self): + def start_listening(self, listeners): config = self.get_config() - for listener in config.listeners: + for listener in listeners: if listener["type"] == "http": self._listening_services.extend( self._listener_http(config, listener) @@ -328,20 +325,11 @@ def setup(config_options): # generating config files and shouldn't try to continue. sys.exit(0) - sighup_callbacks = [] synapse.config.logger.setup_logging( config, - use_worker_options=False, - register_sighup=sighup_callbacks.append + use_worker_options=False ) - def handle_sighup(*args, **kwargs): - for i in sighup_callbacks: - i(*args, **kwargs) - - if hasattr(signal, "SIGHUP"): - signal.signal(signal.SIGHUP, handle_sighup) - events.USE_FROZEN_DICTS = config.use_frozen_dicts database_engine = create_engine(config.database_config) @@ -377,31 +365,6 @@ def setup(config_options): hs.setup() - def refresh_certificate(*args): - """ - Refresh the TLS certificates that Synapse is using by re-reading them - from disk and updating the TLS context factories to use them. - """ - logging.info("Reloading certificate from disk...") - hs.config.read_certificate_from_disk() - hs.tls_server_context_factory = context_factory.ServerContextFactory(config) - hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - logging.info("Certificate reloaded.") - - logging.info("Updating context factories...") - for i in hs._listening_services: - if isinstance(i.factory, TLSMemoryBIOFactory): - i.factory = TLSMemoryBIOFactory( - hs.tls_server_context_factory, - False, - i.factory.wrappedFactory - ) - logging.info("Context factories updated.") - - sighup_callbacks.append(refresh_certificate) - @defer.inlineCallbacks def start(): try: @@ -425,18 +388,9 @@ def setup(config_options): ): yield acme.provision_certificate() - # Read the certificate from disk and build the context factories for - # TLS. - hs.config.read_certificate_from_disk() - hs.tls_server_context_factory = context_factory.ServerContextFactory(config) - hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) + _base.start(hs, config.listeners) - # It is now safe to start your Synapse. - hs.start_listening() hs.get_pusherpool().start() - hs.get_datastore().start_profiling() hs.get_datastore().start_doing_background_updates() except Exception as e: # If a DeferredList failed (like in listening on the ACME listener), diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 4ecf64031b..d4cc4e9443 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -26,7 +26,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource @@ -160,17 +159,7 @@ def start(config_options): ) ss.setup() - - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-media-repository", config) diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 83b0863f00..cbf0d67f51 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -224,11 +224,10 @@ def start(config_options): ) ps.setup() - ps.start_listening(config.worker_listeners) def start(): + _base.start(ps, config.worker_listeners) ps.get_pusherpool().start() - ps.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 0354e82bf8..9163b56d86 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -445,12 +445,7 @@ def start(config_options): ) ss.setup() - ss.start_listening(config.worker_listeners) - - def start(): - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-synchrotron", config) diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 176d55a783..d1ab9512cd 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -26,7 +26,6 @@ from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy @@ -220,17 +219,7 @@ def start(config_options): ) ss.setup() - - def start(): - ss.config.read_certificate_from_disk() - ss.tls_server_context_factory = context_factory.ServerContextFactory(config) - ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - config - ) - ss.start_listening(config.worker_listeners) - ss.get_datastore().start_profiling() - - reactor.callWhenRunning(start) + reactor.callWhenRunning(_base.start, ss, config.worker_listeners) _base.start_worker_reactor("synapse-user-dir", config) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index a795e39b1a..4b938053fb 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -15,7 +15,6 @@ import logging import logging.config import os -import signal import sys from string import Template @@ -24,6 +23,7 @@ import yaml from twisted.logger import STDLibLogObserver, globalLogBeginner import synapse +from synapse.app import _base as appbase from synapse.util.logcontext import LoggingContextFilter from synapse.util.versionstring import get_version_string @@ -127,7 +127,7 @@ class LoggingConfig(Config): ) -def setup_logging(config, use_worker_options=False, register_sighup=None): +def setup_logging(config, use_worker_options=False): """ Set up python logging Args: @@ -140,12 +140,6 @@ def setup_logging(config, use_worker_options=False, register_sighup=None): register_sighup (func | None): Function to call to register a sighup handler. """ - if not register_sighup: - if getattr(signal, "SIGHUP"): - register_sighup = lambda x: signal.signal(signal.SIGHUP, x) - else: - register_sighup = lambda x: None - log_config = (config.worker_log_config if use_worker_options else config.log_config) log_file = (config.worker_log_file if use_worker_options @@ -187,7 +181,7 @@ def setup_logging(config, use_worker_options=False, register_sighup=None): else: handler = logging.StreamHandler() - def sighup(signum, stack): + def sighup(*args): pass handler.setFormatter(formatter) @@ -200,14 +194,14 @@ def setup_logging(config, use_worker_options=False, register_sighup=None): with open(log_config, 'r') as f: logging.config.dictConfig(yaml.load(f)) - def sighup(signum, stack): + def sighup(*args): # it might be better to use a file watcher or something for this. load_log_config() logging.info("Reloaded log config from %s due to SIGHUP", log_config) load_log_config() - register_sighup(sighup) + appbase.register_sighup(sighup) # make sure that the first thing we log is a thing we can grep backwards # for -- cgit 1.5.1 From 6e2a5aa050fc132a7dee6b3e33a7a368207d7e5a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 11 Feb 2019 21:36:26 +1100 Subject: ACME Reprovisioning (#4522) --- changelog.d/4522.feature | 1 + synapse/app/_base.py | 19 ++++++++++++ synapse/app/homeserver.py | 79 +++++++++++++++++++++++++++++++++-------------- synapse/config/tls.py | 12 ++++++- synapse/server.py | 3 ++ 5 files changed, 89 insertions(+), 25 deletions(-) create mode 100644 changelog.d/4522.feature (limited to 'synapse/app') diff --git a/changelog.d/4522.feature b/changelog.d/4522.feature new file mode 100644 index 0000000000..ef18daf60b --- /dev/null +++ b/changelog.d/4522.feature @@ -0,0 +1 @@ +Synapse's ACME support will now correctly reprovision a certificate that approaches its expiry while Synapse is running. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 3cbb003035..62c633146f 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -23,6 +23,7 @@ import psutil from daemonize import Daemonize from twisted.internet import error, reactor +from twisted.protocols.tls import TLSMemoryBIOFactory from synapse.app import check_bind_error from synapse.crypto import context_factory @@ -220,6 +221,24 @@ def refresh_certificate(hs): ) logging.info("Certificate loaded.") + if hs._listening_services: + logging.info("Updating context factories...") + for i in hs._listening_services: + # When you listenSSL, it doesn't make an SSL port but a TCP one with + # a TLS wrapping factory around the factory you actually want to get + # requests. This factory attribute is public but missing from + # Twisted's documentation. + if isinstance(i.factory, TLSMemoryBIOFactory): + # We want to replace TLS factories with a new one, with the new + # TLS configuration. We do this by reaching in and pulling out + # the wrappedFactory, and then re-wrapping it. + i.factory = TLSMemoryBIOFactory( + hs.tls_server_context_factory, + False, + i.factory.wrappedFactory + ) + logging.info("Context factories updated.") + def start(hs, listeners=None): """ diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index d1cab07bb6..b4476bf16e 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -83,7 +83,6 @@ def gz_wrap(r): class SynapseHomeServer(HomeServer): DATASTORE_CLASS = DataStore - _listening_services = [] def _listener_http(self, config, listener_config): port = listener_config["port"] @@ -376,42 +375,73 @@ def setup(config_options): hs.setup() + @defer.inlineCallbacks + def do_acme(): + """ + Reprovision an ACME certificate, if it's required. + + Returns: + Deferred[bool]: Whether the cert has been updated. + """ + acme = hs.get_acme_handler() + + # Check how long the certificate is active for. + cert_days_remaining = hs.config.is_disk_cert_valid( + allow_self_signed=False + ) + + # We want to reprovision if cert_days_remaining is None (meaning no + # certificate exists), or the days remaining number it returns + # is less than our re-registration threshold. + provision = False + + if (cert_days_remaining is None): + provision = True + + if cert_days_remaining > hs.config.acme_reprovision_threshold: + provision = True + + if provision: + yield acme.provision_certificate() + + defer.returnValue(provision) + + @defer.inlineCallbacks + def reprovision_acme(): + """ + Provision a certificate from ACME, if required, and reload the TLS + certificate if it's renewed. + """ + reprovisioned = yield do_acme() + if reprovisioned: + _base.refresh_certificate(hs) + @defer.inlineCallbacks def start(): try: - # Check if the certificate is still valid. - cert_days_remaining = hs.config.is_disk_cert_valid() - + # Run the ACME provisioning code, if it's enabled. if hs.config.acme_enabled: - # If ACME is enabled, we might need to provision a certificate - # before starting. acme = hs.get_acme_handler() - # Start up the webservices which we will respond to ACME - # challenges with. + # challenges with, and then provision. yield acme.start_listening() + yield do_acme() - # We want to reprovision if cert_days_remaining is None (meaning no - # certificate exists), or the days remaining number it returns - # is less than our re-registration threshold. - if (cert_days_remaining is None) or ( - not cert_days_remaining > hs.config.acme_reprovision_threshold - ): - yield acme.provision_certificate() + # Check if it needs to be reprovisioned every day. + hs.get_clock().looping_call( + reprovision_acme, + 24 * 60 * 60 * 1000 + ) _base.start(hs, config.listeners) hs.get_pusherpool().start() hs.get_datastore().start_doing_background_updates() - except Exception as e: - # If a DeferredList failed (like in listening on the ACME listener), - # we need to print the subfailure explicitly. - if isinstance(e, defer.FirstError): - e.subFailure.printTraceback(sys.stderr) - sys.exit(1) - - # Something else went wrong when starting. Print it and bail out. + except Exception: + # Print the exception and bail out. traceback.print_exc(file=sys.stderr) + if reactor.running: + reactor.stop() sys.exit(1) reactor.callWhenRunning(start) @@ -420,7 +450,8 @@ def setup(config_options): class SynapseService(service.Service): - """A twisted Service class that will start synapse. Used to run synapse + """ + A twisted Service class that will start synapse. Used to run synapse via twistd and a .tac. """ def __init__(self, config): diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 81b3a659fe..9fcc79816d 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -64,10 +64,14 @@ class TlsConfig(Config): self.tls_certificate = None self.tls_private_key = None - def is_disk_cert_valid(self): + def is_disk_cert_valid(self, allow_self_signed=True): """ Is the certificate we have on disk valid, and if so, for how long? + Args: + allow_self_signed (bool): Should we allow the certificate we + read to be self signed? + Returns: int: Days remaining of certificate validity. None: No certificate exists. @@ -88,6 +92,12 @@ class TlsConfig(Config): logger.exception("Failed to parse existing certificate off disk!") raise + if not allow_self_signed: + if tls_certificate.get_subject() == tls_certificate.get_issuer(): + raise ValueError( + "TLS Certificate is self signed, and this is not permitted" + ) + # YYYYMMDDhhmmssZ -- in UTC expires_on = datetime.strptime( tls_certificate.get_notAfter().decode('ascii'), "%Y%m%d%H%M%SZ" diff --git a/synapse/server.py b/synapse/server.py index 6c52101616..a2cf8a91cd 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -112,6 +112,8 @@ class HomeServer(object): Attributes: config (synapse.config.homeserver.HomeserverConfig): + _listening_services (list[twisted.internet.tcp.Port]): TCP ports that + we are listening on to provide HTTP services. """ __metaclass__ = abc.ABCMeta @@ -196,6 +198,7 @@ class HomeServer(object): self._reactor = reactor self.hostname = hostname self._building = {} + self._listening_services = [] self.clock = Clock(reactor) self.distributor = Distributor() -- cgit 1.5.1 From 5d27730a73d01bd3ff7eb42a21f2f024493c5b89 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 11 Feb 2019 18:03:30 +0000 Subject: Move ClientTLSOptionsFactory init out of refresh_certificates (#4611) It's nothing to do with refreshing the certificates. No idea why it was here. --- changelog.d/4611.misc | 1 + synapse/app/_base.py | 3 --- synapse/http/matrixfederationclient.py | 4 ++-- synapse/server.py | 6 +++++- tests/http/test_fedclient.py | 4 +--- 5 files changed, 9 insertions(+), 9 deletions(-) create mode 100644 changelog.d/4611.misc (limited to 'synapse/app') diff --git a/changelog.d/4611.misc b/changelog.d/4611.misc new file mode 100644 index 0000000000..d2e0a05daa --- /dev/null +++ b/changelog.d/4611.misc @@ -0,0 +1 @@ +Move ClientTLSOptionsFactory init out of refresh_certificates diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 62c633146f..e1fc1afd5b 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -216,9 +216,6 @@ def refresh_certificate(hs): logging.info("Loading certificate from disk...") hs.config.read_certificate_from_disk() hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config) - hs.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( - hs.config - ) logging.info("Certificate loaded.") if hs._listening_services: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 5ee4d528d2..3c24bf3805 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -168,7 +168,7 @@ class MatrixFederationHttpClient(object): requests. """ - def __init__(self, hs): + def __init__(self, hs, tls_client_options_factory): self.hs = hs self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname @@ -176,7 +176,7 @@ class MatrixFederationHttpClient(object): self.agent = MatrixFederationAgent( hs.get_reactor(), - hs.tls_client_options_factory, + tls_client_options_factory, ) self.clock = hs.get_clock() self._store = hs.get_datastore() diff --git a/synapse/server.py b/synapse/server.py index a2cf8a91cd..8615b67ad4 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -31,6 +31,7 @@ from synapse.api.filtering import Filtering from synapse.api.ratelimiting import Ratelimiter from synapse.appservice.api import ApplicationServiceApi from synapse.appservice.scheduler import ApplicationServiceScheduler +from synapse.crypto import context_factory from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory from synapse.events.spamcheck import SpamChecker @@ -367,7 +368,10 @@ class HomeServer(object): return PusherPool(self) def build_http_client(self): - return MatrixFederationHttpClient(self) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + self.config + ) + return MatrixFederationHttpClient(self, tls_client_options_factory) def build_db_pool(self): name = self.db_config["name"] diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index 018c77ebcd..b03b37affe 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -43,13 +43,11 @@ def check_logcontext(context): class FederationClientTests(HomeserverTestCase): def make_homeserver(self, reactor, clock): - hs = self.setup_test_homeserver(reactor=reactor, clock=clock) - hs.tls_client_options_factory = None return hs def prepare(self, reactor, clock, homeserver): - self.cl = MatrixFederationHttpClient(self.hs) + self.cl = MatrixFederationHttpClient(self.hs, None) self.reactor.lookups["testserv"] = "1.2.3.4" def test_client_get(self): -- cgit 1.5.1 From 086f6f27d409520e71556cad4707cb2f70476e20 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 11 Feb 2019 21:00:41 +0000 Subject: Logging improvements around TLS certs Log which file we're reading keys and certs from, and refactor the code a bit in preparation for other work --- changelog.d/4615.misc | 1 + synapse/app/_base.py | 6 ++---- synapse/config/tls.py | 54 ++++++++++++++++++++++++++++++++++----------------- 3 files changed, 39 insertions(+), 22 deletions(-) create mode 100644 changelog.d/4615.misc (limited to 'synapse/app') diff --git a/changelog.d/4615.misc b/changelog.d/4615.misc new file mode 100644 index 0000000000..c7266fcfc7 --- /dev/null +++ b/changelog.d/4615.misc @@ -0,0 +1 @@ +Logging improvements around TLS certs diff --git a/synapse/app/_base.py b/synapse/app/_base.py index e1fc1afd5b..6d72de1daa 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -213,13 +213,11 @@ def refresh_certificate(hs): Refresh the TLS certificates that Synapse is using by re-reading them from disk and updating the TLS context factories to use them. """ - logging.info("Loading certificate from disk...") hs.config.read_certificate_from_disk() hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config) - logging.info("Certificate loaded.") if hs._listening_services: - logging.info("Updating context factories...") + logger.info("Updating context factories...") for i in hs._listening_services: # When you listenSSL, it doesn't make an SSL port but a TCP one with # a TLS wrapping factory around the factory you actually want to get @@ -234,7 +232,7 @@ def refresh_certificate(hs): False, i.factory.wrappedFactory ) - logging.info("Context factories updated.") + logger.info("Context factories updated.") def start(hs, listeners=None): diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 9fcc79816d..76d2add4fe 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -25,7 +25,7 @@ from OpenSSL import crypto from synapse.config._base import Config -logger = logging.getLogger() +logger = logging.getLogger(__name__) class TlsConfig(Config): @@ -110,20 +110,10 @@ class TlsConfig(Config): """ Read the certificates from disk. """ - self.tls_certificate = self.read_tls_certificate(self.tls_certificate_file) - - # Check if it is self-signed, and issue a warning if so. - if self.tls_certificate.get_issuer() == self.tls_certificate.get_subject(): - warnings.warn( - ( - "Self-signed TLS certificates will not be accepted by Synapse 1.0. " - "Please either provide a valid certificate, or use Synapse's ACME " - "support to provision one." - ) - ) + self.tls_certificate = self.read_tls_certificate() if not self.no_tls: - self.tls_private_key = self.read_tls_private_key(self.tls_private_key_file) + self.tls_private_key = self.read_tls_private_key() self.tls_fingerprints = list(self._original_tls_fingerprints) @@ -250,10 +240,38 @@ class TlsConfig(Config): % locals() ) - def read_tls_certificate(self, cert_path): - cert_pem = self.read_file(cert_path, "tls_certificate") - return crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem) + def read_tls_certificate(self): + """Reads the TLS certificate from the configured file, and returns it + + Also checks if it is self-signed, and warns if so + + Returns: + OpenSSL.crypto.X509: the certificate + """ + cert_path = self.tls_certificate_file + logger.info("Loading TLS certificate from %s", cert_path) + cert_pem = self.read_file(cert_path, "tls_certificate_path") + cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem) + + # Check if it is self-signed, and issue a warning if so. + if cert.get_issuer() == cert.get_subject(): + warnings.warn( + ( + "Self-signed TLS certificates will not be accepted by Synapse 1.0. " + "Please either provide a valid certificate, or use Synapse's ACME " + "support to provision one." + ) + ) + + return cert - def read_tls_private_key(self, private_key_path): - private_key_pem = self.read_file(private_key_path, "tls_private_key") + def read_tls_private_key(self): + """Reads the TLS private key from the configured file, and returns it + + Returns: + OpenSSL.crypto.PKey: the private key + """ + private_key_path = self.tls_private_key_file + logger.info("Loading TLS key from %s", private_key_path) + private_key_pem = self.read_file(private_key_path, "tls_private_key_path") return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem) -- cgit 1.5.1 From 9645728619828fda050fa08aaa25628f5db5d775 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 11 Feb 2019 21:30:59 +0000 Subject: Don't create server contexts when TLS is disabled we aren't going to use them anyway. --- changelog.d/4617.misc | 1 + synapse/app/_base.py | 5 +++++ synapse/crypto/context_factory.py | 4 +--- 3 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelog.d/4617.misc (limited to 'synapse/app') diff --git a/changelog.d/4617.misc b/changelog.d/4617.misc new file mode 100644 index 0000000000..6d751865c9 --- /dev/null +++ b/changelog.d/4617.misc @@ -0,0 +1 @@ +Don't create server contexts when TLS is disabled diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 6d72de1daa..6b3cb61ae9 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -214,6 +214,11 @@ def refresh_certificate(hs): disk and updating the TLS context factories to use them. """ hs.config.read_certificate_from_disk() + + if hs.config.no_tls: + # nothing else to do here + return + hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config) if hs._listening_services: diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 286ad80100..85f2848fb1 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -43,9 +43,7 @@ class ServerContextFactory(ContextFactory): logger.exception("Failed to enable elliptic curve for TLS") context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) context.use_certificate_chain_file(config.tls_certificate_file) - - if not config.no_tls: - context.use_privatekey(config.tls_private_key) + context.use_privatekey(config.tls_private_key) # https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ context.set_cipher_list( -- cgit 1.5.1 From 4fddf8fc77496d9bb3b5fa8835f0e5ba9a5a9926 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 11 Feb 2019 17:57:58 +0000 Subject: Infer no_tls from presence of TLS listeners Rather than have to specify `no_tls` explicitly, infer whether we need to load the TLS keys etc from whether we have any TLS-enabled listeners. --- changelog.d/4613.feature | 1 + changelog.d/4615.feature | 1 + changelog.d/4615.misc | 1 - changelog.d/4617.feature | 1 + changelog.d/4617.misc | 1 - synapse/app/_base.py | 2 +- synapse/app/homeserver.py | 5 ----- synapse/config/homeserver.py | 2 +- synapse/config/server.py | 23 ++++++++++++++++++++--- synapse/config/tls.py | 10 ++-------- 10 files changed, 27 insertions(+), 20 deletions(-) create mode 100644 changelog.d/4613.feature create mode 100644 changelog.d/4615.feature delete mode 100644 changelog.d/4615.misc create mode 100644 changelog.d/4617.feature delete mode 100644 changelog.d/4617.misc (limited to 'synapse/app') diff --git a/changelog.d/4613.feature b/changelog.d/4613.feature new file mode 100644 index 0000000000..098f906af2 --- /dev/null +++ b/changelog.d/4613.feature @@ -0,0 +1 @@ +There is no longer any need to specify `no_tls`: it is inferred from the absence of TLS listeners diff --git a/changelog.d/4615.feature b/changelog.d/4615.feature new file mode 100644 index 0000000000..098f906af2 --- /dev/null +++ b/changelog.d/4615.feature @@ -0,0 +1 @@ +There is no longer any need to specify `no_tls`: it is inferred from the absence of TLS listeners diff --git a/changelog.d/4615.misc b/changelog.d/4615.misc deleted file mode 100644 index c7266fcfc7..0000000000 --- a/changelog.d/4615.misc +++ /dev/null @@ -1 +0,0 @@ -Logging improvements around TLS certs diff --git a/changelog.d/4617.feature b/changelog.d/4617.feature new file mode 100644 index 0000000000..098f906af2 --- /dev/null +++ b/changelog.d/4617.feature @@ -0,0 +1 @@ +There is no longer any need to specify `no_tls`: it is inferred from the absence of TLS listeners diff --git a/changelog.d/4617.misc b/changelog.d/4617.misc deleted file mode 100644 index 6d751865c9..0000000000 --- a/changelog.d/4617.misc +++ /dev/null @@ -1 +0,0 @@ -Don't create server contexts when TLS is disabled diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 6b3cb61ae9..50fd17c0be 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -215,7 +215,7 @@ def refresh_certificate(hs): """ hs.config.read_certificate_from_disk() - if hs.config.no_tls: + if not hs.config.has_tls_listener(): # nothing else to do here return diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b4476bf16e..dbd98d394f 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -90,11 +90,6 @@ class SynapseHomeServer(HomeServer): tls = listener_config.get("tls", False) site_tag = listener_config.get("tag", port) - if tls and config.no_tls: - raise ConfigError( - "Listener on port %i has TLS enabled, but no_tls is set" % (port,), - ) - resources = {} for res in listener_config["resources"]: for name in res["names"]: diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 5aad062c36..727fdc54d8 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -42,7 +42,7 @@ from .voip import VoipConfig from .workers import WorkerConfig -class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, +class HomeServerConfig(ServerConfig, TlsConfig, DatabaseConfig, LoggingConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig, AppServiceConfig, KeyConfig, SAML2Config, CasConfig, diff --git a/synapse/config/server.py b/synapse/config/server.py index eed9d7c81e..767897c419 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -126,14 +126,22 @@ class ServerConfig(Config): self.public_baseurl += '/' self.start_pushers = config.get("start_pushers", True) - self.listeners = config.get("listeners", []) - - for listener in self.listeners: + self.listeners = [] + for listener in config.get("listeners", []): if not isinstance(listener.get("port", None), int): raise ConfigError( "Listener configuration is lacking a valid 'port' option" ) + if listener.setdefault("tls", False): + # no_tls is not really supported any more, but let's grandfather it in + # here. + if config.get("no_tls", False): + logger.info( + "Ignoring TLS-enabled listener on port %i due to no_tls" + ) + continue + bind_address = listener.pop("bind_address", None) bind_addresses = listener.setdefault("bind_addresses", []) @@ -145,6 +153,8 @@ class ServerConfig(Config): if not bind_addresses: bind_addresses.extend(DEFAULT_BIND_ADDRESSES) + self.listeners.append(listener) + if not self.web_client_location: _warn_if_webclient_configured(self.listeners) @@ -152,6 +162,9 @@ class ServerConfig(Config): bind_port = config.get("bind_port") if bind_port: + if config.get("no_tls", False): + raise ConfigError("no_tls is incompatible with bind_port") + self.listeners = [] bind_host = config.get("bind_host", "") gzip_responses = config.get("gzip_responses", True) @@ -198,6 +211,7 @@ class ServerConfig(Config): "port": manhole, "bind_addresses": ["127.0.0.1"], "type": "manhole", + "tls": False, }) metrics_port = config.get("metrics_port") @@ -223,6 +237,9 @@ class ServerConfig(Config): _check_resource_config(self.listeners) + def has_tls_listener(self): + return any(l["tls"] for l in self.listeners) + def default_config(self, server_name, data_dir_path, **kwargs): _, bind_port = parse_and_validate_server_name(server_name) if bind_port is not None: diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 76d2add4fe..e37a41eff4 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -51,7 +51,6 @@ class TlsConfig(Config): self._original_tls_fingerprints = [] self.tls_fingerprints = list(self._original_tls_fingerprints) - self.no_tls = config.get("no_tls", False) # This config option applies to non-federation HTTP clients # (e.g. for talking to recaptcha, identity servers, and such) @@ -141,6 +140,8 @@ class TlsConfig(Config): return ( """\ + ## TLS ## + # PEM-encoded X509 certificate for TLS. # This certificate, as of Synapse 1.0, will need to be a valid and verifiable # certificate, signed by a recognised Certificate Authority. @@ -201,13 +202,6 @@ class TlsConfig(Config): # # reprovision_threshold: 30 - # If your server runs behind a reverse-proxy which terminates TLS connections - # (for both client and federation connections), it may be useful to disable - # All TLS support for incoming connections. Setting no_tls to True will - # do so (and avoid the need to give synapse a TLS private key). - # - # no_tls: True - # List of allowed TLS fingerprints for this server to publish along # with the signing keys for this server. Other matrix servers that # make HTTPS requests to this server will check that the TLS -- cgit 1.5.1 From 32b781bfe20034052d71558c0ed9b0df511a1f77 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 12 Feb 2019 10:51:31 +0000 Subject: Fix error when loading cert if tls is disabled (#4618) If TLS is disabled, it should not be an error if no cert is given. Fixes #4554. --- changelog.d/4618.bugfix | 1 + synapse/app/_base.py | 5 +++-- synapse/config/tls.py | 57 +++++++++++++++++++++++++++++++++++------------- tests/config/test_tls.py | 2 +- 4 files changed, 47 insertions(+), 18 deletions(-) create mode 100644 changelog.d/4618.bugfix (limited to 'synapse/app') diff --git a/changelog.d/4618.bugfix b/changelog.d/4618.bugfix new file mode 100644 index 0000000000..22115a020e --- /dev/null +++ b/changelog.d/4618.bugfix @@ -0,0 +1 @@ +Fix failure to start when not TLS certificate was given even if TLS was disabled. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 50fd17c0be..5b0ca312e2 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -213,12 +213,13 @@ def refresh_certificate(hs): Refresh the TLS certificates that Synapse is using by re-reading them from disk and updating the TLS context factories to use them. """ - hs.config.read_certificate_from_disk() if not hs.config.has_tls_listener(): - # nothing else to do here + # attempt to reload the certs for the good of the tls_fingerprints + hs.config.read_certificate_from_disk(require_cert_and_key=False) return + hs.config.read_certificate_from_disk(require_cert_and_key=True) hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config) if hs._listening_services: diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 86e6eb80db..57f117a14d 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -23,7 +23,7 @@ from unpaddedbase64 import encode_base64 from OpenSSL import crypto -from synapse.config._base import Config +from synapse.config._base import Config, ConfigError logger = logging.getLogger(__name__) @@ -45,6 +45,19 @@ class TlsConfig(Config): self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) self.tls_private_key_file = self.abspath(config.get("tls_private_key_path")) + + if self.has_tls_listener(): + if not self.tls_certificate_file: + raise ConfigError( + "tls_certificate_path must be specified if TLS-enabled listeners are " + "configured." + ) + if not self.tls_private_key_file: + raise ConfigError( + "tls_certificate_path must be specified if TLS-enabled listeners are " + "configured." + ) + self._original_tls_fingerprints = config.get("tls_fingerprints", []) if self._original_tls_fingerprints is None: @@ -105,26 +118,40 @@ class TlsConfig(Config): days_remaining = (expires_on - now).days return days_remaining - def read_certificate_from_disk(self): - """ - Read the certificates from disk. + def read_certificate_from_disk(self, require_cert_and_key): """ - self.tls_certificate = self.read_tls_certificate() + Read the certificates and private key from disk. - if self.has_tls_listener(): + Args: + require_cert_and_key (bool): set to True to throw an error if the certificate + and key file are not given + """ + if require_cert_and_key: self.tls_private_key = self.read_tls_private_key() + self.tls_certificate = self.read_tls_certificate() + elif self.tls_certificate_file: + # we only need the certificate for the tls_fingerprints. Reload it if we + # can, but it's not a fatal error if we can't. + try: + self.tls_certificate = self.read_tls_certificate() + except Exception as e: + logger.info( + "Unable to read TLS certificate (%s). Ignoring as no " + "tls listeners enabled.", e, + ) self.tls_fingerprints = list(self._original_tls_fingerprints) - # Check that our own certificate is included in the list of fingerprints - # and include it if it is not. - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, self.tls_certificate - ) - sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest()) - sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints) - if sha256_fingerprint not in sha256_fingerprints: - self.tls_fingerprints.append({u"sha256": sha256_fingerprint}) + if self.tls_certificate: + # Check that our own certificate is included in the list of fingerprints + # and include it if it is not. + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, self.tls_certificate + ) + sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest()) + sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints) + if sha256_fingerprint not in sha256_fingerprints: + self.tls_fingerprints.append({u"sha256": sha256_fingerprint}) def default_config(self, config_dir_path, server_name, **kwargs): base_key_name = os.path.join(config_dir_path, server_name) diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index d8fd18a9cb..c260d3359f 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -65,7 +65,7 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg= t = TestConfig() t.read_config(config) - t.read_certificate_from_disk() + t.read_certificate_from_disk(require_cert_and_key=False) warnings = self.flushWarnings() self.assertEqual(len(warnings), 1) -- cgit 1.5.1 From ef2228c890b44963c65f086eb1246c27ef43d256 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 12 Feb 2019 13:55:58 +0000 Subject: Basic sentry integration --- synapse/app/_base.py | 22 ++++++++++++++++++++++ synapse/config/metrics.py | 8 ++++++++ synapse/python_dependencies.py | 1 + 3 files changed, 31 insertions(+) (limited to 'synapse/app') diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 5b0ca312e2..d681ff5245 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -25,10 +25,12 @@ from daemonize import Daemonize from twisted.internet import error, reactor from twisted.protocols.tls import TLSMemoryBIOFactory +import synapse from synapse.app import check_bind_error from synapse.crypto import context_factory from synapse.util import PreserveLoggingContext from synapse.util.rlimit import change_resource_limit +from synapse.util.versionstring import get_version_string logger = logging.getLogger(__name__) @@ -266,9 +268,29 @@ def start(hs, listeners=None): # It is now safe to start your Synapse. hs.start_listening(listeners) hs.get_datastore().start_profiling() + + setup_sentry_io(hs) except Exception: traceback.print_exc(file=sys.stderr) reactor = hs.get_reactor() if reactor.running: reactor.stop() sys.exit(1) + + +def setup_sentry_io(hs): + if not hs.config.sentry_enabled: + return + + import sentry_sdk + sentry_sdk.init( + dsn=hs.config.sentry_dsn, + release=get_version_string(synapse), + ) + with sentry_sdk.configure_scope() as scope: + scope.set_tag("matrix_server_name", hs.config.server_name) + + app = hs.config.worker_app if hs.config.worker_app else "synapse.app.homeserver" + name = hs.config.worker_name if hs.config.worker_name else "master" + scope.set_tag("worker_app", app) + scope.set_tag("worker_name", name) diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index 718c43ae03..f312010a9b 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -23,12 +23,20 @@ class MetricsConfig(Config): self.metrics_port = config.get("metrics_port") self.metrics_bind_host = config.get("metrics_bind_host", "127.0.0.1") + self.sentry_enabled = "sentry" in config + if self.sentry_enabled: + self.sentry_dsn = config["sentry"]["dsn"] + def default_config(self, report_stats=None, **kwargs): res = """\ ## Metrics ### # Enable collection and rendering of performance metrics enable_metrics: False + + # Enable sentry.io integration + #sentry: + # dsn: "..." """ if report_stats is None: diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 5d087ee26b..444fc2a979 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -86,6 +86,7 @@ CONDITIONAL_REQUIREMENTS = { "saml2": ["pysaml2>=4.5.0"], "url_preview": ["lxml>=3.5.0"], "test": ["mock>=2.0", "parameterized"], + "sentry": ["sentry-sdk>=0.7.2"], } -- cgit 1.5.1 From 93f7d2df3e8bb44e4f9fb6c5ce3fc23a86c30c1a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 12 Feb 2019 16:03:40 +0000 Subject: Comments --- synapse/app/_base.py | 8 ++++++++ 1 file changed, 8 insertions(+) (limited to 'synapse/app') diff --git a/synapse/app/_base.py b/synapse/app/_base.py index d681ff5245..482f0e22ea 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -279,6 +279,12 @@ def start(hs, listeners=None): def setup_sentry_io(hs): + """Enable sentry.io integration, if enabled in configuration + + Args: + hs (synapse.server.HomeServer) + """ + if not hs.config.sentry_enabled: return @@ -287,6 +293,8 @@ def setup_sentry_io(hs): dsn=hs.config.sentry_dsn, release=get_version_string(synapse), ) + + # We set some default tags that give some context to this instance with sentry_sdk.configure_scope() as scope: scope.set_tag("matrix_server_name", hs.config.server_name) -- cgit 1.5.1 From e3a0300431e6f641297cb90e936cb2d35fb51850 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 13 Feb 2019 11:48:56 +0000 Subject: Special-case the default bind_addresses for metrics listener turns out it doesn't really support ipv6, so let's hack around that by only listening on ipv4 by default. --- synapse/app/_base.py | 5 ++--- synapse/config/server.py | 6 +++++- 2 files changed, 7 insertions(+), 4 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 5b0ca312e2..1d2c3339ff 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -153,9 +153,8 @@ def listen_metrics(bind_addresses, port): from prometheus_client import start_http_server for host in bind_addresses: - reactor.callInThread(start_http_server, int(port), - addr=host, registry=RegistryProxy) - logger.info("Metrics now reporting on %s:%d", host, port) + logger.info("Starting metrics listener on %s:%d", host, port) + start_http_server(port, addr=host, registry=RegistryProxy) def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): diff --git a/synapse/config/server.py b/synapse/config/server.py index c5c3aac8ed..93a30e4cfa 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -151,7 +151,11 @@ class ServerConfig(Config): # if we still have an empty list of addresses, use the default list if not bind_addresses: - bind_addresses.extend(DEFAULT_BIND_ADDRESSES) + if listener['type'] == 'metrics': + # the metrics listener doesn't support IPv6 + bind_addresses.append('0.0.0.0') + else: + bind_addresses.extend(DEFAULT_BIND_ADDRESSES) self.listeners.append(listener) -- cgit 1.5.1 From 2a5a15aff8f8d3146df52d5421a6c818637eb629 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 13 Feb 2019 11:53:43 +0000 Subject: Improve logging around listening services I wanted to bring listen_tcp into line with listen_ssl in terms of returning a list of ports, and wanted to check that was a safe thing to do - hence the logging in `refresh_certificate`. Also, pull the 'Synapse now listening' message up to homeserver.py, because it was being duplicated everywhere else. --- synapse/app/_base.py | 23 ++++++++++++++--------- synapse/app/homeserver.py | 8 ++++++-- 2 files changed, 20 insertions(+), 11 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 1d2c3339ff..c53b644932 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -162,21 +162,23 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): Create a TCP socket for a port and several addresses Returns: - list (empty) + list of twisted.internet.tcp.Port listening for TLS connections """ + r = [] for address in bind_addresses: try: - reactor.listenTCP( - port, - factory, - backlog, - address + r.append( + reactor.listenTCP( + port, + factory, + backlog, + address + ) ) except error.CannotListenError as e: check_bind_error(e, address, bind_addresses) - logger.info("Synapse now listening on TCP port %d", port) - return [] + return r def listen_ssl( @@ -203,7 +205,6 @@ def listen_ssl( except error.CannotListenError as e: check_bind_error(e, address, bind_addresses) - logger.info("Synapse now listening on port %d (TLS)", port) return r @@ -229,6 +230,10 @@ def refresh_certificate(hs): # requests. This factory attribute is public but missing from # Twisted's documentation. if isinstance(i.factory, TLSMemoryBIOFactory): + addr = i.getHost() + logger.info( + "Replacing TLS context factory on [%s]:%i", addr.host, addr.port, + ) # We want to replace TLS factories with a new one, with the new # TLS configuration. We do this by reaching in and pulling out # the wrappedFactory, and then re-wrapping it. diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index dbd98d394f..2b008e3402 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -121,7 +121,7 @@ class SynapseHomeServer(HomeServer): root_resource = create_resource_tree(resources, root_resource) if tls: - return listen_ssl( + ports = listen_ssl( bind_addresses, port, SynapseSite( @@ -134,9 +134,10 @@ class SynapseHomeServer(HomeServer): self.tls_server_context_factory, reactor=self.get_reactor(), ) + logger.info("Synapse now listening on TCP port %d (TLS)", port) else: - return listen_tcp( + ports = listen_tcp( bind_addresses, port, SynapseSite( @@ -148,6 +149,9 @@ class SynapseHomeServer(HomeServer): ), reactor=self.get_reactor(), ) + logger.info("Synapse now listening on TCP port %d", port) + + return ports def _configure_named_resource(self, name, compress=False): """Build a resource map for a named resource -- cgit 1.5.1 From 767686af48ecbf5fe999830e0b0ce13b7fd46b1b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 13 Feb 2019 11:59:04 +0000 Subject: Use `listen_tcp` for the replication listener Fixes the "can't listen on 0.0.0.0" error. Also makes it more consistent with what we do elsewhere. --- synapse/app/homeserver.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 2b008e3402..dbd9a83877 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -266,14 +266,14 @@ class SynapseHomeServer(HomeServer): ) ) elif listener["type"] == "replication": - bind_addresses = listener["bind_addresses"] - for address in bind_addresses: - factory = ReplicationStreamProtocolFactory(self) - server_listener = reactor.listenTCP( - listener["port"], factory, interface=address - ) + services = listen_tcp( + listener["bind_addresses"], + listener["port"], + ReplicationStreamProtocolFactory(self), + ) + for s in services: reactor.addSystemEventTrigger( - "before", "shutdown", server_listener.stopListening, + "before", "shutdown", s.stopListening, ) elif listener["type"] == "metrics": if not self.get_config().enable_metrics: -- cgit 1.5.1 From 309f3bb322ea407b29651bd31c12183ea81b05b1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 13 Feb 2019 13:24:27 +0000 Subject: Update synapse/app/_base.py Co-Authored-By: richvdh <1389908+richvdh@users.noreply.github.com> --- synapse/app/_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/_base.py b/synapse/app/_base.py index c53b644932..73ca52bd8c 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -162,7 +162,7 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): Create a TCP socket for a port and several addresses Returns: - list of twisted.internet.tcp.Port listening for TLS connections + list[twisted.internet.tcp.Port]: listening for TCP connections """ r = [] for address in bind_addresses: -- cgit 1.5.1 From 6cb415b63fd58ab253f8519f725a581a0e15044e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 13 Feb 2019 16:14:37 +0000 Subject: Fixup comments and add warning --- changelog.d/4632.feature | 2 +- synapse/app/_base.py | 6 +++--- synapse/config/metrics.py | 9 +++++++-- 3 files changed, 11 insertions(+), 6 deletions(-) (limited to 'synapse/app') diff --git a/changelog.d/4632.feature b/changelog.d/4632.feature index 0bdeb3738a..d053ab5a25 100644 --- a/changelog.d/4632.feature +++ b/changelog.d/4632.feature @@ -1 +1 @@ -Add basic optional sentry.io integration +Add basic optional sentry integration diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 482f0e22ea..0284151d0f 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -269,7 +269,7 @@ def start(hs, listeners=None): hs.start_listening(listeners) hs.get_datastore().start_profiling() - setup_sentry_io(hs) + setup_sentry(hs) except Exception: traceback.print_exc(file=sys.stderr) reactor = hs.get_reactor() @@ -278,8 +278,8 @@ def start(hs, listeners=None): sys.exit(1) -def setup_sentry_io(hs): - """Enable sentry.io integration, if enabled in configuration +def setup_sentry(hs): + """Enable sentry integration, if enabled in configuration Args: hs (synapse.server.HomeServer) diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index 7bab8b0b2b..185b895a24 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -16,7 +16,7 @@ from ._base import Config, ConfigError MISSING_SENTRY = ( - """Missing sentry_sdk library. This is required for enable sentry.io + """Missing sentry_sdk library. This is required for enable sentry integration. Install by running: @@ -48,7 +48,12 @@ class MetricsConfig(Config): # Enable collection and rendering of performance metrics enable_metrics: False - # Enable sentry.io integration + # Enable sentry integration + # NOTE: While attempts are made to ensure that the logs don't contain + # any sensitive information, this cannot be guaranteed. By enabling + # this option the sentry server may therefore receive sensitive + # information, and it in turn may then diseminate sensitive information + # through insecure notification channels if so configured. #sentry: # dsn: "..." """ -- cgit 1.5.1 From f31101882391aa37bc6fdeb71d1559d5dcdfd4e7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 14 Feb 2019 17:10:36 +0000 Subject: Fix errors in acme provisioning (#4648) * Better logging for errors on startup * Fix "TypeError: '>' not supported" when starting without an existing certificate * Fix a bug where an existing certificate would be reprovisoned every day --- changelog.d/4648.bugfix | 2 ++ synapse/app/homeserver.py | 19 +++++++++++++------ synapse/config/logger.py | 2 ++ 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 changelog.d/4648.bugfix (limited to 'synapse/app') diff --git a/changelog.d/4648.bugfix b/changelog.d/4648.bugfix new file mode 100644 index 0000000000..bd117e8664 --- /dev/null +++ b/changelog.d/4648.bugfix @@ -0,0 +1,2 @@ +Fix "TypeError: '>' not supported" when starting without an existing certificate. +Fix a bug where an existing certificate would be reprovisoned every day. diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index dbd9a83877..05a97979ec 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,11 +15,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import gc import logging import os import sys -import traceback from six import iteritems @@ -27,6 +29,7 @@ from prometheus_client import Gauge from twisted.application import service from twisted.internet import defer, reactor +from twisted.python.failure import Failure from twisted.web.resource import EncodingResourceWrapper, NoResource from twisted.web.server import GzipEncoderFactory from twisted.web.static import File @@ -394,10 +397,10 @@ def setup(config_options): # is less than our re-registration threshold. provision = False - if (cert_days_remaining is None): - provision = True - - if cert_days_remaining > hs.config.acme_reprovision_threshold: + if ( + cert_days_remaining is None or + cert_days_remaining < hs.config.acme_reprovision_threshold + ): provision = True if provision: @@ -438,7 +441,11 @@ def setup(config_options): hs.get_datastore().start_doing_background_updates() except Exception: # Print the exception and bail out. - traceback.print_exc(file=sys.stderr) + print("Error during startup:", file=sys.stderr) + + # this gives better tracebacks than traceback.print_exc() + Failure().printTraceback(file=sys.stderr) + if reactor.running: reactor.stop() sys.exit(1) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 4b938053fb..9b5994d55e 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -242,3 +242,5 @@ def setup_logging(config, use_worker_options=False): [_log], redirectStandardIO=not config.no_redirect_stdio, ) + if not config.no_redirect_stdio: + print("Redirected stdout/stderr to logs") -- cgit 1.5.1 From eb2b8523ae1ddd38bf1dd19ee37e44e7f4a3ee68 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 12:12:57 +0000 Subject: Split out registration to worker This allows registration to be handled by a worker, though the actual write to the database still happens on master. Note: due to the in-memory session map all registration requests must be handled by the same worker. --- synapse/app/client_reader.py | 2 + synapse/handlers/register.py | 63 ++++++++- synapse/replication/http/__init__.py | 4 +- synapse/replication/http/login.py | 85 ++++++++++++ synapse/replication/http/register.py | 91 ++++++++++++ synapse/rest/client/v2_alpha/register.py | 73 ++++++---- synapse/storage/registration.py | 230 +++++++++++++++---------------- 7 files changed, 401 insertions(+), 147 deletions(-) create mode 100644 synapse/replication/http/login.py create mode 100644 synapse/replication/http/register.py (limited to 'synapse/app') diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index a9d2147022..9250b6c239 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -47,6 +47,7 @@ from synapse.rest.client.v1.room import ( RoomMemberListRestServlet, RoomStateRestServlet, ) +from synapse.rest.client.v2_alpha.register import RegisterRestServlet from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree @@ -92,6 +93,7 @@ class ClientReaderServer(HomeServer): JoinedRoomMemberListRestServlet(self).register(resource) RoomStateRestServlet(self).register(resource) RoomEventContextServlet(self).register(resource) + RegisterRestServlet(self).register(resource) resources.update({ "/_matrix/client/r0": resource, diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 21c17c59a0..8ea557a003 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -27,6 +27,7 @@ from synapse.api.errors import ( SynapseError, ) from synapse.http.client import CaptchaServerHttpClient +from synapse.replication.http.register import ReplicationRegisterServlet from synapse.types import RoomAlias, RoomID, UserID, create_requester from synapse.util.async_helpers import Linearizer from synapse.util.threepids import check_3pid_allowed @@ -61,6 +62,9 @@ class RegistrationHandler(BaseHandler): ) self._server_notices_mxid = hs.config.server_notices_mxid + if hs.config.worker_app: + self._register_client = ReplicationRegisterServlet.make_client(hs) + @defer.inlineCallbacks def check_username(self, localpart, guest_access_token=None, assigned_user_id=None): @@ -185,7 +189,7 @@ class RegistrationHandler(BaseHandler): token = None if generate_token: token = self.macaroon_gen.generate_access_token(user_id) - yield self.store.register( + yield self._register_with_store( user_id=user_id, token=token, password_hash=password_hash, @@ -217,7 +221,7 @@ class RegistrationHandler(BaseHandler): if default_display_name is None: default_display_name = localpart try: - yield self.store.register( + yield self._register_with_store( user_id=user_id, token=token, password_hash=password_hash, @@ -316,7 +320,7 @@ class RegistrationHandler(BaseHandler): user_id, allowed_appservice=service ) - yield self.store.register( + yield self._register_with_store( user_id=user_id, password_hash="", appservice_id=service_id, @@ -494,7 +498,7 @@ class RegistrationHandler(BaseHandler): token = self.macaroon_gen.generate_access_token(user_id) if need_register: - yield self.store.register( + yield self._register_with_store( user_id=user_id, token=token, password_hash=password_hash, @@ -573,3 +577,54 @@ class RegistrationHandler(BaseHandler): action="join", ratelimit=False, ) + + def _register_with_store(self, user_id, token=None, password_hash=None, + was_guest=False, make_guest=False, appservice_id=None, + create_profile_with_displayname=None, admin=False, + user_type=None): + """Register user in the datastore. + + Args: + user_id (str): The desired user ID to register. + token (str): The desired access token to use for this user. If this + is not None, the given access token is associated with the user + id. + password_hash (str|None): Optional. The password hash for this user. + was_guest (bool): Optional. Whether this is a guest account being + upgraded to a non-guest account. + make_guest (boolean): True if the the new user should be guest, + false to add a regular user account. + appservice_id (str|None): The ID of the appservice registering the user. + create_profile_with_displayname (unicode|None): Optionally create a + profile for the user, setting their displayname to the given value + admin (boolean): is an admin user? + user_type (str|None): type of user. One of the values from + api.constants.UserTypes, or None for a normal user. + + Returns: + Deferred + """ + if self.hs.config.worker_app: + return self._register_client( + user_id=user_id, + token=token, + password_hash=password_hash, + was_guest=was_guest, + make_guest=make_guest, + appservice_id=appservice_id, + create_profile_with_displayname=create_profile_with_displayname, + admin=admin, + user_type=user_type, + ) + else: + return self.store.register( + user_id=user_id, + token=token, + password_hash=password_hash, + was_guest=was_guest, + make_guest=make_guest, + appservice_id=appservice_id, + create_profile_with_displayname=create_profile_with_displayname, + admin=admin, + user_type=user_type, + ) diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py index 19f214281e..81b85352b1 100644 --- a/synapse/replication/http/__init__.py +++ b/synapse/replication/http/__init__.py @@ -14,7 +14,7 @@ # limitations under the License. from synapse.http.server import JsonResource -from synapse.replication.http import federation, membership, send_event +from synapse.replication.http import federation, login, membership, register, send_event REPLICATION_PREFIX = "/_synapse/replication" @@ -28,3 +28,5 @@ class ReplicationRestResource(JsonResource): send_event.register_servlets(hs, self) membership.register_servlets(hs, self) federation.register_servlets(hs, self) + login.register_servlets(hs, self) + register.register_servlets(hs, self) diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py new file mode 100644 index 0000000000..797f6aabd1 --- /dev/null +++ b/synapse/replication/http/login.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint + +logger = logging.getLogger(__name__) + + +class RegisterDeviceReplicationServlet(ReplicationEndpoint): + """Ensure a device is registered, generating a new access token for the + device. + + Used during registration and login. + """ + + NAME = "device_check_registered" + PATH_ARGS = ("user_id",) + + def __init__(self, hs): + super(RegisterDeviceReplicationServlet, self).__init__(hs) + self.auth_handler = hs.get_auth_handler() + self.device_handler = hs.get_device_handler() + self.macaroon_gen = hs.get_macaroon_generator() + + @staticmethod + def _serialize_payload(user_id, device_id, initial_display_name, is_guest): + """ + Args: + device_id (str|None): Device ID to use, if None a new one is + generated. + initial_display_name (str|None) + is_guest (bool) + """ + return { + "device_id": device_id, + "initial_display_name": initial_display_name, + "is_guest": is_guest, + } + + @defer.inlineCallbacks + def _handle_request(self, request, user_id): + content = parse_json_object_from_request(request) + + device_id = content["device_id"] + initial_display_name = content["initial_display_name"] + is_guest = content["is_guest"] + + device_id = yield self.device_handler.check_device_registered( + user_id, device_id, initial_display_name, + ) + + if is_guest: + access_token = self.macaroon_gen.generate_access_token( + user_id, ["guest = true"] + ) + else: + access_token = yield self.auth_handler.get_access_token_for_user_id( + user_id, device_id=device_id, + ) + + defer.returnValue((200, { + "device_id": device_id, + "access_token": access_token, + })) + + +def register_servlets(hs, http_server): + RegisterDeviceReplicationServlet(hs).register(http_server) diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py new file mode 100644 index 0000000000..bdaf37396c --- /dev/null +++ b/synapse/replication/http/register.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint + +logger = logging.getLogger(__name__) + + +class ReplicationRegisterServlet(ReplicationEndpoint): + """Register a new user + """ + + NAME = "register_user" + PATH_ARGS = ("user_id",) + + def __init__(self, hs): + super(ReplicationRegisterServlet, self).__init__(hs) + self.store = hs.get_datastore() + + @staticmethod + def _serialize_payload( + user_id, token, password_hash, was_guest, make_guest, appservice_id, + create_profile_with_displayname, admin, user_type, + ): + """ + Args: + user_id (str): The desired user ID to register. + token (str): The desired access token to use for this user. If this + is not None, the given access token is associated with the user + id. + password_hash (str|None): Optional. The password hash for this user. + was_guest (bool): Optional. Whether this is a guest account being + upgraded to a non-guest account. + make_guest (boolean): True if the the new user should be guest, + false to add a regular user account. + appservice_id (str|None): The ID of the appservice registering the user. + create_profile_with_displayname (unicode|None): Optionally create a + profile for the user, setting their displayname to the given value + admin (boolean): is an admin user? + user_type (str|None): type of user. One of the values from + api.constants.UserTypes, or None for a normal user. + """ + return { + "token": token, + "password_hash": password_hash, + "was_guest": was_guest, + "make_guest": make_guest, + "appservice_id": appservice_id, + "create_profile_with_displayname": create_profile_with_displayname, + "admin": admin, + "user_type": user_type, + } + + @defer.inlineCallbacks + def _handle_request(self, request, user_id): + content = parse_json_object_from_request(request) + + yield self.store.register( + user_id=user_id, + token=content["token"], + password_hash=content["password_hash"], + was_guest=content["was_guest"], + make_guest=content["make_guest"], + appservice_id=content["appservice_id"], + create_profile_with_displayname=content["create_profile_with_displayname"], + admin=content["admin"], + user_type=content["user_type"], + ) + + defer.returnValue((200, {})) + + +def register_servlets(hs, http_server): + ReplicationRegisterServlet(hs).register(http_server) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 7f812b8209..d78da50787 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -33,6 +33,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.threepids import check_3pid_allowed @@ -190,9 +191,15 @@ class RegisterRestServlet(RestServlet): self.registration_handler = hs.get_handlers().registration_handler self.identity_handler = hs.get_handlers().identity_handler self.room_member_handler = hs.get_room_member_handler() - self.device_handler = hs.get_device_handler() self.macaroon_gen = hs.get_macaroon_generator() + if self.hs.config.worker_app: + self._register_device_client = ( + RegisterDeviceReplicationServlet.make_client(hs) + ) + else: + self.device_handler = hs.get_device_handler() + @interactive_auth_handler @defer.inlineCallbacks def on_POST(self, request): @@ -633,12 +640,10 @@ class RegisterRestServlet(RestServlet): "home_server": self.hs.hostname, } if not params.get("inhibit_login", False): - device_id = yield self._register_device(user_id, params) - - access_token = ( - yield self.auth_handler.get_access_token_for_user_id( - user_id, device_id=device_id, - ) + device_id = params.get("device_id") + initial_display_name = params.get("initial_device_display_name") + device_id, access_token = yield self._register_device( + user_id, device_id, initial_display_name, is_guest=False, ) result.update({ @@ -647,25 +652,42 @@ class RegisterRestServlet(RestServlet): }) defer.returnValue(result) - def _register_device(self, user_id, params): - """Register a device for a user. - - This is called after the user's credentials have been validated, but - before the access token has been issued. + @defer.inlineCallbacks + def _register_device(self, user_id, device_id, initial_display_name, + is_guest): + """Register a device for a user and generate an access token. Args: - (str) user_id: full canonical @user:id - (object) params: registration parameters, from which we pull - device_id and initial_device_name + user_id (str): full canonical @user:id + device_id (str|None): The device ID to check, or None to generate + a new one. + initial_display_name (str|None): An optional display name for the + device. + is_guest (bool): Whether this is a guest account Returns: - defer.Deferred: (str) device_id + defer.Deferred[(str, str)]: Tuple of device ID and access token """ - # register the user's device - device_id = params.get("device_id") - initial_display_name = params.get("initial_device_display_name") - return self.device_handler.check_device_registered( - user_id, device_id, initial_display_name - ) + if self.hs.config.worker_app: + r = yield self._register_device_client( + user_id=user_id, + device_id=device_id, + initial_display_name=initial_display_name, + is_guest=is_guest, + ) + defer.returnValue((r["device_id"], r["access_token"])) + else: + device_id = yield self.device_handler.check_device_registered( + user_id, device_id, initial_display_name + ) + if is_guest: + access_token = self.macaroon_gen.generate_access_token( + user_id, ["guest = true"] + ) + else: + access_token = yield self.auth_handler.get_access_token_for_user_id( + user_id, device_id=device_id, + ) + defer.returnValue((device_id, access_token)) @defer.inlineCallbacks def _do_guest_registration(self, params): @@ -680,13 +702,10 @@ class RegisterRestServlet(RestServlet): # we have nowhere to store it. device_id = synapse.api.auth.GUEST_DEVICE_ID initial_display_name = params.get("initial_device_display_name") - yield self.device_handler.check_device_registered( - user_id, device_id, initial_display_name + device_id, access_token = yield self._register_device( + user_id, device_id, initial_display_name, is_guest=True, ) - access_token = self.macaroon_gen.generate_access_token( - user_id, ["guest = true"] - ) defer.returnValue((200, { "user_id": user_id, "device_id": device_id, diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index c9e11c3135..3bc5def48e 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -139,6 +139,121 @@ class RegistrationWorkerStore(SQLBaseStore): ) return True if res == UserTypes.SUPPORT else False + def get_users_by_id_case_insensitive(self, user_id): + """Gets users that match user_id case insensitively. + Returns a mapping of user_id -> password_hash. + """ + def f(txn): + sql = ( + "SELECT name, password_hash FROM users" + " WHERE lower(name) = lower(?)" + ) + txn.execute(sql, (user_id,)) + return dict(txn) + + return self.runInteraction("get_users_by_id_case_insensitive", f) + + @defer.inlineCallbacks + def count_all_users(self): + """Counts all users registered on the homeserver.""" + def _count_users(txn): + txn.execute("SELECT COUNT(*) AS users FROM users") + rows = self.cursor_to_dict(txn) + if rows: + return rows[0]["users"] + return 0 + + ret = yield self.runInteraction("count_users", _count_users) + defer.returnValue(ret) + + def count_daily_user_type(self): + """ + Counts 1) native non guest users + 2) native guests users + 3) bridged users + who registered on the homeserver in the past 24 hours + """ + def _count_daily_user_type(txn): + yesterday = int(self._clock.time()) - (60 * 60 * 24) + + sql = """ + SELECT user_type, COALESCE(count(*), 0) AS count FROM ( + SELECT + CASE + WHEN is_guest=0 AND appservice_id IS NULL THEN 'native' + WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest' + WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged' + END AS user_type + FROM users + WHERE creation_ts > ? + ) AS t GROUP BY user_type + """ + results = {'native': 0, 'guest': 0, 'bridged': 0} + txn.execute(sql, (yesterday,)) + for row in txn: + results[row[0]] = row[1] + return results + return self.runInteraction("count_daily_user_type", _count_daily_user_type) + + @defer.inlineCallbacks + def count_nonbridged_users(self): + def _count_users(txn): + txn.execute(""" + SELECT COALESCE(COUNT(*), 0) FROM users + WHERE appservice_id IS NULL + """) + count, = txn.fetchone() + return count + + ret = yield self.runInteraction("count_users", _count_users) + defer.returnValue(ret) + + @defer.inlineCallbacks + def find_next_generated_user_id_localpart(self): + """ + Gets the localpart of the next generated user ID. + + Generated user IDs are integers, and we aim for them to be as small as + we can. Unfortunately, it's possible some of them are already taken by + existing users, and there may be gaps in the already taken range. This + function returns the start of the first allocatable gap. This is to + avoid the case of ID 10000000 being pre-allocated, so us wasting the + first (and shortest) many generated user IDs. + """ + def _find_next_generated_user_id(txn): + txn.execute("SELECT name FROM users") + + regex = re.compile(r"^@(\d+):") + + found = set() + + for user_id, in txn: + match = regex.search(user_id) + if match: + found.add(int(match.group(1))) + for i in range(len(found) + 1): + if i not in found: + return i + + defer.returnValue((yield self.runInteraction( + "find_next_generated_user_id", + _find_next_generated_user_id + ))) + + @defer.inlineCallbacks + def get_3pid_guest_access_token(self, medium, address): + ret = yield self._simple_select_one( + "threepid_guest_access_tokens", + { + "medium": medium, + "address": address + }, + ["guest_access_token"], True, 'get_3pid_guest_access_token' + ) + if ret: + defer.returnValue(ret["guest_access_token"]) + defer.returnValue(None) + class RegistrationStore(RegistrationWorkerStore, background_updates.BackgroundUpdateStore): @@ -326,20 +441,6 @@ class RegistrationStore(RegistrationWorkerStore, ) txn.call_after(self.is_guest.invalidate, (user_id,)) - def get_users_by_id_case_insensitive(self, user_id): - """Gets users that match user_id case insensitively. - Returns a mapping of user_id -> password_hash. - """ - def f(txn): - sql = ( - "SELECT name, password_hash FROM users" - " WHERE lower(name) = lower(?)" - ) - txn.execute(sql, (user_id,)) - return dict(txn) - - return self.runInteraction("get_users_by_id_case_insensitive", f) - def user_set_password_hash(self, user_id, password_hash): """ NB. This does *not* evict any cache because the one use for this @@ -564,107 +665,6 @@ class RegistrationStore(RegistrationWorkerStore, desc="user_delete_threepids", ) - @defer.inlineCallbacks - def count_all_users(self): - """Counts all users registered on the homeserver.""" - def _count_users(txn): - txn.execute("SELECT COUNT(*) AS users FROM users") - rows = self.cursor_to_dict(txn) - if rows: - return rows[0]["users"] - return 0 - - ret = yield self.runInteraction("count_users", _count_users) - defer.returnValue(ret) - - def count_daily_user_type(self): - """ - Counts 1) native non guest users - 2) native guests users - 3) bridged users - who registered on the homeserver in the past 24 hours - """ - def _count_daily_user_type(txn): - yesterday = int(self._clock.time()) - (60 * 60 * 24) - - sql = """ - SELECT user_type, COALESCE(count(*), 0) AS count FROM ( - SELECT - CASE - WHEN is_guest=0 AND appservice_id IS NULL THEN 'native' - WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest' - WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged' - END AS user_type - FROM users - WHERE creation_ts > ? - ) AS t GROUP BY user_type - """ - results = {'native': 0, 'guest': 0, 'bridged': 0} - txn.execute(sql, (yesterday,)) - for row in txn: - results[row[0]] = row[1] - return results - return self.runInteraction("count_daily_user_type", _count_daily_user_type) - - @defer.inlineCallbacks - def count_nonbridged_users(self): - def _count_users(txn): - txn.execute(""" - SELECT COALESCE(COUNT(*), 0) FROM users - WHERE appservice_id IS NULL - """) - count, = txn.fetchone() - return count - - ret = yield self.runInteraction("count_users", _count_users) - defer.returnValue(ret) - - @defer.inlineCallbacks - def find_next_generated_user_id_localpart(self): - """ - Gets the localpart of the next generated user ID. - - Generated user IDs are integers, and we aim for them to be as small as - we can. Unfortunately, it's possible some of them are already taken by - existing users, and there may be gaps in the already taken range. This - function returns the start of the first allocatable gap. This is to - avoid the case of ID 10000000 being pre-allocated, so us wasting the - first (and shortest) many generated user IDs. - """ - def _find_next_generated_user_id(txn): - txn.execute("SELECT name FROM users") - - regex = re.compile(r"^@(\d+):") - - found = set() - - for user_id, in txn: - match = regex.search(user_id) - if match: - found.add(int(match.group(1))) - for i in range(len(found) + 1): - if i not in found: - return i - - defer.returnValue((yield self.runInteraction( - "find_next_generated_user_id", - _find_next_generated_user_id - ))) - - @defer.inlineCallbacks - def get_3pid_guest_access_token(self, medium, address): - ret = yield self._simple_select_one( - "threepid_guest_access_tokens", - { - "medium": medium, - "address": address - }, - ["guest_access_token"], True, 'get_3pid_guest_access_token' - ) - if ret: - defer.returnValue(ret["guest_access_token"]) - defer.returnValue(None) - @defer.inlineCallbacks def save_or_get_3pid_guest_access_token( self, medium, address, access_token, inviter_user_id -- cgit 1.5.1 From 32e54b472a6c6c12d0b92ac46c8733c300589f19 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 14:08:13 +0000 Subject: Fix kicking guest users in worker mode When guest_access changes from allowed to forbidden all local guest users should be kicked from the room. This did not happen when revocation was received from federation on a worker. Presumably broken in #4141 --- synapse/app/federation_reader.py | 2 ++ synapse/handlers/_base.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 6ee2b76dcd..b116c17669 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -40,6 +40,7 @@ from synapse.replication.slave.storage.profile import SlavedProfileStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.pushers import SlavedPusherStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore +from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler @@ -62,6 +63,7 @@ class FederationReaderSlavedStore( SlavedReceiptsStore, SlavedEventStore, SlavedKeyStore, + SlavedRegistrationStore, RoomStore, DirectoryStore, SlavedTransactionStore, diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 704181d2d3..594754cfd8 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -167,4 +167,4 @@ class BaseHandler(object): ratelimit=False, ) except Exception as e: - logger.warn("Error kicking guest user: %s" % (e,)) + logger.exception("Error kicking guest user: %s" % (e,)) -- cgit 1.5.1 From 4cc4400b4deabed113548b296656d0e6e404dbde Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Feb 2019 17:19:01 +0000 Subject: Split /login into client_reader --- synapse/app/client_reader.py | 2 + synapse/storage/registration.py | 82 ++++++++++++++++++++--------------------- 2 files changed, 43 insertions(+), 41 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 9250b6c239..043b48f8f3 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -40,6 +40,7 @@ from synapse.replication.slave.storage.registration import SlavedRegistrationSto from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler +from synapse.rest.client.v1.login import LoginRestServlet from synapse.rest.client.v1.room import ( JoinedRoomMemberListRestServlet, PublicRoomListRestServlet, @@ -94,6 +95,7 @@ class ClientReaderServer(HomeServer): RoomStateRestServlet(self).register(resource) RoomEventContextServlet(self).register(resource) RegisterRestServlet(self).register(resource) + LoginRestServlet(self).register(resource) resources.update({ "/_matrix/client/r0": resource, diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 3bc5def48e..9b9572890b 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -254,6 +254,47 @@ class RegistrationWorkerStore(SQLBaseStore): defer.returnValue(ret["guest_access_token"]) defer.returnValue(None) + @defer.inlineCallbacks + def get_user_id_by_threepid(self, medium, address): + """Returns user id from threepid + + Args: + medium (str): threepid medium e.g. email + address (str): threepid address e.g. me@example.com + + Returns: + Deferred[str|None]: user id or None if no user id/threepid mapping exists + """ + user_id = yield self.runInteraction( + "get_user_id_by_threepid", self.get_user_id_by_threepid_txn, + medium, address + ) + defer.returnValue(user_id) + + def get_user_id_by_threepid_txn(self, txn, medium, address): + """Returns user id from threepid + + Args: + txn (cursor): + medium (str): threepid medium e.g. email + address (str): threepid address e.g. me@example.com + + Returns: + str|None: user id or None if no user id/threepid mapping exists + """ + ret = self._simple_select_one_txn( + txn, + "user_threepids", + { + "medium": medium, + "address": address + }, + ['user_id'], True + ) + if ret: + return ret['user_id'] + return None + class RegistrationStore(RegistrationWorkerStore, background_updates.BackgroundUpdateStore): @@ -613,47 +654,6 @@ class RegistrationStore(RegistrationWorkerStore, ) defer.returnValue(ret) - @defer.inlineCallbacks - def get_user_id_by_threepid(self, medium, address): - """Returns user id from threepid - - Args: - medium (str): threepid medium e.g. email - address (str): threepid address e.g. me@example.com - - Returns: - Deferred[str|None]: user id or None if no user id/threepid mapping exists - """ - user_id = yield self.runInteraction( - "get_user_id_by_threepid", self.get_user_id_by_threepid_txn, - medium, address - ) - defer.returnValue(user_id) - - def get_user_id_by_threepid_txn(self, txn, medium, address): - """Returns user id from threepid - - Args: - txn (cursor): - medium (str): threepid medium e.g. email - address (str): threepid address e.g. me@example.com - - Returns: - str|None: user id or None if no user id/threepid mapping exists - """ - ret = self._simple_select_one_txn( - txn, - "user_threepids", - { - "medium": medium, - "address": address - }, - ['user_id'], True - ) - if ret: - return ret['user_id'] - return None - def user_delete_threepid(self, user_id, medium, address): return self._simple_delete( "user_threepids", -- cgit 1.5.1 From 47a7e3928d103b88ede86fb742cd05d092f5ee62 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Sat, 23 Feb 2019 15:17:57 +0000 Subject: Correctly proxy exception in frontend_proxy worker --- synapse/app/frontend_proxy.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index d5b954361d..8479fee738 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -21,7 +21,7 @@ from twisted.web.resource import NoResource import synapse from synapse import events -from synapse.api.errors import SynapseError +from synapse.api.errors import HttpResponseException, SynapseError from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig @@ -66,10 +66,15 @@ class PresenceStatusStubServlet(ClientV1RestServlet): headers = { "Authorization": auth_headers, } - result = yield self.http_client.get_json( - self.main_uri + request.uri.decode('ascii'), - headers=headers, - ) + + try: + result = yield self.http_client.get_json( + self.main_uri + request.uri.decode('ascii'), + headers=headers, + ) + except HttpResponseException as e: + raise e.to_synapse_error() + defer.returnValue((200, result)) @defer.inlineCallbacks -- cgit 1.5.1