diff options
author | Mark Haines <mark.haines@matrix.org> | 2015-04-24 10:35:49 +0100 |
---|---|---|
committer | Mark Haines <mark.haines@matrix.org> | 2015-04-24 10:35:49 +0100 |
commit | eede182df75d0c0b6064af016a06c2758ae217d0 (patch) | |
tree | 4d4eb13dd3ab020032394f8282c85f4e3c2782e8 | |
parent | Move the key related config parser into a separate file (diff) | |
parent | Copyright notices (diff) | |
download | synapse-eede182df75d0c0b6064af016a06c2758ae217d0.tar.xz |
Merge branch 'develop' into key_distribution
-rw-r--r-- | UPGRADE.rst | 34 | ||||
-rw-r--r-- | docs/metrics-howto.rst | 50 | ||||
-rw-r--r-- | synapse/api/auth.py | 88 | ||||
-rw-r--r-- | synapse/api/errors.py | 4 | ||||
-rw-r--r-- | synapse/handlers/presence.py | 51 | ||||
-rw-r--r-- | synapse/push/baserules.py | 14 | ||||
-rw-r--r-- | synapse/push/rulekinds.py | 14 | ||||
-rw-r--r-- | synapse/python_dependencies.py | 14 | ||||
-rw-r--r-- | synapse/rest/media/v1/identicon_resource.py | 14 | ||||
-rw-r--r-- | synapse/storage/schema/delta/14/upgrade_appservice_db.py | 14 | ||||
-rw-r--r-- | synapse/storage/schema/delta/14/v14.sql | 14 | ||||
-rw-r--r-- | tests/handlers/test_presence.py | 65 |
12 files changed, 309 insertions, 67 deletions
diff --git a/UPGRADE.rst b/UPGRADE.rst index 87dd6e04a8..ab327a8136 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -1,3 +1,37 @@ +Upgrading to v0.x.x +=================== + +Application services have had a breaking API change in this version. + +They can no longer register themselves with a home server using the AS HTTP API. This +decision was made because a compromised application service with free reign to register +any regex in effect grants full read/write access to the home server if a regex of ``.*`` +is used. An attack where a compromised AS re-registers itself with ``.*`` was deemed too +big of a security risk to ignore, and so the ability to register with the HS remotely has +been removed. + +It has been replaced by specifying a list of application service registrations in +``homeserver.yaml``:: + + app_service_config_files: ["registration-01.yaml", "registration-02.yaml"] + +Where ``registration-01.yaml`` looks like:: + + url: <String> # e.g. "https://my.application.service.com" + as_token: <String> + hs_token: <String> + sender_localpart: <String> # This is a new field which denotes the user_id localpart when using the AS token + namespaces: + users: + - exclusive: <Boolean> + regex: <String> # e.g. "@prefix_.*" + aliases: + - exclusive: <Boolean> + regex: <String> + rooms: + - exclusive: <Boolean> + regex: <String> + Upgrading to v0.8.0 =================== diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst new file mode 100644 index 0000000000..c1f5ae2174 --- /dev/null +++ b/docs/metrics-howto.rst @@ -0,0 +1,50 @@ +How to monitor Synapse metrics using Prometheus +=============================================== + +1: Install prometheus: + Follow instructions at http://prometheus.io/docs/introduction/install/ + +2: Enable synapse metrics: + Simply setting a (local) port number will enable it. Pick a port. + prometheus itself defaults to 9090, so starting just above that for + locally monitored services seems reasonable. E.g. 9092: + + Add to homeserver.yaml + + metrics_port: 9092 + + Restart synapse + +3: Check out synapse-prometheus-config + https://github.com/matrix-org/synapse-prometheus-config + +4: Add ``synapse.html`` and ``synapse.rules`` + The ``.html`` file needs to appear in prometheus's ``consoles`` directory, + and the ``.rules`` file needs to be invoked somewhere in the main config + file. A symlink to each from the git checkout into the prometheus directory + might be easiest to ensure ``git pull`` keeps it updated. + +5: Add a prometheus target for synapse + This is easiest if prometheus runs on the same machine as synapse, as it can + then just use localhost:: + + global: { + rule_file: "synapse.rules" + } + + job: { + name: "synapse" + + target_group: { + target: "http://localhost:9092/" + } + } + +6: Start prometheus:: + + ./prometheus -config.file=prometheus.conf + +7: Wait a few seconds for it to start and perform the first scrape, + then visit the console: + + http://server-where-prometheus-runs:9090/consoles/synapse.html diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e159e4503f..a21120b313 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -183,18 +183,10 @@ class Auth(object): else: join_rule = JoinRules.INVITE - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) - ban_level, kick_level, redact_level = ( - self._get_ops_level_from_event_state( - event, - auth_events, - ) - ) + # FIXME (erikj): What should we do here as the default? + ban_level = self._get_named_level(auth_events, "ban", 50) logger.debug( "is_membership_change_allowed: %s", @@ -210,11 +202,6 @@ class Auth(object): } ) - if ban_level: - ban_level = int(ban_level) - else: - ban_level = 50 # FIXME (erikj): What should we do here? - if Membership.JOIN != membership: # JOIN is the only action you can perform if you're not in the room if not caller_in_room: # caller isn't joined @@ -259,10 +246,7 @@ class Auth(object): 403, "You cannot unban user &s." % (target_user_id,) ) elif target_user_id != event.user_id: - if kick_level: - kick_level = int(kick_level) - else: - kick_level = 50 # FIXME (erikj): What should we do here? + kick_level = self._get_named_level(auth_events, "kick", 50) if user_level < kick_level: raise AuthError( @@ -276,34 +260,42 @@ class Auth(object): return True - def _get_power_level_from_event_state(self, event, user_id, auth_events): + def _get_power_level_event(self, auth_events): key = (EventTypes.PowerLevels, "", ) - power_level_event = auth_events.get(key) - level = None + return auth_events.get(key) + + def _get_user_power_level(self, user_id, auth_events): + power_level_event = self._get_power_level_event(auth_events) + if power_level_event: level = power_level_event.content.get("users", {}).get(user_id) if not level: level = power_level_event.content.get("users_default", 0) + + if level is None: + return 0 + else: + return int(level) else: key = (EventTypes.Create, "", ) create_event = auth_events.get(key) if (create_event is not None and create_event.content["creator"] == user_id): return 100 + else: + return 0 - return level + def _get_named_level(self, auth_events, name, default): + power_level_event = self._get_power_level_event(auth_events) - def _get_ops_level_from_event_state(self, event, auth_events): - key = (EventTypes.PowerLevels, "", ) - power_level_event = auth_events.get(key) + if not power_level_event: + return default - if power_level_event: - return ( - power_level_event.content.get("ban", 50), - power_level_event.content.get("kick", 50), - power_level_event.content.get("redact", 50), - ) - return None, None, None, + level = power_level_event.content.get(name, None) + if level is not None: + return int(level) + else: + return default @defer.inlineCallbacks def get_user_by_req(self, request): @@ -498,16 +490,7 @@ class Auth(object): else: send_level = 0 - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) - - if user_level: - user_level = int(user_level) - else: - user_level = 0 + user_level = self._get_user_power_level(event.user_id, auth_events) if user_level < send_level: raise AuthError( @@ -539,16 +522,9 @@ class Auth(object): return True def _check_redaction(self, event, auth_events): - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) - _, _, redact_level = self._get_ops_level_from_event_state( - event, - auth_events, - ) + redact_level = self._get_named_level(auth_events, "redact", 50) if user_level < redact_level: raise AuthError( @@ -576,11 +552,7 @@ class Auth(object): if not current_state: return - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) # Check other levels: levels_to_check = [ diff --git a/synapse/api/errors.py b/synapse/api/errors.py index eddd889778..72d2bd5b4c 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -35,8 +35,8 @@ class Codes(object): LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED" CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED" CAPTCHA_INVALID = "M_CAPTCHA_INVALID" - MISSING_PARAM = "M_MISSING_PARAM", - TOO_LARGE = "M_TOO_LARGE", + MISSING_PARAM = "M_MISSING_PARAM" + TOO_LARGE = "M_TOO_LARGE" EXCLUSIVE = "M_EXCLUSIVE" diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index bbc7a0f200..571eacd343 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -36,6 +36,9 @@ metrics = synapse.metrics.get_metrics_for(__name__) # Don't bother bumping "last active" time if it differs by less than 60 seconds LAST_ACTIVE_GRANULARITY = 60*1000 +# Keep no more than this number of offline serial revisions +MAX_OFFLINE_SERIALS = 1000 + # TODO(paul): Maybe there's one of these I can steal from somewhere def partition(l, func): @@ -135,6 +138,9 @@ class PresenceHandler(BaseHandler): self._remote_sendmap = {} # map remote users to sets of local users who're interested in them self._remote_recvmap = {} + # list of (serial, set of(userids)) tuples, ordered by serial, latest + # first + self._remote_offline_serials = [] # map any user to a UserPresenceCache self._user_cachemap = {} @@ -714,8 +720,24 @@ class PresenceHandler(BaseHandler): statuscache=statuscache, ) + user_id = user.to_string() + if state["presence"] == PresenceState.OFFLINE: + self._remote_offline_serials.insert( + 0, + (self._user_cachemap_latest_serial, set([user_id])) + ) + while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS: + self._remote_offline_serials.pop() # remove the oldest del self._user_cachemap[user] + else: + # Remove the user from remote_offline_serials now that they're + # no longer offline + for idx, elem in enumerate(self._remote_offline_serials): + (_, user_ids) = elem + user_ids.discard(user_id) + if not user_ids: + self._remote_offline_serials.pop(idx) for poll in content.get("poll", []): user = UserID.from_string(poll) @@ -836,6 +858,8 @@ class PresenceEventSource(object): presence = self.hs.get_handlers().presence_handler cachemap = presence._user_cachemap + clock = self.clock + latest_serial = None updates = [] # TODO(paul): use a DeferredList ? How to limit concurrency. @@ -845,18 +869,31 @@ class PresenceEventSource(object): if cached.serial <= from_key: continue - if (yield self.is_visible(observer_user, observed_user)): - updates.append((observed_user, cached)) + if not (yield self.is_visible(observer_user, observed_user)): + continue + + if latest_serial is None or cached.serial > latest_serial: + latest_serial = cached.serial + updates.append(cached.make_event(user=observed_user, clock=clock)) # TODO(paul): limit - if updates: - clock = self.clock + for serial, user_ids in presence._remote_offline_serials: + if serial < from_key: + break - latest_serial = max([x[1].serial for x in updates]) - data = [x[1].make_event(user=x[0], clock=clock) for x in updates] + for u in user_ids: + updates.append({ + "type": "m.presence", + "content": {"user_id": u, "presence": PresenceState.OFFLINE}, + }) + # TODO(paul): For the v2 API we want to tell the client their from_key + # is too old if we fell off the end of the _remote_offline_serials + # list, and get them to invalidate+resync. In v1 we have no such + # concept so this is a best-effort result. - defer.returnValue((data, latest_serial)) + if updates: + defer.returnValue((updates, latest_serial)) else: defer.returnValue(([], presence._user_cachemap_latest_serial)) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 60fd35fbfb..f8408d6596 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py index 660aa4e10e..4c591aa638 100644 --- a/synapse/push/rulekinds.py +++ b/synapse/push/rulekinds.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + PRIORITY_CLASS_MAP = { 'underride': 1, 'sender': 2, diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index ee72f774b3..8b457419cf 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging from distutils.version import LooseVersion diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py index 912856386a..603859d5d4 100644 --- a/synapse/rest/media/v1/identicon_resource.py +++ b/synapse/rest/media/v1/identicon_resource.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from pydenticon import Generator from twisted.web.resource import Resource diff --git a/synapse/storage/schema/delta/14/upgrade_appservice_db.py b/synapse/storage/schema/delta/14/upgrade_appservice_db.py index 847b1c5b89..9f3a4dd4c5 100644 --- a/synapse/storage/schema/delta/14/upgrade_appservice_db.py +++ b/synapse/storage/schema/delta/14/upgrade_appservice_db.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import logging diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql index 0212726448..1d09ad7a15 100644 --- a/synapse/storage/schema/delta/14/v14.sql +++ b/synapse/storage/schema/delta/14/v14.sql @@ -1,3 +1,17 @@ +/* Copyright 2015 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ CREATE TABLE IF NOT EXISTS push_rules_enable ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT NOT NULL, diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 04eba4289e..9f5580c096 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -879,6 +879,71 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase): ) @defer.inlineCallbacks + def test_recv_remote_offline(self): + """ Various tests relating to SYN-261 """ + potato_set = self.handler._remote_recvmap.setdefault(self.u_potato, + set()) + potato_set.add(self.u_apple) + + self.room_members = [self.u_banana, self.u_potato] + + self.assertEquals(self.event_source.get_current_key(), 0) + + yield self.mock_federation_resource.trigger("PUT", + "/_matrix/federation/v1/send/1000000/", + _make_edu_json("elsewhere", "m.presence", + content={ + "push": [ + {"user_id": "@potato:remote", + "presence": "offline"}, + ], + } + ) + ) + + self.assertEquals(self.event_source.get_current_key(), 1) + + (events, _) = yield self.event_source.get_new_events_for_user( + self.u_apple, 0, None + ) + self.assertEquals(events, + [ + {"type": "m.presence", + "content": { + "user_id": "@potato:remote", + "presence": OFFLINE, + }} + ] + ) + + yield self.mock_federation_resource.trigger("PUT", + "/_matrix/federation/v1/send/1000001/", + _make_edu_json("elsewhere", "m.presence", + content={ + "push": [ + {"user_id": "@potato:remote", + "presence": "online"}, + ], + } + ) + ) + + self.assertEquals(self.event_source.get_current_key(), 2) + + (events, _) = yield self.event_source.get_new_events_for_user( + self.u_apple, 0, None + ) + self.assertEquals(events, + [ + {"type": "m.presence", + "content": { + "user_id": "@potato:remote", + "presence": ONLINE, + }} + ] + ) + + @defer.inlineCallbacks def test_join_room_local(self): self.room_members = [self.u_apple, self.u_banana] |