diff options
-rw-r--r-- | CHANGES.rst | 17 | ||||
-rwxr-xr-x | demo/start.sh | 8 | ||||
-rw-r--r-- | synapse/__init__.py | 2 | ||||
-rw-r--r-- | synapse/api/filtering.py | 189 | ||||
-rw-r--r-- | synapse/appservice/scheduler.py | 4 | ||||
-rw-r--r-- | synapse/config/cas.py | 3 | ||||
-rw-r--r-- | synapse/config/homeserver.py | 4 | ||||
-rw-r--r-- | synapse/config/password.py | 32 | ||||
-rw-r--r-- | synapse/config/saml2.py | 3 | ||||
-rw-r--r-- | synapse/handlers/receipts.py | 8 | ||||
-rw-r--r-- | synapse/handlers/search.py | 15 | ||||
-rw-r--r-- | synapse/handlers/sync.py | 185 | ||||
-rw-r--r-- | synapse/rest/client/v1/login.py | 8 | ||||
-rw-r--r-- | synapse/rest/client/v2_alpha/sync.py | 33 | ||||
-rw-r--r-- | synapse/storage/prepare_database.py | 2 | ||||
-rw-r--r-- | synapse/storage/roommember.py | 13 | ||||
-rw-r--r-- | synapse/storage/schema/delta/25/fts.py (renamed from synapse/storage/schema/delta/24/fts.py) | 5 | ||||
-rw-r--r-- | synapse/storage/search.py | 35 | ||||
-rw-r--r-- | tests/api/test_filtering.py | 57 | ||||
-rw-r--r-- | tests/events/__init__.py | 0 | ||||
-rw-r--r-- | tests/events/test_utils.py | 115 |
21 files changed, 561 insertions, 177 deletions
diff --git a/CHANGES.rst b/CHANGES.rst index f1d2c7a765..da118b7ceb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,20 @@ +Changes in synapse v0.10.1-rc1 (2015-10-15) +=========================================== + +* Add support for CAS, thanks to Steven Hammerton (PR #295, #296) +* Add support for using macaroons for ``access_token`` (PR #256, #229) +* Add support for ``m.room.canonical_alias`` (PR #287) +* Add support for viewing the history of rooms that they have left. (PR #276, + #294) +* Add support for refresh tokens (PR #240) +* Add flag on creation which disables federation of the room (PR #279) +* Add some room state to invites. (PR #275) +* Atomically persist events when joining a room over federation (PR #283) +* Change default history visibility for private rooms (PR #271) +* Allow users to redact their own sent events (PR #262) +* Use tox for tests (PR #247) +* Split up syutil into separate libraries (PR #243) + Changes in synapse v0.10.0-r2 (2015-09-16) ========================================== diff --git a/demo/start.sh b/demo/start.sh index d90115ec97..dcc4d6f4fa 100755 --- a/demo/start.sh +++ b/demo/start.sh @@ -38,8 +38,12 @@ for port in 8080 8081 8082; do perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config - echo "full_twisted_stacktraces: true" >> $DIR/etc/$port.config - echo "report_stats: false" >> $DIR/etc/$port.config + if ! grep -F "full_twisted_stacktraces" -q $DIR/etc/$port.config; then + echo "full_twisted_stacktraces: true" >> $DIR/etc/$port.config + fi + if ! grep -F "report_stats" -q $DIR/etc/$port.config ; then + echo "report_stats: false" >> $DIR/etc/$port.config + fi python -m synapse.app.homeserver \ --config-path "$DIR/etc/$port.config" \ diff --git a/synapse/__init__.py b/synapse/__init__.py index d62294e6bb..e9ce0412ed 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.10.0-r2" +__version__ = "0.10.1-rc1" diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index e79e91e7eb..eb15d8c54a 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -24,7 +24,7 @@ class Filtering(object): def get_user_filter(self, user_localpart, filter_id): result = self.store.get_user_filter(user_localpart, filter_id) - result.addCallback(Filter) + result.addCallback(FilterCollection) return result def add_user_filter(self, user_localpart, user_filter): @@ -131,125 +131,126 @@ class Filtering(object): raise SynapseError(400, "Bad bundle_updates: expected bool.") -class Filter(object): +class FilterCollection(object): def __init__(self, filter_json): self.filter_json = filter_json + self.room_timeline_filter = Filter( + self.filter_json.get("room", {}).get("timeline", {}) + ) + + self.room_state_filter = Filter( + self.filter_json.get("room", {}).get("state", {}) + ) + + self.room_ephemeral_filter = Filter( + self.filter_json.get("room", {}).get("ephemeral", {}) + ) + + self.presence_filter = Filter( + self.filter_json.get("presence", {}) + ) + def timeline_limit(self): - return self.filter_json.get("room", {}).get("timeline", {}).get("limit", 10) + return self.room_timeline_filter.limit() def presence_limit(self): - return self.filter_json.get("presence", {}).get("limit", 10) + return self.presence_filter.limit() def ephemeral_limit(self): - return self.filter_json.get("room", {}).get("ephemeral", {}).get("limit", 10) + return self.room_ephemeral_filter.limit() def filter_presence(self, events): - return self._filter_on_key(events, ["presence"]) + return self.presence_filter.filter(events) def filter_room_state(self, events): - return self._filter_on_key(events, ["room", "state"]) + return self.room_state_filter.filter(events) def filter_room_timeline(self, events): - return self._filter_on_key(events, ["room", "timeline"]) + return self.room_timeline_filter.filter(events) def filter_room_ephemeral(self, events): - return self._filter_on_key(events, ["room", "ephemeral"]) - - def _filter_on_key(self, events, keys): - filter_json = self.filter_json - if not filter_json: - return events - - try: - # extract the right definition from the filter - definition = filter_json - for key in keys: - definition = definition[key] - return self._filter_with_definition(events, definition) - except KeyError: - # return all events if definition isn't specified. - return events - - def _filter_with_definition(self, events, definition): - return [e for e in events if self._passes_definition(definition, e)] - - def _passes_definition(self, definition, event): - """Check if the event passes the filter definition - Args: - definition(dict): The filter definition to check against - event(dict or Event): The event to check + return self.room_ephemeral_filter.filter(events) + + +class Filter(object): + def __init__(self, filter_json): + self.filter_json = filter_json + + def check(self, event): + """Checks whether the filter matches the given event. + Returns: - True if the event passes the filter in the definition + bool: True if the event matches """ - if type(event) is dict: - room_id = event.get("room_id") - sender = event.get("sender") - event_type = event["type"] + if isinstance(event, dict): + return self.check_fields( + event.get("room_id", None), + event.get("sender", None), + event.get("type", None), + ) else: - room_id = getattr(event, "room_id", None) - sender = getattr(event, "sender", None) - event_type = event.type - return self._event_passes_definition( - definition, room_id, sender, event_type - ) + return self.check_fields( + getattr(event, "room_id", None), + getattr(event, "sender", None), + event.type, + ) - def _event_passes_definition(self, definition, room_id, sender, - event_type): - """Check if the event passes through the given definition. + def check_fields(self, room_id, sender, event_type): + """Checks whether the filter matches the given event fields. - Args: - definition(dict): The definition to check against. - room_id(str): The id of the room this event is in or None. - sender(str): The sender of the event - event_type(str): The type of the event. Returns: - True if the event passes through the filter. + bool: True if the event fields match """ - # Algorithm notes: - # For each key in the definition, check the event meets the criteria: - # * For types: Literal match or prefix match (if ends with wildcard) - # * For senders/rooms: Literal match only - # * "not_" checks take presedence (e.g. if "m.*" is in both 'types' - # and 'not_types' then it is treated as only being in 'not_types') - - # room checks - if room_id is not None: - allow_rooms = definition.get("rooms", None) - reject_rooms = definition.get("not_rooms", None) - if reject_rooms and room_id in reject_rooms: - return False - if allow_rooms and room_id not in allow_rooms: - return False - - # sender checks - if sender is not None: - allow_senders = definition.get("senders", None) - reject_senders = definition.get("not_senders", None) - if reject_senders and sender in reject_senders: - return False - if allow_senders and sender not in allow_senders: + literal_keys = { + "rooms": lambda v: room_id == v, + "senders": lambda v: sender == v, + "types": lambda v: _matches_wildcard(event_type, v) + } + + for name, match_func in literal_keys.items(): + not_name = "not_%s" % (name,) + disallowed_values = self.filter_json.get(not_name, []) + if any(map(match_func, disallowed_values)): return False - # type checks - if "not_types" in definition: - for def_type in definition["not_types"]: - if self._event_matches_type(event_type, def_type): + allowed_values = self.filter_json.get(name, None) + if allowed_values is not None: + if not any(map(match_func, allowed_values)): return False - if "types" in definition: - included = False - for def_type in definition["types"]: - if self._event_matches_type(event_type, def_type): - included = True - break - if not included: - return False return True - def _event_matches_type(self, event_type, def_type): - if def_type.endswith("*"): - type_prefix = def_type[:-1] - return event_type.startswith(type_prefix) - else: - return event_type == def_type + def filter_rooms(self, room_ids): + """Apply the 'rooms' filter to a given list of rooms. + + Args: + room_ids (list): A list of room_ids. + + Returns: + list: A list of room_ids that match the filter + """ + room_ids = set(room_ids) + + disallowed_rooms = set(self.filter_json.get("not_rooms", [])) + room_ids -= disallowed_rooms + + allowed_rooms = self.filter_json.get("rooms", None) + if allowed_rooms is not None: + room_ids &= set(allowed_rooms) + + return room_ids + + def filter(self, events): + return filter(self.check, events) + + def limit(self): + return self.filter_json.get("limit", 10) + + +def _matches_wildcard(actual_value, filter_value): + if filter_value.endswith("*"): + type_prefix = filter_value[:-1] + return actual_value.startswith(type_prefix) + else: + return actual_value == filter_value diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index 59b0b1f4ac..44dc2c4744 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -224,8 +224,8 @@ class _Recoverer(object): self.clock.call_later((2 ** self.backoff_counter), self.retry) def _backoff(self): - # cap the backoff to be around 18h => (2^16) = 65536 secs - if self.backoff_counter < 16: + # cap the backoff to be around 8.5min => (2^9) = 512 secs + if self.backoff_counter < 9: self.backoff_counter += 1 self.recover() diff --git a/synapse/config/cas.py b/synapse/config/cas.py index d268680729..a337ae6ca0 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -25,7 +25,7 @@ class CasConfig(Config): def read_config(self, config): cas_config = config.get("cas_config", None) if cas_config: - self.cas_enabled = True + self.cas_enabled = cas_config.get("enabled", True) self.cas_server_url = cas_config["server_url"] self.cas_required_attributes = cas_config.get("required_attributes", {}) else: @@ -37,6 +37,7 @@ class CasConfig(Config): return """ # Enable CAS for registration and login. #cas_config: + # enabled: true # server_url: "https://cas-server.com" # #required_attributes: # # name: value diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 3039f3c0bf..4743e6abc5 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -27,12 +27,14 @@ from .appservice import AppServiceConfig from .key import KeyConfig from .saml2 import SAML2Config from .cas import CasConfig +from .password import PasswordConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, VoipConfig, RegistrationConfig, MetricsConfig, - AppServiceConfig, KeyConfig, SAML2Config, CasConfig): + AppServiceConfig, KeyConfig, SAML2Config, CasConfig, + PasswordConfig,): pass diff --git a/synapse/config/password.py b/synapse/config/password.py new file mode 100644 index 0000000000..1a3e278472 --- /dev/null +++ b/synapse/config/password.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import Config + + +class PasswordConfig(Config): + """Password login configuration + """ + + def read_config(self, config): + password_config = config.get("password_config", {}) + self.password_enabled = password_config.get("enabled", True) + + def default_config(self, config_dir_path, server_name, **kwargs): + return """ + # Enable password for login. + password_config: + enabled: true + """ diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py index 4c6133cf22..8d7f443021 100644 --- a/synapse/config/saml2.py +++ b/synapse/config/saml2.py @@ -33,7 +33,7 @@ class SAML2Config(Config): def read_config(self, config): saml2_config = config.get("saml2_config", None) if saml2_config: - self.saml2_enabled = True + self.saml2_enabled = saml2_config.get("enabled", True) self.saml2_config_path = saml2_config["config_path"] self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"] else: @@ -49,6 +49,7 @@ class SAML2Config(Config): # the user back to /login/saml2 with proper info. # See pysaml2 docs for format of config. #saml2_config: + # enabled: true # config_path: "%s/sp_conf.py" # idp_redirect_url: "http://%s/idp" """ % (config_dir_path, server_name) diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index 86c911c4bf..a47ae3df42 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -156,13 +156,7 @@ class ReceiptsHandler(BaseHandler): if not result: defer.returnValue([]) - event = { - "type": "m.receipt", - "room_id": room_id, - "content": result, - } - - defer.returnValue([event]) + defer.returnValue(result) class ReceiptEventSource(object): diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 22808b9c07..bbe82b1425 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -18,6 +18,7 @@ from twisted.internet import defer from ._base import BaseHandler from synapse.api.constants import Membership +from synapse.api.filtering import Filter from synapse.api.errors import SynapseError from synapse.events.utils import serialize_event @@ -49,9 +50,12 @@ class SearchHandler(BaseHandler): keys = content["search_categories"]["room_events"].get("keys", [ "content.body", "content.name", "content.topic", ]) + filter_dict = content["search_categories"]["room_events"].get("filter", {}) except KeyError: raise SynapseError(400, "Invalid search query") + search_filter = Filter(filter_dict) + # TODO: Search through left rooms too rooms = yield self.store.get_rooms_for_user_where_membership_is( user.to_string(), @@ -60,15 +64,18 @@ class SearchHandler(BaseHandler): ) room_ids = set(r.room_id for r in rooms) - # TODO: Apply room filter to rooms list + room_ids = search_filter.filter_rooms(room_ids) + + rank_map, event_map, _ = yield self.store.search_msgs( + room_ids, search_term, keys + ) - rank_map, event_map = yield self.store.search_msgs(room_ids, search_term, keys) + filtered_events = search_filter.filter(event_map.values()) allowed_events = yield self._filter_events_for_client( - user.to_string(), event_map.values() + user.to_string(), filtered_events ) - # TODO: Filter allowed_events # TODO: Add a limit time_now = self.clock.time_msec() diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index ee6b881de1..b8e2c81969 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -61,18 +61,37 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [ return bool(self.timeline or self.state or self.ephemeral) +class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [ + "room_id", + "timeline", + "state", +])): + __slots__ = [] + + def __nonzero__(self): + """Make the result appear empty if there are no updates. This is used + to tell if room needs to be part of the sync result. + """ + return bool(self.timeline or self.state) + + class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [ "room_id", "invite", ])): __slots__ = [] + def __nonzero__(self): + """Invited rooms should always be reported to the client""" + return True + class SyncResult(collections.namedtuple("SyncResult", [ "next_batch", # Token for the next sync "presence", # List of presence events for the user. "joined", # JoinedSyncResult for each joined room. "invited", # InvitedSyncResult for each invited room. + "archived", # ArchivedSyncResult for each archived room. ])): __slots__ = [] @@ -145,6 +164,10 @@ class SyncHandler(BaseHandler): """ now_token = yield self.event_sources.get_current_token() + now_token, typing_by_room = yield self.typing_by_room( + sync_config, now_token + ) + presence_stream = self.event_sources.sources["presence"] # TODO (mjark): This looks wrong, shouldn't we be getting the presence # UP to the present rather than after the present? @@ -156,15 +179,21 @@ class SyncHandler(BaseHandler): ) room_list = yield self.store.get_rooms_for_user_where_membership_is( user_id=sync_config.user.to_string(), - membership_list=[Membership.INVITE, Membership.JOIN] + membership_list=( + Membership.INVITE, + Membership.JOIN, + Membership.LEAVE, + Membership.BAN + ) ) joined = [] invited = [] + archived = [] for event in room_list: if event.membership == Membership.JOIN: room_sync = yield self.initial_sync_for_joined_room( - event.room_id, sync_config, now_token, + event.room_id, sync_config, now_token, typing_by_room ) joined.append(room_sync) elif event.membership == Membership.INVITE: @@ -173,16 +202,29 @@ class SyncHandler(BaseHandler): room_id=event.room_id, invite=invite, )) + elif event.membership in (Membership.LEAVE, Membership.BAN): + leave_token = now_token.copy_and_replace( + "room_key", "s%d" % (event.stream_ordering,) + ) + room_sync = yield self.initial_sync_for_archived_room( + sync_config=sync_config, + room_id=event.room_id, + leave_event_id=event.event_id, + leave_token=leave_token, + ) + archived.append(room_sync) defer.returnValue(SyncResult( presence=presence, joined=joined, invited=invited, + archived=archived, next_batch=now_token, )) @defer.inlineCallbacks - def initial_sync_for_joined_room(self, room_id, sync_config, now_token): + def initial_sync_for_joined_room(self, room_id, sync_config, now_token, + typing_by_room): """Sync a room for a client which is starting without any state Returns: A Deferred JoinedSyncResult. @@ -201,7 +243,60 @@ class SyncHandler(BaseHandler): room_id=room_id, timeline=batch, state=current_state_events, - ephemeral=[], + ephemeral=typing_by_room.get(room_id, []), + )) + + @defer.inlineCallbacks + def typing_by_room(self, sync_config, now_token, since_token=None): + """Get the typing events for each room the user is in + Args: + sync_config (SyncConfig): The flags, filters and user for the sync. + now_token (StreamToken): Where the server is currently up to. + since_token (StreamToken): Where the server was when the client + last synced. + Returns: + A tuple of the now StreamToken, updated to reflect the which typing + events are included, and a dict mapping from room_id to a list of + typing events for that room. + """ + + typing_key = since_token.typing_key if since_token else "0" + + typing_source = self.event_sources.sources["typing"] + typing, typing_key = yield typing_source.get_new_events_for_user( + user=sync_config.user, + from_key=typing_key, + limit=sync_config.filter.ephemeral_limit(), + ) + now_token = now_token.copy_and_replace("typing_key", typing_key) + + typing_by_room = {event["room_id"]: [event] for event in typing} + for event in typing: + event.pop("room_id") + logger.debug("Typing %r", typing_by_room) + + defer.returnValue((now_token, typing_by_room)) + + @defer.inlineCallbacks + def initial_sync_for_archived_room(self, room_id, sync_config, + leave_event_id, leave_token): + """Sync a room for a client which is starting without any state + Returns: + A Deferred JoinedSyncResult. + """ + + batch = yield self.load_filtered_recents( + room_id, sync_config, leave_token, + ) + + leave_state = yield self.store.get_state_for_events( + [leave_event_id], None + ) + + defer.returnValue(ArchivedSyncResult( + room_id=room_id, + timeline=batch, + state=leave_state[leave_event_id].values(), )) @defer.inlineCallbacks @@ -221,18 +316,9 @@ class SyncHandler(BaseHandler): ) now_token = now_token.copy_and_replace("presence_key", presence_key) - typing_source = self.event_sources.sources["typing"] - typing, typing_key = yield typing_source.get_new_events_for_user( - user=sync_config.user, - from_key=since_token.typing_key, - limit=sync_config.filter.ephemeral_limit(), + now_token, typing_by_room = yield self.typing_by_room( + sync_config, now_token, since_token ) - now_token = now_token.copy_and_replace("typing_key", typing_key) - - typing_by_room = {event["room_id"]: [event] for event in typing} - for event in typing: - event.pop("room_id") - logger.debug("Typing %r", typing_by_room) rm_handler = self.hs.get_handlers().room_member_handler app_service = yield self.store.get_app_service_by_user_id( @@ -257,18 +343,22 @@ class SyncHandler(BaseHandler): ) joined = [] + archived = [] if len(room_events) <= timeline_limit: # There is no gap in any of the rooms. Therefore we can just # partition the new events by room and return them. invite_events = [] + leave_events = [] events_by_room_id = {} for event in room_events: events_by_room_id.setdefault(event.room_id, []).append(event) if event.room_id not in joined_room_ids: if (event.type == EventTypes.Member - and event.membership == Membership.INVITE and event.state_key == sync_config.user.to_string()): - invite_events.append(event) + if event.membership == Membership.INVITE: + invite_events.append(event) + elif event.membership in (Membership.LEAVE, Membership.BAN): + leave_events.append(event) for room_id in joined_room_ids: recents = events_by_room_id.get(room_id, []) @@ -296,11 +386,16 @@ class SyncHandler(BaseHandler): ) if room_sync: joined.append(room_sync) + else: invite_events = yield self.store.get_invites_for_user( sync_config.user.to_string() ) + leave_events = yield self.store.get_leave_and_ban_events_for_user( + sync_config.user.to_string() + ) + for room_id in joined_room_ids: room_sync = yield self.incremental_sync_with_gap_for_room( room_id, sync_config, since_token, now_token, @@ -309,6 +404,12 @@ class SyncHandler(BaseHandler): if room_sync: joined.append(room_sync) + for leave_event in leave_events: + room_sync = yield self.incremental_sync_for_archived_room( + sync_config, leave_event, since_token + ) + archived.append(room_sync) + invited = [ InvitedSyncResult(room_id=event.room_id, invite=event) for event in invite_events @@ -318,6 +419,7 @@ class SyncHandler(BaseHandler): presence=presence, joined=joined, invited=invited, + archived=archived, next_batch=now_token, )) @@ -417,6 +519,55 @@ class SyncHandler(BaseHandler): defer.returnValue(room_sync) @defer.inlineCallbacks + def incremental_sync_for_archived_room(self, sync_config, leave_event, + since_token): + """ Get the incremental delta needed to bring the client up to date for + the archived room. + Returns: + A Deferred ArchivedSyncResult + """ + + stream_token = yield self.store.get_stream_token_for_event( + leave_event.event_id + ) + + leave_token = since_token.copy_and_replace("room_key", stream_token) + + batch = yield self.load_filtered_recents( + leave_event.room_id, sync_config, leave_token, since_token, + ) + + logging.debug("Recents %r", batch) + + # TODO(mjark): This seems racy since this isn't being passed a + # token to indicate what point in the stream this is + leave_state = yield self.store.get_state_for_events( + [leave_event.event_id], None + ) + + state_events_at_leave = leave_state[leave_event.event_id].values() + + state_at_previous_sync = yield self.get_state_at_previous_sync( + leave_event.room_id, since_token=since_token + ) + + state_events_delta = yield self.compute_state_delta( + since_token=since_token, + previous_state=state_at_previous_sync, + current_state=state_events_at_leave, + ) + + room_sync = ArchivedSyncResult( + room_id=leave_event.room_id, + timeline=batch, + state=state_events_delta, + ) + + logging.debug("Room sync: %r", room_sync) + + defer.returnValue(room_sync) + + @defer.inlineCallbacks def get_state_at_previous_sync(self, room_id, since_token): """ Get the room state at the previous sync the client made. Returns: diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index e71cf7e43e..4ea06c1434 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -43,6 +43,7 @@ class LoginRestServlet(ClientV1RestServlet): def __init__(self, hs): super(LoginRestServlet, self).__init__(hs) self.idp_redirect_url = hs.config.saml2_idp_redirect_url + self.password_enabled = hs.config.password_enabled self.saml2_enabled = hs.config.saml2_enabled self.cas_enabled = hs.config.cas_enabled self.cas_server_url = hs.config.cas_server_url @@ -50,11 +51,13 @@ class LoginRestServlet(ClientV1RestServlet): self.servername = hs.config.server_name def on_GET(self, request): - flows = [{"type": LoginRestServlet.PASS_TYPE}] + flows = [] if self.saml2_enabled: flows.append({"type": LoginRestServlet.SAML2_TYPE}) if self.cas_enabled: flows.append({"type": LoginRestServlet.CAS_TYPE}) + if self.password_enabled: + flows.append({"type": LoginRestServlet.PASS_TYPE}) return (200, {"flows": flows}) def on_OPTIONS(self, request): @@ -65,6 +68,9 @@ class LoginRestServlet(ClientV1RestServlet): login_submission = _parse_json(request) try: if login_submission["type"] == LoginRestServlet.PASS_TYPE: + if not self.password_enabled: + raise SynapseError(400, "Password login has been disabled.") + result = yield self.do_password_login(login_submission) defer.returnValue(result) elif self.saml2_enabled and (login_submission["type"] == diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index fffecb24f5..6c4f2b7cd4 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -23,7 +23,7 @@ from synapse.types import StreamToken from synapse.events.utils import ( serialize_event, format_event_for_client_v2_without_event_id, ) -from synapse.api.filtering import Filter +from synapse.api.filtering import FilterCollection from ._base import client_v2_pattern import copy @@ -103,7 +103,7 @@ class SyncRestServlet(RestServlet): user.localpart, filter_id ) except: - filter = Filter({}) + filter = FilterCollection({}) sync_config = SyncConfig( user=user, @@ -136,6 +136,10 @@ class SyncRestServlet(RestServlet): sync_result.invited, filter, time_now, token_id ) + archived = self.encode_archived( + sync_result.archived, filter, time_now, token_id + ) + response_content = { "presence": self.encode_presence( sync_result.presence, filter, time_now @@ -143,7 +147,7 @@ class SyncRestServlet(RestServlet): "rooms": { "joined": joined, "invited": invited, - "archived": {}, + "archived": archived, }, "next_batch": sync_result.next_batch.to_string(), } @@ -182,14 +186,20 @@ class SyncRestServlet(RestServlet): return invited + def encode_archived(self, rooms, filter, time_now, token_id): + joined = {} + for room in rooms: + joined[room.room_id] = self.encode_room( + room, filter, time_now, token_id, joined=False + ) + + return joined + @staticmethod - def encode_room(room, filter, time_now, token_id): + def encode_room(room, filter, time_now, token_id, joined=True): event_map = {} state_events = filter.filter_room_state(room.state) - timeline_events = filter.filter_room_timeline(room.timeline.events) - ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) state_event_ids = [] - timeline_event_ids = [] for event in state_events: # TODO(mjark): Respect formatting requirements in the filter. event_map[event.event_id] = serialize_event( @@ -198,6 +208,8 @@ class SyncRestServlet(RestServlet): ) state_event_ids.append(event.event_id) + timeline_events = filter.filter_room_timeline(room.timeline.events) + timeline_event_ids = [] for event in timeline_events: # TODO(mjark): Respect formatting requirements in the filter. event_map[event.event_id] = serialize_event( @@ -205,6 +217,7 @@ class SyncRestServlet(RestServlet): event_format=format_event_for_client_v2_without_event_id, ) timeline_event_ids.append(event.event_id) + result = { "event_map": event_map, "timeline": { @@ -213,8 +226,12 @@ class SyncRestServlet(RestServlet): "limited": room.timeline.limited, }, "state": {"events": state_event_ids}, - "ephemeral": {"events": ephemeral_events}, } + + if joined: + ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) + result["ephemeral"] = {"events": ephemeral_events} + return result diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 1ddf55be4d..1a74d6e360 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 24 +SCHEMA_VERSION = 25 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index dd98dcfda8..ae1ad56d9a 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -124,6 +124,19 @@ class RoomMemberStore(SQLBaseStore): invites.event_id for invite in invites ])) + def get_leave_and_ban_events_for_user(self, user_id): + """ Get all the leave events for a user + Args: + user_id (str): The user ID. + Returns: + A deferred list of event objects. + """ + return self.get_rooms_for_user_where_membership_is( + user_id, (Membership.LEAVE, Membership.BAN) + ).addCallback(lambda leaves: self._get_events([ + leave.event_id for leave in leaves + ])) + def get_rooms_for_user_where_membership_is(self, user_id, membership_list): """ Get all the rooms for this user where the membership for this user matches one in the membership list. diff --git a/synapse/storage/schema/delta/24/fts.py b/synapse/storage/schema/delta/25/fts.py index 0c752d8426..ed3cc06557 100644 --- a/synapse/storage/schema/delta/24/fts.py +++ b/synapse/storage/schema/delta/25/fts.py @@ -23,7 +23,7 @@ logger = logging.getLogger(__name__) POSTGRES_SQL = """ -CREATE TABLE event_search ( +CREATE TABLE IF NOT EXISTS event_search ( event_id TEXT, room_id TEXT, key TEXT, @@ -53,7 +53,8 @@ CREATE INDEX event_search_ev_ridx ON event_search(room_id); SQLITE_TABLE = ( - "CREATE VIRTUAL TABLE event_search USING fts3 ( event_id, room_id, key, value)" + "CREATE VIRTUAL TABLE IF NOT EXISTS event_search" + " USING fts3 ( event_id, room_id, key, value)" ) diff --git a/synapse/storage/search.py b/synapse/storage/search.py index a3c69c5ab3..9608b5d6a7 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -18,6 +18,17 @@ from twisted.internet import defer from _base import SQLBaseStore from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from collections import namedtuple + +"""The result of a search. + +Fields: + rank_map (dict): Mapping event_id -> rank + event_map (dict): Mapping event_id -> event + pagination_token (str): Pagination token +""" +SearchResult = namedtuple("SearchResult", ("rank_map", "event_map", "pagination_token")) + class SearchStore(SQLBaseStore): @defer.inlineCallbacks @@ -31,15 +42,18 @@ class SearchStore(SQLBaseStore): "content.body", "content.name", "content.topic" Returns: - 2-tuple of (dict event_id -> rank, dict event_id -> event) + SearchResult """ clauses = [] args = [] - clauses.append( - "room_id IN (%s)" % (",".join(["?"] * len(room_ids)),) - ) - args.extend(room_ids) + # Make sure we don't explode because the person is in too many rooms. + # We filter the results below regardless. + if len(room_ids) < 500: + clauses.append( + "room_id IN (%s)" % (",".join(["?"] * len(room_ids)),) + ) + args.extend(room_ids) local_clauses = [] for key in keys: @@ -52,13 +66,13 @@ class SearchStore(SQLBaseStore): if isinstance(self.database_engine, PostgresEngine): sql = ( - "SELECT ts_rank_cd(vector, query) AS rank, event_id" + "SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id" " FROM plainto_tsquery('english', ?) as query, event_search" " WHERE vector @@ query" ) elif isinstance(self.database_engine, Sqlite3Engine): sql = ( - "SELECT 0 as rank, event_id FROM event_search" + "SELECT 0 as rank, room_id, event_id FROM event_search" " WHERE value MATCH ?" ) else: @@ -76,6 +90,8 @@ class SearchStore(SQLBaseStore): "search_msgs", self.cursor_to_dict, sql, *([search_term] + args) ) + results = filter(lambda row: row["room_id"] in room_ids, results) + events = yield self._get_events([r["event_id"] for r in results]) event_map = { @@ -83,11 +99,12 @@ class SearchStore(SQLBaseStore): for ev in events } - defer.returnValue(( + defer.returnValue(SearchResult( { r["event_id"]: r["rank"] for r in results if r["event_id"] in event_map }, - event_map + event_map, + None )) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 6942cdac51..9f9af2d783 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -23,10 +23,17 @@ from tests.utils import ( ) from synapse.types import UserID -from synapse.api.filtering import Filter +from synapse.api.filtering import FilterCollection, Filter user_localpart = "test_user" -MockEvent = namedtuple("MockEvent", "sender type room_id") +# MockEvent = namedtuple("MockEvent", "sender type room_id") + + +def MockEvent(**kwargs): + ev = NonCallableMock(spec_set=kwargs.keys()) + ev.configure_mock(**kwargs) + return ev + class FilteringTestCase(unittest.TestCase): @@ -44,7 +51,6 @@ class FilteringTestCase(unittest.TestCase): ) self.filtering = hs.get_filtering() - self.filter = Filter({}) self.datastore = hs.get_datastore() @@ -57,8 +63,9 @@ class FilteringTestCase(unittest.TestCase): type="m.room.message", room_id="!foo:bar" ) + self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_types_works_with_wildcards(self): @@ -71,7 +78,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_types_works_with_unknowns(self): @@ -84,7 +91,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_types_works_with_literals(self): @@ -97,7 +104,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_types_works_with_wildcards(self): @@ -110,7 +117,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_types_works_with_unknowns(self): @@ -123,7 +130,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_types_takes_priority_over_types(self): @@ -137,7 +144,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_senders_works_with_literals(self): @@ -150,7 +157,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_senders_works_with_unknowns(self): @@ -163,7 +170,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_senders_works_with_literals(self): @@ -176,7 +183,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_senders_works_with_unknowns(self): @@ -189,7 +196,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_senders_takes_priority_over_senders(self): @@ -203,7 +210,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!foo:bar" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_rooms_works_with_literals(self): @@ -216,7 +223,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!secretbase:unknown" ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_rooms_works_with_unknowns(self): @@ -229,7 +236,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!anothersecretbase:unknown" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_rooms_works_with_literals(self): @@ -242,7 +249,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!anothersecretbase:unknown" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_rooms_works_with_unknowns(self): @@ -255,7 +262,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!anothersecretbase:unknown" ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_not_rooms_takes_priority_over_rooms(self): @@ -269,7 +276,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!secretbase:unknown" ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_combined_event(self): @@ -287,7 +294,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!stage:unknown" # yup ) self.assertTrue( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_combined_event_bad_sender(self): @@ -305,7 +312,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!stage:unknown" # yup ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_combined_event_bad_room(self): @@ -323,7 +330,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!piggyshouse:muppets" # nope ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) def test_definition_combined_event_bad_type(self): @@ -341,7 +348,7 @@ class FilteringTestCase(unittest.TestCase): room_id="!stage:unknown" # yup ) self.assertFalse( - self.filter._passes_definition(definition, event) + Filter(definition).check(event) ) @defer.inlineCallbacks @@ -359,7 +366,6 @@ class FilteringTestCase(unittest.TestCase): event = MockEvent( sender="@foo:bar", type="m.profile", - room_id="!foo:bar" ) events = [event] @@ -386,7 +392,6 @@ class FilteringTestCase(unittest.TestCase): event = MockEvent( sender="@foo:bar", type="custom.avatar.3d.crazy", - room_id="!foo:bar" ) events = [event] diff --git a/tests/events/__init__.py b/tests/events/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/tests/events/__init__.py diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py new file mode 100644 index 0000000000..16179921f0 --- /dev/null +++ b/tests/events/test_utils.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import unittest + +from synapse.events import FrozenEvent +from synapse.events.utils import prune_event + +class PruneEventTestCase(unittest.TestCase): + """ Asserts that a new event constructed with `evdict` will look like + `matchdict` when it is redacted. """ + def run_test(self, evdict, matchdict): + self.assertEquals( + prune_event(FrozenEvent(evdict)).get_dict(), + matchdict + ) + + def test_minimal(self): + self.run_test( + {'type': 'A'}, + { + 'type': 'A', + 'content': {}, + 'signatures': {}, + 'unsigned': {}, + } + ) + + def test_basic_keys(self): + self.run_test( + { + 'type': 'A', + 'room_id': '!1:domain', + 'sender': '@2:domain', + 'event_id': '$3:domain', + 'origin': 'domain', + }, + { + 'type': 'A', + 'room_id': '!1:domain', + 'sender': '@2:domain', + 'event_id': '$3:domain', + 'origin': 'domain', + 'content': {}, + 'signatures': {}, + 'unsigned': {}, + } + ) + + def test_unsigned_age_ts(self): + self.run_test( + { + 'type': 'B', + 'unsigned': {'age_ts': 20}, + }, + { + 'type': 'B', + 'content': {}, + 'signatures': {}, + 'unsigned': {'age_ts': 20}, + } + ) + + self.run_test( + { + 'type': 'B', + 'unsigned': {'other_key': 'here'}, + }, + { + 'type': 'B', + 'content': {}, + 'signatures': {}, + 'unsigned': {}, + } + ) + + def test_content(self): + self.run_test( + { + 'type': 'C', + 'content': {'things': 'here'}, + }, + { + 'type': 'C', + 'content': {}, + 'signatures': {}, + 'unsigned': {}, + } + ) + + self.run_test( + { + 'type': 'm.room.create', + 'content': {'creator': '@2:domain', 'other_field': 'here'}, + }, + { + 'type': 'm.room.create', + 'content': {'creator': '@2:domain'}, + 'signatures': {}, + 'unsigned': {}, + } + ) |