diff options
-rw-r--r-- | synapse/crypto/keyring.py | 38 | ||||
-rw-r--r-- | synapse/events/utils.py | 3 | ||||
-rw-r--r-- | synapse/handlers/search.py | 55 | ||||
-rw-r--r-- | synapse/http/matrixfederationclient.py | 9 | ||||
-rw-r--r-- | synapse/rest/client/v1/login.py | 11 | ||||
-rw-r--r-- | synapse/rest/client/v2_alpha/sync.py | 41 |
6 files changed, 104 insertions, 53 deletions
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 8b6a59866f..bc5bb5cdb1 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -381,28 +381,24 @@ class Keyring(object): def get_server_verify_key_v2_indirect(self, server_names_and_key_ids, perspective_name, perspective_keys): - limiter = yield get_retry_limiter( - perspective_name, self.clock, self.store - ) - - with limiter: - # TODO(mark): Set the minimum_valid_until_ts to that needed by - # the events being validated or the current time if validating - # an incoming request. - query_response = yield self.client.post_json( - destination=perspective_name, - path=b"/_matrix/key/v2/query", - data={ - u"server_keys": { - server_name: { - key_id: { - u"minimum_valid_until_ts": 0 - } for key_id in key_ids - } - for server_name, key_ids in server_names_and_key_ids + # TODO(mark): Set the minimum_valid_until_ts to that needed by + # the events being validated or the current time if validating + # an incoming request. + query_response = yield self.client.post_json( + destination=perspective_name, + path=b"/_matrix/key/v2/query", + data={ + u"server_keys": { + server_name: { + key_id: { + u"minimum_valid_until_ts": 0 + } for key_id in key_ids } - }, - ) + for server_name, key_ids in server_names_and_key_ids + } + }, + long_retries=True, + ) keys = {} diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 9989b76591..44cc1ef132 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -129,10 +129,9 @@ def format_event_for_client_v2(d): return d -def format_event_for_client_v2_without_event_id(d): +def format_event_for_client_v2_without_room_id(d): d = format_event_for_client_v2(d) d.pop("room_id", None) - d.pop("event_id", None) return d diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index b7545c111f..50688e51a8 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -17,13 +17,14 @@ from twisted.internet import defer from ._base import BaseHandler -from synapse.api.constants import Membership +from synapse.api.constants import Membership, EventTypes from synapse.api.filtering import Filter from synapse.api.errors import SynapseError from synapse.events.utils import serialize_event from unpaddedbase64 import decode_base64, encode_base64 +import itertools import logging @@ -79,6 +80,9 @@ class SearchHandler(BaseHandler): # What to order results by (impacts whether pagination can be doen) order_by = room_cat.get("order_by", "rank") + # Return the current state of the rooms? + include_state = room_cat.get("include_state", False) + # Include context around each event? event_context = room_cat.get( "event_context", None @@ -96,6 +100,10 @@ class SearchHandler(BaseHandler): after_limit = int(event_context.get( "after_limit", 5 )) + + # Return the historic display name and avatar for the senders + # of the events? + include_profile = bool(event_context.get("include_profile", False)) except KeyError: raise SynapseError(400, "Invalid search query") @@ -269,6 +277,33 @@ class SearchHandler(BaseHandler): "room_key", res["end"] ).to_string() + if include_profile: + senders = set( + ev.sender + for ev in itertools.chain( + res["events_before"], [event], res["events_after"] + ) + ) + + if res["events_after"]: + last_event_id = res["events_after"][-1].event_id + else: + last_event_id = event.event_id + + state = yield self.store.get_state_for_event( + last_event_id, + types=[(EventTypes.Member, sender) for sender in senders] + ) + + res["profile_info"] = { + s.state_key: { + "displayname": s.content.get("displayname", None), + "avatar_url": s.content.get("avatar_url", None), + } + for s in state.values() + if s.type == EventTypes.Member and s.state_key in senders + } + contexts[event.event_id] = res else: contexts = {} @@ -287,6 +322,18 @@ class SearchHandler(BaseHandler): for e in context["events_after"] ] + state_results = {} + if include_state: + rooms = set(e.room_id for e in allowed_events) + for room_id in rooms: + state = yield self.state_handler.get_current_state(room_id) + state_results[room_id] = state.values() + + state_results.values() + + # We're now about to serialize the events. We should not make any + # blocking calls after this. Otherwise the 'age' will be wrong + results = { e.event_id: { "rank": rank_map[e.event_id], @@ -303,6 +350,12 @@ class SearchHandler(BaseHandler): "count": len(results) } + if state_results: + rooms_cat_res["state"] = { + room_id: [serialize_event(e, time_now) for e in state] + for room_id, state in state_results.items() + } + if room_groups and "room_id" in group_keys: rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 614c06a6d7..b7b7c2cce8 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -190,11 +190,11 @@ class MatrixFederationHttpClient(object): if retries_left and not timeout: if long_retries: delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left) - delay = max(delay, 60) + delay = min(delay, 60) delay *= random.uniform(0.8, 1.4) else: delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left) - delay = max(delay, 2) + delay = min(delay, 2) delay *= random.uniform(0.8, 1.4) yield sleep(delay) @@ -302,7 +302,7 @@ class MatrixFederationHttpClient(object): defer.returnValue(json.loads(body)) @defer.inlineCallbacks - def post_json(self, destination, path, data={}): + def post_json(self, destination, path, data={}, long_retries=True): """ Sends the specifed json data using POST Args: @@ -311,6 +311,8 @@ class MatrixFederationHttpClient(object): path (str): The HTTP path. data (dict): A dict containing the data that will be used as the request body. This will be encoded as JSON. + long_retries (bool): A boolean that indicates whether we should + retry for a short or long time. Returns: Deferred: Succeeds when we get a 2xx HTTP response. The result @@ -330,6 +332,7 @@ class MatrixFederationHttpClient(object): path.encode("ascii"), body_callback=body_callback, headers_dict={"Content-Type": ["application/json"]}, + long_retries=True, ) if 200 <= response.code < 300: diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 0171f6c018..720d6358e7 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -58,9 +58,18 @@ class LoginRestServlet(ClientV1RestServlet): flows.append({"type": LoginRestServlet.SAML2_TYPE}) if self.cas_enabled: flows.append({"type": LoginRestServlet.CAS_TYPE}) + + # While its valid for us to advertise this login type generally, + # synapse currently only gives out these tokens as part of the + # CAS login flow. + # Generally we don't want to advertise login flows that clients + # don't know how to implement, since they (currently) will always + # fall back to the fallback API if they don't understand one of the + # login flow types returned. + flows.append({"type": LoginRestServlet.TOKEN_TYPE}) if self.password_enabled: flows.append({"type": LoginRestServlet.PASS_TYPE}) - flows.append({"type": LoginRestServlet.TOKEN_TYPE}) + return (200, {"flows": flows}) def on_OPTIONS(self, request): diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 8ac16f2a3c..775f49885b 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -22,7 +22,7 @@ from synapse.handlers.sync import SyncConfig from synapse.types import StreamToken from synapse.events import FrozenEvent from synapse.events.utils import ( - serialize_event, format_event_for_client_v2_without_event_id, + serialize_event, format_event_for_client_v2_without_room_id, ) from synapse.api.filtering import FilterCollection from ._base import client_v2_pattern @@ -148,9 +148,9 @@ class SyncRestServlet(RestServlet): sync_result.presence, filter, time_now ), "rooms": { - "joined": joined, - "invited": invited, - "archived": archived, + "join": joined, + "invite": invited, + "leave": archived, }, "next_batch": sync_result.next_batch.to_string(), } @@ -207,7 +207,7 @@ class SyncRestServlet(RestServlet): for room in rooms: invite = serialize_event( room.invite, time_now, token_id=token_id, - event_format=format_event_for_client_v2_without_event_id, + event_format=format_event_for_client_v2_without_room_id, ) invited_state = invite.get("unsigned", {}).pop("invite_room_state", []) invited_state.append(invite) @@ -256,7 +256,13 @@ class SyncRestServlet(RestServlet): :return: the room, encoded in our response format :rtype: dict[str, object] """ - event_map = {} + def serialize(event): + # TODO(mjark): Respect formatting requirements in the filter. + return serialize_event( + event, time_now, token_id=token_id, + event_format=format_event_for_client_v2_without_room_id, + ) + state_dict = room.state timeline_events = filter.filter_room_timeline(room.timeline.events) @@ -264,36 +270,21 @@ class SyncRestServlet(RestServlet): state_dict, timeline_events) state_events = filter.filter_room_state(state_dict.values()) - state_event_ids = [] - for event in state_events: - # TODO(mjark): Respect formatting requirements in the filter. - event_map[event.event_id] = serialize_event( - event, time_now, token_id=token_id, - event_format=format_event_for_client_v2_without_event_id, - ) - state_event_ids.append(event.event_id) - timeline_event_ids = [] - for event in timeline_events: - # TODO(mjark): Respect formatting requirements in the filter. - event_map[event.event_id] = serialize_event( - event, time_now, token_id=token_id, - event_format=format_event_for_client_v2_without_event_id, - ) - timeline_event_ids.append(event.event_id) + serialized_state = [serialize(e) for e in state_events] + serialized_timeline = [serialize(e) for e in timeline_events] account_data = filter.filter_room_account_data( room.account_data ) result = { - "event_map": event_map, "timeline": { - "events": timeline_event_ids, + "events": serialized_timeline, "prev_batch": room.timeline.prev_batch.to_string(), "limited": room.timeline.limited, }, - "state": {"events": state_event_ids}, + "state": {"events": serialized_state}, "account_data": {"events": account_data}, } |