summary refs log tree commit diff
path: root/synapse/rest
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/rest')
-rw-r--r--synapse/rest/client/relations.py170
-rw-r--r--synapse/rest/client/room_batch.py21
-rw-r--r--synapse/rest/client/sync.py4
-rw-r--r--synapse/rest/key/v2/local_key_resource.py12
-rw-r--r--synapse/rest/key/v2/remote_key_resource.py8
-rw-r--r--synapse/rest/media/v1/media_storage.py2
-rw-r--r--synapse/rest/media/v1/preview_html.py4
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py9
8 files changed, 36 insertions, 194 deletions
diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py

index c16078b187..55c96a2af3 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py
@@ -12,22 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This class implements the proposed relation APIs from MSC 1849. - -Since the MSC has not been approved all APIs here are unstable and may change at -any time to reflect changes in the MSC. -""" - import logging from typing import TYPE_CHECKING, Optional, Tuple -from synapse.api.constants import RelationTypes -from synapse.api.errors import SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_integer, parse_string from synapse.http.site import SynapseRequest from synapse.rest.client._base import client_patterns -from synapse.storage.relations import AggregationPaginationToken from synapse.types import JsonDict, StreamToken if TYPE_CHECKING: @@ -93,166 +84,5 @@ class RelationPaginationServlet(RestServlet): return 200, result -class RelationAggregationPaginationServlet(RestServlet): - """API to paginate aggregation groups of relations, e.g. paginate the - types and counts of the reactions on the events. - - Example request and response: - - GET /rooms/{room_id}/aggregations/{parent_id} - - { - chunk: [ - { - "type": "m.reaction", - "key": "👍", - "count": 3 - } - ] - } - """ - - PATTERNS = client_patterns( - "/rooms/(?P<room_id>[^/]*)/aggregations/(?P<parent_id>[^/]*)" - "(/(?P<relation_type>[^/]*)(/(?P<event_type>[^/]*))?)?$", - releases=(), - ) - - def __init__(self, hs: "HomeServer"): - super().__init__() - self.auth = hs.get_auth() - self.store = hs.get_datastores().main - self.event_handler = hs.get_event_handler() - - async def on_GET( - self, - request: SynapseRequest, - room_id: str, - parent_id: str, - relation_type: Optional[str] = None, - event_type: Optional[str] = None, - ) -> Tuple[int, JsonDict]: - requester = await self.auth.get_user_by_req(request, allow_guest=True) - - await self.auth.check_user_in_room_or_world_readable( - room_id, - requester.user.to_string(), - allow_departed_users=True, - ) - - # This checks that a) the event exists and b) the user is allowed to - # view it. - event = await self.event_handler.get_event(requester.user, room_id, parent_id) - if event is None: - raise SynapseError(404, "Unknown parent event.") - - if relation_type not in (RelationTypes.ANNOTATION, None): - raise SynapseError( - 400, f"Relation type must be '{RelationTypes.ANNOTATION}'" - ) - - limit = parse_integer(request, "limit", default=5) - from_token_str = parse_string(request, "from") - to_token_str = parse_string(request, "to") - - # Return the relations - from_token = None - if from_token_str: - from_token = AggregationPaginationToken.from_string(from_token_str) - - to_token = None - if to_token_str: - to_token = AggregationPaginationToken.from_string(to_token_str) - - pagination_chunk = await self.store.get_aggregation_groups_for_event( - event_id=parent_id, - room_id=room_id, - event_type=event_type, - limit=limit, - from_token=from_token, - to_token=to_token, - ) - - return 200, await pagination_chunk.to_dict(self.store) - - -class RelationAggregationGroupPaginationServlet(RestServlet): - """API to paginate within an aggregation group of relations, e.g. paginate - all the 👍 reactions on an event. - - Example request and response: - - GET /rooms/{room_id}/aggregations/{parent_id}/m.annotation/m.reaction/👍 - - { - chunk: [ - { - "type": "m.reaction", - "content": { - "m.relates_to": { - "rel_type": "m.annotation", - "key": "👍" - } - } - }, - ... - ] - } - """ - - PATTERNS = client_patterns( - "/rooms/(?P<room_id>[^/]*)/aggregations/(?P<parent_id>[^/]*)" - "/(?P<relation_type>[^/]*)/(?P<event_type>[^/]*)/(?P<key>[^/]*)$", - releases=(), - ) - - def __init__(self, hs: "HomeServer"): - super().__init__() - self.auth = hs.get_auth() - self.store = hs.get_datastores().main - self._relations_handler = hs.get_relations_handler() - - async def on_GET( - self, - request: SynapseRequest, - room_id: str, - parent_id: str, - relation_type: str, - event_type: str, - key: str, - ) -> Tuple[int, JsonDict]: - requester = await self.auth.get_user_by_req(request, allow_guest=True) - - if relation_type != RelationTypes.ANNOTATION: - raise SynapseError(400, "Relation type must be 'annotation'") - - limit = parse_integer(request, "limit", default=5) - from_token_str = parse_string(request, "from") - to_token_str = parse_string(request, "to") - - from_token = None - if from_token_str: - from_token = await StreamToken.from_string(self.store, from_token_str) - to_token = None - if to_token_str: - to_token = await StreamToken.from_string(self.store, to_token_str) - - result = await self._relations_handler.get_relations( - requester=requester, - event_id=parent_id, - room_id=room_id, - relation_type=relation_type, - event_type=event_type, - aggregation_key=key, - limit=limit, - from_token=from_token, - to_token=to_token, - ) - - return 200, result - - def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: RelationPaginationServlet(hs).register(http_server) - RelationAggregationPaginationServlet(hs).register(http_server) - RelationAggregationGroupPaginationServlet(hs).register(http_server) diff --git a/synapse/rest/client/room_batch.py b/synapse/rest/client/room_batch.py
index 0780485322..dd91dabedd 100644 --- a/synapse/rest/client/room_batch.py +++ b/synapse/rest/client/room_batch.py
@@ -123,6 +123,19 @@ class RoomBatchSendEventRestServlet(RestServlet): errcode=Codes.INVALID_PARAM, ) + # Make sure that the prev_event_ids exist and aren't outliers - ie, they are + # regular parts of the room DAG where we know the state. + non_outlier_prev_events = await self.store.have_events_in_timeline( + prev_event_ids_from_query + ) + for prev_event_id in prev_event_ids_from_query: + if prev_event_id not in non_outlier_prev_events: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "prev_event %s does not exist, or is an outlier" % (prev_event_id,), + errcode=Codes.INVALID_PARAM, + ) + # For the event we are inserting next to (`prev_event_ids_from_query`), # find the most recent state events that allowed that message to be # sent. We will use that as a base to auth our historical messages @@ -131,14 +144,6 @@ class RoomBatchSendEventRestServlet(RestServlet): prev_event_ids_from_query ) - if not state_event_ids: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "No auth events found for given prev_event query parameter. The prev_event=%s probably does not exist." - % prev_event_ids_from_query, - errcode=Codes.INVALID_PARAM, - ) - state_event_ids_at_start = [] # Create and persist all of the state events that float off on their own # before the batch. These will most likely be all of the invite/member diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py
index 53c385a86c..0bf32f873b 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py
@@ -99,6 +99,7 @@ class SyncRestServlet(RestServlet): self.presence_handler = hs.get_presence_handler() self._server_notices_sender = hs.get_server_notices_sender() self._event_serializer = hs.get_event_client_serializer() + self._msc2654_enabled = hs.config.experimental.msc2654_enabled async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. @@ -521,7 +522,8 @@ class SyncRestServlet(RestServlet): result["ephemeral"] = {"events": ephemeral_events} result["unread_notifications"] = room.unread_notifications result["summary"] = room.summary - result["org.matrix.msc2654.unread_count"] = room.unread_count + if self._msc2654_enabled: + result["org.matrix.msc2654.unread_count"] = room.unread_count return result diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py
index b9bfbea21b..0c9f042c84 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py
@@ -76,17 +76,17 @@ class LocalKey(Resource): def response_json_object(self) -> JsonDict: verify_keys = {} - for key in self.config.key.signing_key: - verify_key_bytes = key.verify_key.encode() - key_id = "%s:%s" % (key.alg, key.version) + for signing_key in self.config.key.signing_key: + verify_key_bytes = signing_key.verify_key.encode() + key_id = "%s:%s" % (signing_key.alg, signing_key.version) verify_keys[key_id] = {"key": encode_base64(verify_key_bytes)} old_verify_keys = {} - for key_id, key in self.config.key.old_signing_keys.items(): - verify_key_bytes = key.encode() + for key_id, old_signing_key in self.config.key.old_signing_keys.items(): + verify_key_bytes = old_signing_key.encode() old_verify_keys[key_id] = { "key": encode_base64(verify_key_bytes), - "expired_ts": key.expired_ts, + "expired_ts": old_signing_key.expired, } json_object = { diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 3525d6ae54..f597157581 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -13,7 +13,7 @@ # limitations under the License. import logging -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING, Dict, Set from signedjson.sign import sign_json @@ -149,7 +149,7 @@ class RemoteKey(DirectServeJsonResource): cached = await self.store.get_server_keys_json(store_queries) - json_results = set() + json_results: Set[bytes] = set() time_now_ms = self.clock.time_msec() @@ -234,8 +234,8 @@ class RemoteKey(DirectServeJsonResource): await self.query_keys(request, query, query_remote_on_cache_miss=False) else: signed_keys = [] - for key_json in json_results: - key_json = json_decoder.decode(key_json.decode("utf-8")) + for key_json_raw in json_results: + key_json = json_decoder.decode(key_json_raw.decode("utf-8")) for signing_key in self.config.key.key_server_signing_keys: key_json = sign_json( key_json, self.config.server.server_name, signing_key diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index 9f6c251caf..604f18bf52 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py
@@ -352,7 +352,7 @@ class ReadableFileWrapper: `IConsumer`. """ - CHUNK_SIZE = 2 ** 14 + CHUNK_SIZE = 2**14 clock: Clock path: str diff --git a/synapse/rest/media/v1/preview_html.py b/synapse/rest/media/v1/preview_html.py
index 4cc9c66fbe..ca73965fc2 100644 --- a/synapse/rest/media/v1/preview_html.py +++ b/synapse/rest/media/v1/preview_html.py
@@ -23,10 +23,10 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) _charset_match = re.compile( - br'<\s*meta[^>]*charset\s*=\s*"?([a-z0-9_-]+)"?', flags=re.I + rb'<\s*meta[^>]*charset\s*=\s*"?([a-z0-9_-]+)"?', flags=re.I ) _xml_encoding_match = re.compile( - br'\s*<\s*\?\s*xml[^>]*encoding="([a-z0-9_-]+)"', flags=re.I + rb'\s*<\s*\?\s*xml[^>]*encoding="([a-z0-9_-]+)"', flags=re.I ) _content_type_match = re.compile(r'.*; *charset="?(.*?)"?(;|$)', flags=re.I) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index d47af8ead6..50383bdbd1 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -200,12 +200,17 @@ class PreviewUrlResource(DirectServeJsonResource): match = False continue + # Some attributes might not be parsed as strings by urlsplit (such as the + # port, which is parsed as an int). Because we use match functions that + # expect strings, we want to make sure that's what we give them. + value_str = str(value) + if pattern.startswith("^"): - if not re.match(pattern, getattr(url_tuple, attrib)): + if not re.match(pattern, value_str): match = False continue else: - if not fnmatch.fnmatch(getattr(url_tuple, attrib), pattern): + if not fnmatch.fnmatch(value_str, pattern): match = False continue if match: