diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py
index 14227f1cdb..326780405e 100644
--- a/synapse/rest/__init__.py
+++ b/synapse/rest/__init__.py
@@ -46,7 +46,9 @@ from synapse.rest.client.v2_alpha import (
account_data,
report_event,
openid,
+ notifications,
devices,
+ thirdparty,
)
from synapse.http.server import JsonResource
@@ -91,4 +93,6 @@ class ClientRestResource(JsonResource):
account_data.register_servlets(hs, client_resource)
report_event.register_servlets(hs, client_resource)
openid.register_servlets(hs, client_resource)
+ notifications.register_servlets(hs, client_resource)
devices.register_servlets(hs, client_resource)
+ thirdparty.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index b0cb31a448..af21661d7c 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -28,6 +28,10 @@ logger = logging.getLogger(__name__)
class WhoisRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/admin/whois/(?P<user_id>[^/]*)")
+ def __init__(self, hs):
+ super(WhoisRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, user_id):
target_user = UserID.from_string(user_id)
@@ -82,6 +86,10 @@ class PurgeHistoryRestServlet(ClientV1RestServlet):
"/admin/purge_history/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
)
+ def __init__(self, hs):
+ super(PurgeHistoryRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_POST(self, request, room_id, event_id):
requester = yield self.auth.get_user_by_req(request)
diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py
index 96b49b01f2..c2a8447860 100644
--- a/synapse/rest/client/v1/base.py
+++ b/synapse/rest/client/v1/base.py
@@ -57,7 +57,6 @@ class ClientV1RestServlet(RestServlet):
hs (synapse.server.HomeServer):
"""
self.hs = hs
- self.handlers = hs.get_handlers()
self.builder_factory = hs.get_event_builder_factory()
self.auth = hs.get_v1auth()
self.txns = HttpTransactionStore()
diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py
index 8ac09419dc..09d0831594 100644
--- a/synapse/rest/client/v1/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -36,6 +36,10 @@ def register_servlets(hs, http_server):
class ClientDirectoryServer(ClientV1RestServlet):
PATTERNS = client_path_patterns("/directory/room/(?P<room_alias>[^/]*)$")
+ def __init__(self, hs):
+ super(ClientDirectoryServer, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, room_alias):
room_alias = RoomAlias.from_string(room_alias)
@@ -146,6 +150,7 @@ class ClientDirectoryListServer(ClientV1RestServlet):
def __init__(self, hs):
super(ClientDirectoryListServer, self).__init__(hs)
self.store = hs.get_datastore()
+ self.handlers = hs.get_handlers()
@defer.inlineCallbacks
def on_GET(self, request, room_id):
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 498bb9e18a..701b6f549b 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -32,6 +32,10 @@ class EventStreamRestServlet(ClientV1RestServlet):
DEFAULT_LONGPOLL_TIME_MS = 30000
+ def __init__(self, hs):
+ super(EventStreamRestServlet, self).__init__(hs)
+ self.event_stream_handler = hs.get_event_stream_handler()
+
@defer.inlineCallbacks
def on_GET(self, request):
requester = yield self.auth.get_user_by_req(
@@ -46,7 +50,6 @@ class EventStreamRestServlet(ClientV1RestServlet):
if "room_id" in request.args:
room_id = request.args["room_id"][0]
- handler = self.handlers.event_stream_handler
pagin_config = PaginationConfig.from_request(request)
timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
if "timeout" in request.args:
@@ -57,7 +60,7 @@ class EventStreamRestServlet(ClientV1RestServlet):
as_client_event = "raw" not in request.args
- chunk = yield handler.get_stream(
+ chunk = yield self.event_stream_handler.get_stream(
requester.user.to_string(),
pagin_config,
timeout=timeout,
@@ -80,12 +83,12 @@ class EventRestServlet(ClientV1RestServlet):
def __init__(self, hs):
super(EventRestServlet, self).__init__(hs)
self.clock = hs.get_clock()
+ self.event_handler = hs.get_event_handler()
@defer.inlineCallbacks
def on_GET(self, request, event_id):
requester = yield self.auth.get_user_by_req(request)
- handler = self.handlers.event_handler
- event = yield handler.get_event(requester.user, event_id)
+ event = yield self.event_handler.get_event(requester.user, event_id)
time_now = self.clock.time_msec()
if event:
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index 36c3520567..113a49e539 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -23,6 +23,10 @@ from .base import ClientV1RestServlet, client_path_patterns
class InitialSyncRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/initialSync$")
+ def __init__(self, hs):
+ super(InitialSyncRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request):
requester = yield self.auth.get_user_by_req(request)
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 92fcae674a..6c0eec8fb3 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -54,12 +54,9 @@ class LoginRestServlet(ClientV1RestServlet):
self.jwt_secret = hs.config.jwt_secret
self.jwt_algorithm = hs.config.jwt_algorithm
self.cas_enabled = hs.config.cas_enabled
- self.cas_server_url = hs.config.cas_server_url
- self.cas_required_attributes = hs.config.cas_required_attributes
- self.servername = hs.config.server_name
- self.http_client = hs.get_simple_http_client()
self.auth_handler = self.hs.get_auth_handler()
self.device_handler = self.hs.get_device_handler()
+ self.handlers = hs.get_handlers()
def on_GET(self, request):
flows = []
@@ -110,17 +107,6 @@ class LoginRestServlet(ClientV1RestServlet):
LoginRestServlet.JWT_TYPE):
result = yield self.do_jwt_login(login_submission)
defer.returnValue(result)
- # TODO Delete this after all CAS clients switch to token login instead
- elif self.cas_enabled and (login_submission["type"] ==
- LoginRestServlet.CAS_TYPE):
- uri = "%s/proxyValidate" % (self.cas_server_url,)
- args = {
- "ticket": login_submission["ticket"],
- "service": login_submission["service"]
- }
- body = yield self.http_client.get_raw(uri, args)
- result = yield self.do_cas_login(body)
- defer.returnValue(result)
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
result = yield self.do_token_login(login_submission)
defer.returnValue(result)
@@ -191,51 +177,6 @@ class LoginRestServlet(ClientV1RestServlet):
defer.returnValue((200, result))
- # TODO Delete this after all CAS clients switch to token login instead
- @defer.inlineCallbacks
- def do_cas_login(self, cas_response_body):
- user, attributes = self.parse_cas_response(cas_response_body)
-
- for required_attribute, required_value in self.cas_required_attributes.items():
- # If required attribute was not in CAS Response - Forbidden
- if required_attribute not in attributes:
- raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
-
- # Also need to check value
- if required_value is not None:
- actual_value = attributes[required_attribute]
- # If required attribute value does not match expected - Forbidden
- if required_value != actual_value:
- raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
-
- user_id = UserID.create(user, self.hs.hostname).to_string()
- auth_handler = self.auth_handler
- registered_user_id = yield auth_handler.check_user_exists(user_id)
- if registered_user_id:
- access_token, refresh_token = (
- yield auth_handler.get_login_tuple_for_user_id(
- registered_user_id
- )
- )
- result = {
- "user_id": registered_user_id, # may have changed
- "access_token": access_token,
- "refresh_token": refresh_token,
- "home_server": self.hs.hostname,
- }
-
- else:
- user_id, access_token = (
- yield self.handlers.registration_handler.register(localpart=user)
- )
- result = {
- "user_id": user_id, # may have changed
- "access_token": access_token,
- "home_server": self.hs.hostname,
- }
-
- defer.returnValue((200, result))
-
@defer.inlineCallbacks
def do_jwt_login(self, login_submission):
token = login_submission.get("token", None)
@@ -293,33 +234,6 @@ class LoginRestServlet(ClientV1RestServlet):
defer.returnValue((200, result))
- # TODO Delete this after all CAS clients switch to token login instead
- def parse_cas_response(self, cas_response_body):
- root = ET.fromstring(cas_response_body)
- if not root.tag.endswith("serviceResponse"):
- raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
- if not root[0].tag.endswith("authenticationSuccess"):
- raise LoginError(401, "Unsuccessful CAS response", errcode=Codes.UNAUTHORIZED)
- for child in root[0]:
- if child.tag.endswith("user"):
- user = child.text
- if child.tag.endswith("attributes"):
- attributes = {}
- for attribute in child:
- # ElementTree library expands the namespace in attribute tags
- # to the full URL of the namespace.
- # See (https://docs.python.org/2/library/xml.etree.elementtree.html)
- # We don't care about namespace here and it will always be encased in
- # curly braces, so we remove them.
- if "}" in attribute.tag:
- attributes[attribute.tag.split("}")[1]] = attribute.text
- else:
- attributes[attribute.tag] = attribute.text
- if user is None or attributes is None:
- raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
-
- return (user, attributes)
-
def _register_device(self, user_id, login_submission):
"""Register a device for a user.
@@ -347,6 +261,7 @@ class SAML2RestServlet(ClientV1RestServlet):
def __init__(self, hs):
super(SAML2RestServlet, self).__init__(hs)
self.sp_config = hs.config.saml2_config_path
+ self.handlers = hs.get_handlers()
@defer.inlineCallbacks
def on_POST(self, request):
@@ -384,18 +299,6 @@ class SAML2RestServlet(ClientV1RestServlet):
defer.returnValue((200, {"status": "not_authenticated"}))
-# TODO Delete this after all CAS clients switch to token login instead
-class CasRestServlet(ClientV1RestServlet):
- PATTERNS = client_path_patterns("/login/cas", releases=())
-
- def __init__(self, hs):
- super(CasRestServlet, self).__init__(hs)
- self.cas_server_url = hs.config.cas_server_url
-
- def on_GET(self, request):
- return (200, {"serverUrl": self.cas_server_url})
-
-
class CasRedirectServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/login/cas/redirect", releases=())
@@ -427,6 +330,8 @@ class CasTicketServlet(ClientV1RestServlet):
self.cas_server_url = hs.config.cas_server_url
self.cas_service_url = hs.config.cas_service_url
self.cas_required_attributes = hs.config.cas_required_attributes
+ self.auth_handler = hs.get_auth_handler()
+ self.handlers = hs.get_handlers()
@defer.inlineCallbacks
def on_GET(self, request):
@@ -479,30 +384,39 @@ class CasTicketServlet(ClientV1RestServlet):
return urlparse.urlunparse(url_parts)
def parse_cas_response(self, cas_response_body):
- root = ET.fromstring(cas_response_body)
- if not root.tag.endswith("serviceResponse"):
- raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
- if not root[0].tag.endswith("authenticationSuccess"):
- raise LoginError(401, "Unsuccessful CAS response", errcode=Codes.UNAUTHORIZED)
- for child in root[0]:
- if child.tag.endswith("user"):
- user = child.text
- if child.tag.endswith("attributes"):
- attributes = {}
- for attribute in child:
- # ElementTree library expands the namespace in attribute tags
- # to the full URL of the namespace.
- # See (https://docs.python.org/2/library/xml.etree.elementtree.html)
- # We don't care about namespace here and it will always be encased in
- # curly braces, so we remove them.
- if "}" in attribute.tag:
- attributes[attribute.tag.split("}")[1]] = attribute.text
- else:
- attributes[attribute.tag] = attribute.text
- if user is None or attributes is None:
- raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
-
- return (user, attributes)
+ user = None
+ attributes = None
+ try:
+ root = ET.fromstring(cas_response_body)
+ if not root.tag.endswith("serviceResponse"):
+ raise Exception("root of CAS response is not serviceResponse")
+ success = (root[0].tag.endswith("authenticationSuccess"))
+ for child in root[0]:
+ if child.tag.endswith("user"):
+ user = child.text
+ if child.tag.endswith("attributes"):
+ attributes = {}
+ for attribute in child:
+ # ElementTree library expands the namespace in
+ # attribute tags to the full URL of the namespace.
+ # We don't care about namespace here and it will always
+ # be encased in curly braces, so we remove them.
+ tag = attribute.tag
+ if "}" in tag:
+ tag = tag.split("}")[1]
+ attributes[tag] = attribute.text
+ if user is None:
+ raise Exception("CAS response does not contain user")
+ if attributes is None:
+ raise Exception("CAS response does not contain attributes")
+ except Exception:
+ logger.error("Error parsing CAS response", exc_info=1)
+ raise LoginError(401, "Invalid CAS response",
+ errcode=Codes.UNAUTHORIZED)
+ if not success:
+ raise LoginError(401, "Unsuccessful CAS response",
+ errcode=Codes.UNAUTHORIZED)
+ return user, attributes
def register_servlets(hs, http_server):
@@ -512,5 +426,3 @@ def register_servlets(hs, http_server):
if hs.config.cas_enabled:
CasRedirectServlet(hs).register(http_server)
CasTicketServlet(hs).register(http_server)
- CasRestServlet(hs).register(http_server)
- # TODO PasswordResetRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py
index 65c4e2ebef..355e82474b 100644
--- a/synapse/rest/client/v1/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -24,6 +24,10 @@ from synapse.http.servlet import parse_json_object_from_request
class ProfileDisplaynameRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/displayname")
+ def __init__(self, hs):
+ super(ProfileDisplaynameRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
@@ -62,6 +66,10 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
class ProfileAvatarURLRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/avatar_url")
+ def __init__(self, hs):
+ super(ProfileAvatarURLRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
@@ -99,6 +107,10 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
class ProfileRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)")
+ def __init__(self, hs):
+ super(ProfileRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py
index 2383b9df86..71d58c8e8d 100644
--- a/synapse/rest/client/v1/register.py
+++ b/synapse/rest/client/v1/register.py
@@ -65,6 +65,7 @@ class RegisterRestServlet(ClientV1RestServlet):
self.sessions = {}
self.enable_registration = hs.config.enable_registration
self.auth_handler = hs.get_auth_handler()
+ self.handlers = hs.get_handlers()
def on_GET(self, request):
if self.hs.config.enable_registration_captcha:
@@ -383,6 +384,7 @@ class CreateUserRestServlet(ClientV1RestServlet):
super(CreateUserRestServlet, self).__init__(hs)
self.store = hs.get_datastore()
self.direct_user_creation_max_duration = hs.config.user_creation_max_duration
+ self.handlers = hs.get_handlers()
@defer.inlineCallbacks
def on_POST(self, request):
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 866a1e9120..0d81757010 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -35,6 +35,10 @@ logger = logging.getLogger(__name__)
class RoomCreateRestServlet(ClientV1RestServlet):
# No PATTERN; we have custom dispatch rules here
+ def __init__(self, hs):
+ super(RoomCreateRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
def register(self, http_server):
PATTERNS = "/createRoom"
register_txn_path(self, PATTERNS, http_server)
@@ -82,6 +86,10 @@ class RoomCreateRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing for generic events
class RoomStateEventRestServlet(ClientV1RestServlet):
+ def __init__(self, hs):
+ super(RoomStateEventRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
def register(self, http_server):
# /room/$roomid/state/$eventtype
no_state_key = "/rooms/(?P<room_id>[^/]*)/state/(?P<event_type>[^/]*)$"
@@ -166,6 +174,10 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing for generic events + feedback
class RoomSendEventRestServlet(ClientV1RestServlet):
+ def __init__(self, hs):
+ super(RoomSendEventRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
def register(self, http_server):
# /rooms/$roomid/send/$event_type[/$txn_id]
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/send/(?P<event_type>[^/]*)")
@@ -210,6 +222,9 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing for room ID + alias joins
class JoinRoomAliasServlet(ClientV1RestServlet):
+ def __init__(self, hs):
+ super(JoinRoomAliasServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
def register(self, http_server):
# /join/$room_identifier[/$txn_id]
@@ -253,6 +268,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
action="join",
txn_id=txn_id,
remote_room_hosts=remote_room_hosts,
+ content=content,
third_party_signed=content.get("third_party_signed", None),
)
@@ -296,6 +312,10 @@ class PublicRoomListRestServlet(ClientV1RestServlet):
class RoomMemberListRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/members$")
+ def __init__(self, hs):
+ super(RoomMemberListRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, room_id):
# TODO support Pagination stream API (limit/tokens)
@@ -322,6 +342,10 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
class RoomMessageListRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/messages$")
+ def __init__(self, hs):
+ super(RoomMessageListRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, room_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
@@ -351,6 +375,10 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
class RoomStateRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/state$")
+ def __init__(self, hs):
+ super(RoomStateRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, room_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
@@ -368,6 +396,10 @@ class RoomStateRestServlet(ClientV1RestServlet):
class RoomInitialSyncRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/initialSync$")
+ def __init__(self, hs):
+ super(RoomInitialSyncRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_GET(self, request, room_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
@@ -388,6 +420,7 @@ class RoomEventContext(ClientV1RestServlet):
def __init__(self, hs):
super(RoomEventContext, self).__init__(hs)
self.clock = hs.get_clock()
+ self.handlers = hs.get_handlers()
@defer.inlineCallbacks
def on_GET(self, request, room_id, event_id):
@@ -424,6 +457,10 @@ class RoomEventContext(ClientV1RestServlet):
class RoomForgetRestServlet(ClientV1RestServlet):
+ def __init__(self, hs):
+ super(RoomForgetRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
def register(self, http_server):
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/forget")
register_txn_path(self, PATTERNS, http_server)
@@ -462,6 +499,10 @@ class RoomForgetRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing
class RoomMembershipRestServlet(ClientV1RestServlet):
+ def __init__(self, hs):
+ super(RoomMembershipRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
def register(self, http_server):
# /rooms/$roomid/[invite|join|leave]
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/"
@@ -542,6 +583,10 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
class RoomRedactEventRestServlet(ClientV1RestServlet):
+ def __init__(self, hs):
+ super(RoomRedactEventRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
def register(self, http_server):
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/redact/(?P<event_id>[^/]*)")
register_txn_path(self, PATTERNS, http_server)
@@ -624,6 +669,10 @@ class SearchRestServlet(ClientV1RestServlet):
"/search$"
)
+ def __init__(self, hs):
+ super(SearchRestServlet, self).__init__(hs)
+ self.handlers = hs.get_handlers()
+
@defer.inlineCallbacks
def on_POST(self, request):
requester = yield self.auth.get_user_by_req(request)
diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py
new file mode 100644
index 0000000000..f1a48acf07
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/notifications.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.http.servlet import (
+ RestServlet, parse_string, parse_integer
+)
+from synapse.events.utils import (
+ serialize_event, format_event_for_client_v2_without_room_id,
+)
+
+from ._base import client_v2_patterns
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class NotificationsServlet(RestServlet):
+ PATTERNS = client_v2_patterns("/notifications$", releases=())
+
+ def __init__(self, hs):
+ super(NotificationsServlet, self).__init__()
+ self.store = hs.get_datastore()
+ self.auth = hs.get_auth()
+ self.clock = hs.get_clock()
+
+ @defer.inlineCallbacks
+ def on_GET(self, request):
+ requester = yield self.auth.get_user_by_req(request)
+ user_id = requester.user.to_string()
+
+ from_token = parse_string(request, "from", required=False)
+ limit = parse_integer(request, "limit", default=50)
+
+ limit = min(limit, 500)
+
+ push_actions = yield self.store.get_push_actions_for_user(
+ user_id, from_token, limit
+ )
+
+ receipts_by_room = yield self.store.get_receipts_for_user_with_orderings(
+ user_id, 'm.read'
+ )
+
+ notif_event_ids = [pa["event_id"] for pa in push_actions]
+ notif_events = yield self.store.get_events(notif_event_ids)
+
+ returned_push_actions = []
+
+ next_token = None
+
+ for pa in push_actions:
+ returned_pa = {
+ "room_id": pa["room_id"],
+ "profile_tag": pa["profile_tag"],
+ "actions": pa["actions"],
+ "ts": pa["received_ts"],
+ "event": serialize_event(
+ notif_events[pa["event_id"]],
+ self.clock.time_msec(),
+ event_format=format_event_for_client_v2_without_room_id,
+ ),
+ }
+
+ if pa["room_id"] not in receipts_by_room:
+ returned_pa["read"] = False
+ else:
+ receipt = receipts_by_room[pa["room_id"]]
+
+ returned_pa["read"] = (
+ receipt["topological_ordering"], receipt["stream_ordering"]
+ ) >= (
+ pa["topological_ordering"], pa["stream_ordering"]
+ )
+ returned_push_actions.append(returned_pa)
+ next_token = pa["stream_ordering"]
+
+ defer.returnValue((200, {
+ "notifications": returned_push_actions,
+ "next_token": next_token,
+ }))
+
+
+def register_servlets(hs, http_server):
+ NotificationsServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 943f5676a3..2121bd75ea 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -403,10 +403,9 @@ class RegisterRestServlet(RestServlet):
# register the user's device
device_id = params.get("device_id")
initial_display_name = params.get("initial_device_display_name")
- device_id = self.device_handler.check_device_registered(
+ return self.device_handler.check_device_registered(
user_id, device_id, initial_display_name
)
- return device_id
@defer.inlineCallbacks
def _do_guest_registration(self):
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 43d8e0bf39..b11acdbea7 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -146,7 +146,7 @@ class SyncRestServlet(RestServlet):
affect_presence = set_presence != PresenceState.OFFLINE
if affect_presence:
- yield self.presence_handler.set_state(user, {"presence": set_presence})
+ yield self.presence_handler.set_state(user, {"presence": set_presence}, True)
context = yield self.presence_handler.user_syncing(
user.to_string(), affect_presence=affect_presence,
diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py
new file mode 100644
index 0000000000..9abca3a8ad
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/thirdparty.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015, 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import logging
+
+from twisted.internet import defer
+
+from synapse.http.servlet import RestServlet
+from synapse.types import ThirdPartyEntityKind
+from ._base import client_v2_patterns
+
+logger = logging.getLogger(__name__)
+
+
+class ThirdPartyUserServlet(RestServlet):
+ PATTERNS = client_v2_patterns("/3pu(/(?P<protocol>[^/]+))?$",
+ releases=())
+
+ def __init__(self, hs):
+ super(ThirdPartyUserServlet, self).__init__()
+
+ self.auth = hs.get_auth()
+ self.appservice_handler = hs.get_application_service_handler()
+
+ @defer.inlineCallbacks
+ def on_GET(self, request, protocol):
+ yield self.auth.get_user_by_req(request)
+
+ fields = request.args
+ del fields["access_token"]
+
+ results = yield self.appservice_handler.query_3pe(
+ ThirdPartyEntityKind.USER, protocol, fields
+ )
+
+ defer.returnValue((200, results))
+
+
+class ThirdPartyLocationServlet(RestServlet):
+ PATTERNS = client_v2_patterns("/3pl(/(?P<protocol>[^/]+))?$",
+ releases=())
+
+ def __init__(self, hs):
+ super(ThirdPartyLocationServlet, self).__init__()
+
+ self.auth = hs.get_auth()
+ self.appservice_handler = hs.get_application_service_handler()
+
+ @defer.inlineCallbacks
+ def on_GET(self, request, protocol):
+ yield self.auth.get_user_by_req(request)
+
+ fields = request.args
+ del fields["access_token"]
+
+ results = yield self.appservice_handler.query_3pe(
+ ThirdPartyEntityKind.LOCATION, protocol, fields
+ )
+
+ defer.returnValue((200, results))
+
+
+def register_servlets(hs, http_server):
+ ThirdPartyUserServlet(hs).register(http_server)
+ ThirdPartyLocationServlet(hs).register(http_server)
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 7209d5a37d..9fe2013657 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -15,6 +15,7 @@
from synapse.http.server import request_handler, respond_with_json_bytes
from synapse.http.servlet import parse_integer, parse_json_object_from_request
from synapse.api.errors import SynapseError, Codes
+from synapse.crypto.keyring import KeyLookupError
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
@@ -210,9 +211,10 @@ class RemoteKey(Resource):
yield self.keyring.get_server_verify_key_v2_direct(
server_name, key_ids
)
+ except KeyLookupError as e:
+ logger.info("Failed to fetch key: %s", e)
except:
logger.exception("Failed to get key for %r", server_name)
- pass
yield self.query_keys(
request, query, query_remote_on_cache_miss=False
)
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index 9f69620772..9f0625a822 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -45,6 +45,7 @@ class DownloadResource(Resource):
@request_handler()
@defer.inlineCallbacks
def _async_render_GET(self, request):
+ request.setHeader("Content-Security-Policy", "sandbox")
server_name, media_id, name = parse_media_id(request)
if server_name == self.server_name:
yield self._respond_local_file(request, media_id, name)
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index bdd0e60c5b..33f35fb44e 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -29,14 +29,13 @@ from synapse.http.server import (
from synapse.util.async import ObservableDeferred
from synapse.util.stringutils import is_ascii
-from copy import deepcopy
-
import os
import re
import fnmatch
import cgi
import ujson as json
import urlparse
+import itertools
import logging
logger = logging.getLogger(__name__)
@@ -163,7 +162,7 @@ class PreviewUrlResource(Resource):
logger.debug("got media_info of '%s'" % media_info)
- if self._is_media(media_info['media_type']):
+ if _is_media(media_info['media_type']):
dims = yield self.media_repo._generate_local_thumbnails(
media_info['filesystem_id'], media_info
)
@@ -184,11 +183,9 @@ class PreviewUrlResource(Resource):
logger.warn("Couldn't get dims for %s" % url)
# define our OG response for this media
- elif self._is_html(media_info['media_type']):
+ elif _is_html(media_info['media_type']):
# TODO: somehow stop a big HTML tree from exploding synapse's RAM
- from lxml import etree
-
file = open(media_info['filename'])
body = file.read()
file.close()
@@ -199,17 +196,35 @@ class PreviewUrlResource(Resource):
match = re.match(r'.*; *charset=(.*?)(;|$)', media_info['media_type'], re.I)
encoding = match.group(1) if match else "utf-8"
- try:
- parser = etree.HTMLParser(recover=True, encoding=encoding)
- tree = etree.fromstring(body, parser)
- og = yield self._calc_og(tree, media_info, requester)
- except UnicodeDecodeError:
- # blindly try decoding the body as utf-8, which seems to fix
- # the charset mismatches on https://google.com
- parser = etree.HTMLParser(recover=True, encoding=encoding)
- tree = etree.fromstring(body.decode('utf-8', 'ignore'), parser)
- og = yield self._calc_og(tree, media_info, requester)
+ og = decode_and_calc_og(body, media_info['uri'], encoding)
+
+ # pre-cache the image for posterity
+ # FIXME: it might be cleaner to use the same flow as the main /preview_url
+ # request itself and benefit from the same caching etc. But for now we
+ # just rely on the caching on the master request to speed things up.
+ if 'og:image' in og and og['og:image']:
+ image_info = yield self._download_url(
+ _rebase_url(og['og:image'], media_info['uri']), requester.user
+ )
+ if _is_media(image_info['media_type']):
+ # TODO: make sure we don't choke on white-on-transparent images
+ dims = yield self.media_repo._generate_local_thumbnails(
+ image_info['filesystem_id'], image_info
+ )
+ if dims:
+ og["og:image:width"] = dims['width']
+ og["og:image:height"] = dims['height']
+ else:
+ logger.warn("Couldn't get dims for %s" % og["og:image"])
+
+ og["og:image"] = "mxc://%s/%s" % (
+ self.server_name, image_info['filesystem_id']
+ )
+ og["og:image:type"] = image_info['media_type']
+ og["matrix:image:size"] = image_info['media_length']
+ else:
+ del og["og:image"]
else:
logger.warn("Failed to find any OG data in %s", url)
og = {}
@@ -233,139 +248,6 @@ class PreviewUrlResource(Resource):
respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True)
@defer.inlineCallbacks
- def _calc_og(self, tree, media_info, requester):
- # suck our tree into lxml and define our OG response.
-
- # if we see any image URLs in the OG response, then spider them
- # (although the client could choose to do this by asking for previews of those
- # URLs to avoid DoSing the server)
-
- # "og:type" : "video",
- # "og:url" : "https://www.youtube.com/watch?v=LXDBoHyjmtw",
- # "og:site_name" : "YouTube",
- # "og:video:type" : "application/x-shockwave-flash",
- # "og:description" : "Fun stuff happening here",
- # "og:title" : "RemoteJam - Matrix team hack for Disrupt Europe Hackathon",
- # "og:image" : "https://i.ytimg.com/vi/LXDBoHyjmtw/maxresdefault.jpg",
- # "og:video:url" : "http://www.youtube.com/v/LXDBoHyjmtw?version=3&autohide=1",
- # "og:video:width" : "1280"
- # "og:video:height" : "720",
- # "og:video:secure_url": "https://www.youtube.com/v/LXDBoHyjmtw?version=3",
-
- og = {}
- for tag in tree.xpath("//*/meta[starts-with(@property, 'og:')]"):
- if 'content' in tag.attrib:
- og[tag.attrib['property']] = tag.attrib['content']
-
- # TODO: grab article: meta tags too, e.g.:
-
- # "article:publisher" : "https://www.facebook.com/thethudonline" />
- # "article:author" content="https://www.facebook.com/thethudonline" />
- # "article:tag" content="baby" />
- # "article:section" content="Breaking News" />
- # "article:published_time" content="2016-03-31T19:58:24+00:00" />
- # "article:modified_time" content="2016-04-01T18:31:53+00:00" />
-
- if 'og:title' not in og:
- # do some basic spidering of the HTML
- title = tree.xpath("(//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1]")
- og['og:title'] = title[0].text.strip() if title else None
-
- if 'og:image' not in og:
- # TODO: extract a favicon failing all else
- meta_image = tree.xpath(
- "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image']/@content"
- )
- if meta_image:
- og['og:image'] = self._rebase_url(meta_image[0], media_info['uri'])
- else:
- # TODO: consider inlined CSS styles as well as width & height attribs
- images = tree.xpath("//img[@src][number(@width)>10][number(@height)>10]")
- images = sorted(images, key=lambda i: (
- -1 * float(i.attrib['width']) * float(i.attrib['height'])
- ))
- if not images:
- images = tree.xpath("//img[@src]")
- if images:
- og['og:image'] = images[0].attrib['src']
-
- # pre-cache the image for posterity
- # FIXME: it might be cleaner to use the same flow as the main /preview_url
- # request itself and benefit from the same caching etc. But for now we
- # just rely on the caching on the master request to speed things up.
- if 'og:image' in og and og['og:image']:
- image_info = yield self._download_url(
- self._rebase_url(og['og:image'], media_info['uri']), requester.user
- )
-
- if self._is_media(image_info['media_type']):
- # TODO: make sure we don't choke on white-on-transparent images
- dims = yield self.media_repo._generate_local_thumbnails(
- image_info['filesystem_id'], image_info
- )
- if dims:
- og["og:image:width"] = dims['width']
- og["og:image:height"] = dims['height']
- else:
- logger.warn("Couldn't get dims for %s" % og["og:image"])
-
- og["og:image"] = "mxc://%s/%s" % (
- self.server_name, image_info['filesystem_id']
- )
- og["og:image:type"] = image_info['media_type']
- og["matrix:image:size"] = image_info['media_length']
- else:
- del og["og:image"]
-
- if 'og:description' not in og:
- meta_description = tree.xpath(
- "//*/meta"
- "[translate(@name, 'DESCRIPTION', 'description')='description']"
- "/@content")
- if meta_description:
- og['og:description'] = meta_description[0]
- else:
- # grab any text nodes which are inside the <body/> tag...
- # unless they are within an HTML5 semantic markup tag...
- # <header/>, <nav/>, <aside/>, <footer/>
- # ...or if they are within a <script/> or <style/> tag.
- # This is a very very very coarse approximation to a plain text
- # render of the page.
-
- # We don't just use XPATH here as that is slow on some machines.
-
- # We clone `tree` as we modify it.
- cloned_tree = deepcopy(tree.find("body"))
-
- TAGS_TO_REMOVE = ("header", "nav", "aside", "footer", "script", "style",)
- for el in cloned_tree.iter(TAGS_TO_REMOVE):
- el.getparent().remove(el)
-
- # Split all the text nodes into paragraphs (by splitting on new
- # lines)
- text_nodes = (
- re.sub(r'\s+', '\n', el.text).strip()
- for el in cloned_tree.iter()
- if el.text and isinstance(el.tag, basestring) # Removes comments
- )
- og['og:description'] = summarize_paragraphs(text_nodes)
-
- # TODO: delete the url downloads to stop diskfilling,
- # as we only ever cared about its OG
- defer.returnValue(og)
-
- def _rebase_url(self, url, base):
- base = list(urlparse.urlparse(base))
- url = list(urlparse.urlparse(url))
- if not url[0]: # fix up schema
- url[0] = base[0] or "http"
- if not url[1]: # fix up hostname
- url[1] = base[1]
- if not url[2].startswith('/'):
- url[2] = re.sub(r'/[^/]+$', '/', base[2]) + url[2]
- return urlparse.urlunparse(url)
-
- @defer.inlineCallbacks
def _download_url(self, url, user):
# TODO: we should probably honour robots.txt... except in practice
# we're most likely being explicitly triggered by a human rather than a
@@ -445,17 +327,171 @@ class PreviewUrlResource(Resource):
"etag": headers["ETag"][0] if "ETag" in headers else None,
})
- def _is_media(self, content_type):
- if content_type.lower().startswith("image/"):
- return True
- def _is_html(self, content_type):
- content_type = content_type.lower()
- if (
- content_type.startswith("text/html") or
- content_type.startswith("application/xhtml")
- ):
- return True
+def decode_and_calc_og(body, media_uri, request_encoding=None):
+ from lxml import etree
+
+ try:
+ parser = etree.HTMLParser(recover=True, encoding=request_encoding)
+ tree = etree.fromstring(body, parser)
+ og = _calc_og(tree, media_uri)
+ except UnicodeDecodeError:
+ # blindly try decoding the body as utf-8, which seems to fix
+ # the charset mismatches on https://google.com
+ parser = etree.HTMLParser(recover=True, encoding=request_encoding)
+ tree = etree.fromstring(body.decode('utf-8', 'ignore'), parser)
+ og = _calc_og(tree, media_uri)
+
+ return og
+
+
+def _calc_og(tree, media_uri):
+ # suck our tree into lxml and define our OG response.
+
+ # if we see any image URLs in the OG response, then spider them
+ # (although the client could choose to do this by asking for previews of those
+ # URLs to avoid DoSing the server)
+
+ # "og:type" : "video",
+ # "og:url" : "https://www.youtube.com/watch?v=LXDBoHyjmtw",
+ # "og:site_name" : "YouTube",
+ # "og:video:type" : "application/x-shockwave-flash",
+ # "og:description" : "Fun stuff happening here",
+ # "og:title" : "RemoteJam - Matrix team hack for Disrupt Europe Hackathon",
+ # "og:image" : "https://i.ytimg.com/vi/LXDBoHyjmtw/maxresdefault.jpg",
+ # "og:video:url" : "http://www.youtube.com/v/LXDBoHyjmtw?version=3&autohide=1",
+ # "og:video:width" : "1280"
+ # "og:video:height" : "720",
+ # "og:video:secure_url": "https://www.youtube.com/v/LXDBoHyjmtw?version=3",
+
+ og = {}
+ for tag in tree.xpath("//*/meta[starts-with(@property, 'og:')]"):
+ if 'content' in tag.attrib:
+ og[tag.attrib['property']] = tag.attrib['content']
+
+ # TODO: grab article: meta tags too, e.g.:
+
+ # "article:publisher" : "https://www.facebook.com/thethudonline" />
+ # "article:author" content="https://www.facebook.com/thethudonline" />
+ # "article:tag" content="baby" />
+ # "article:section" content="Breaking News" />
+ # "article:published_time" content="2016-03-31T19:58:24+00:00" />
+ # "article:modified_time" content="2016-04-01T18:31:53+00:00" />
+
+ if 'og:title' not in og:
+ # do some basic spidering of the HTML
+ title = tree.xpath("(//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1]")
+ og['og:title'] = title[0].text.strip() if title else None
+
+ if 'og:image' not in og:
+ # TODO: extract a favicon failing all else
+ meta_image = tree.xpath(
+ "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image']/@content"
+ )
+ if meta_image:
+ og['og:image'] = _rebase_url(meta_image[0], media_uri)
+ else:
+ # TODO: consider inlined CSS styles as well as width & height attribs
+ images = tree.xpath("//img[@src][number(@width)>10][number(@height)>10]")
+ images = sorted(images, key=lambda i: (
+ -1 * float(i.attrib['width']) * float(i.attrib['height'])
+ ))
+ if not images:
+ images = tree.xpath("//img[@src]")
+ if images:
+ og['og:image'] = images[0].attrib['src']
+
+ if 'og:description' not in og:
+ meta_description = tree.xpath(
+ "//*/meta"
+ "[translate(@name, 'DESCRIPTION', 'description')='description']"
+ "/@content")
+ if meta_description:
+ og['og:description'] = meta_description[0]
+ else:
+ # grab any text nodes which are inside the <body/> tag...
+ # unless they are within an HTML5 semantic markup tag...
+ # <header/>, <nav/>, <aside/>, <footer/>
+ # ...or if they are within a <script/> or <style/> tag.
+ # This is a very very very coarse approximation to a plain text
+ # render of the page.
+
+ # We don't just use XPATH here as that is slow on some machines.
+
+ from lxml import etree
+
+ TAGS_TO_REMOVE = (
+ "header", "nav", "aside", "footer", "script", "style", etree.Comment
+ )
+
+ # Split all the text nodes into paragraphs (by splitting on new
+ # lines)
+ text_nodes = (
+ re.sub(r'\s+', '\n', el).strip()
+ for el in _iterate_over_text(tree.find("body"), *TAGS_TO_REMOVE)
+ )
+ og['og:description'] = summarize_paragraphs(text_nodes)
+
+ # TODO: delete the url downloads to stop diskfilling,
+ # as we only ever cared about its OG
+ return og
+
+
+def _iterate_over_text(tree, *tags_to_ignore):
+ """Iterate over the tree returning text nodes in a depth first fashion,
+ skipping text nodes inside certain tags.
+ """
+ # This is basically a stack that we extend using itertools.chain.
+ # This will either consist of an element to iterate over *or* a string
+ # to be returned.
+ elements = iter([tree])
+ while True:
+ el = elements.next()
+ if isinstance(el, basestring):
+ yield el
+ elif el is not None and el.tag not in tags_to_ignore:
+ # el.text is the text before the first child, so we can immediately
+ # return it if the text exists.
+ if el.text:
+ yield el.text
+
+ # We add to the stack all the elements children, interspersed with
+ # each child's tail text (if it exists). The tail text of a node
+ # is text that comes *after* the node, so we always include it even
+ # if we ignore the child node.
+ elements = itertools.chain(
+ itertools.chain.from_iterable( # Basically a flatmap
+ [child, child.tail] if child.tail else [child]
+ for child in el.iterchildren()
+ ),
+ elements
+ )
+
+
+def _rebase_url(url, base):
+ base = list(urlparse.urlparse(base))
+ url = list(urlparse.urlparse(url))
+ if not url[0]: # fix up schema
+ url[0] = base[0] or "http"
+ if not url[1]: # fix up hostname
+ url[1] = base[1]
+ if not url[2].startswith('/'):
+ url[2] = re.sub(r'/[^/]+$', '/', base[2]) + url[2]
+ return urlparse.urlunparse(url)
+
+
+def _is_media(content_type):
+ if content_type.lower().startswith("image/"):
+ return True
+
+
+def _is_html(content_type):
+ content_type = content_type.lower()
+ if (
+ content_type.startswith("text/html") or
+ content_type.startswith("application/xhtml")
+ ):
+ return True
def summarize_paragraphs(text_nodes, min_size=200, max_size=500):
|