diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
index a03774c98a..e1506deb2b 100644
--- a/synapse/rest/admin/__init__.py
+++ b/synapse/rest/admin/__init__.py
@@ -267,7 +267,7 @@ def register_servlets_for_client_rest_resource(
# Load the media repo ones if we're using them. Otherwise load the servlets which
# don't need a media repo (typically readonly admin APIs).
- if hs.config.can_load_media_repo:
+ if hs.config.media.can_load_media_repo:
register_servlets_for_media_repo(hs, http_server)
else:
ListMediaInRoom(hs).register(http_server)
diff --git a/synapse/rest/admin/registration_tokens.py b/synapse/rest/admin/registration_tokens.py
index 5a1c929d85..aba48f6e7b 100644
--- a/synapse/rest/admin/registration_tokens.py
+++ b/synapse/rest/admin/registration_tokens.py
@@ -113,7 +113,7 @@ class NewRegistrationTokenRestServlet(RestServlet):
self.store = hs.get_datastore()
self.clock = hs.get_clock()
# A string of all the characters allowed to be in a registration_token
- self.allowed_chars = string.ascii_letters + string.digits + "-_"
+ self.allowed_chars = string.ascii_letters + string.digits + "._~-"
self.allowed_chars_set = set(self.allowed_chars)
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
index ad83d4b54c..a4823ca6e7 100644
--- a/synapse/rest/admin/rooms.py
+++ b/synapse/rest/admin/rooms.py
@@ -125,7 +125,7 @@ class ListRoomRestServlet(RestServlet):
errcode=Codes.INVALID_PARAM,
)
- search_term = parse_string(request, "search_term")
+ search_term = parse_string(request, "search_term", encoding="utf-8")
if search_term == "":
raise SynapseError(
400,
@@ -213,7 +213,7 @@ class RoomRestServlet(RestServlet):
members = await self.store.get_users_in_room(room_id)
ret["joined_local_devices"] = await self.store.count_devices_by_users(members)
- return (200, ret)
+ return 200, ret
async def on_DELETE(
self, request: SynapseRequest, room_id: str
@@ -668,4 +668,4 @@ async def _delete_room(
if purge:
await pagination_handler.purge_room(room_id, force=force_purge)
- return (200, ret)
+ return 200, ret
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index 681e491826..46bfec4623 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -368,8 +368,8 @@ class UserRestServletV2(RestServlet):
user_id, medium, address, current_time
)
if (
- self.hs.config.email_enable_notifs
- and self.hs.config.email_notif_for_new_users
+ self.hs.config.email.email_enable_notifs
+ and self.hs.config.email.email_notif_for_new_users
):
await self.pusher_pool.add_pusher(
user_id=user_id,
diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py
index aefaaa8ae8..6a7608d60b 100644
--- a/synapse/rest/client/account.py
+++ b/synapse/rest/client/account.py
@@ -64,17 +64,17 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
self.config = hs.config
self.identity_handler = hs.get_identity_handler()
- if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.mailer = Mailer(
hs=self.hs,
- app_name=self.config.email_app_name,
- template_html=self.config.email_password_reset_template_html,
- template_text=self.config.email_password_reset_template_text,
+ app_name=self.config.email.email_app_name,
+ template_html=self.config.email.email_password_reset_template_html,
+ template_text=self.config.email.email_password_reset_template_text,
)
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
- if self.config.threepid_behaviour_email == ThreepidBehaviour.OFF:
- if self.config.local_threepid_handling_disabled_due_to_email_config:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.OFF:
+ if self.config.email.local_threepid_handling_disabled_due_to_email_config:
logger.warning(
"User password resets have been disabled due to lack of email config"
)
@@ -129,7 +129,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email not found", Codes.THREEPID_NOT_FOUND)
- if self.config.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
assert self.hs.config.account_threepid_delegate_email
# Have the configured identity server handle the request
@@ -349,17 +349,17 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
self.identity_handler = hs.get_identity_handler()
self.store = self.hs.get_datastore()
- if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.mailer = Mailer(
hs=self.hs,
- app_name=self.config.email_app_name,
- template_html=self.config.email_add_threepid_template_html,
- template_text=self.config.email_add_threepid_template_text,
+ app_name=self.config.email.email_app_name,
+ template_html=self.config.email.email_add_threepid_template_html,
+ template_text=self.config.email.email_add_threepid_template_text,
)
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
- if self.config.threepid_behaviour_email == ThreepidBehaviour.OFF:
- if self.config.local_threepid_handling_disabled_due_to_email_config:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.OFF:
+ if self.config.email.local_threepid_handling_disabled_due_to_email_config:
logger.warning(
"Adding emails have been disabled due to lack of an email config"
)
@@ -413,7 +413,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
- if self.config.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
assert self.hs.config.account_threepid_delegate_email
# Have the configured identity server handle the request
@@ -534,21 +534,21 @@ class AddThreepidEmailSubmitTokenServlet(RestServlet):
self.config = hs.config
self.clock = hs.get_clock()
self.store = hs.get_datastore()
- if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self._failure_email_template = (
- self.config.email_add_threepid_template_failure_html
+ self.config.email.email_add_threepid_template_failure_html
)
async def on_GET(self, request: Request) -> None:
- if self.config.threepid_behaviour_email == ThreepidBehaviour.OFF:
- if self.config.local_threepid_handling_disabled_due_to_email_config:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.OFF:
+ if self.config.email.local_threepid_handling_disabled_due_to_email_config:
logger.warning(
"Adding emails have been disabled due to lack of an email config"
)
raise SynapseError(
400, "Adding an email to your account is disabled on this server"
)
- elif self.config.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
+ elif self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
raise SynapseError(
400,
"This homeserver is not validating threepids. Use an identity server "
@@ -575,7 +575,7 @@ class AddThreepidEmailSubmitTokenServlet(RestServlet):
return None
# Otherwise show the success template
- html = self.config.email_add_threepid_template_success_html_content
+ html = self.config.email.email_add_threepid_template_success_html_content
status_code = 200
except ThreepidValidationError as e:
status_code = e.code
diff --git a/synapse/rest/client/auth.py b/synapse/rest/client/auth.py
index 7bb7801472..282861fae2 100644
--- a/synapse/rest/client/auth.py
+++ b/synapse/rest/client/auth.py
@@ -47,7 +47,7 @@ class AuthRestServlet(RestServlet):
self.auth = hs.get_auth()
self.auth_handler = hs.get_auth_handler()
self.registration_handler = hs.get_registration_handler()
- self.recaptcha_template = hs.config.recaptcha_template
+ self.recaptcha_template = hs.config.captcha.recaptcha_template
self.terms_template = hs.config.terms_template
self.registration_token_template = hs.config.registration_token_template
self.success_template = hs.config.fallback_success_template
@@ -62,7 +62,7 @@ class AuthRestServlet(RestServlet):
session=session,
myurl="%s/r0/auth/%s/fallback/web"
% (CLIENT_API_PREFIX, LoginType.RECAPTCHA),
- sitekey=self.hs.config.recaptcha_public_key,
+ sitekey=self.hs.config.captcha.recaptcha_public_key,
)
elif stagetype == LoginType.TERMS:
html = self.terms_template.render(
@@ -70,7 +70,7 @@ class AuthRestServlet(RestServlet):
terms_url="%s_matrix/consent?v=%s"
% (
self.hs.config.server.public_baseurl,
- self.hs.config.user_consent_version,
+ self.hs.config.consent.user_consent_version,
),
myurl="%s/r0/auth/%s/fallback/web"
% (CLIENT_API_PREFIX, LoginType.TERMS),
@@ -118,7 +118,7 @@ class AuthRestServlet(RestServlet):
session=session,
myurl="%s/r0/auth/%s/fallback/web"
% (CLIENT_API_PREFIX, LoginType.RECAPTCHA),
- sitekey=self.hs.config.recaptcha_public_key,
+ sitekey=self.hs.config.captcha.recaptcha_public_key,
error=e.msg,
)
else:
@@ -139,7 +139,7 @@ class AuthRestServlet(RestServlet):
terms_url="%s_matrix/consent?v=%s"
% (
self.hs.config.server.public_baseurl,
- self.hs.config.user_consent_version,
+ self.hs.config.consent.user_consent_version,
),
myurl="%s/r0/auth/%s/fallback/web"
% (CLIENT_API_PREFIX, LoginType.TERMS),
diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py
index 25bc3c8f47..8566dc5cb5 100644
--- a/synapse/rest/client/devices.py
+++ b/synapse/rest/client/devices.py
@@ -211,7 +211,7 @@ class DehydratedDeviceServlet(RestServlet):
if dehydrated_device is not None:
(device_id, device_data) = dehydrated_device
result = {"device_id": device_id, "device_data": device_data}
- return (200, result)
+ return 200, result
else:
raise errors.NotFoundError("No dehydrated device available")
@@ -293,7 +293,7 @@ class ClaimDehydratedDeviceServlet(RestServlet):
submission["device_id"],
)
- return (200, result)
+ return 200, result
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py
index a6ede7e2f3..fa5c173f4b 100644
--- a/synapse/rest/client/login.py
+++ b/synapse/rest/client/login.py
@@ -69,16 +69,16 @@ class LoginRestServlet(RestServlet):
self.hs = hs
# JWT configuration variables.
- self.jwt_enabled = hs.config.jwt_enabled
- self.jwt_secret = hs.config.jwt_secret
- self.jwt_algorithm = hs.config.jwt_algorithm
- self.jwt_issuer = hs.config.jwt_issuer
- self.jwt_audiences = hs.config.jwt_audiences
+ self.jwt_enabled = hs.config.jwt.jwt_enabled
+ self.jwt_secret = hs.config.jwt.jwt_secret
+ self.jwt_algorithm = hs.config.jwt.jwt_algorithm
+ self.jwt_issuer = hs.config.jwt.jwt_issuer
+ self.jwt_audiences = hs.config.jwt.jwt_audiences
# SSO configuration.
- self.saml2_enabled = hs.config.saml2_enabled
- self.cas_enabled = hs.config.cas_enabled
- self.oidc_enabled = hs.config.oidc_enabled
+ self.saml2_enabled = hs.config.saml2.saml2_enabled
+ self.cas_enabled = hs.config.cas.cas_enabled
+ self.oidc_enabled = hs.config.oidc.oidc_enabled
self._msc2918_enabled = hs.config.access_token_lifetime is not None
self.auth = hs.get_auth()
@@ -559,7 +559,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
if hs.config.access_token_lifetime is not None:
RefreshTokenServlet(hs).register(http_server)
SsoRedirectServlet(hs).register(http_server)
- if hs.config.cas_enabled:
+ if hs.config.cas.cas_enabled:
CasTicketServlet(hs).register(http_server)
diff --git a/synapse/rest/client/password_policy.py b/synapse/rest/client/password_policy.py
index 6d64efb165..9f1908004b 100644
--- a/synapse/rest/client/password_policy.py
+++ b/synapse/rest/client/password_policy.py
@@ -35,12 +35,12 @@ class PasswordPolicyServlet(RestServlet):
def __init__(self, hs: "HomeServer"):
super().__init__()
- self.policy = hs.config.password_policy
- self.enabled = hs.config.password_policy_enabled
+ self.policy = hs.config.auth.password_policy
+ self.enabled = hs.config.auth.password_policy_enabled
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
if not self.enabled or not self.policy:
- return (200, {})
+ return 200, {}
policy = {}
@@ -54,7 +54,7 @@ class PasswordPolicyServlet(RestServlet):
if param in self.policy:
policy["m.%s" % param] = self.policy[param]
- return (200, policy)
+ return 200, policy
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py
index abe4d7e205..48b0062cf4 100644
--- a/synapse/rest/client/register.py
+++ b/synapse/rest/client/register.py
@@ -75,17 +75,19 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
self.identity_handler = hs.get_identity_handler()
self.config = hs.config
- if self.hs.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
+ if self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.mailer = Mailer(
hs=self.hs,
- app_name=self.config.email_app_name,
- template_html=self.config.email_registration_template_html,
- template_text=self.config.email_registration_template_text,
+ app_name=self.config.email.email_app_name,
+ template_html=self.config.email.email_registration_template_html,
+ template_text=self.config.email.email_registration_template_text,
)
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
- if self.hs.config.threepid_behaviour_email == ThreepidBehaviour.OFF:
- if self.hs.config.local_threepid_handling_disabled_due_to_email_config:
+ if self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.OFF:
+ if (
+ self.hs.config.email.local_threepid_handling_disabled_due_to_email_config
+ ):
logger.warning(
"Email registration has been disabled due to lack of email config"
)
@@ -137,7 +139,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
- if self.config.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
assert self.hs.config.account_threepid_delegate_email
# Have the configured identity server handle the request
@@ -259,9 +261,9 @@ class RegistrationSubmitTokenServlet(RestServlet):
self.clock = hs.get_clock()
self.store = hs.get_datastore()
- if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self._failure_email_template = (
- self.config.email_registration_template_failure_html
+ self.config.email.email_registration_template_failure_html
)
async def on_GET(self, request: Request, medium: str) -> None:
@@ -269,8 +271,8 @@ class RegistrationSubmitTokenServlet(RestServlet):
raise SynapseError(
400, "This medium is currently not supported for registration"
)
- if self.config.threepid_behaviour_email == ThreepidBehaviour.OFF:
- if self.config.local_threepid_handling_disabled_due_to_email_config:
+ if self.config.email.threepid_behaviour_email == ThreepidBehaviour.OFF:
+ if self.config.email.local_threepid_handling_disabled_due_to_email_config:
logger.warning(
"User registration via email has been disabled due to lack of email config"
)
@@ -303,7 +305,7 @@ class RegistrationSubmitTokenServlet(RestServlet):
return None
# Otherwise show the success template
- html = self.config.email_registration_template_success_html_content
+ html = self.config.email.email_registration_template_success_html_content
status_code = 200
except ThreepidValidationError as e:
status_code = e.code
@@ -897,12 +899,12 @@ def _calculate_registration_flows(
flows.append([LoginType.MSISDN, LoginType.EMAIL_IDENTITY])
# Prepend m.login.terms to all flows if we're requiring consent
- if config.user_consent_at_registration:
+ if config.consent.user_consent_at_registration:
for flow in flows:
flow.insert(0, LoginType.TERMS)
# Prepend recaptcha to all flows if we're requiring captcha
- if config.enable_registration_captcha:
+ if config.captcha.enable_registration_captcha:
for flow in flows:
flow.insert(0, LoginType.RECAPTCHA)
diff --git a/synapse/rest/client/room_batch.py b/synapse/rest/client/room_batch.py
index d466edeec2..bf14ec384e 100644
--- a/synapse/rest/client/room_batch.py
+++ b/synapse/rest/client/room_batch.py
@@ -43,25 +43,25 @@ logger = logging.getLogger(__name__)
class RoomBatchSendEventRestServlet(RestServlet):
"""
- API endpoint which can insert a chunk of events historically back in time
+ API endpoint which can insert a batch of events historically back in time
next to the given `prev_event`.
- `chunk_id` comes from `next_chunk_id `in the response of the batch send
- endpoint and is derived from the "insertion" events added to each chunk.
+ `batch_id` comes from `next_batch_id `in the response of the batch send
+ endpoint and is derived from the "insertion" events added to each batch.
It's not required for the first batch send.
`state_events_at_start` is used to define the historical state events
needed to auth the events like join events. These events will float
outside of the normal DAG as outlier's and won't be visible in the chat
- history which also allows us to insert multiple chunks without having a bunch
- of `@mxid joined the room` noise between each chunk.
+ history which also allows us to insert multiple batches without having a bunch
+ of `@mxid joined the room` noise between each batch.
- `events` is chronological chunk/list of events you want to insert.
- There is a reverse-chronological constraint on chunks so once you insert
+ `events` is chronological list of events you want to insert.
+ There is a reverse-chronological constraint on batches so once you insert
some messages, you can only insert older ones after that.
- tldr; Insert chunks from your most recent history -> oldest history.
+ tldr; Insert batches from your most recent history -> oldest history.
- POST /_matrix/client/unstable/org.matrix.msc2716/rooms/<roomID>/batch_send?prev_event=<eventID>&chunk_id=<chunkID>
+ POST /_matrix/client/unstable/org.matrix.msc2716/rooms/<roomID>/batch_send?prev_event_id=<eventID>&batch_id=<batchID>
{
"events": [ ... ],
"state_events_at_start": [ ... ]
@@ -129,7 +129,7 @@ class RoomBatchSendEventRestServlet(RestServlet):
self, sender: str, room_id: str, origin_server_ts: int
) -> JsonDict:
"""Creates an event dict for an "insertion" event with the proper fields
- and a random chunk ID.
+ and a random batch ID.
Args:
sender: The event author MXID
@@ -140,13 +140,13 @@ class RoomBatchSendEventRestServlet(RestServlet):
The new event dictionary to insert.
"""
- next_chunk_id = random_string(8)
+ next_batch_id = random_string(8)
insertion_event = {
"type": EventTypes.MSC2716_INSERTION,
"sender": sender,
"room_id": room_id,
"content": {
- EventContentFields.MSC2716_NEXT_CHUNK_ID: next_chunk_id,
+ EventContentFields.MSC2716_NEXT_BATCH_ID: next_batch_id,
EventContentFields.MSC2716_HISTORICAL: True,
},
"origin_server_ts": origin_server_ts,
@@ -188,24 +188,26 @@ class RoomBatchSendEventRestServlet(RestServlet):
assert_params_in_dict(body, ["state_events_at_start", "events"])
assert request.args is not None
- prev_events_from_query = parse_strings_from_args(request.args, "prev_event")
- chunk_id_from_query = parse_string(request, "chunk_id")
+ prev_event_ids_from_query = parse_strings_from_args(
+ request.args, "prev_event_id"
+ )
+ batch_id_from_query = parse_string(request, "batch_id")
- if prev_events_from_query is None:
+ if prev_event_ids_from_query is None:
raise SynapseError(
HTTPStatus.BAD_REQUEST,
"prev_event query parameter is required when inserting historical messages back in time",
errcode=Codes.MISSING_PARAM,
)
- # For the event we are inserting next to (`prev_events_from_query`),
+ # For the event we are inserting next to (`prev_event_ids_from_query`),
# find the most recent auth events (derived from state events) that
# allowed that message to be sent. We will use that as a base
# to auth our historical messages against.
(
most_recent_prev_event_id,
_,
- ) = await self.store.get_max_depth_of(prev_events_from_query)
+ ) = await self.store.get_max_depth_of(prev_event_ids_from_query)
# mapping from (type, state_key) -> state_event_id
prev_state_map = await self.state_store.get_state_ids_for_event(
most_recent_prev_event_id
@@ -286,30 +288,30 @@ class RoomBatchSendEventRestServlet(RestServlet):
events_to_create = body["events"]
inherited_depth = await self._inherit_depth_from_prev_ids(
- prev_events_from_query
+ prev_event_ids_from_query
)
- # Figure out which chunk to connect to. If they passed in
- # chunk_id_from_query let's use it. The chunk ID passed in comes
- # from the chunk_id in the "insertion" event from the previous chunk.
- last_event_in_chunk = events_to_create[-1]
- chunk_id_to_connect_to = chunk_id_from_query
+ # Figure out which batch to connect to. If they passed in
+ # batch_id_from_query let's use it. The batch ID passed in comes
+ # from the batch_id in the "insertion" event from the previous batch.
+ last_event_in_batch = events_to_create[-1]
+ batch_id_to_connect_to = batch_id_from_query
base_insertion_event = None
- if chunk_id_from_query:
+ if batch_id_from_query:
# All but the first base insertion event should point at a fake
# event, which causes the HS to ask for the state at the start of
- # the chunk later.
+ # the batch later.
prev_event_ids = [fake_prev_event_id]
- # Verify the chunk_id_from_query corresponds to an actual insertion event
- # and have the chunk connected.
+ # Verify the batch_id_from_query corresponds to an actual insertion event
+ # and have the batch connected.
corresponding_insertion_event_id = (
- await self.store.get_insertion_event_by_chunk_id(chunk_id_from_query)
+ await self.store.get_insertion_event_by_batch_id(batch_id_from_query)
)
if corresponding_insertion_event_id is None:
raise SynapseError(
400,
- "No insertion event corresponds to the given ?chunk_id",
+ "No insertion event corresponds to the given ?batch_id",
errcode=Codes.INVALID_PARAM,
)
pass
@@ -321,12 +323,12 @@ class RoomBatchSendEventRestServlet(RestServlet):
# an insertion event), in which case we just create a new insertion event
# that can then get pointed to by a "marker" event later.
else:
- prev_event_ids = prev_events_from_query
+ prev_event_ids = prev_event_ids_from_query
base_insertion_event_dict = self._create_insertion_event_dict(
sender=requester.user.to_string(),
room_id=room_id,
- origin_server_ts=last_event_in_chunk["origin_server_ts"],
+ origin_server_ts=last_event_in_batch["origin_server_ts"],
)
base_insertion_event_dict["prev_events"] = prev_event_ids.copy()
@@ -345,38 +347,38 @@ class RoomBatchSendEventRestServlet(RestServlet):
depth=inherited_depth,
)
- chunk_id_to_connect_to = base_insertion_event["content"][
- EventContentFields.MSC2716_NEXT_CHUNK_ID
+ batch_id_to_connect_to = base_insertion_event["content"][
+ EventContentFields.MSC2716_NEXT_BATCH_ID
]
- # Connect this current chunk to the insertion event from the previous chunk
- chunk_event = {
- "type": EventTypes.MSC2716_CHUNK,
+ # Connect this current batch to the insertion event from the previous batch
+ batch_event = {
+ "type": EventTypes.MSC2716_BATCH,
"sender": requester.user.to_string(),
"room_id": room_id,
"content": {
- EventContentFields.MSC2716_CHUNK_ID: chunk_id_to_connect_to,
+ EventContentFields.MSC2716_BATCH_ID: batch_id_to_connect_to,
EventContentFields.MSC2716_HISTORICAL: True,
},
- # Since the chunk event is put at the end of the chunk,
+ # Since the batch event is put at the end of the batch,
# where the newest-in-time event is, copy the origin_server_ts from
# the last event we're inserting
- "origin_server_ts": last_event_in_chunk["origin_server_ts"],
+ "origin_server_ts": last_event_in_batch["origin_server_ts"],
}
- # Add the chunk event to the end of the chunk (newest-in-time)
- events_to_create.append(chunk_event)
+ # Add the batch event to the end of the batch (newest-in-time)
+ events_to_create.append(batch_event)
- # Add an "insertion" event to the start of each chunk (next to the oldest-in-time
- # event in the chunk) so the next chunk can be connected to this one.
+ # Add an "insertion" event to the start of each batch (next to the oldest-in-time
+ # event in the batch) so the next batch can be connected to this one.
insertion_event = self._create_insertion_event_dict(
sender=requester.user.to_string(),
room_id=room_id,
- # Since the insertion event is put at the start of the chunk,
+ # Since the insertion event is put at the start of the batch,
# where the oldest-in-time event is, copy the origin_server_ts from
# the first event we're inserting
origin_server_ts=events_to_create[0]["origin_server_ts"],
)
- # Prepend the insertion event to the start of the chunk (oldest-in-time)
+ # Prepend the insertion event to the start of the batch (oldest-in-time)
events_to_create = [insertion_event] + events_to_create
event_ids = []
@@ -437,17 +439,17 @@ class RoomBatchSendEventRestServlet(RestServlet):
)
insertion_event_id = event_ids[0]
- chunk_event_id = event_ids[-1]
+ batch_event_id = event_ids[-1]
historical_event_ids = event_ids[1:-1]
response_dict = {
"state_event_ids": state_event_ids_at_start,
"event_ids": historical_event_ids,
- "next_chunk_id": insertion_event["content"][
- EventContentFields.MSC2716_NEXT_CHUNK_ID
+ "next_batch_id": insertion_event["content"][
+ EventContentFields.MSC2716_NEXT_BATCH_ID
],
"insertion_event_id": insertion_event_id,
- "chunk_event_id": chunk_event_id,
+ "batch_event_id": batch_event_id,
}
if base_insertion_event is not None:
response_dict["base_insertion_event_id"] = base_insertion_event.event_id
diff --git a/synapse/rest/client/user_directory.py b/synapse/rest/client/user_directory.py
index 8852811114..a47d9bd01d 100644
--- a/synapse/rest/client/user_directory.py
+++ b/synapse/rest/client/user_directory.py
@@ -58,7 +58,7 @@ class UserDirectorySearchRestServlet(RestServlet):
requester = await self.auth.get_user_by_req(request, allow_guest=False)
user_id = requester.user.to_string()
- if not self.hs.config.user_directory_search_enabled:
+ if not self.hs.config.userdirectory.user_directory_search_enabled:
return 200, {"limited": False, "results": []}
body = parse_json_object_from_request(request)
diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py
index a1a815cf82..b52a296d8f 100644
--- a/synapse/rest/client/versions.py
+++ b/synapse/rest/client/versions.py
@@ -42,15 +42,15 @@ class VersionsRestServlet(RestServlet):
# Calculate these once since they shouldn't change after start-up.
self.e2ee_forced_public = (
RoomCreationPreset.PUBLIC_CHAT
- in self.config.encryption_enabled_by_default_for_room_presets
+ in self.config.room.encryption_enabled_by_default_for_room_presets
)
self.e2ee_forced_private = (
RoomCreationPreset.PRIVATE_CHAT
- in self.config.encryption_enabled_by_default_for_room_presets
+ in self.config.room.encryption_enabled_by_default_for_room_presets
)
self.e2ee_forced_trusted_private = (
RoomCreationPreset.TRUSTED_PRIVATE_CHAT
- in self.config.encryption_enabled_by_default_for_room_presets
+ in self.config.room.encryption_enabled_by_default_for_room_presets
)
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
diff --git a/synapse/rest/client/voip.py b/synapse/rest/client/voip.py
index 9d46ed3af3..ea2b8aa45f 100644
--- a/synapse/rest/client/voip.py
+++ b/synapse/rest/client/voip.py
@@ -37,14 +37,14 @@ class VoipRestServlet(RestServlet):
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(
- request, self.hs.config.turn_allow_guests
+ request, self.hs.config.voip.turn_allow_guests
)
- turnUris = self.hs.config.turn_uris
- turnSecret = self.hs.config.turn_shared_secret
- turnUsername = self.hs.config.turn_username
- turnPassword = self.hs.config.turn_password
- userLifetime = self.hs.config.turn_user_lifetime
+ turnUris = self.hs.config.voip.turn_uris
+ turnSecret = self.hs.config.voip.turn_shared_secret
+ turnUsername = self.hs.config.voip.turn_username
+ turnPassword = self.hs.config.voip.turn_password
+ userLifetime = self.hs.config.voip.turn_user_lifetime
if turnUris and turnSecret and userLifetime:
expiry = (self.hs.get_clock().time_msec() + userLifetime) / 1000
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 06e0fbde22..3d2afacc50 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -84,14 +84,15 @@ class ConsentResource(DirectServeHtmlResource):
# this is required by the request_handler wrapper
self.clock = hs.get_clock()
- self._default_consent_version = hs.config.user_consent_version
- if self._default_consent_version is None:
+ # Consent must be configured to create this resource.
+ default_consent_version = hs.config.consent.user_consent_version
+ consent_template_directory = hs.config.consent.user_consent_template_dir
+ if default_consent_version is None or consent_template_directory is None:
raise ConfigError(
"Consent resource is enabled but user_consent section is "
"missing in config file."
)
-
- consent_template_directory = hs.config.user_consent_template_dir
+ self._default_consent_version = default_consent_version
# TODO: switch to synapse.util.templates.build_jinja_env
loader = jinja2.FileSystemLoader(consent_template_directory)
@@ -99,13 +100,13 @@ class ConsentResource(DirectServeHtmlResource):
loader=loader, autoescape=jinja2.select_autoescape(["html", "htm", "xml"])
)
- if hs.config.form_secret is None:
+ if hs.config.key.form_secret is None:
raise ConfigError(
"Consent resource is enabled but form_secret is not set in "
"config file. It should be set to an arbitrary secret string."
)
- self._hmac_secret = hs.config.form_secret.encode("utf-8")
+ self._hmac_secret = hs.config.key.form_secret.encode("utf-8")
async def _async_render_GET(self, request: Request) -> None:
version = parse_string(request, "v", default=self._default_consent_version)
diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py
index ebe243bcfd..12b3ae120c 100644
--- a/synapse/rest/key/v2/local_key_resource.py
+++ b/synapse/rest/key/v2/local_key_resource.py
@@ -70,19 +70,19 @@ class LocalKey(Resource):
Resource.__init__(self)
def update_response_body(self, time_now_msec: int) -> None:
- refresh_interval = self.config.key_refresh_interval
+ refresh_interval = self.config.key.key_refresh_interval
self.valid_until_ts = int(time_now_msec + refresh_interval)
self.response_body = encode_canonical_json(self.response_json_object())
def response_json_object(self) -> JsonDict:
verify_keys = {}
- for key in self.config.signing_key:
+ for key in self.config.key.signing_key:
verify_key_bytes = key.verify_key.encode()
key_id = "%s:%s" % (key.alg, key.version)
verify_keys[key_id] = {"key": encode_base64(verify_key_bytes)}
old_verify_keys = {}
- for key_id, key in self.config.old_signing_keys.items():
+ for key_id, key in self.config.key.old_signing_keys.items():
verify_key_bytes = key.encode()
old_verify_keys[key_id] = {
"key": encode_base64(verify_key_bytes),
@@ -95,13 +95,13 @@ class LocalKey(Resource):
"verify_keys": verify_keys,
"old_verify_keys": old_verify_keys,
}
- for key in self.config.signing_key:
+ for key in self.config.key.signing_key:
json_object = sign_json(json_object, self.config.server.server_name, key)
return json_object
def render_GET(self, request: Request) -> int:
time_now = self.clock.time_msec()
# Update the expiry time if less than half the interval remains.
- if time_now + self.config.key_refresh_interval / 2 > self.valid_until_ts:
+ if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts:
self.update_response_body(time_now)
return respond_with_json_bytes(request, 200, self.response_body)
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index d8fd7938a4..3923ba8439 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -17,12 +17,11 @@ from typing import TYPE_CHECKING, Dict
from signedjson.sign import sign_json
-from twisted.web.server import Request
-
from synapse.api.errors import Codes, SynapseError
from synapse.crypto.keyring import ServerKeyFetcher
from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.servlet import parse_integer, parse_json_object_from_request
+from synapse.http.site import SynapseRequest
from synapse.types import JsonDict
from synapse.util import json_decoder
from synapse.util.async_helpers import yieldable_gather_results
@@ -97,10 +96,12 @@ class RemoteKey(DirectServeJsonResource):
self.fetcher = ServerKeyFetcher(hs)
self.store = hs.get_datastore()
self.clock = hs.get_clock()
- self.federation_domain_whitelist = hs.config.federation_domain_whitelist
+ self.federation_domain_whitelist = (
+ hs.config.federation.federation_domain_whitelist
+ )
self.config = hs.config
- async def _async_render_GET(self, request: Request) -> None:
+ async def _async_render_GET(self, request: SynapseRequest) -> None:
assert request.postpath is not None
if len(request.postpath) == 1:
(server,) = request.postpath
@@ -117,7 +118,7 @@ class RemoteKey(DirectServeJsonResource):
await self.query_keys(request, query, query_remote_on_cache_miss=True)
- async def _async_render_POST(self, request: Request) -> None:
+ async def _async_render_POST(self, request: SynapseRequest) -> None:
content = parse_json_object_from_request(request)
query = content["server_keys"]
@@ -126,7 +127,7 @@ class RemoteKey(DirectServeJsonResource):
async def query_keys(
self,
- request: Request,
+ request: SynapseRequest,
query: JsonDict,
query_remote_on_cache_miss: bool = False,
) -> None:
@@ -235,7 +236,7 @@ class RemoteKey(DirectServeJsonResource):
signed_keys = []
for key_json in json_results:
key_json = json_decoder.decode(key_json.decode("utf-8"))
- for signing_key in self.config.key_server_signing_keys:
+ for signing_key in self.config.key.key_server_signing_keys:
key_json = sign_json(
key_json, self.config.server.server_name, signing_key
)
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index 7c881f2bdb..014fa893d6 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -27,6 +27,7 @@ from twisted.web.server import Request
from synapse.api.errors import Codes, SynapseError, cs_error
from synapse.http.server import finish_request, respond_with_json
+from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable
from synapse.util.stringutils import is_ascii
@@ -74,7 +75,7 @@ def parse_media_id(request: Request) -> Tuple[str, str, Optional[str]]:
)
-def respond_404(request: Request) -> None:
+def respond_404(request: SynapseRequest) -> None:
respond_with_json(
request,
404,
@@ -84,7 +85,7 @@ def respond_404(request: Request) -> None:
async def respond_with_file(
- request: Request,
+ request: SynapseRequest,
media_type: str,
file_path: str,
file_size: Optional[int] = None,
@@ -221,7 +222,7 @@ def _can_encode_filename_as_token(x: str) -> bool:
async def respond_with_responder(
- request: Request,
+ request: SynapseRequest,
responder: "Optional[Responder]",
media_type: str,
file_size: Optional[int],
diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py
index a1d36e5cf1..a95804d327 100644
--- a/synapse/rest/media/v1/config_resource.py
+++ b/synapse/rest/media/v1/config_resource.py
@@ -16,8 +16,6 @@
from typing import TYPE_CHECKING
-from twisted.web.server import Request
-
from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.site import SynapseRequest
@@ -33,11 +31,11 @@ class MediaConfigResource(DirectServeJsonResource):
config = hs.config
self.clock = hs.get_clock()
self.auth = hs.get_auth()
- self.limits_dict = {"m.upload.size": config.max_upload_size}
+ self.limits_dict = {"m.upload.size": config.media.max_upload_size}
async def _async_render_GET(self, request: SynapseRequest) -> None:
await self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict, send_cors=True)
- async def _async_render_OPTIONS(self, request: Request) -> None:
+ async def _async_render_OPTIONS(self, request: SynapseRequest) -> None:
respond_with_json(request, 200, {}, send_cors=True)
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index d6d938953e..6180fa575e 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -15,10 +15,9 @@
import logging
from typing import TYPE_CHECKING
-from twisted.web.server import Request
-
from synapse.http.server import DirectServeJsonResource, set_cors_headers
from synapse.http.servlet import parse_boolean
+from synapse.http.site import SynapseRequest
from ._base import parse_media_id, respond_404
@@ -37,7 +36,7 @@ class DownloadResource(DirectServeJsonResource):
self.media_repo = media_repo
self.server_name = hs.hostname
- async def _async_render_GET(self, request: Request) -> None:
+ async def _async_render_GET(self, request: SynapseRequest) -> None:
set_cors_headers(request)
request.setHeader(
b"Content-Security-Policy",
diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py
index 39bbe4e874..08bd85f664 100644
--- a/synapse/rest/media/v1/filepath.py
+++ b/synapse/rest/media/v1/filepath.py
@@ -195,23 +195,24 @@ class MediaFilePaths:
url_cache_thumbnail = _wrap_in_base_path(url_cache_thumbnail_rel)
- def url_cache_thumbnail_directory(self, media_id: str) -> str:
+ def url_cache_thumbnail_directory_rel(self, media_id: str) -> str:
# Media id is of the form <DATE><RANDOM_STRING>
# E.g.: 2017-09-28-fsdRDt24DS234dsf
if NEW_FORMAT_ID_RE.match(media_id):
- return os.path.join(
- self.base_path, "url_cache_thumbnails", media_id[:10], media_id[11:]
- )
+ return os.path.join("url_cache_thumbnails", media_id[:10], media_id[11:])
else:
return os.path.join(
- self.base_path,
"url_cache_thumbnails",
media_id[0:2],
media_id[2:4],
media_id[4:],
)
+ url_cache_thumbnail_directory = _wrap_in_base_path(
+ url_cache_thumbnail_directory_rel
+ )
+
def url_cache_thumbnail_dirs_to_delete(self, media_id: str) -> List[str]:
"The dirs to try and remove if we delete the media_id thumbnails"
# Media id is of the form <DATE><RANDOM_STRING>
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 50e4c9e29f..abd88a2d4f 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -23,7 +23,6 @@ import twisted.internet.error
import twisted.web.http
from twisted.internet.defer import Deferred
from twisted.web.resource import Resource
-from twisted.web.server import Request
from synapse.api.errors import (
FederationDeniedError,
@@ -34,6 +33,7 @@ from synapse.api.errors import (
)
from synapse.config._base import ConfigError
from synapse.config.repository import ThumbnailRequirement
+from synapse.http.site import SynapseRequest
from synapse.logging.context import defer_to_thread
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import UserID
@@ -76,29 +76,35 @@ class MediaRepository:
self.clock = hs.get_clock()
self.server_name = hs.hostname
self.store = hs.get_datastore()
- self.max_upload_size = hs.config.max_upload_size
- self.max_image_pixels = hs.config.max_image_pixels
+ self.max_upload_size = hs.config.media.max_upload_size
+ self.max_image_pixels = hs.config.media.max_image_pixels
Thumbnailer.set_limits(self.max_image_pixels)
- self.primary_base_path: str = hs.config.media_store_path
+ self.primary_base_path: str = hs.config.media.media_store_path
self.filepaths: MediaFilePaths = MediaFilePaths(self.primary_base_path)
- self.dynamic_thumbnails = hs.config.dynamic_thumbnails
- self.thumbnail_requirements = hs.config.thumbnail_requirements
+ self.dynamic_thumbnails = hs.config.media.dynamic_thumbnails
+ self.thumbnail_requirements = hs.config.media.thumbnail_requirements
self.remote_media_linearizer = Linearizer(name="media_remote")
self.recently_accessed_remotes: Set[Tuple[str, str]] = set()
self.recently_accessed_locals: Set[str] = set()
- self.federation_domain_whitelist = hs.config.federation_domain_whitelist
+ self.federation_domain_whitelist = (
+ hs.config.federation.federation_domain_whitelist
+ )
# List of StorageProviders where we should search for media and
# potentially upload to.
storage_providers = []
- for clz, provider_config, wrapper_config in hs.config.media_storage_providers:
+ for (
+ clz,
+ provider_config,
+ wrapper_config,
+ ) in hs.config.media.media_storage_providers:
backend = clz(hs, provider_config)
provider = StorageProviderWrapper(
backend,
@@ -187,7 +193,7 @@ class MediaRepository:
return "mxc://%s/%s" % (self.server_name, media_id)
async def get_local_media(
- self, request: Request, media_id: str, name: Optional[str]
+ self, request: SynapseRequest, media_id: str, name: Optional[str]
) -> None:
"""Responds to requests for local media, if exists, or returns 404.
@@ -221,7 +227,11 @@ class MediaRepository:
)
async def get_remote_media(
- self, request: Request, server_name: str, media_id: str, name: Optional[str]
+ self,
+ request: SynapseRequest,
+ server_name: str,
+ media_id: str,
+ name: Optional[str],
) -> None:
"""Respond to requests for remote media.
@@ -969,7 +979,7 @@ class MediaRepositoryResource(Resource):
def __init__(self, hs: "HomeServer"):
# If we're not configured to use it, raise if we somehow got here.
- if not hs.config.can_load_media_repo:
+ if not hs.config.media.can_load_media_repo:
raise ConfigError("Synapse is not configured to use a media repo.")
super().__init__()
@@ -980,7 +990,7 @@ class MediaRepositoryResource(Resource):
self.putChild(
b"thumbnail", ThumbnailResource(hs, media_repo, media_repo.media_storage)
)
- if hs.config.url_preview_enabled:
+ if hs.config.media.url_preview_enabled:
self.putChild(
b"preview_url",
PreviewUrlResource(hs, media_repo, media_repo.media_storage),
diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index 01fada8fb5..fca239d8c7 100644
--- a/synapse/rest/media/v1/media_storage.py
+++ b/synapse/rest/media/v1/media_storage.py
@@ -132,8 +132,7 @@ class MediaStorage:
fname = os.path.join(self.local_media_directory, path)
dirname = os.path.dirname(fname)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
+ os.makedirs(dirname, exist_ok=True)
finished_called = [False]
@@ -244,8 +243,7 @@ class MediaStorage:
return legacy_local_path
dirname = os.path.dirname(local_path)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
+ os.makedirs(dirname, exist_ok=True)
for provider in self.storage_providers:
res: Any = await provider.fetch(path, file_info)
diff --git a/synapse/rest/media/v1/oembed.py b/synapse/rest/media/v1/oembed.py
index 2e6706dbfa..e04671fb95 100644
--- a/synapse/rest/media/v1/oembed.py
+++ b/synapse/rest/media/v1/oembed.py
@@ -12,30 +12,32 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import TYPE_CHECKING, Optional
+import urllib.parse
+from typing import TYPE_CHECKING, List, Optional
import attr
from synapse.http.client import SimpleHttpClient
+from synapse.types import JsonDict
+from synapse.util import json_decoder
if TYPE_CHECKING:
+ from lxml import etree
+
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
-@attr.s(slots=True, auto_attribs=True)
+@attr.s(slots=True, frozen=True, auto_attribs=True)
class OEmbedResult:
- # Either HTML content or URL must be provided.
- html: Optional[str]
- url: Optional[str]
- title: Optional[str]
- # Number of seconds to cache the content.
- cache_age: int
-
-
-class OEmbedError(Exception):
- """An error occurred processing the oEmbed object."""
+ # The Open Graph result (converted from the oEmbed result).
+ open_graph_result: JsonDict
+ # Number of milliseconds to cache the content, according to the oEmbed response.
+ #
+ # This will be None if no cache-age is provided in the oEmbed response (or
+ # if the oEmbed response cannot be turned into an Open Graph response).
+ cache_age: Optional[int]
class OEmbedProvider:
@@ -81,75 +83,145 @@ class OEmbedProvider:
"""
for url_pattern, endpoint in self._oembed_patterns.items():
if url_pattern.fullmatch(url):
- return endpoint
+ # TODO Specify max height / width.
+
+ # Note that only the JSON format is supported, some endpoints want
+ # this in the URL, others want it as an argument.
+ endpoint = endpoint.replace("{format}", "json")
+
+ args = {"url": url, "format": "json"}
+ query_str = urllib.parse.urlencode(args, True)
+ return f"{endpoint}?{query_str}"
# No match.
return None
- async def get_oembed_content(self, endpoint: str, url: str) -> OEmbedResult:
+ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult:
"""
- Request content from an oEmbed endpoint.
+ Parse the oEmbed response into an Open Graph response.
Args:
- endpoint: The oEmbed API endpoint.
- url: The URL to pass to the API.
+ url: The URL which is being previewed (not the one which was
+ requested).
+ raw_body: The oEmbed response as JSON encoded as bytes.
Returns:
- An object representing the metadata returned.
-
- Raises:
- OEmbedError if fetching or parsing of the oEmbed information fails.
+ json-encoded Open Graph data
"""
- try:
- logger.debug("Trying to get oEmbed content for url '%s'", url)
-
- # Note that only the JSON format is supported, some endpoints want
- # this in the URL, others want it as an argument.
- endpoint = endpoint.replace("{format}", "json")
- result = await self._client.get_json(
- endpoint,
- # TODO Specify max height / width.
- args={"url": url, "format": "json"},
- )
+ try:
+ # oEmbed responses *must* be UTF-8 according to the spec.
+ oembed = json_decoder.decode(raw_body.decode("utf-8"))
# Ensure there's a version of 1.0.
- if result.get("version") != "1.0":
- raise OEmbedError("Invalid version: %s" % (result.get("version"),))
-
- oembed_type = result.get("type")
+ oembed_version = oembed["version"]
+ if oembed_version != "1.0":
+ raise RuntimeError(f"Invalid version: {oembed_version}")
# Ensure the cache age is None or an int.
- cache_age = result.get("cache_age")
+ cache_age = oembed.get("cache_age")
if cache_age:
- cache_age = int(cache_age)
+ cache_age = int(cache_age) * 1000
- oembed_result = OEmbedResult(None, None, result.get("title"), cache_age)
+ # The results.
+ open_graph_response = {
+ "og:url": url,
+ }
- # HTML content.
- if oembed_type == "rich":
- oembed_result.html = result.get("html")
- return oembed_result
+ # Use either title or author's name as the title.
+ title = oembed.get("title") or oembed.get("author_name")
+ if title:
+ open_graph_response["og:title"] = title
- if oembed_type == "photo":
- oembed_result.url = result.get("url")
- return oembed_result
+ # Use the provider name and as the site.
+ provider_name = oembed.get("provider_name")
+ if provider_name:
+ open_graph_response["og:site_name"] = provider_name
- # TODO Handle link and video types.
+ # If a thumbnail exists, use it. Note that dimensions will be calculated later.
+ if "thumbnail_url" in oembed:
+ open_graph_response["og:image"] = oembed["thumbnail_url"]
- if "thumbnail_url" in result:
- oembed_result.url = result.get("thumbnail_url")
- return oembed_result
+ # Process each type separately.
+ oembed_type = oembed["type"]
+ if oembed_type == "rich":
+ calc_description_and_urls(open_graph_response, oembed["html"])
+
+ elif oembed_type == "photo":
+ # If this is a photo, use the full image, not the thumbnail.
+ open_graph_response["og:image"] = oembed["url"]
- raise OEmbedError("Incompatible oEmbed information.")
+ elif oembed_type == "video":
+ open_graph_response["og:type"] = "video.other"
+ calc_description_and_urls(open_graph_response, oembed["html"])
+ open_graph_response["og:video:width"] = oembed["width"]
+ open_graph_response["og:video:height"] = oembed["height"]
- except OEmbedError as e:
- # Trap OEmbedErrors first so we can directly re-raise them.
- logger.warning("Error parsing oEmbed metadata from %s: %r", url, e)
- raise
+ elif oembed_type == "link":
+ open_graph_response["og:type"] = "website"
+
+ else:
+ raise RuntimeError(f"Unknown oEmbed type: {oembed_type}")
except Exception as e:
# Trap any exception and let the code follow as usual.
- # FIXME: pass through 404s and other error messages nicely
- logger.warning("Error downloading oEmbed metadata from %s: %r", url, e)
- raise OEmbedError() from e
+ logger.warning(f"Error parsing oEmbed metadata from {url}: {e:r}")
+ open_graph_response = {}
+ cache_age = None
+
+ return OEmbedResult(open_graph_response, cache_age)
+
+
+def _fetch_urls(tree: "etree.Element", tag_name: str) -> List[str]:
+ results = []
+ for tag in tree.xpath("//*/" + tag_name):
+ if "src" in tag.attrib:
+ results.append(tag.attrib["src"])
+ return results
+
+
+def calc_description_and_urls(open_graph_response: JsonDict, html_body: str) -> None:
+ """
+ Calculate description for an HTML document.
+
+ This uses lxml to convert the HTML document into plaintext. If errors
+ occur during processing of the document, an empty response is returned.
+
+ Args:
+ open_graph_response: The current Open Graph summary. This is updated with additional fields.
+ html_body: The HTML document, as bytes.
+
+ Returns:
+ The summary
+ """
+ # If there's no body, nothing useful is going to be found.
+ if not html_body:
+ return
+
+ from lxml import etree
+
+ # Create an HTML parser. If this fails, log and return no metadata.
+ parser = etree.HTMLParser(recover=True, encoding="utf-8")
+
+ # Attempt to parse the body. If this fails, log and return no metadata.
+ tree = etree.fromstring(html_body, parser)
+
+ # The data was successfully parsed, but no tree was found.
+ if tree is None:
+ return
+
+ # Attempt to find interesting URLs (images, videos, embeds).
+ if "og:image" not in open_graph_response:
+ image_urls = _fetch_urls(tree, "img")
+ if image_urls:
+ open_graph_response["og:image"] = image_urls[0]
+
+ video_urls = _fetch_urls(tree, "video") + _fetch_urls(tree, "embed")
+ if video_urls:
+ open_graph_response["og:video"] = video_urls[0]
+
+ from synapse.rest.media.v1.preview_url_resource import _calc_description
+
+ description = _calc_description(tree)
+ if description:
+ open_graph_response["og:description"] = description
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index fe0627d9b0..79a42b2455 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -29,7 +29,6 @@ import attr
from twisted.internet.defer import Deferred
from twisted.internet.error import DNSLookupError
-from twisted.web.server import Request
from synapse.api.errors import Codes, SynapseError
from synapse.http.client import SimpleHttpClient
@@ -44,7 +43,7 @@ from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.rest.media.v1._base import get_filename_from_headers
from synapse.rest.media.v1.media_storage import MediaStorage
-from synapse.rest.media.v1.oembed import OEmbedError, OEmbedProvider
+from synapse.rest.media.v1.oembed import OEmbedProvider
from synapse.types import JsonDict
from synapse.util import json_encoder
from synapse.util.async_helpers import ObservableDeferred
@@ -73,6 +72,7 @@ OG_TAG_NAME_MAXLEN = 50
OG_TAG_VALUE_MAXLEN = 1000
ONE_HOUR = 60 * 60 * 1000
+ONE_DAY = 24 * ONE_HOUR
@attr.s(slots=True, frozen=True, auto_attribs=True)
@@ -125,14 +125,14 @@ class PreviewUrlResource(DirectServeJsonResource):
self.auth = hs.get_auth()
self.clock = hs.get_clock()
self.filepaths = media_repo.filepaths
- self.max_spider_size = hs.config.max_spider_size
+ self.max_spider_size = hs.config.media.max_spider_size
self.server_name = hs.hostname
self.store = hs.get_datastore()
self.client = SimpleHttpClient(
hs,
treq_args={"browser_like_redirects": True},
- ip_whitelist=hs.config.url_preview_ip_range_whitelist,
- ip_blacklist=hs.config.url_preview_ip_range_blacklist,
+ ip_whitelist=hs.config.media.url_preview_ip_range_whitelist,
+ ip_blacklist=hs.config.media.url_preview_ip_range_blacklist,
use_proxy=True,
)
self.media_repo = media_repo
@@ -150,8 +150,8 @@ class PreviewUrlResource(DirectServeJsonResource):
or instance_running_jobs == hs.get_instance_name()
)
- self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist
- self.url_preview_accept_language = hs.config.url_preview_accept_language
+ self.url_preview_url_blacklist = hs.config.media.url_preview_url_blacklist
+ self.url_preview_accept_language = hs.config.media.url_preview_accept_language
# memory cache mapping urls to an ObservableDeferred returning
# JSON-encoded OG metadata
@@ -167,7 +167,7 @@ class PreviewUrlResource(DirectServeJsonResource):
self._start_expire_url_cache_data, 10 * 1000
)
- async def _async_render_OPTIONS(self, request: Request) -> None:
+ async def _async_render_OPTIONS(self, request: SynapseRequest) -> None:
request.setHeader(b"Allow", b"OPTIONS, GET")
respond_with_json(request, 200, {}, send_cors=True)
@@ -255,10 +255,19 @@ class PreviewUrlResource(DirectServeJsonResource):
og = og.encode("utf8")
return og
- media_info = await self._download_url(url, user)
+ # If this URL can be accessed via oEmbed, use that instead.
+ url_to_download = url
+ oembed_url = self._oembed.get_oembed_url(url)
+ if oembed_url:
+ url_to_download = oembed_url
+
+ media_info = await self._download_url(url_to_download, user)
logger.debug("got media_info of '%s'", media_info)
+ # The number of milliseconds that the response should be considered valid.
+ expiration_ms = media_info.expires
+
if _is_media(media_info.media_type):
file_id = media_info.filesystem_id
dims = await self.media_repo._generate_thumbnails(
@@ -288,34 +297,22 @@ class PreviewUrlResource(DirectServeJsonResource):
encoding = get_html_media_encoding(body, media_info.media_type)
og = decode_and_calc_og(body, media_info.uri, encoding)
- # pre-cache the image for posterity
- # FIXME: it might be cleaner to use the same flow as the main /preview_url
- # request itself and benefit from the same caching etc. But for now we
- # just rely on the caching on the master request to speed things up.
- if "og:image" in og and og["og:image"]:
- image_info = await self._download_url(
- _rebase_url(og["og:image"], media_info.uri), user
- )
+ await self._precache_image_url(user, media_info, og)
+
+ elif oembed_url and _is_json(media_info.media_type):
+ # Handle an oEmbed response.
+ with open(media_info.filename, "rb") as file:
+ body = file.read()
+
+ oembed_response = self._oembed.parse_oembed_response(url, body)
+ og = oembed_response.open_graph_result
+
+ # Use the cache age from the oEmbed result, instead of the HTTP response.
+ if oembed_response.cache_age is not None:
+ expiration_ms = oembed_response.cache_age
+
+ await self._precache_image_url(user, media_info, og)
- if _is_media(image_info.media_type):
- # TODO: make sure we don't choke on white-on-transparent images
- file_id = image_info.filesystem_id
- dims = await self.media_repo._generate_thumbnails(
- None, file_id, file_id, image_info.media_type, url_cache=True
- )
- if dims:
- og["og:image:width"] = dims["width"]
- og["og:image:height"] = dims["height"]
- else:
- logger.warning("Couldn't get dims for %s", og["og:image"])
-
- og[
- "og:image"
- ] = f"mxc://{self.server_name}/{image_info.filesystem_id}"
- og["og:image:type"] = image_info.media_type
- og["matrix:image:size"] = image_info.media_length
- else:
- del og["og:image"]
else:
logger.warning("Failed to find any OG data in %s", url)
og = {}
@@ -336,12 +333,15 @@ class PreviewUrlResource(DirectServeJsonResource):
jsonog = json_encoder.encode(og)
+ # Cap the amount of time to consider a response valid.
+ expiration_ms = min(expiration_ms, ONE_DAY)
+
# store OG in history-aware DB cache
await self.store.store_url_cache(
url,
media_info.response_code,
media_info.etag,
- media_info.expires + media_info.created_ts_ms,
+ media_info.created_ts_ms + expiration_ms,
jsonog,
media_info.filesystem_id,
media_info.created_ts_ms,
@@ -358,88 +358,52 @@ class PreviewUrlResource(DirectServeJsonResource):
file_info = FileInfo(server_name=None, file_id=file_id, url_cache=True)
- # If this URL can be accessed via oEmbed, use that instead.
- url_to_download: Optional[str] = url
- oembed_url = self._oembed.get_oembed_url(url)
- if oembed_url:
- # The result might be a new URL to download, or it might be HTML content.
+ with self.media_storage.store_into_file(file_info) as (f, fname, finish):
try:
- oembed_result = await self._oembed.get_oembed_content(oembed_url, url)
- if oembed_result.url:
- url_to_download = oembed_result.url
- elif oembed_result.html:
- url_to_download = None
- except OEmbedError:
- # If an error occurs, try doing a normal preview.
- pass
+ logger.debug("Trying to get preview for url '%s'", url)
+ length, headers, uri, code = await self.client.get_file(
+ url,
+ output_stream=f,
+ max_size=self.max_spider_size,
+ headers={"Accept-Language": self.url_preview_accept_language},
+ )
+ except SynapseError:
+ # Pass SynapseErrors through directly, so that the servlet
+ # handler will return a SynapseError to the client instead of
+ # blank data or a 500.
+ raise
+ except DNSLookupError:
+ # DNS lookup returned no results
+ # Note: This will also be the case if one of the resolved IP
+ # addresses is blacklisted
+ raise SynapseError(
+ 502,
+ "DNS resolution failure during URL preview generation",
+ Codes.UNKNOWN,
+ )
+ except Exception as e:
+ # FIXME: pass through 404s and other error messages nicely
+ logger.warning("Error downloading %s: %r", url, e)
- if url_to_download:
- with self.media_storage.store_into_file(file_info) as (f, fname, finish):
- try:
- logger.debug("Trying to get preview for url '%s'", url_to_download)
- length, headers, uri, code = await self.client.get_file(
- url_to_download,
- output_stream=f,
- max_size=self.max_spider_size,
- headers={"Accept-Language": self.url_preview_accept_language},
- )
- except SynapseError:
- # Pass SynapseErrors through directly, so that the servlet
- # handler will return a SynapseError to the client instead of
- # blank data or a 500.
- raise
- except DNSLookupError:
- # DNS lookup returned no results
- # Note: This will also be the case if one of the resolved IP
- # addresses is blacklisted
- raise SynapseError(
- 502,
- "DNS resolution failure during URL preview generation",
- Codes.UNKNOWN,
- )
- except Exception as e:
- # FIXME: pass through 404s and other error messages nicely
- logger.warning("Error downloading %s: %r", url_to_download, e)
-
- raise SynapseError(
- 500,
- "Failed to download content: %s"
- % (traceback.format_exception_only(sys.exc_info()[0], e),),
- Codes.UNKNOWN,
- )
- await finish()
-
- if b"Content-Type" in headers:
- media_type = headers[b"Content-Type"][0].decode("ascii")
- else:
- media_type = "application/octet-stream"
+ raise SynapseError(
+ 500,
+ "Failed to download content: %s"
+ % (traceback.format_exception_only(sys.exc_info()[0], e),),
+ Codes.UNKNOWN,
+ )
+ await finish()
- download_name = get_filename_from_headers(headers)
+ if b"Content-Type" in headers:
+ media_type = headers[b"Content-Type"][0].decode("ascii")
+ else:
+ media_type = "application/octet-stream"
- # FIXME: we should calculate a proper expiration based on the
- # Cache-Control and Expire headers. But for now, assume 1 hour.
- expires = ONE_HOUR
- etag = (
- headers[b"ETag"][0].decode("ascii") if b"ETag" in headers else None
- )
- else:
- # we can only get here if we did an oembed request and have an oembed_result.html
- assert oembed_result.html is not None
- assert oembed_url is not None
-
- html_bytes = oembed_result.html.encode("utf-8")
- with self.media_storage.store_into_file(file_info) as (f, fname, finish):
- f.write(html_bytes)
- await finish()
-
- media_type = "text/html"
- download_name = oembed_result.title
- length = len(html_bytes)
- # If a specific cache age was not given, assume 1 hour.
- expires = oembed_result.cache_age or ONE_HOUR
- uri = oembed_url
- code = 200
- etag = None
+ download_name = get_filename_from_headers(headers)
+
+ # FIXME: we should calculate a proper expiration based on the
+ # Cache-Control and Expire headers. But for now, assume 1 hour.
+ expires = ONE_HOUR
+ etag = headers[b"ETag"][0].decode("ascii") if b"ETag" in headers else None
try:
time_now_ms = self.clock.time_msec()
@@ -474,6 +438,46 @@ class PreviewUrlResource(DirectServeJsonResource):
etag=etag,
)
+ async def _precache_image_url(
+ self, user: str, media_info: MediaInfo, og: JsonDict
+ ) -> None:
+ """
+ Pre-cache the image (if one exists) for posterity
+
+ Args:
+ user: The user requesting the preview.
+ media_info: The media being previewed.
+ og: The Open Graph dictionary. This is modified with image information.
+ """
+ # If there's no image or it is blank, there's nothing to do.
+ if "og:image" not in og or not og["og:image"]:
+ return
+
+ # FIXME: it might be cleaner to use the same flow as the main /preview_url
+ # request itself and benefit from the same caching etc. But for now we
+ # just rely on the caching on the master request to speed things up.
+ image_info = await self._download_url(
+ _rebase_url(og["og:image"], media_info.uri), user
+ )
+
+ if _is_media(image_info.media_type):
+ # TODO: make sure we don't choke on white-on-transparent images
+ file_id = image_info.filesystem_id
+ dims = await self.media_repo._generate_thumbnails(
+ None, file_id, file_id, image_info.media_type, url_cache=True
+ )
+ if dims:
+ og["og:image:width"] = dims["width"]
+ og["og:image:height"] = dims["height"]
+ else:
+ logger.warning("Couldn't get dims for %s", og["og:image"])
+
+ og["og:image"] = f"mxc://{self.server_name}/{image_info.filesystem_id}"
+ og["og:image:type"] = image_info.media_type
+ og["matrix:image:size"] = image_info.media_length
+ else:
+ del og["og:image"]
+
def _start_expire_url_cache_data(self) -> Deferred:
return run_as_background_process(
"expire_url_cache_data", self._expire_url_cache_data
@@ -481,7 +485,6 @@ class PreviewUrlResource(DirectServeJsonResource):
async def _expire_url_cache_data(self) -> None:
"""Clean up expired url cache content, media and thumbnails."""
- # TODO: Delete from backup media store
assert self._worker_run_media_background_jobs
@@ -527,7 +530,7 @@ class PreviewUrlResource(DirectServeJsonResource):
# These may be cached for a bit on the client (i.e., they
# may have a room open with a preview url thing open).
# So we wait a couple of days before deleting, just in case.
- expire_before = now - 2 * 24 * ONE_HOUR
+ expire_before = now - 2 * ONE_DAY
media_ids = await self.store.get_url_cache_media_before(expire_before)
removed_media = []
@@ -669,7 +672,18 @@ def decode_and_calc_og(
def _calc_og(tree: "etree.Element", media_uri: str) -> Dict[str, Optional[str]]:
- # suck our tree into lxml and define our OG response.
+ """
+ Calculate metadata for an HTML document.
+
+ This uses lxml to search the HTML document for Open Graph data.
+
+ Args:
+ tree: The parsed HTML document.
+ media_url: The URI used to download the body.
+
+ Returns:
+ The Open Graph response as a dictionary.
+ """
# if we see any image URLs in the OG response, then spider them
# (although the client could choose to do this by asking for previews of those
@@ -743,35 +757,7 @@ def _calc_og(tree: "etree.Element", media_uri: str) -> Dict[str, Optional[str]]:
if meta_description:
og["og:description"] = meta_description[0]
else:
- # grab any text nodes which are inside the <body/> tag...
- # unless they are within an HTML5 semantic markup tag...
- # <header/>, <nav/>, <aside/>, <footer/>
- # ...or if they are within a <script/> or <style/> tag.
- # This is a very very very coarse approximation to a plain text
- # render of the page.
-
- # We don't just use XPATH here as that is slow on some machines.
-
- from lxml import etree
-
- TAGS_TO_REMOVE = (
- "header",
- "nav",
- "aside",
- "footer",
- "script",
- "noscript",
- "style",
- etree.Comment,
- )
-
- # Split all the text nodes into paragraphs (by splitting on new
- # lines)
- text_nodes = (
- re.sub(r"\s+", "\n", el).strip()
- for el in _iterate_over_text(tree.find("body"), *TAGS_TO_REMOVE)
- )
- og["og:description"] = summarize_paragraphs(text_nodes)
+ og["og:description"] = _calc_description(tree)
elif og["og:description"]:
# This must be a non-empty string at this point.
assert isinstance(og["og:description"], str)
@@ -782,6 +768,46 @@ def _calc_og(tree: "etree.Element", media_uri: str) -> Dict[str, Optional[str]]:
return og
+def _calc_description(tree: "etree.Element") -> Optional[str]:
+ """
+ Calculate a text description based on an HTML document.
+
+ Grabs any text nodes which are inside the <body/> tag, unless they are within
+ an HTML5 semantic markup tag (<header/>, <nav/>, <aside/>, <footer/>), or
+ if they are within a <script/> or <style/> tag.
+
+ This is a very very very coarse approximation to a plain text render of the page.
+
+ Args:
+ tree: The parsed HTML document.
+
+ Returns:
+ The plain text description, or None if one cannot be generated.
+ """
+ # We don't just use XPATH here as that is slow on some machines.
+
+ from lxml import etree
+
+ TAGS_TO_REMOVE = (
+ "header",
+ "nav",
+ "aside",
+ "footer",
+ "script",
+ "noscript",
+ "style",
+ etree.Comment,
+ )
+
+ # Split all the text nodes into paragraphs (by splitting on new
+ # lines)
+ text_nodes = (
+ re.sub(r"\s+", "\n", el).strip()
+ for el in _iterate_over_text(tree.find("body"), *TAGS_TO_REMOVE)
+ )
+ return summarize_paragraphs(text_nodes)
+
+
def _iterate_over_text(
tree: "etree.Element", *tags_to_ignore: Iterable[Union[str, "etree.Comment"]]
) -> Generator[str, None, None]:
@@ -841,11 +867,25 @@ def _is_html(content_type: str) -> bool:
)
+def _is_json(content_type: str) -> bool:
+ return content_type.lower().startswith("application/json")
+
+
def summarize_paragraphs(
text_nodes: Iterable[str], min_size: int = 200, max_size: int = 500
) -> Optional[str]:
- # Try to get a summary of between 200 and 500 words, respecting
- # first paragraph and then word boundaries.
+ """
+ Try to get a summary respecting first paragraph and then word boundaries.
+
+ Args:
+ text_nodes: The paragraphs to summarize.
+ min_size: The minimum number of words to include.
+ max_size: The maximum number of words to include.
+
+ Returns:
+ A summary of the text nodes, or None if that was not possible.
+ """
+
# TODO: Respect sentences?
description = ""
@@ -868,7 +908,7 @@ def summarize_paragraphs(
new_desc = ""
# This splits the paragraph into words, but keeping the
- # (preceeding) whitespace intact so we can easily concat
+ # (preceding) whitespace intact so we can easily concat
# words back together.
for match in re.finditer(r"\s*\S+", description):
word = match.group()
diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py
index 6c9969e55f..18bf977d3d 100644
--- a/synapse/rest/media/v1/storage_provider.py
+++ b/synapse/rest/media/v1/storage_provider.py
@@ -93,6 +93,11 @@ class StorageProviderWrapper(StorageProvider):
if file_info.server_name and not self.store_remote:
return None
+ if file_info.url_cache:
+ # The URL preview cache is short lived and not worth offloading or
+ # backing up.
+ return None
+
if self.store_synchronous:
# store_file is supposed to return an Awaitable, but guard
# against improper implementations.
@@ -110,6 +115,11 @@ class StorageProviderWrapper(StorageProvider):
run_in_background(store)
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
+ if file_info.url_cache:
+ # Files in the URL preview cache definitely aren't stored here,
+ # so avoid any potentially slow I/O or network access.
+ return None
+
# store_file is supposed to return an Awaitable, but guard
# against improper implementations.
return await maybe_awaitable(self.backend.fetch(path, file_info))
@@ -125,7 +135,7 @@ class FileStorageProviderBackend(StorageProvider):
def __init__(self, hs: "HomeServer", config: str):
self.hs = hs
- self.cache_directory = hs.config.media_store_path
+ self.cache_directory = hs.config.media.media_store_path
self.base_directory = config
def __str__(self) -> str:
@@ -138,8 +148,7 @@ class FileStorageProviderBackend(StorageProvider):
backup_fname = os.path.join(self.base_directory, path)
dirname = os.path.dirname(backup_fname)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
+ os.makedirs(dirname, exist_ok=True)
await defer_to_thread(
self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index 22f43d8531..ed91ef5a42 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -17,11 +17,10 @@
import logging
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
-from twisted.web.server import Request
-
from synapse.api.errors import SynapseError
from synapse.http.server import DirectServeJsonResource, set_cors_headers
from synapse.http.servlet import parse_integer, parse_string
+from synapse.http.site import SynapseRequest
from synapse.rest.media.v1.media_storage import MediaStorage
from ._base import (
@@ -54,10 +53,10 @@ class ThumbnailResource(DirectServeJsonResource):
self.store = hs.get_datastore()
self.media_repo = media_repo
self.media_storage = media_storage
- self.dynamic_thumbnails = hs.config.dynamic_thumbnails
+ self.dynamic_thumbnails = hs.config.media.dynamic_thumbnails
self.server_name = hs.hostname
- async def _async_render_GET(self, request: Request) -> None:
+ async def _async_render_GET(self, request: SynapseRequest) -> None:
set_cors_headers(request)
server_name, media_id, _ = parse_media_id(request)
width = parse_integer(request, "width", required=True)
@@ -88,7 +87,7 @@ class ThumbnailResource(DirectServeJsonResource):
async def _respond_local_thumbnail(
self,
- request: Request,
+ request: SynapseRequest,
media_id: str,
width: int,
height: int,
@@ -121,7 +120,7 @@ class ThumbnailResource(DirectServeJsonResource):
async def _select_or_generate_local_thumbnail(
self,
- request: Request,
+ request: SynapseRequest,
media_id: str,
desired_width: int,
desired_height: int,
@@ -186,7 +185,7 @@ class ThumbnailResource(DirectServeJsonResource):
async def _select_or_generate_remote_thumbnail(
self,
- request: Request,
+ request: SynapseRequest,
server_name: str,
media_id: str,
desired_width: int,
@@ -249,7 +248,7 @@ class ThumbnailResource(DirectServeJsonResource):
async def _respond_remote_thumbnail(
self,
- request: Request,
+ request: SynapseRequest,
server_name: str,
media_id: str,
width: int,
@@ -280,7 +279,7 @@ class ThumbnailResource(DirectServeJsonResource):
async def _select_and_respond_with_thumbnail(
self,
- request: Request,
+ request: SynapseRequest,
desired_width: int,
desired_height: int,
desired_method: str,
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 146adca8f1..7dcb1428e4 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -16,8 +16,6 @@
import logging
from typing import IO, TYPE_CHECKING, Dict, List, Optional
-from twisted.web.server import Request
-
from synapse.api.errors import Codes, SynapseError
from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.servlet import parse_bytes_from_args
@@ -43,10 +41,10 @@ class UploadResource(DirectServeJsonResource):
self.clock = hs.get_clock()
self.server_name = hs.hostname
self.auth = hs.get_auth()
- self.max_upload_size = hs.config.max_upload_size
+ self.max_upload_size = hs.config.media.max_upload_size
self.clock = hs.get_clock()
- async def _async_render_OPTIONS(self, request: Request) -> None:
+ async def _async_render_OPTIONS(self, request: SynapseRequest) -> None:
respond_with_json(request, 200, {}, send_cors=True)
async def _async_render_POST(self, request: SynapseRequest) -> None:
diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py
index 47a2f72b32..6ad558f5d1 100644
--- a/synapse/rest/synapse/client/__init__.py
+++ b/synapse/rest/synapse/client/__init__.py
@@ -45,12 +45,12 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc
# provider-specific SSO bits. Only load these if they are enabled, since they
# rely on optional dependencies.
- if hs.config.oidc_enabled:
+ if hs.config.oidc.oidc_enabled:
from synapse.rest.synapse.client.oidc import OIDCResource
resources["/_synapse/client/oidc"] = OIDCResource(hs)
- if hs.config.saml2_enabled:
+ if hs.config.saml2.saml2_enabled:
from synapse.rest.synapse.client.saml2 import SAML2Resource
res = SAML2Resource(hs)
diff --git a/synapse/rest/synapse/client/password_reset.py b/synapse/rest/synapse/client/password_reset.py
index f2800bf2db..28a67f04e3 100644
--- a/synapse/rest/synapse/client/password_reset.py
+++ b/synapse/rest/synapse/client/password_reset.py
@@ -47,20 +47,20 @@ class PasswordResetSubmitTokenResource(DirectServeHtmlResource):
self.store = hs.get_datastore()
self._local_threepid_handling_disabled_due_to_email_config = (
- hs.config.local_threepid_handling_disabled_due_to_email_config
+ hs.config.email.local_threepid_handling_disabled_due_to_email_config
)
self._confirmation_email_template = (
- hs.config.email_password_reset_template_confirmation_html
+ hs.config.email.email_password_reset_template_confirmation_html
)
self._email_password_reset_template_success_html = (
- hs.config.email_password_reset_template_success_html_content
+ hs.config.email.email_password_reset_template_success_html_content
)
self._failure_email_template = (
- hs.config.email_password_reset_template_failure_html
+ hs.config.email.email_password_reset_template_failure_html
)
# This resource should not be mounted if threepid behaviour is not LOCAL
- assert hs.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL
+ assert hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL
async def _async_render_GET(self, request: Request) -> Tuple[int, bytes]:
sid = parse_string(request, "sid", required=True)
diff --git a/synapse/rest/synapse/client/saml2/metadata_resource.py b/synapse/rest/synapse/client/saml2/metadata_resource.py
index 64378ed57b..d8eae3970d 100644
--- a/synapse/rest/synapse/client/saml2/metadata_resource.py
+++ b/synapse/rest/synapse/client/saml2/metadata_resource.py
@@ -30,7 +30,7 @@ class SAML2MetadataResource(Resource):
def __init__(self, hs: "HomeServer"):
Resource.__init__(self)
- self.sp_config = hs.config.saml2_sp_config
+ self.sp_config = hs.config.saml2.saml2_sp_config
def render_GET(self, request: Request) -> bytes:
metadata_xml = saml2.metadata.create_metadata_string(
|