diff options
author | Erik Johnston <erik@matrix.org> | 2021-08-18 17:02:47 +0100 |
---|---|---|
committer | Erik Johnston <erik@matrix.org> | 2021-08-18 17:02:47 +0100 |
commit | 78a70a2e0bdff5bdf02ba093eefab0b16ebefd73 (patch) | |
tree | 8de7b7805a9840101b00529a50812d6ffcb99cc4 /synapse | |
parent | Fix weakref_slot parameter for room member storage attrs. (#10642) (diff) | |
parent | Update docs/upgrade.md with new version (diff) | |
download | synapse-78a70a2e0bdff5bdf02ba093eefab0b16ebefd73.tar.xz |
Merge branch 'release-v1.41' into develop
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/__init__.py | 2 | ||||
-rw-r--r-- | synapse/app/admin_cmd.py | 2 | ||||
-rw-r--r-- | synapse/app/generic_worker.py | 4 | ||||
-rw-r--r-- | synapse/config/account_validity.py | 7 | ||||
-rw-r--r-- | synapse/config/emailconfig.py | 71 | ||||
-rw-r--r-- | synapse/config/logger.py | 27 | ||||
-rw-r--r-- | synapse/config/server.py | 25 | ||||
-rw-r--r-- | synapse/config/sso.py | 173 | ||||
-rw-r--r-- | synapse/federation/federation_client.py | 64 | ||||
-rw-r--r-- | synapse/module_api/__init__.py | 12 | ||||
-rw-r--r-- | synapse/replication/slave/storage/room.py | 37 | ||||
-rw-r--r-- | synapse/replication/tcp/streams/__init__.py | 3 | ||||
-rw-r--r-- | synapse/replication/tcp/streams/_base.py | 25 | ||||
-rw-r--r-- | synapse/rest/admin/__init__.py | 2 | ||||
-rw-r--r-- | synapse/rest/admin/media.py | 165 | ||||
-rw-r--r-- | synapse/rest/admin/users.py | 160 | ||||
-rw-r--r-- | synapse/rest/client/room.py | 2 | ||||
-rw-r--r-- | synapse/rest/synapse/client/new_user_consent.py | 2 | ||||
-rw-r--r-- | synapse/rest/synapse/client/pick_username.py | 2 | ||||
-rw-r--r-- | synapse/storage/databases/main/__init__.py | 4 | ||||
-rw-r--r-- | synapse/storage/databases/main/room.py | 261 | ||||
-rw-r--r-- | synapse/storage/schema/__init__.py | 7 |
22 files changed, 389 insertions, 668 deletions
diff --git a/synapse/__init__.py b/synapse/__init__.py index 919293cd80..6ada20a77f 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -47,7 +47,7 @@ try: except ImportError: pass -__version__ = "1.40.0" +__version__ = "1.41.0rc1" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index 3234d9ebba..7396db93c6 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -38,7 +38,6 @@ from synapse.replication.slave.storage.groups import SlavedGroupServerStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore -from synapse.replication.slave.storage.room import RoomStore from synapse.server import HomeServer from synapse.util.logcontext import LoggingContext from synapse.util.versionstring import get_version_string @@ -58,7 +57,6 @@ class AdminCmdSlavedStore( SlavedPushRuleStore, SlavedEventStore, SlavedClientIpStore, - RoomStore, BaseSlavedStore, ): pass diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index d7b425a7ab..845e6a8220 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -64,7 +64,6 @@ from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.pushers import SlavedPusherStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore -from synapse.replication.slave.storage.room import RoomStore from synapse.rest.admin import register_servlets_for_media_repo from synapse.rest.client import ( account_data, @@ -114,6 +113,7 @@ from synapse.storage.databases.main.monthly_active_users import ( MonthlyActiveUsersWorkerStore, ) from synapse.storage.databases.main.presence import PresenceStore +from synapse.storage.databases.main.room import RoomWorkerStore from synapse.storage.databases.main.search import SearchStore from synapse.storage.databases.main.stats import StatsStore from synapse.storage.databases.main.transactions import TransactionWorkerStore @@ -237,7 +237,7 @@ class GenericWorkerSlavedStore( ClientIpWorkerStore, SlavedEventStore, SlavedKeyStore, - RoomStore, + RoomWorkerStore, DirectoryStore, SlavedApplicationServiceStore, SlavedRegistrationStore, diff --git a/synapse/config/account_validity.py b/synapse/config/account_validity.py index 9acce5996e..52e63ab1f6 100644 --- a/synapse/config/account_validity.py +++ b/synapse/config/account_validity.py @@ -78,6 +78,11 @@ class AccountValidityConfig(Config): ) # Read and store template content + custom_template_directories = ( + self.root.server.custom_template_directory, + account_validity_template_dir, + ) + ( self.account_validity_account_renewed_template, self.account_validity_account_previously_renewed_template, @@ -88,5 +93,5 @@ class AccountValidityConfig(Config): "account_previously_renewed.html", invalid_token_template_filename, ], - (td for td in (account_validity_template_dir,) if td), + (td for td in custom_template_directories if td), ) diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index fc74b4a8b9..4477419196 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -258,7 +258,12 @@ class EmailConfig(Config): add_threepid_template_success_html, ], ( - td for td in (template_dir,) if td + td + for td in ( + self.root.server.custom_template_directory, + template_dir, + ) + if td ), # Filter out template_dir if not provided ) @@ -299,7 +304,14 @@ class EmailConfig(Config): self.email_notif_template_text, ) = self.read_templates( [notif_template_html, notif_template_text], - (td for td in (template_dir,) if td), + ( + td + for td in ( + self.root.server.custom_template_directory, + template_dir, + ) + if td + ), # Filter out template_dir if not provided ) self.email_notif_for_new_users = email_config.get( @@ -322,7 +334,14 @@ class EmailConfig(Config): self.account_validity_template_text, ) = self.read_templates( [expiry_template_html, expiry_template_text], - (td for td in (template_dir,) if td), + ( + td + for td in ( + self.root.server.custom_template_directory, + template_dir, + ) + if td + ), # Filter out template_dir if not provided ) subjects_config = email_config.get("subjects", {}) @@ -354,6 +373,9 @@ class EmailConfig(Config): """\ # Configuration for sending emails from Synapse. # + # Server admins can configure custom templates for email content. See + # https://matrix-org.github.io/synapse/latest/templates.html for more information. + # email: # The hostname of the outgoing SMTP server to use. Defaults to 'localhost'. # @@ -430,49 +452,6 @@ class EmailConfig(Config): # #invite_client_location: https://app.element.io - # Directory in which Synapse will try to find the template files below. - # If not set, or the files named below are not found within the template - # directory, default templates from within the Synapse package will be used. - # - # Synapse will look for the following templates in this directory: - # - # * The contents of email notifications of missed events: 'notif_mail.html' and - # 'notif_mail.txt'. - # - # * The contents of account expiry notice emails: 'notice_expiry.html' and - # 'notice_expiry.txt'. - # - # * The contents of password reset emails sent by the homeserver: - # 'password_reset.html' and 'password_reset.txt' - # - # * An HTML page that a user will see when they follow the link in the password - # reset email. The user will be asked to confirm the action before their - # password is reset: 'password_reset_confirmation.html' - # - # * HTML pages for success and failure that a user will see when they confirm - # the password reset flow using the page above: 'password_reset_success.html' - # and 'password_reset_failure.html' - # - # * The contents of address verification emails sent during registration: - # 'registration.html' and 'registration.txt' - # - # * HTML pages for success and failure that a user will see when they follow - # the link in an address verification email sent during registration: - # 'registration_success.html' and 'registration_failure.html' - # - # * The contents of address verification emails sent when an address is added - # to a Matrix account: 'add_threepid.html' and 'add_threepid.txt' - # - # * HTML pages for success and failure that a user will see when they follow - # the link in an address verification email sent when an address is added - # to a Matrix account: 'add_threepid_success.html' and - # 'add_threepid_failure.html' - # - # You can see the default templates at: - # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates - # - #template_dir: "res/templates" - # Subjects to use when sending emails from Synapse. # # The placeholder '%%(app)s' will be replaced with the value of the 'app_name' diff --git a/synapse/config/logger.py b/synapse/config/logger.py index ad4e6e61c3..4a398a7932 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -67,18 +67,31 @@ handlers: backupCount: 3 # Does not include the current log file. encoding: utf8 - # Default to buffering writes to log file for efficiency. This means that - # will be a delay for INFO/DEBUG logs to get written, but WARNING/ERROR - # logs will still be flushed immediately. + # Default to buffering writes to log file for efficiency. + # WARNING/ERROR logs will still be flushed immediately, but there will be a + # delay (of up to `period` seconds, or until the buffer is full with + # `capacity` messages) before INFO/DEBUG logs get written. buffer: - class: logging.handlers.MemoryHandler + class: synapse.logging.handlers.PeriodicallyFlushingMemoryHandler target: file - # The capacity is the number of log lines that are buffered before - # being written to disk. Increasing this will lead to better + + # The capacity is the maximum number of log lines that are buffered + # before being written to disk. Increasing this will lead to better # performance, at the expensive of it taking longer for log lines to # be written to disk. + # This parameter is required. capacity: 10 - flushLevel: 30 # Flush for WARNING logs as well + + # Logs with a level at or above the flush level will cause the buffer to + # be flushed immediately. + # Default value: 40 (ERROR) + # Other values: 50 (CRITICAL), 30 (WARNING), 20 (INFO), 10 (DEBUG) + flushLevel: 30 # Flush immediately for WARNING logs and higher + + # The period of time, in seconds, between forced flushes. + # Messages will not be delayed for longer than this time. + # Default value: 5 seconds + period: 5 # A handler that writes logs to stderr. Unused by default, but can be used # instead of "buffer" and "file" in the logger handlers. diff --git a/synapse/config/server.py b/synapse/config/server.py index 871422ea28..d2c900f50c 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -711,6 +711,18 @@ class ServerConfig(Config): # Turn the list into a set to improve lookup speed. self.next_link_domain_whitelist = set(next_link_domain_whitelist) + templates_config = config.get("templates") or {} + if not isinstance(templates_config, dict): + raise ConfigError("The 'templates' section must be a dictionary") + + self.custom_template_directory = templates_config.get( + "custom_template_directory" + ) + if self.custom_template_directory is not None and not isinstance( + self.custom_template_directory, str + ): + raise ConfigError("'custom_template_directory' must be a string") + def has_tls_listener(self) -> bool: return any(listener.tls for listener in self.listeners) @@ -1271,6 +1283,19 @@ class ServerConfig(Config): # all domains. # #next_link_domain_whitelist: ["matrix.org"] + + # Templates to use when generating email or HTML page contents. + # + templates: + # Directory in which Synapse will try to find template files to use to generate + # email or HTML page contents. + # If not set, or a file is not found within the template directory, a default + # template from within the Synapse package will be used. + # + # See https://matrix-org.github.io/synapse/latest/templates.html for more + # information about using custom templates. + # + #custom_template_directory: /path/to/custom/templates/ """ % locals() ) diff --git a/synapse/config/sso.py b/synapse/config/sso.py index 4b590e0535..fe1177ab81 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -45,6 +45,11 @@ class SSOConfig(Config): self.sso_template_dir = sso_config.get("template_dir") # Read templates from disk + custom_template_directories = ( + self.root.server.custom_template_directory, + self.sso_template_dir, + ) + ( self.sso_login_idp_picker_template, self.sso_redirect_confirm_template, @@ -63,7 +68,7 @@ class SSOConfig(Config): "sso_auth_success.html", "sso_auth_bad_user.html", ], - (td for td in (self.sso_template_dir,) if td), + (td for td in custom_template_directories if td), ) # These templates have no placeholders, so render them here @@ -94,6 +99,9 @@ class SSOConfig(Config): # Additional settings to use with single-sign on systems such as OpenID Connect, # SAML2 and CAS. # + # Server admins can configure custom templates for pages related to SSO. See + # https://matrix-org.github.io/synapse/latest/templates.html for more information. + # sso: # A list of client URLs which are whitelisted so that the user does not # have to confirm giving access to their account to the URL. Any client @@ -125,167 +133,4 @@ class SSOConfig(Config): # information when first signing in. Defaults to false. # #update_profile_information: true - - # Directory in which Synapse will try to find the template files below. - # If not set, or the files named below are not found within the template - # directory, default templates from within the Synapse package will be used. - # - # Synapse will look for the following templates in this directory: - # - # * HTML page to prompt the user to choose an Identity Provider during - # login: 'sso_login_idp_picker.html'. - # - # This is only used if multiple SSO Identity Providers are configured. - # - # When rendering, this template is given the following variables: - # * redirect_url: the URL that the user will be redirected to after - # login. - # - # * server_name: the homeserver's name. - # - # * providers: a list of available Identity Providers. Each element is - # an object with the following attributes: - # - # * idp_id: unique identifier for the IdP - # * idp_name: user-facing name for the IdP - # * idp_icon: if specified in the IdP config, an MXC URI for an icon - # for the IdP - # * idp_brand: if specified in the IdP config, a textual identifier - # for the brand of the IdP - # - # The rendered HTML page should contain a form which submits its results - # back as a GET request, with the following query parameters: - # - # * redirectUrl: the client redirect URI (ie, the `redirect_url` passed - # to the template) - # - # * idp: the 'idp_id' of the chosen IDP. - # - # * HTML page to prompt new users to enter a userid and confirm other - # details: 'sso_auth_account_details.html'. This is only shown if the - # SSO implementation (with any user_mapping_provider) does not return - # a localpart. - # - # When rendering, this template is given the following variables: - # - # * server_name: the homeserver's name. - # - # * idp: details of the SSO Identity Provider that the user logged in - # with: an object with the following attributes: - # - # * idp_id: unique identifier for the IdP - # * idp_name: user-facing name for the IdP - # * idp_icon: if specified in the IdP config, an MXC URI for an icon - # for the IdP - # * idp_brand: if specified in the IdP config, a textual identifier - # for the brand of the IdP - # - # * user_attributes: an object containing details about the user that - # we received from the IdP. May have the following attributes: - # - # * display_name: the user's display_name - # * emails: a list of email addresses - # - # The template should render a form which submits the following fields: - # - # * username: the localpart of the user's chosen user id - # - # * HTML page allowing the user to consent to the server's terms and - # conditions. This is only shown for new users, and only if - # `user_consent.require_at_registration` is set. - # - # When rendering, this template is given the following variables: - # - # * server_name: the homeserver's name. - # - # * user_id: the user's matrix proposed ID. - # - # * user_profile.display_name: the user's proposed display name, if any. - # - # * consent_version: the version of the terms that the user will be - # shown - # - # * terms_url: a link to the page showing the terms. - # - # The template should render a form which submits the following fields: - # - # * accepted_version: the version of the terms accepted by the user - # (ie, 'consent_version' from the input variables). - # - # * HTML page for a confirmation step before redirecting back to the client - # with the login token: 'sso_redirect_confirm.html'. - # - # When rendering, this template is given the following variables: - # - # * redirect_url: the URL the user is about to be redirected to. - # - # * display_url: the same as `redirect_url`, but with the query - # parameters stripped. The intention is to have a - # human-readable URL to show to users, not to use it as - # the final address to redirect to. - # - # * server_name: the homeserver's name. - # - # * new_user: a boolean indicating whether this is the user's first time - # logging in. - # - # * user_id: the user's matrix ID. - # - # * user_profile.avatar_url: an MXC URI for the user's avatar, if any. - # None if the user has not set an avatar. - # - # * user_profile.display_name: the user's display name. None if the user - # has not set a display name. - # - # * HTML page which notifies the user that they are authenticating to confirm - # an operation on their account during the user interactive authentication - # process: 'sso_auth_confirm.html'. - # - # When rendering, this template is given the following variables: - # * redirect_url: the URL the user is about to be redirected to. - # - # * description: the operation which the user is being asked to confirm - # - # * idp: details of the Identity Provider that we will use to confirm - # the user's identity: an object with the following attributes: - # - # * idp_id: unique identifier for the IdP - # * idp_name: user-facing name for the IdP - # * idp_icon: if specified in the IdP config, an MXC URI for an icon - # for the IdP - # * idp_brand: if specified in the IdP config, a textual identifier - # for the brand of the IdP - # - # * HTML page shown after a successful user interactive authentication session: - # 'sso_auth_success.html'. - # - # Note that this page must include the JavaScript which notifies of a successful authentication - # (see https://matrix.org/docs/spec/client_server/r0.6.0#fallback). - # - # This template has no additional variables. - # - # * HTML page shown after a user-interactive authentication session which - # does not map correctly onto the expected user: 'sso_auth_bad_user.html'. - # - # When rendering, this template is given the following variables: - # * server_name: the homeserver's name. - # * user_id_to_verify: the MXID of the user that we are trying to - # validate. - # - # * HTML page shown during single sign-on if a deactivated user (according to Synapse's database) - # attempts to login: 'sso_account_deactivated.html'. - # - # This template has no additional variables. - # - # * HTML page to display to users if something goes wrong during the - # OpenID Connect authentication process: 'sso_error.html'. - # - # When rendering, this template is given two variables: - # * error: the technical name of the error - # * error_description: a human-readable message for the error - # - # You can see the default templates at: - # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates - # - #template_dir: "res/templates" """ diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 0af953a5d6..29979414e3 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -1364,13 +1364,59 @@ class FederationClient(FederationBase): return room, children, inaccessible_children - # TODO Fallback to the old federation API and translate the results. - return await self._try_destination_list( - "fetch room hierarchy", - destinations, - send_request, - failover_on_unknown_endpoint=True, - ) + try: + return await self._try_destination_list( + "fetch room hierarchy", + destinations, + send_request, + failover_on_unknown_endpoint=True, + ) + except SynapseError as e: + # Fallback to the old federation API and translate the results if + # no servers implement the new API. + # + # The algorithm below is a bit inefficient as it only attempts to + # get information for the requested room, but the legacy API may + # return additional layers. + if e.code == 502: + legacy_result = await self.get_space_summary( + destinations, + room_id, + suggested_only, + max_rooms_per_space=None, + exclude_rooms=[], + ) + + # Find the requested room in the response (and remove it). + for _i, room in enumerate(legacy_result.rooms): + if room.get("room_id") == room_id: + break + else: + # The requested room was not returned, nothing we can do. + raise + requested_room = legacy_result.rooms.pop(_i) + + # Find any children events of the requested room. + children_events = [] + children_room_ids = set() + for event in legacy_result.events: + if event.room_id == room_id: + children_events.append(event.data) + children_room_ids.add(event.state_key) + # And add them under the requested room. + requested_room["children_state"] = children_events + + # Find the children rooms. + children = [] + for room in legacy_result.rooms: + if room.get("room_id") in children_room_ids: + children.append(room) + + # It isn't clear from the response whether some of the rooms are + # not accessible. + return requested_room, children, () + + raise @attr.s(frozen=True, slots=True, auto_attribs=True) @@ -1430,7 +1476,7 @@ class FederationSpaceSummaryEventResult: class FederationSpaceSummaryResult: """Represents the data returned by a successful get_space_summary call.""" - rooms: Sequence[JsonDict] + rooms: List[JsonDict] events: Sequence[FederationSpaceSummaryEventResult] @classmethod @@ -1444,7 +1490,7 @@ class FederationSpaceSummaryResult: ValueError if d is not a valid /spaces/ response """ rooms = d.get("rooms") - if not isinstance(rooms, Sequence): + if not isinstance(rooms, List): raise ValueError("'rooms' must be a list") if any(not isinstance(r, dict) for r in rooms): raise ValueError("Invalid room in 'rooms' list") diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 84bb7264a4..b11fa6393b 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -95,6 +95,7 @@ class ModuleApi: self._state = hs.get_state_handler() self._clock: Clock = hs.get_clock() self._send_email_handler = hs.get_send_email_handler() + self.custom_template_dir = hs.config.server.custom_template_directory try: app_name = self._hs.config.email_app_name @@ -613,10 +614,15 @@ class ModuleApi: msec: float, *args, desc: Optional[str] = None, + run_on_all_instances: bool = False, **kwargs, ): """Wraps a function as a background process and calls it repeatedly. + NOTE: Will only run on the instance that is configured to run + background processes (which is the main process by default), unless + `run_on_all_workers` is set. + Waits `msec` initially before calling `f` for the first time. Args: @@ -627,12 +633,14 @@ class ModuleApi: msec: How long to wait between calls in milliseconds. *args: Positional arguments to pass to function. desc: The background task's description. Default to the function's name. + run_on_all_instances: Whether to run this on all instances, rather + than just the instance configured to run background tasks. **kwargs: Key arguments to pass to function. """ if desc is None: desc = f.__name__ - if self._hs.config.run_background_tasks: + if self._hs.config.run_background_tasks or run_on_all_instances: self._clock.looping_call( run_as_background_process, msec, @@ -689,7 +697,7 @@ class ModuleApi: """ return self._hs.config.read_templates( filenames, - (td for td in (custom_template_directory,) if td), + (td for td in (self.custom_template_dir, custom_template_directory) if td), ) diff --git a/synapse/replication/slave/storage/room.py b/synapse/replication/slave/storage/room.py deleted file mode 100644 index 8cc6de3f46..0000000000 --- a/synapse/replication/slave/storage/room.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2015, 2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from synapse.replication.tcp.streams import PublicRoomsStream -from synapse.storage.database import DatabasePool -from synapse.storage.databases.main.room import RoomWorkerStore - -from ._base import BaseSlavedStore -from ._slaved_id_tracker import SlavedIdTracker - - -class RoomStore(RoomWorkerStore, BaseSlavedStore): - def __init__(self, database: DatabasePool, db_conn, hs): - super().__init__(database, db_conn, hs) - self._public_room_id_gen = SlavedIdTracker( - db_conn, "public_room_list_stream", "stream_id" - ) - - def get_current_public_room_stream_id(self): - return self._public_room_id_gen.get_current_token() - - def process_replication_rows(self, stream_name, instance_name, token, rows): - if stream_name == PublicRoomsStream.NAME: - self._public_room_id_gen.advance(instance_name, token) - - return super().process_replication_rows(stream_name, instance_name, token, rows) diff --git a/synapse/replication/tcp/streams/__init__.py b/synapse/replication/tcp/streams/__init__.py index 4c0023c68a..f41eabd85e 100644 --- a/synapse/replication/tcp/streams/__init__.py +++ b/synapse/replication/tcp/streams/__init__.py @@ -32,7 +32,6 @@ from synapse.replication.tcp.streams._base import ( GroupServerStream, PresenceFederationStream, PresenceStream, - PublicRoomsStream, PushersStream, PushRulesStream, ReceiptsStream, @@ -57,7 +56,6 @@ STREAMS_MAP = { PushRulesStream, PushersStream, CachesStream, - PublicRoomsStream, DeviceListsStream, ToDeviceStream, FederationStream, @@ -79,7 +77,6 @@ __all__ = [ "PushRulesStream", "PushersStream", "CachesStream", - "PublicRoomsStream", "DeviceListsStream", "ToDeviceStream", "TagAccountDataStream", diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index 3716c41bea..9b905aba9d 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -447,31 +447,6 @@ class CachesStream(Stream): ) -class PublicRoomsStream(Stream): - """The public rooms list changed""" - - PublicRoomsStreamRow = namedtuple( - "PublicRoomsStreamRow", - ( - "room_id", # str - "visibility", # str - "appservice_id", # str, optional - "network_id", # str, optional - ), - ) - - NAME = "public_rooms" - ROW_TYPE = PublicRoomsStreamRow - - def __init__(self, hs): - store = hs.get_datastore() - super().__init__( - hs.get_instance_name(), - current_token_without_instance(store.get_current_public_room_stream_id), - store.get_all_new_public_rooms, - ) - - class DeviceListsStream(Stream): """Either a user has updated their devices or a remote server needs to be told about a device update. diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 2acaea3003..7f3051aef1 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -59,7 +59,6 @@ from synapse.rest.admin.users import ( SearchUsersRestServlet, ShadowBanRestServlet, UserAdminServlet, - UserMediaRestServlet, UserMembershipRestServlet, UserRegisterServlet, UserRestServletV2, @@ -222,7 +221,6 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: SendServerNoticeServlet(hs).register(http_server) VersionServlet(hs).register(http_server) UserAdminServlet(hs).register(http_server) - UserMediaRestServlet(hs).register(http_server) UserMembershipRestServlet(hs).register(http_server) UserTokenRestServlet(hs).register(http_server) UserRestServletV2(hs).register(http_server) diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index 5f0555039d..8ce443049e 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -18,14 +18,15 @@ from typing import TYPE_CHECKING, Tuple from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer -from synapse.http.servlet import RestServlet, parse_boolean, parse_integer +from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string from synapse.http.site import SynapseRequest from synapse.rest.admin._base import ( admin_patterns, assert_requester_is_admin, assert_user_is_admin, ) -from synapse.types import JsonDict +from synapse.storage.databases.main.media_repository import MediaSortOrder +from synapse.types import JsonDict, UserID if TYPE_CHECKING: from synapse.server import HomeServer @@ -314,6 +315,165 @@ class DeleteMediaByDateSize(RestServlet): return 200, {"deleted_media": deleted_media, "total": total} +class UserMediaRestServlet(RestServlet): + """ + Gets information about all uploaded local media for a specific `user_id`. + With DELETE request you can delete all this media. + + Example: + http://localhost:8008/_synapse/admin/v1/users/@user:server/media + + Args: + The parameters `from` and `limit` are required for pagination. + By default, a `limit` of 100 is used. + Returns: + A list of media and an integer representing the total number of + media that exist given for this user + """ + + PATTERNS = admin_patterns("/users/(?P<user_id>[^/]+)/media$") + + def __init__(self, hs: "HomeServer"): + self.is_mine = hs.is_mine + self.auth = hs.get_auth() + self.store = hs.get_datastore() + self.media_repository = hs.get_media_repository() + + async def on_GET( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: + # This will always be set by the time Twisted calls us. + assert request.args is not None + + await assert_requester_is_admin(self.auth, request) + + if not self.is_mine(UserID.from_string(user_id)): + raise SynapseError(400, "Can only look up local users") + + user = await self.store.get_user_by_id(user_id) + if user is None: + raise NotFoundError("Unknown user") + + start = parse_integer(request, "from", default=0) + limit = parse_integer(request, "limit", default=100) + + if start < 0: + raise SynapseError( + 400, + "Query parameter from must be a string representing a positive integer.", + errcode=Codes.INVALID_PARAM, + ) + + if limit < 0: + raise SynapseError( + 400, + "Query parameter limit must be a string representing a positive integer.", + errcode=Codes.INVALID_PARAM, + ) + + # If neither `order_by` nor `dir` is set, set the default order + # to newest media is on top for backward compatibility. + if b"order_by" not in request.args and b"dir" not in request.args: + order_by = MediaSortOrder.CREATED_TS.value + direction = "b" + else: + order_by = parse_string( + request, + "order_by", + default=MediaSortOrder.CREATED_TS.value, + allowed_values=( + MediaSortOrder.MEDIA_ID.value, + MediaSortOrder.UPLOAD_NAME.value, + MediaSortOrder.CREATED_TS.value, + MediaSortOrder.LAST_ACCESS_TS.value, + MediaSortOrder.MEDIA_LENGTH.value, + MediaSortOrder.MEDIA_TYPE.value, + MediaSortOrder.QUARANTINED_BY.value, + MediaSortOrder.SAFE_FROM_QUARANTINE.value, + ), + ) + direction = parse_string( + request, "dir", default="f", allowed_values=("f", "b") + ) + + media, total = await self.store.get_local_media_by_user_paginate( + start, limit, user_id, order_by, direction + ) + + ret = {"media": media, "total": total} + if (start + limit) < total: + ret["next_token"] = start + len(media) + + return 200, ret + + async def on_DELETE( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: + # This will always be set by the time Twisted calls us. + assert request.args is not None + + await assert_requester_is_admin(self.auth, request) + + if not self.is_mine(UserID.from_string(user_id)): + raise SynapseError(400, "Can only look up local users") + + user = await self.store.get_user_by_id(user_id) + if user is None: + raise NotFoundError("Unknown user") + + start = parse_integer(request, "from", default=0) + limit = parse_integer(request, "limit", default=100) + + if start < 0: + raise SynapseError( + 400, + "Query parameter from must be a string representing a positive integer.", + errcode=Codes.INVALID_PARAM, + ) + + if limit < 0: + raise SynapseError( + 400, + "Query parameter limit must be a string representing a positive integer.", + errcode=Codes.INVALID_PARAM, + ) + + # If neither `order_by` nor `dir` is set, set the default order + # to newest media is on top for backward compatibility. + if b"order_by" not in request.args and b"dir" not in request.args: + order_by = MediaSortOrder.CREATED_TS.value + direction = "b" + else: + order_by = parse_string( + request, + "order_by", + default=MediaSortOrder.CREATED_TS.value, + allowed_values=( + MediaSortOrder.MEDIA_ID.value, + MediaSortOrder.UPLOAD_NAME.value, + MediaSortOrder.CREATED_TS.value, + MediaSortOrder.LAST_ACCESS_TS.value, + MediaSortOrder.MEDIA_LENGTH.value, + MediaSortOrder.MEDIA_TYPE.value, + MediaSortOrder.QUARANTINED_BY.value, + MediaSortOrder.SAFE_FROM_QUARANTINE.value, + ), + ) + direction = parse_string( + request, "dir", default="f", allowed_values=("f", "b") + ) + + media, _ = await self.store.get_local_media_by_user_paginate( + start, limit, user_id, order_by, direction + ) + + deleted_media, total = await self.media_repository.delete_local_media_ids( + ([row["media_id"] for row in media]) + ) + + return 200, {"deleted_media": deleted_media, "total": total} + + def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer) -> None: """ Media repo specific APIs. @@ -328,3 +488,4 @@ def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer) ListMediaInRoom(hs).register(http_server) DeleteMediaByID(hs).register(http_server) DeleteMediaByDateSize(hs).register(http_server) + UserMediaRestServlet(hs).register(http_server) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 93193b0864..3c8a0c6883 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -35,7 +35,6 @@ from synapse.rest.admin._base import ( assert_user_is_admin, ) from synapse.rest.client._base import client_patterns -from synapse.storage.databases.main.media_repository import MediaSortOrder from synapse.storage.databases.main.stats import UserSortOrder from synapse.types import JsonDict, UserID @@ -851,165 +850,6 @@ class PushersRestServlet(RestServlet): return 200, {"pushers": filtered_pushers, "total": len(filtered_pushers)} -class UserMediaRestServlet(RestServlet): - """ - Gets information about all uploaded local media for a specific `user_id`. - With DELETE request you can delete all this media. - - Example: - http://localhost:8008/_synapse/admin/v1/users/@user:server/media - - Args: - The parameters `from` and `limit` are required for pagination. - By default, a `limit` of 100 is used. - Returns: - A list of media and an integer representing the total number of - media that exist given for this user - """ - - PATTERNS = admin_patterns("/users/(?P<user_id>[^/]+)/media$") - - def __init__(self, hs: "HomeServer"): - self.is_mine = hs.is_mine - self.auth = hs.get_auth() - self.store = hs.get_datastore() - self.media_repository = hs.get_media_repository() - - async def on_GET( - self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: - # This will always be set by the time Twisted calls us. - assert request.args is not None - - await assert_requester_is_admin(self.auth, request) - - if not self.is_mine(UserID.from_string(user_id)): - raise SynapseError(400, "Can only look up local users") - - user = await self.store.get_user_by_id(user_id) - if user is None: - raise NotFoundError("Unknown user") - - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - 400, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - 400, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - # If neither `order_by` nor `dir` is set, set the default order - # to newest media is on top for backward compatibility. - if b"order_by" not in request.args and b"dir" not in request.args: - order_by = MediaSortOrder.CREATED_TS.value - direction = "b" - else: - order_by = parse_string( - request, - "order_by", - default=MediaSortOrder.CREATED_TS.value, - allowed_values=( - MediaSortOrder.MEDIA_ID.value, - MediaSortOrder.UPLOAD_NAME.value, - MediaSortOrder.CREATED_TS.value, - MediaSortOrder.LAST_ACCESS_TS.value, - MediaSortOrder.MEDIA_LENGTH.value, - MediaSortOrder.MEDIA_TYPE.value, - MediaSortOrder.QUARANTINED_BY.value, - MediaSortOrder.SAFE_FROM_QUARANTINE.value, - ), - ) - direction = parse_string( - request, "dir", default="f", allowed_values=("f", "b") - ) - - media, total = await self.store.get_local_media_by_user_paginate( - start, limit, user_id, order_by, direction - ) - - ret = {"media": media, "total": total} - if (start + limit) < total: - ret["next_token"] = start + len(media) - - return 200, ret - - async def on_DELETE( - self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: - # This will always be set by the time Twisted calls us. - assert request.args is not None - - await assert_requester_is_admin(self.auth, request) - - if not self.is_mine(UserID.from_string(user_id)): - raise SynapseError(400, "Can only look up local users") - - user = await self.store.get_user_by_id(user_id) - if user is None: - raise NotFoundError("Unknown user") - - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - 400, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - 400, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - # If neither `order_by` nor `dir` is set, set the default order - # to newest media is on top for backward compatibility. - if b"order_by" not in request.args and b"dir" not in request.args: - order_by = MediaSortOrder.CREATED_TS.value - direction = "b" - else: - order_by = parse_string( - request, - "order_by", - default=MediaSortOrder.CREATED_TS.value, - allowed_values=( - MediaSortOrder.MEDIA_ID.value, - MediaSortOrder.UPLOAD_NAME.value, - MediaSortOrder.CREATED_TS.value, - MediaSortOrder.LAST_ACCESS_TS.value, - MediaSortOrder.MEDIA_LENGTH.value, - MediaSortOrder.MEDIA_TYPE.value, - MediaSortOrder.QUARANTINED_BY.value, - MediaSortOrder.SAFE_FROM_QUARANTINE.value, - ), - ) - direction = parse_string( - request, "dir", default="f", allowed_values=("f", "b") - ) - - media, _ = await self.store.get_local_media_by_user_paginate( - start, limit, user_id, order_by, direction - ) - - deleted_media, total = await self.media_repository.delete_local_media_ids( - ([row["media_id"] for row in media]) - ) - - return 200, {"deleted_media": deleted_media, "total": total} - - class UserTokenRestServlet(RestServlet): """An admin API for logging in as a user. diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index ed238b2141..c5c54564be 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -1141,10 +1141,10 @@ def register_servlets(hs: "HomeServer", http_server, is_worker=False): JoinedRoomsRestServlet(hs).register(http_server) RoomAliasListServlet(hs).register(http_server) SearchRestServlet(hs).register(http_server) + RoomCreateRestServlet(hs).register(http_server) # Some servlets only get registered for the main process. if not is_worker: - RoomCreateRestServlet(hs).register(http_server) RoomForgetRestServlet(hs).register(http_server) diff --git a/synapse/rest/synapse/client/new_user_consent.py b/synapse/rest/synapse/client/new_user_consent.py index 488b97b32e..fc62a09b7f 100644 --- a/synapse/rest/synapse/client/new_user_consent.py +++ b/synapse/rest/synapse/client/new_user_consent.py @@ -46,6 +46,8 @@ class NewUserConsentResource(DirectServeHtmlResource): self._consent_version = hs.config.consent.user_consent_version def template_search_dirs(): + if hs.config.server.custom_template_directory: + yield hs.config.server.custom_template_directory if hs.config.sso.sso_template_dir: yield hs.config.sso.sso_template_dir yield hs.config.sso.default_template_dir diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py index ab24ec0a8e..c15b83c387 100644 --- a/synapse/rest/synapse/client/pick_username.py +++ b/synapse/rest/synapse/client/pick_username.py @@ -74,6 +74,8 @@ class AccountDetailsResource(DirectServeHtmlResource): self._sso_handler = hs.get_sso_handler() def template_search_dirs(): + if hs.config.server.custom_template_directory: + yield hs.config.server.custom_template_directory if hs.config.sso.sso_template_dir: yield hs.config.sso.sso_template_dir yield hs.config.sso.default_template_dir diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 8d9f07111d..01b918e12e 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -127,9 +127,6 @@ class DataStore( self._clock = hs.get_clock() self.database_engine = database.engine - self._public_room_id_gen = StreamIdGenerator( - db_conn, "public_room_list_stream", "stream_id" - ) self._device_list_id_gen = StreamIdGenerator( db_conn, "device_lists_stream", @@ -170,6 +167,7 @@ class DataStore( sequence_name="cache_invalidation_stream_seq", writers=[], ) + else: self._cache_id_gen = None diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 443e5f3315..f98b892598 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -73,6 +73,40 @@ class RoomWorkerStore(SQLBaseStore): self.config = hs.config + async def store_room( + self, + room_id: str, + room_creator_user_id: str, + is_public: bool, + room_version: RoomVersion, + ): + """Stores a room. + + Args: + room_id: The desired room ID, can be None. + room_creator_user_id: The user ID of the room creator. + is_public: True to indicate that this room should appear in + public room lists. + room_version: The version of the room + Raises: + StoreError if the room could not be stored. + """ + try: + await self.db_pool.simple_insert( + "rooms", + { + "room_id": room_id, + "creator": room_creator_user_id, + "is_public": is_public, + "room_version": room_version.identifier, + "has_auth_chain_index": True, + }, + desc="store_room", + ) + except Exception as e: + logger.error("store_room with room_id=%s failed: %s", room_id, e) + raise StoreError(500, "Problem creating room.") + async def get_room(self, room_id: str) -> dict: """Retrieve a room. @@ -890,55 +924,6 @@ class RoomWorkerStore(SQLBaseStore): return total_media_quarantined - async def get_all_new_public_rooms( - self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: - """Get updates for public rooms replication stream. - - Args: - instance_name: The writer we want to fetch updates from. Unused - here since there is only ever one writer. - last_id: The token to fetch updates from. Exclusive. - current_id: The token to fetch updates up to. Inclusive. - limit: The requested limit for the number of rows to return. The - function may return more or fewer rows. - - Returns: - A tuple consisting of: the updates, a token to use to fetch - subsequent updates, and whether we returned fewer rows than exists - between the requested tokens due to the limit. - - The token returned can be used in a subsequent call to this - function to get further updatees. - - The updates are a list of 2-tuples of stream ID and the row data - """ - if last_id == current_id: - return [], current_id, False - - def get_all_new_public_rooms(txn): - sql = """ - SELECT stream_id, room_id, visibility, appservice_id, network_id - FROM public_room_list_stream - WHERE stream_id > ? AND stream_id <= ? - ORDER BY stream_id ASC - LIMIT ? - """ - - txn.execute(sql, (last_id, current_id, limit)) - updates = [(row[0], row[1:]) for row in txn] - limited = False - upto_token = current_id - if len(updates) >= limit: - upto_token = updates[-1][0] - limited = True - - return updates, upto_token, limited - - return await self.db_pool.runInteraction( - "get_all_new_public_rooms", get_all_new_public_rooms - ) - async def get_rooms_for_retention_period_in_range( self, min_ms: Optional[int], max_ms: Optional[int], include_null: bool = False ) -> Dict[str, dict]: @@ -1391,57 +1376,6 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore): lock=False, ) - async def store_room( - self, - room_id: str, - room_creator_user_id: str, - is_public: bool, - room_version: RoomVersion, - ): - """Stores a room. - - Args: - room_id: The desired room ID, can be None. - room_creator_user_id: The user ID of the room creator. - is_public: True to indicate that this room should appear in - public room lists. - room_version: The version of the room - Raises: - StoreError if the room could not be stored. - """ - try: - - def store_room_txn(txn, next_id): - self.db_pool.simple_insert_txn( - txn, - "rooms", - { - "room_id": room_id, - "creator": room_creator_user_id, - "is_public": is_public, - "room_version": room_version.identifier, - "has_auth_chain_index": True, - }, - ) - if is_public: - self.db_pool.simple_insert_txn( - txn, - table="public_room_list_stream", - values={ - "stream_id": next_id, - "room_id": room_id, - "visibility": is_public, - }, - ) - - async with self._public_room_id_gen.get_next() as next_id: - await self.db_pool.runInteraction( - "store_room_txn", store_room_txn, next_id - ) - except Exception as e: - logger.error("store_room with room_id=%s failed: %s", room_id, e) - raise StoreError(500, "Problem creating room.") - async def maybe_store_room_on_outlier_membership( self, room_id: str, room_version: RoomVersion ): @@ -1470,49 +1404,14 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore): lock=False, ) - async def set_room_is_public(self, room_id, is_public): - def set_room_is_public_txn(txn, next_id): - self.db_pool.simple_update_one_txn( - txn, - table="rooms", - keyvalues={"room_id": room_id}, - updatevalues={"is_public": is_public}, - ) - - entries = self.db_pool.simple_select_list_txn( - txn, - table="public_room_list_stream", - keyvalues={ - "room_id": room_id, - "appservice_id": None, - "network_id": None, - }, - retcols=("stream_id", "visibility"), - ) - - entries.sort(key=lambda r: r["stream_id"]) - - add_to_stream = True - if entries: - add_to_stream = bool(entries[-1]["visibility"]) != is_public - - if add_to_stream: - self.db_pool.simple_insert_txn( - txn, - table="public_room_list_stream", - values={ - "stream_id": next_id, - "room_id": room_id, - "visibility": is_public, - "appservice_id": None, - "network_id": None, - }, - ) + async def set_room_is_public(self, room_id: str, is_public: bool) -> None: + await self.db_pool.simple_update_one( + table="rooms", + keyvalues={"room_id": room_id}, + updatevalues={"is_public": is_public}, + desc="set_room_is_public", + ) - async with self._public_room_id_gen.get_next() as next_id: - await self.db_pool.runInteraction( - "set_room_is_public", set_room_is_public_txn, next_id - ) self.hs.get_notifier().on_new_replication_data() async def set_room_is_public_appservice( @@ -1533,68 +1432,33 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore): list. """ - def set_room_is_public_appservice_txn(txn, next_id): - if is_public: - try: - self.db_pool.simple_insert_txn( - txn, - table="appservice_room_list", - values={ - "appservice_id": appservice_id, - "network_id": network_id, - "room_id": room_id, - }, - ) - except self.database_engine.module.IntegrityError: - # We've already inserted, nothing to do. - return - else: - self.db_pool.simple_delete_txn( - txn, - table="appservice_room_list", - keyvalues={ - "appservice_id": appservice_id, - "network_id": network_id, - "room_id": room_id, - }, - ) - - entries = self.db_pool.simple_select_list_txn( - txn, - table="public_room_list_stream", + if is_public: + await self.db_pool.simple_upsert( + table="appservice_room_list", keyvalues={ + "appservice_id": appservice_id, + "network_id": network_id, "room_id": room_id, + }, + values={}, + insertion_values={ "appservice_id": appservice_id, "network_id": network_id, + "room_id": room_id, }, - retcols=("stream_id", "visibility"), + desc="set_room_is_public_appservice_true", ) - - entries.sort(key=lambda r: r["stream_id"]) - - add_to_stream = True - if entries: - add_to_stream = bool(entries[-1]["visibility"]) != is_public - - if add_to_stream: - self.db_pool.simple_insert_txn( - txn, - table="public_room_list_stream", - values={ - "stream_id": next_id, - "room_id": room_id, - "visibility": is_public, - "appservice_id": appservice_id, - "network_id": network_id, - }, - ) - - async with self._public_room_id_gen.get_next() as next_id: - await self.db_pool.runInteraction( - "set_room_is_public_appservice", - set_room_is_public_appservice_txn, - next_id, + else: + await self.db_pool.simple_delete( + table="appservice_room_list", + keyvalues={ + "appservice_id": appservice_id, + "network_id": network_id, + "room_id": room_id, + }, + desc="set_room_is_public_appservice_false", ) + self.hs.get_notifier().on_new_replication_data() async def add_event_report( @@ -1787,9 +1651,6 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore): "get_event_reports_paginate", _get_event_reports_paginate_txn ) - def get_current_public_room_stream_id(self): - return self._public_room_id_gen.get_current_token() - async def block_room(self, room_id: str, user_id: str) -> None: """Marks the room as blocked. Can be called multiple times. diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 7e0687e197..a5bc0ee8a5 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 62 +SCHEMA_VERSION = 63 """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -25,6 +25,11 @@ for more information on how this works. Changes in SCHEMA_VERSION = 61: - The `user_stats_historical` and `room_stats_historical` tables are not written and are not read (previously, they were written but not read). + +Changes in SCHEMA_VERSION = 63: + - The `public_room_list_stream` table is not written nor read to + (previously, it was written and read to, but not for any significant purpose). + https://github.com/matrix-org/synapse/pull/10565 """ |