diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000000..d6cd75f1d0
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,2 @@
+# Automatically request reviews from the synapse-core team when a pull request comes in.
+* @matrix-org/synapse-core
\ No newline at end of file
diff --git a/changelog.d/10894.feature b/changelog.d/10894.feature
new file mode 100644
index 0000000000..a4f968bed1
--- /dev/null
+++ b/changelog.d/10894.feature
@@ -0,0 +1 @@
+Add a `user_may_send_3pid_invite` spam checker callback for modules to allow or deny 3PID invites.
diff --git a/changelog.d/10910.feature b/changelog.d/10910.feature
new file mode 100644
index 0000000000..aee139f8b6
--- /dev/null
+++ b/changelog.d/10910.feature
@@ -0,0 +1 @@
+Add a spam checker callback to allow or deny room joins.
diff --git a/changelog.d/10985.misc b/changelog.d/10985.misc
new file mode 100644
index 0000000000..586a0b3a96
--- /dev/null
+++ b/changelog.d/10985.misc
@@ -0,0 +1 @@
+Use direct references to config flags.
diff --git a/changelog.d/10990.doc b/changelog.d/10990.doc
new file mode 100644
index 0000000000..51290d6200
--- /dev/null
+++ b/changelog.d/10990.doc
@@ -0,0 +1 @@
+Add additional content to the Welcome and Overview page of the documentation.
diff --git a/changelog.d/10994.misc b/changelog.d/10994.misc
new file mode 100644
index 0000000000..0a8538b01e
--- /dev/null
+++ b/changelog.d/10994.misc
@@ -0,0 +1 @@
+Add a `CODEOWNERS` file to automatically request reviews from the `@matrix-org/synapse-core` team on new pull requests.
diff --git a/changelog.d/10995.bugfix b/changelog.d/10995.bugfix
new file mode 100644
index 0000000000..3eef96f3db
--- /dev/null
+++ b/changelog.d/10995.bugfix
@@ -0,0 +1 @@
+Correct a bugfix introduced in Synapse v1.44.0 that wouldn't catch every error of the connection breaks before a response could be written to it.
diff --git a/changelog.d/11002.bugfix b/changelog.d/11002.bugfix
new file mode 100644
index 0000000000..cf894a6314
--- /dev/null
+++ b/changelog.d/11002.bugfix
@@ -0,0 +1 @@
+Fix a long-standing bug where local users' per-room nicknames/avatars were visible to anyone who could see you in the user_directory.
diff --git a/changelog.d/11004.misc b/changelog.d/11004.misc
new file mode 100644
index 0000000000..821033710a
--- /dev/null
+++ b/changelog.d/11004.misc
@@ -0,0 +1 @@
+Add further type hints to `synapse.state`.
\ No newline at end of file
diff --git a/changelog.d/11010.misc b/changelog.d/11010.misc
new file mode 100644
index 0000000000..9a765435db
--- /dev/null
+++ b/changelog.d/11010.misc
@@ -0,0 +1 @@
+Clean up some of the federation event authentication code for clarity.
diff --git a/changelog.d/11011.misc b/changelog.d/11011.misc
new file mode 100644
index 0000000000..9a765435db
--- /dev/null
+++ b/changelog.d/11011.misc
@@ -0,0 +1 @@
+Clean up some of the federation event authentication code for clarity.
diff --git a/changelog.d/11019.misc b/changelog.d/11019.misc
new file mode 100644
index 0000000000..aae5ee62b2
--- /dev/null
+++ b/changelog.d/11019.misc
@@ -0,0 +1 @@
+Ensure that cache config tests do not share state.
diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md
index 713366368c..580a4f7f98 100644
--- a/docs/development/contributing_guide.md
+++ b/docs/development/contributing_guide.md
@@ -63,7 +63,7 @@ TBD
# 5. Get in touch.
-Join our developer community on Matrix: #synapse-dev:matrix.org !
+Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
# 6. Pick an issue.
diff --git a/docs/modules/spam_checker_callbacks.md b/docs/modules/spam_checker_callbacks.md
index 7920ac5f8f..787e99074a 100644
--- a/docs/modules/spam_checker_callbacks.md
+++ b/docs/modules/spam_checker_callbacks.md
@@ -19,6 +19,21 @@ either a `bool` to indicate whether the event must be rejected because of spam,
to indicate the event must be rejected because of spam and to give a rejection reason to
forward to clients.
+### `user_may_join_room`
+
+```python
+async def user_may_join_room(user: str, room: str, is_invited: bool) -> bool
+```
+
+Called when a user is trying to join a room. The module must return a `bool` to indicate
+whether the user can join the room. The user is represented by their Matrix user ID (e.g.
+`@alice:example.com`) and the room is represented by its Matrix ID (e.g.
+`!room:example.com`). The module is also given a boolean to indicate whether the user
+currently has a pending invite in the room.
+
+This callback isn't called if the join is performed by a server administrator, or in the
+context of a room creation.
+
### `user_may_invite`
```python
@@ -29,6 +44,41 @@ Called when processing an invitation. The module must return a `bool` indicating
the inviter can invite the invitee to the given room. Both inviter and invitee are
represented by their Matrix user ID (e.g. `@alice:example.com`).
+### `user_may_send_3pid_invite`
+
+```python
+async def user_may_send_3pid_invite(
+ inviter: str,
+ medium: str,
+ address: str,
+ room_id: str,
+) -> bool
+```
+
+Called when processing an invitation using a third-party identifier (also called a 3PID,
+e.g. an email address or a phone number). The module must return a `bool` indicating
+whether the inviter can invite the invitee to the given room.
+
+The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
+invitee is represented by its medium (e.g. "email") and its address
+(e.g. `alice@example.com`). See [the Matrix specification](https://matrix.org/docs/spec/appendices#pid-types)
+for more information regarding third-party identifiers.
+
+For example, a call to this callback to send an invitation to the email address
+`alice@example.com` would look like this:
+
+```python
+await user_may_send_3pid_invite(
+ "@bob:example.com", # The inviter's user ID
+ "email", # The medium of the 3PID to invite
+ "alice@example.com", # The address of the 3PID to invite
+ "!some_room:example.com", # The ID of the room to send the invite into
+)
+```
+
+**Note**: If the third-party identifier is already associated with a matrix user ID,
+[`user_may_invite`](#user_may_invite) will be used instead.
+
### `user_may_create_room`
```python
diff --git a/docs/welcome_and_overview.md b/docs/welcome_and_overview.md
index 9882d9f159..aab2d6b4f0 100644
--- a/docs/welcome_and_overview.md
+++ b/docs/welcome_and_overview.md
@@ -3,3 +3,77 @@
Welcome to the documentation repository for Synapse, a
[Matrix](https://matrix.org) homeserver implementation developed by the matrix.org core
team.
+
+## Installing and using Synapse
+
+This documentation covers topics for **installation**, **configuration** and
+**maintainence** of your Synapse process:
+
+* Learn how to [install](setup/installation.md) and
+ [configure](usage/configuration/index.html) your own instance, perhaps with [Single
+ Sign-On](usage/configuration/user_authentication/index.html).
+
+* See how to [upgrade](upgrade.md) between Synapse versions.
+
+* Administer your instance using the [Admin
+ API](usage/administration/admin_api/index.html), installing [pluggable
+ modules](modules/index.html), or by accessing the [manhole](manhole.md).
+
+* Learn how to [read log lines](usage/administration/request_log.md), configure
+ [logging](usage/configuration/logging_sample_config.md) or set up [structured
+ logging](structured_logging.md).
+
+* Scale Synapse through additional [worker processes](workers.md).
+
+* Set up [monitoring and metrics](metrics-howto.md) to keep an eye on your
+ Synapse instance's performance.
+
+## Developing on Synapse
+
+Contributions are welcome! Synapse is primarily written in
+[Python](https://python.org). As a developer, you may be interested in the
+following documentation:
+
+* Read the [Contributing Guide](development/contributing_guide.md). It is meant
+ to walk new contributors through the process of developing and submitting a
+ change to the Synapse codebase (which is [hosted on
+ GitHub](https://github.com/matrix-org/synapse)).
+
+* Set up your [development
+ environment](development/contributing_guide.md#2-what-do-i-need), then learn
+ how to [lint](development/contributing_guide.md#run-the-linters) and
+ [test](development/contributing_guide.md#8-test-test-test) your code.
+
+* Look at [the issue tracker](https://github.com/matrix-org/synapse/issues) for
+ bugs to fix or features to add. If you're new, it may be best to start with
+ those labeled [good first
+ issue](https://github.com/matrix-org/synapse/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22).
+
+* Understand [how Synapse is
+ built](development/internal_documentation/index.html), how to [migrate
+ database schemas](development/database_schema.md), learn about
+ [federation](federate.md) and how to [set up a local
+ federation](federate.md#running-a-demo-federation-of-synapses) for development.
+
+* We like to keep our `git` history clean. [Learn](development/git.md) how to
+ do so!
+
+* And finally, contribute to this documentation! The source for which is
+ [located here](https://github.com/matrix-org/synapse/tree/develop/docs).
+
+## Donating to Synapse development
+
+Want to help keep Synapse going but don't know how to code? Synapse is a
+[Matrix.org Foundation](https://matrix.org) project. Consider becoming a
+supportor on [Liberapay](https://liberapay.com/matrixdotorg),
+[Patreon](https://patreon.com/matrixdotorg) or through
+[PayPal](https://paypal.me/matrixdotorg) via a one-time donation.
+
+If you are an organisation or enterprise and would like to sponsor development,
+reach out to us over email at: support (at) matrix.org
+
+## Reporting a security vulnerability
+
+If you've found a security issue in Synapse or any other Matrix.org Foundation
+project, please report it to us in accordance with our [Security Disclosure
+Policy](https://www.matrix.org/security-disclosure-policy/). Thank you!
diff --git a/mypy.ini b/mypy.ini
index 857327cd85..c63b01fe99 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -99,6 +99,9 @@ disallow_untyped_defs = True
[mypy-synapse.rest.*]
disallow_untyped_defs = True
+[mypy-synapse.state.*]
+disallow_untyped_defs = True
+
[mypy-synapse.util.batching_queue]
disallow_untyped_defs = True
diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index fa6ac6d93a..a947d9e49e 100755
--- a/scripts/synapse_port_db
+++ b/scripts/synapse_port_db
@@ -215,7 +215,7 @@ class MockHomeserver:
def __init__(self, config):
self.clock = Clock(reactor)
self.config = config
- self.hostname = config.server_name
+ self.hostname = config.server.server_name
self.version_string = "Synapse/" + get_version_string(synapse)
def get_clock(self):
@@ -583,7 +583,7 @@ class Porter(object):
return
self.postgres_store = self.build_db_store(
- self.hs_config.get_single_database()
+ self.hs_config.database.get_single_database()
)
await self.run_background_updates_on_postgres()
diff --git a/scripts/update_synapse_database b/scripts/update_synapse_database
index 26b29b0b45..6c088bad93 100755
--- a/scripts/update_synapse_database
+++ b/scripts/update_synapse_database
@@ -36,7 +36,7 @@ class MockHomeserver(HomeServer):
def __init__(self, config, **kwargs):
super(MockHomeserver, self).__init__(
- config.server_name, reactor=reactor, config=config, **kwargs
+ config.server.server_name, reactor=reactor, config=config, **kwargs
)
self.version_string = "Synapse/" + get_version_string(synapse)
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 749bc1deb9..4a204a5823 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -301,7 +301,7 @@ def refresh_certificate(hs):
if not hs.config.server.has_tls_listener():
return
- hs.config.read_certificate_from_disk()
+ hs.config.tls.read_certificate_from_disk()
hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config)
if hs._listening_services:
diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py
index 556bcc124e..13d20af457 100644
--- a/synapse/app/admin_cmd.py
+++ b/synapse/app/admin_cmd.py
@@ -197,9 +197,9 @@ def start(config_options):
# Explicitly disable background processes
config.server.update_user_directory = False
config.worker.run_background_tasks = False
- config.start_pushers = False
+ config.worker.start_pushers = False
config.pusher_shard_config.instances = []
- config.send_federation = False
+ config.worker.send_federation = False
config.federation_shard_config.instances = []
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 2b2d4bbf83..422f03cc04 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -234,7 +234,7 @@ class SynapseHomeServer(HomeServer):
)
if name in ["media", "federation", "client"]:
- if self.config.media.enable_media_repo:
+ if self.config.server.enable_media_repo:
media_repo = self.get_media_repository_resource()
resources.update(
{MEDIA_PREFIX: media_repo, LEGACY_MEDIA_PREFIX: media_repo}
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 26152b0924..7c4428a138 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -118,21 +118,6 @@ class Config:
"synapse", "res/templates"
)
- def __getattr__(self, item: str) -> Any:
- """
- Try and fetch a configuration option that does not exist on this class.
-
- This is so that existing configs that rely on `self.value`, where value
- is actually from a different config section, continue to work.
- """
- if item in ["generate_config_section", "read_config"]:
- raise AttributeError(item)
-
- if self.root is None:
- raise AttributeError(item)
- else:
- return self.root._get_unclassed_config(self.section, item)
-
@staticmethod
def parse_size(value):
if isinstance(value, int):
@@ -289,7 +274,9 @@ class Config:
env.filters.update(
{
"format_ts": _format_ts_filter,
- "mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl),
+ "mxc_to_http": _create_mxc_to_http_filter(
+ self.root.server.public_baseurl
+ ),
}
)
@@ -311,8 +298,6 @@ class RootConfig:
config_classes = []
def __init__(self):
- self._configs = OrderedDict()
-
for config_class in self.config_classes:
if config_class.section is None:
raise ValueError("%r requires a section name" % (config_class,))
@@ -321,42 +306,7 @@ class RootConfig:
conf = config_class(self)
except Exception as e:
raise Exception("Failed making %s: %r" % (config_class.section, e))
- self._configs[config_class.section] = conf
-
- def __getattr__(self, item: str) -> Any:
- """
- Redirect lookups on this object either to config objects, or values on
- config objects, so that `config.tls.blah` works, as well as legacy uses
- of things like `config.server.server_name`. It will first look up the config
- section name, and then values on those config classes.
- """
- if item in self._configs.keys():
- return self._configs[item]
-
- return self._get_unclassed_config(None, item)
-
- def _get_unclassed_config(self, asking_section: Optional[str], item: str):
- """
- Fetch a config value from one of the instantiated config classes that
- has not been fetched directly.
-
- Args:
- asking_section: If this check is coming from a Config child, which
- one? This section will not be asked if it has the value.
- item: The configuration value key.
-
- Raises:
- AttributeError if no config classes have the config key. The body
- will contain what sections were checked.
- """
- for key, val in self._configs.items():
- if key == asking_section:
- continue
-
- if item in dir(val):
- return getattr(val, item)
-
- raise AttributeError(item, "not found in %s" % (list(self._configs.keys()),))
+ setattr(self, config_class.section, conf)
def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]:
"""
@@ -373,9 +323,11 @@ class RootConfig:
"""
res = OrderedDict()
- for name, config in self._configs.items():
+ for config_class in self.config_classes:
+ config = getattr(self, config_class.section)
+
if hasattr(config, func_name):
- res[name] = getattr(config, func_name)(*args, **kwargs)
+ res[config_class.section] = getattr(config, func_name)(*args, **kwargs)
return res
diff --git a/synapse/config/account_validity.py b/synapse/config/account_validity.py
index ffaffc4931..b56c2a24df 100644
--- a/synapse/config/account_validity.py
+++ b/synapse/config/account_validity.py
@@ -76,7 +76,7 @@ class AccountValidityConfig(Config):
)
if self.account_validity_renew_by_email_enabled:
- if not self.public_baseurl:
+ if not self.root.server.public_baseurl:
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
# Load account validity templates.
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index 901f4123e1..9b58ecf3d8 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -37,7 +37,7 @@ class CasConfig(Config):
# The public baseurl is required because it is used by the redirect
# template.
- public_baseurl = self.public_baseurl
+ public_baseurl = self.root.server.public_baseurl
if not public_baseurl:
raise ConfigError("cas_config requires a public_baseurl to be set")
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 936abe6178..8ff59aa2f8 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -19,7 +19,6 @@ import email.utils
import logging
import os
from enum import Enum
-from typing import Optional
import attr
@@ -135,7 +134,7 @@ class EmailConfig(Config):
# msisdn is currently always remote while Synapse does not support any method of
# sending SMS messages
ThreepidBehaviour.REMOTE
- if self.account_threepid_delegate_email
+ if self.root.registration.account_threepid_delegate_email
else ThreepidBehaviour.LOCAL
)
# Prior to Synapse v1.4.0, there was another option that defined whether Synapse would
@@ -144,7 +143,7 @@ class EmailConfig(Config):
# identity server in the process.
self.using_identity_server_from_trusted_list = False
if (
- not self.account_threepid_delegate_email
+ not self.root.registration.account_threepid_delegate_email
and config.get("trust_identity_server_for_password_resets", False) is True
):
# Use the first entry in self.trusted_third_party_id_servers instead
@@ -156,7 +155,7 @@ class EmailConfig(Config):
# trusted_third_party_id_servers does not contain a scheme whereas
# account_threepid_delegate_email is expected to. Presume https
- self.account_threepid_delegate_email: Optional[str] = (
+ self.root.registration.account_threepid_delegate_email = (
"https://" + first_trusted_identity_server
)
self.using_identity_server_from_trusted_list = True
@@ -335,7 +334,7 @@ class EmailConfig(Config):
"client_base_url", email_config.get("riot_base_url", None)
)
- if self.account_validity_renew_by_email_enabled:
+ if self.root.account_validity.account_validity_renew_by_email_enabled:
expiry_template_html = email_config.get(
"expiry_template_html", "notice_expiry.html"
)
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 94a9063043..015dbb8a67 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -145,11 +145,13 @@ class KeyConfig(Config):
# list of TrustedKeyServer objects
self.key_servers = list(
- _parse_key_servers(key_servers, self.federation_verify_certificates)
+ _parse_key_servers(
+ key_servers, self.root.tls.federation_verify_certificates
+ )
)
self.macaroon_secret_key = config.get(
- "macaroon_secret_key", self.registration_shared_secret
+ "macaroon_secret_key", self.root.registration.registration_shared_secret
)
if not self.macaroon_secret_key:
diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py
index 7e67fbada1..10f5796330 100644
--- a/synapse/config/oidc.py
+++ b/synapse/config/oidc.py
@@ -58,7 +58,7 @@ class OIDCConfig(Config):
"Multiple OIDC providers have the idp_id %r." % idp_id
)
- public_baseurl = self.public_baseurl
+ public_baseurl = self.root.server.public_baseurl
if public_baseurl is None:
raise ConfigError("oidc_config requires a public_baseurl to be set")
self.oidc_callback_url = public_baseurl + "_synapse/client/oidc/callback"
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 7cffdacfa5..a3d2a38c4c 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -45,7 +45,10 @@ class RegistrationConfig(Config):
account_threepid_delegates = config.get("account_threepid_delegates") or {}
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
- if self.account_threepid_delegate_msisdn and not self.public_baseurl:
+ if (
+ self.account_threepid_delegate_msisdn
+ and not self.root.server.public_baseurl
+ ):
raise ConfigError(
"The configuration option `public_baseurl` is required if "
"`account_threepid_delegate.msisdn` is set, such that "
@@ -85,7 +88,7 @@ class RegistrationConfig(Config):
if mxid_localpart:
# Convert the localpart to a full mxid.
self.auto_join_user_id = UserID(
- mxid_localpart, self.server_name
+ mxid_localpart, self.root.server.server_name
).to_string()
if self.autocreate_auto_join_rooms:
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 7481f3bf5f..69906a98d4 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -94,7 +94,7 @@ class ContentRepositoryConfig(Config):
# Only enable the media repo if either the media repo is enabled or the
# current worker app is the media repo.
if (
- self.enable_media_repo is False
+ self.root.server.enable_media_repo is False
and config.get("worker_app") != "synapse.app.media_repository"
):
self.can_load_media_repo = False
diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py
index 05e983625d..9c51b6a25a 100644
--- a/synapse/config/saml2.py
+++ b/synapse/config/saml2.py
@@ -199,7 +199,7 @@ class SAML2Config(Config):
"""
import saml2
- public_baseurl = self.public_baseurl
+ public_baseurl = self.root.server.public_baseurl
if public_baseurl is None:
raise ConfigError("saml2_config requires a public_baseurl to be set")
diff --git a/synapse/config/server_notices.py b/synapse/config/server_notices.py
index 48bf3241b6..bde4e879d9 100644
--- a/synapse/config/server_notices.py
+++ b/synapse/config/server_notices.py
@@ -73,7 +73,9 @@ class ServerNoticesConfig(Config):
return
mxid_localpart = c["system_mxid_localpart"]
- self.server_notices_mxid = UserID(mxid_localpart, self.server_name).to_string()
+ self.server_notices_mxid = UserID(
+ mxid_localpart, self.root.server.server_name
+ ).to_string()
self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None)
self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
# todo: i18n
diff --git a/synapse/config/sso.py b/synapse/config/sso.py
index 524a7ff3aa..11a9b76aa0 100644
--- a/synapse/config/sso.py
+++ b/synapse/config/sso.py
@@ -103,8 +103,10 @@ class SSOConfig(Config):
# the client's.
# public_baseurl is an optional setting, so we only add the fallback's URL to the
# list if it's provided (because we can't figure out what that URL is otherwise).
- if self.public_baseurl:
- login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
+ if self.root.server.public_baseurl:
+ login_fallback_url = (
+ self.root.server.public_baseurl + "_matrix/static/client/login"
+ )
self.sso_client_whitelist.append(login_fallback_url)
def generate_config_section(self, **kwargs):
diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py
index c389f70b8d..ae4c8ab257 100644
--- a/synapse/events/spamcheck.py
+++ b/synapse/events/spamcheck.py
@@ -44,7 +44,9 @@ CHECK_EVENT_FOR_SPAM_CALLBACK = Callable[
["synapse.events.EventBase"],
Awaitable[Union[bool, str]],
]
+USER_MAY_JOIN_ROOM_CALLBACK = Callable[[str, str, bool], Awaitable[bool]]
USER_MAY_INVITE_CALLBACK = Callable[[str, str, str], Awaitable[bool]]
+USER_MAY_SEND_3PID_INVITE_CALLBACK = Callable[[str, str, str, str], Awaitable[bool]]
USER_MAY_CREATE_ROOM_CALLBACK = Callable[[str], Awaitable[bool]]
USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK = Callable[
[str, List[str], List[Dict[str, str]]], Awaitable[bool]
@@ -165,7 +167,11 @@ def load_legacy_spam_checkers(hs: "synapse.server.HomeServer"):
class SpamChecker:
def __init__(self):
self._check_event_for_spam_callbacks: List[CHECK_EVENT_FOR_SPAM_CALLBACK] = []
+ self._user_may_join_room_callbacks: List[USER_MAY_JOIN_ROOM_CALLBACK] = []
self._user_may_invite_callbacks: List[USER_MAY_INVITE_CALLBACK] = []
+ self._user_may_send_3pid_invite_callbacks: List[
+ USER_MAY_SEND_3PID_INVITE_CALLBACK
+ ] = []
self._user_may_create_room_callbacks: List[USER_MAY_CREATE_ROOM_CALLBACK] = []
self._user_may_create_room_with_invites_callbacks: List[
USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK
@@ -187,7 +193,9 @@ class SpamChecker:
def register_callbacks(
self,
check_event_for_spam: Optional[CHECK_EVENT_FOR_SPAM_CALLBACK] = None,
+ user_may_join_room: Optional[USER_MAY_JOIN_ROOM_CALLBACK] = None,
user_may_invite: Optional[USER_MAY_INVITE_CALLBACK] = None,
+ user_may_send_3pid_invite: Optional[USER_MAY_SEND_3PID_INVITE_CALLBACK] = None,
user_may_create_room: Optional[USER_MAY_CREATE_ROOM_CALLBACK] = None,
user_may_create_room_with_invites: Optional[
USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK
@@ -206,9 +214,17 @@ class SpamChecker:
if check_event_for_spam is not None:
self._check_event_for_spam_callbacks.append(check_event_for_spam)
+ if user_may_join_room is not None:
+ self._user_may_join_room_callbacks.append(user_may_join_room)
+
if user_may_invite is not None:
self._user_may_invite_callbacks.append(user_may_invite)
+ if user_may_send_3pid_invite is not None:
+ self._user_may_send_3pid_invite_callbacks.append(
+ user_may_send_3pid_invite,
+ )
+
if user_may_create_room is not None:
self._user_may_create_room_callbacks.append(user_may_create_room)
@@ -259,6 +275,24 @@ class SpamChecker:
return False
+ async def user_may_join_room(self, user_id: str, room_id: str, is_invited: bool):
+ """Checks if a given users is allowed to join a room.
+ Not called when a user creates a room.
+
+ Args:
+ userid: The ID of the user wanting to join the room
+ room_id: The ID of the room the user wants to join
+ is_invited: Whether the user is invited into the room
+
+ Returns:
+ bool: Whether the user may join the room
+ """
+ for callback in self._user_may_join_room_callbacks:
+ if await callback(user_id, room_id, is_invited) is False:
+ return False
+
+ return True
+
async def user_may_invite(
self, inviter_userid: str, invitee_userid: str, room_id: str
) -> bool:
@@ -280,6 +314,31 @@ class SpamChecker:
return True
+ async def user_may_send_3pid_invite(
+ self, inviter_userid: str, medium: str, address: str, room_id: str
+ ) -> bool:
+ """Checks if a given user may invite a given threepid into the room
+
+ If this method returns false, the threepid invite will be rejected.
+
+ Note that if the threepid is already associated with a Matrix user ID, Synapse
+ will call user_may_invite with said user ID instead.
+
+ Args:
+ inviter_userid: The user ID of the sender of the invitation
+ medium: The 3PID's medium (e.g. "email")
+ address: The 3PID's address (e.g. "alice@example.com")
+ room_id: The room ID
+
+ Returns:
+ True if the user may send the invite, otherwise False
+ """
+ for callback in self._user_may_send_3pid_invite_callbacks:
+ if await callback(inviter_userid, medium, address, room_id) is False:
+ return False
+
+ return True
+
async def user_may_create_room(self, userid: str) -> bool:
"""Checks if a given user may create a room
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 5a5f124ddf..87e415df75 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -67,12 +67,8 @@ class AccountValidityHandler:
and self._account_validity_renew_by_email_enabled
):
# Don't do email-specific configuration if renewal by email is disabled.
- self._template_html = (
- hs.config.account_validity.account_validity_template_html
- )
- self._template_text = (
- hs.config.account_validity.account_validity_template_text
- )
+ self._template_html = hs.config.email.account_validity_template_html
+ self._template_text = hs.config.email.account_validity_template_text
self._renew_email_subject = (
hs.config.account_validity.account_validity_renew_email_subject
)
diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py
index 243be46267..f640b417b3 100644
--- a/synapse/handlers/federation_event.py
+++ b/synapse/handlers/federation_event.py
@@ -894,6 +894,9 @@ class FederationEventHandler:
backfilled=backfilled,
)
except AuthError as e:
+ # FIXME richvdh 2021/10/07 I don't think this is reachable. Let's log it
+ # for now
+ logger.exception("Unexpected AuthError from _check_event_auth")
raise FederationError("ERROR", e.code, e.msg, affected=event.event_id)
await self._run_push_actions_and_persist_event(event, context, backfilled)
@@ -1158,7 +1161,10 @@ class FederationEventHandler:
return
logger.info(
- "Persisting %i of %i remaining events", len(roots), len(event_map)
+ "Persisting %i of %i remaining outliers: %s",
+ len(roots),
+ len(event_map),
+ shortstr(e.event_id for e in roots),
)
await self._auth_and_persist_fetched_events_inner(origin, room_id, roots)
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 873e08258e..d40dbd761d 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -860,6 +860,7 @@ class RoomCreationHandler(BaseHandler):
"invite",
ratelimit=False,
content=content,
+ new_room=True,
)
for invite_3pid in invite_3pid_list:
@@ -962,6 +963,7 @@ class RoomCreationHandler(BaseHandler):
"join",
ratelimit=ratelimit,
content=creator_join_profile,
+ new_room=True,
)
# We treat the power levels override specially as this needs to be one
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index c8fb24a20c..eef337feeb 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -434,6 +434,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: Optional[dict] = None,
ratelimit: bool = True,
content: Optional[dict] = None,
+ new_room: bool = False,
require_consent: bool = True,
outlier: bool = False,
prev_event_ids: Optional[List[str]] = None,
@@ -451,6 +452,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: Information from a 3PID invite.
ratelimit: Whether to rate limit the request.
content: The content of the created event.
+ new_room: Whether the membership update is happening in the context of a room
+ creation.
require_consent: Whether consent is required.
outlier: Indicates whether the event is an `outlier`, i.e. if
it's from an arbitrary point and floating in the DAG as
@@ -485,6 +488,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed=third_party_signed,
ratelimit=ratelimit,
content=content,
+ new_room=new_room,
require_consent=require_consent,
outlier=outlier,
prev_event_ids=prev_event_ids,
@@ -504,6 +508,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: Optional[dict] = None,
ratelimit: bool = True,
content: Optional[dict] = None,
+ new_room: bool = False,
require_consent: bool = True,
outlier: bool = False,
prev_event_ids: Optional[List[str]] = None,
@@ -523,6 +528,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed:
ratelimit:
content:
+ new_room: Whether the membership update is happening in the context of a room
+ creation.
require_consent:
outlier: Indicates whether the event is an `outlier`, i.e. if
it's from an arbitrary point and floating in the DAG as
@@ -726,6 +733,30 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
# so don't really fit into the general auth process.
raise AuthError(403, "Guest access not allowed")
+ # Figure out whether the user is a server admin to determine whether they
+ # should be able to bypass the spam checker.
+ if (
+ self._server_notices_mxid is not None
+ and requester.user.to_string() == self._server_notices_mxid
+ ):
+ # allow the server notices mxid to join rooms
+ bypass_spam_checker = True
+
+ else:
+ bypass_spam_checker = await self.auth.is_server_admin(requester.user)
+
+ inviter = await self._get_inviter(target.to_string(), room_id)
+ if (
+ not bypass_spam_checker
+ # We assume that if the spam checker allowed the user to create
+ # a room then they're allowed to join it.
+ and not new_room
+ and not await self.spam_checker.user_may_join_room(
+ target.to_string(), room_id, is_invited=inviter is not None
+ )
+ ):
+ raise SynapseError(403, "Not allowed to join this room")
+
# Check if a remote join should be performed.
remote_join, remote_room_hosts = await self._should_perform_remote_join(
target.to_string(), room_id, remote_room_hosts, content, is_host_in_room
@@ -1268,10 +1299,22 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
if invitee:
# Note that update_membership with an action of "invite" can raise
# a ShadowBanError, but this was done above already.
+ # We don't check the invite against the spamchecker(s) here (through
+ # user_may_invite) because we'll do it further down the line anyway (in
+ # update_membership_locked).
_, stream_id = await self.update_membership(
requester, UserID.from_string(invitee), room_id, "invite", txn_id=txn_id
)
else:
+ # Check if the spamchecker(s) allow this invite to go through.
+ if not await self.spam_checker.user_may_send_3pid_invite(
+ inviter_userid=requester.user.to_string(),
+ medium=medium,
+ address=address,
+ room_id=room_id,
+ ):
+ raise SynapseError(403, "Cannot send threepid invite")
+
stream_id = await self._make_and_store_3pid_invite(
requester,
id_server,
@@ -1468,8 +1511,11 @@ class RoomMemberMasterHandler(RoomMemberHandler):
if len(remote_room_hosts) == 0:
raise SynapseError(404, "No known servers")
- check_complexity = self.hs.config.limit_remote_rooms.enabled
- if check_complexity and self.hs.config.limit_remote_rooms.admins_can_join:
+ check_complexity = self.hs.config.server.limit_remote_rooms.enabled
+ if (
+ check_complexity
+ and self.hs.config.server.limit_remote_rooms.admins_can_join
+ ):
check_complexity = not await self.auth.is_server_admin(user)
if check_complexity:
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 97f60b5806..b7b1973346 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -203,6 +203,7 @@ class UserDirectoryHandler(StateDeltasHandler):
public_value=Membership.JOIN,
)
+ is_remote = not self.is_mine_id(state_key)
if change is MatchChange.now_false:
# Need to check if the server left the room entirely, if so
# we might need to remove all the users in that room
@@ -224,15 +225,20 @@ class UserDirectoryHandler(StateDeltasHandler):
else:
logger.debug("Server is still in room: %r", room_id)
- include_in_dir = not self.is_mine_id(
- state_key
- ) or await self.store.should_include_local_user_in_dir(state_key)
+ include_in_dir = (
+ is_remote
+ or await self.store.should_include_local_user_in_dir(state_key)
+ )
if include_in_dir:
if change is MatchChange.no_change:
- # Handle any profile changes
- await self._handle_profile_change(
- state_key, room_id, prev_event_id, event_id
- )
+ # Handle any profile changes for remote users.
+ # (For local users we are not forced to scan membership
+ # events; instead the rest of the application calls
+ # `handle_local_profile_change`.)
+ if is_remote:
+ await self._handle_profile_change(
+ state_key, room_id, prev_event_id, event_id
+ )
continue
if change is MatchChange.now_true: # The user joined
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 0df1bfbeef..897ba5e453 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -563,7 +563,10 @@ class _ByteProducer:
try:
self._request.registerProducer(self, True)
- except RuntimeError as e:
+ except AttributeError as e:
+ # Calling self._request.registerProducer might raise an AttributeError since
+ # the underlying Twisted code calls self._request.channel.registerProducer,
+ # however self._request.channel will be None if the connection was lost.
logger.info("Connection disconnected before response was written: %r", e)
# We drop our references to data we'll not use.
diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index 37769ace48..961c17762e 100644
--- a/synapse/replication/tcp/client.py
+++ b/synapse/replication/tcp/client.py
@@ -117,7 +117,7 @@ class ReplicationDataHandler:
self._instance_name = hs.get_instance_name()
self._typing_handler = hs.get_typing_handler()
- self._notify_pushers = hs.config.start_pushers
+ self._notify_pushers = hs.config.worker.start_pushers
self._pusher_pool = hs.get_pusherpool()
self._presence_handler = hs.get_presence_handler()
diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py
index d64d1dbacd..6aa9318027 100644
--- a/synapse/replication/tcp/handler.py
+++ b/synapse/replication/tcp/handler.py
@@ -171,7 +171,10 @@ class ReplicationCommandHandler:
if hs.config.worker.worker_app is not None:
continue
- if stream.NAME == FederationStream.NAME and hs.config.send_federation:
+ if (
+ stream.NAME == FederationStream.NAME
+ and hs.config.worker.send_federation
+ ):
# We only support federation stream if federation sending
# has been disabled on the master.
continue
@@ -225,7 +228,7 @@ class ReplicationCommandHandler:
self._is_master = hs.config.worker.worker_app is None
self._federation_sender = None
- if self._is_master and not hs.config.send_federation:
+ if self._is_master and not hs.config.worker.send_federation:
self._federation_sender = hs.get_federation_sender()
self._server_notices_sender = None
diff --git a/synapse/rest/client/auth.py b/synapse/rest/client/auth.py
index c9ad35a3ad..9c15a04338 100644
--- a/synapse/rest/client/auth.py
+++ b/synapse/rest/client/auth.py
@@ -48,7 +48,7 @@ class AuthRestServlet(RestServlet):
self.auth_handler = hs.get_auth_handler()
self.registration_handler = hs.get_registration_handler()
self.recaptcha_template = hs.config.captcha.recaptcha_template
- self.terms_template = hs.config.terms_template
+ self.terms_template = hs.config.consent.terms_template
self.registration_token_template = (
hs.config.registration.registration_token_template
)
diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py
index ecebc46e8d..6f796d5e50 100644
--- a/synapse/rest/client/push_rule.py
+++ b/synapse/rest/client/push_rule.py
@@ -61,7 +61,9 @@ class PushRuleRestServlet(RestServlet):
self.notifier = hs.get_notifier()
self._is_worker = hs.config.worker.worker_app is not None
- self._users_new_default_push_rules = hs.config.users_new_default_push_rules
+ self._users_new_default_push_rules = (
+ hs.config.server.users_new_default_push_rules
+ )
async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]:
if self._is_worker:
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index c981df3f18..5cf2e12575 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -118,7 +118,7 @@ class _StateCacheEntry:
else:
self.state_id = _gen_state_id()
- def __len__(self):
+ def __len__(self) -> int:
return len(self.state)
diff --git a/synapse/state/v1.py b/synapse/state/v1.py
index 017e6fd92d..ffe6207a3c 100644
--- a/synapse/state/v1.py
+++ b/synapse/state/v1.py
@@ -225,7 +225,7 @@ def _resolve_with_state(
conflicted_state_ids: StateMap[Set[str]],
auth_event_ids: StateMap[str],
state_map: Dict[str, EventBase],
-):
+) -> MutableStateMap[str]:
conflicted_state = {}
for key, event_ids in conflicted_state_ids.items():
events = [state_map[ev_id] for ev_id in event_ids if ev_id in state_map]
@@ -362,7 +362,7 @@ def _resolve_normal_events(
def _ordered_events(events: Iterable[EventBase]) -> List[EventBase]:
- def key_func(e):
+ def key_func(e: EventBase) -> Tuple[int, str]:
# we have to use utf-8 rather than ascii here because it turns out we allow
# people to send us events with non-ascii event IDs :/
return -int(e.depth), hashlib.sha1(e.event_id.encode("utf-8")).hexdigest()
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index 586b0e12fe..bd18eefd58 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -481,7 +481,7 @@ async def _reverse_topological_power_sort(
if idx % _AWAIT_AFTER_ITERATIONS == 0:
await clock.sleep(0)
- def _get_power_order(event_id):
+ def _get_power_order(event_id: str) -> Tuple[int, int, str]:
ev = event_map[event_id]
pl = event_to_pl[event_id]
diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py
index a7fb8cd848..b81e33964a 100644
--- a/synapse/storage/databases/main/push_rule.py
+++ b/synapse/storage/databases/main/push_rule.py
@@ -101,7 +101,9 @@ class PushRulesWorkerStore(
prefilled_cache=push_rules_prefill,
)
- self._users_new_default_push_rules = hs.config.users_new_default_push_rules
+ self._users_new_default_push_rules = (
+ hs.config.server.users_new_default_push_rules
+ )
@abc.abstractmethod
def get_max_push_rules_stream_id(self):
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index de262fbf5a..7de4ad7f9b 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -1778,7 +1778,9 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
def __init__(self, database: DatabasePool, db_conn: Connection, hs: "HomeServer"):
super().__init__(database, db_conn, hs)
- self._ignore_unknown_session_error = hs.config.request_token_inhibit_3pid_errors
+ self._ignore_unknown_session_error = (
+ hs.config.server.request_token_inhibit_3pid_errors
+ )
self._access_tokens_id_gen = IdGenerator(db_conn, "access_tokens", "id")
self._refresh_tokens_id_gen = IdGenerator(db_conn, "refresh_tokens", "id")
diff --git a/tests/config/test_base.py b/tests/config/test_base.py
index baa5313fb3..6a52f862f4 100644
--- a/tests/config/test_base.py
+++ b/tests/config/test_base.py
@@ -14,23 +14,28 @@
import os.path
import tempfile
+from unittest.mock import Mock
from synapse.config import ConfigError
+from synapse.config._base import Config
from synapse.util.stringutils import random_string
from tests import unittest
-class BaseConfigTestCase(unittest.HomeserverTestCase):
- def prepare(self, reactor, clock, hs):
- self.hs = hs
+class BaseConfigTestCase(unittest.TestCase):
+ def setUp(self):
+ # The root object needs a server property with a public_baseurl.
+ root = Mock()
+ root.server.public_baseurl = "http://test"
+ self.config = Config(root)
def test_loading_missing_templates(self):
# Use a temporary directory that exists on the system, but that isn't likely to
# contain template files
with tempfile.TemporaryDirectory() as tmp_dir:
# Attempt to load an HTML template from our custom template directory
- template = self.hs.config.read_templates(["sso_error.html"], (tmp_dir,))[0]
+ template = self.config.read_templates(["sso_error.html"], (tmp_dir,))[0]
# If no errors, we should've gotten the default template instead
@@ -60,7 +65,7 @@ class BaseConfigTestCase(unittest.HomeserverTestCase):
# Attempt to load the template from our custom template directory
template = (
- self.hs.config.read_templates([template_filename], (tmp_dir,))
+ self.config.read_templates([template_filename], (tmp_dir,))
)[0]
# Render the template
@@ -97,7 +102,7 @@ class BaseConfigTestCase(unittest.HomeserverTestCase):
# Retrieve the template.
template = (
- self.hs.config.read_templates(
+ self.config.read_templates(
[template_filename],
(td.name for td in tempdirs),
)
@@ -118,7 +123,7 @@ class BaseConfigTestCase(unittest.HomeserverTestCase):
# Retrieve the template.
template = (
- self.hs.config.read_templates(
+ self.config.read_templates(
[other_template_name],
(td.name for td in tempdirs),
)
@@ -134,6 +139,6 @@ class BaseConfigTestCase(unittest.HomeserverTestCase):
def test_loading_template_from_nonexistent_custom_directory(self):
with self.assertRaises(ConfigError):
- self.hs.config.read_templates(
+ self.config.read_templates(
["some_filename.html"], ("a_nonexistent_directory",)
)
diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py
index 857d9cd096..79d417568d 100644
--- a/tests/config/test_cache.py
+++ b/tests/config/test_cache.py
@@ -12,59 +12,55 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.config._base import Config, RootConfig
+from unittest.mock import patch
+
from synapse.config.cache import CacheConfig, add_resizable_cache
from synapse.util.caches.lrucache import LruCache
from tests.unittest import TestCase
-class FakeServer(Config):
- section = "server"
-
-
-class TestConfig(RootConfig):
- config_classes = [FakeServer, CacheConfig]
-
-
+# Patch the global _CACHES so that each test runs against its own state.
+@patch("synapse.config.cache._CACHES", new_callable=dict)
class CacheConfigTests(TestCase):
def setUp(self):
# Reset caches before each test
- TestConfig().caches.reset()
+ self.config = CacheConfig()
+
+ def tearDown(self):
+ self.config.reset()
- def test_individual_caches_from_environ(self):
+ def test_individual_caches_from_environ(self, _caches):
"""
Individual cache factors will be loaded from the environment.
"""
config = {}
- t = TestConfig()
- t.caches._environ = {
+ self.config._environ = {
"SYNAPSE_CACHE_FACTOR_SOMETHING_OR_OTHER": "2",
"SYNAPSE_NOT_CACHE": "BLAH",
}
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(dict(t.caches.cache_factors), {"something_or_other": 2.0})
+ self.assertEqual(dict(self.config.cache_factors), {"something_or_other": 2.0})
- def test_config_overrides_environ(self):
+ def test_config_overrides_environ(self, _caches):
"""
Individual cache factors defined in the environment will take precedence
over those in the config.
"""
config = {"caches": {"per_cache_factors": {"foo": 2, "bar": 3}}}
- t = TestConfig()
- t.caches._environ = {
+ self.config._environ = {
"SYNAPSE_CACHE_FACTOR_SOMETHING_OR_OTHER": "2",
"SYNAPSE_CACHE_FACTOR_FOO": 1,
}
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
self.assertEqual(
- dict(t.caches.cache_factors),
+ dict(self.config.cache_factors),
{"foo": 1.0, "bar": 3.0, "something_or_other": 2.0},
)
- def test_individual_instantiated_before_config_load(self):
+ def test_individual_instantiated_before_config_load(self, _caches):
"""
If a cache is instantiated before the config is read, it will be given
the default cache size in the interim, and then resized once the config
@@ -76,26 +72,24 @@ class CacheConfigTests(TestCase):
self.assertEqual(cache.max_size, 50)
config = {"caches": {"per_cache_factors": {"foo": 3}}}
- t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config)
self.assertEqual(cache.max_size, 300)
- def test_individual_instantiated_after_config_load(self):
+ def test_individual_instantiated_after_config_load(self, _caches):
"""
If a cache is instantiated after the config is read, it will be
immediately resized to the correct size given the per_cache_factor if
there is one.
"""
config = {"caches": {"per_cache_factors": {"foo": 2}}}
- t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
cache = LruCache(100)
add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor)
self.assertEqual(cache.max_size, 200)
- def test_global_instantiated_before_config_load(self):
+ def test_global_instantiated_before_config_load(self, _caches):
"""
If a cache is instantiated before the config is read, it will be given
the default cache size in the interim, and then resized to the new
@@ -106,26 +100,24 @@ class CacheConfigTests(TestCase):
self.assertEqual(cache.max_size, 50)
config = {"caches": {"global_factor": 4}}
- t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
self.assertEqual(cache.max_size, 400)
- def test_global_instantiated_after_config_load(self):
+ def test_global_instantiated_after_config_load(self, _caches):
"""
If a cache is instantiated after the config is read, it will be
immediately resized to the correct size given the global factor if there
is no per-cache factor.
"""
config = {"caches": {"global_factor": 1.5}}
- t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
cache = LruCache(100)
add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor)
self.assertEqual(cache.max_size, 150)
- def test_cache_with_asterisk_in_name(self):
+ def test_cache_with_asterisk_in_name(self, _caches):
"""Some caches have asterisks in their name, test that they are set correctly."""
config = {
@@ -133,12 +125,11 @@ class CacheConfigTests(TestCase):
"per_cache_factors": {"*cache_a*": 5, "cache_b": 6, "cache_c": 2}
}
}
- t = TestConfig()
- t.caches._environ = {
+ self.config._environ = {
"SYNAPSE_CACHE_FACTOR_CACHE_A": "2",
"SYNAPSE_CACHE_FACTOR_CACHE_B": 3,
}
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
cache_a = LruCache(100)
add_resizable_cache("*cache_a*", cache_resize_callback=cache_a.set_cache_factor)
@@ -152,17 +143,16 @@ class CacheConfigTests(TestCase):
add_resizable_cache("*cache_c*", cache_resize_callback=cache_c.set_cache_factor)
self.assertEqual(cache_c.max_size, 200)
- def test_apply_cache_factor_from_config(self):
+ def test_apply_cache_factor_from_config(self, _caches):
"""Caches can disable applying cache factor updates, mainly used by
event cache size.
"""
config = {"caches": {"event_cache_size": "10k"}}
- t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ self.config.read_config(config, config_dir_path="", data_dir_path="")
cache = LruCache(
- max_size=t.caches.event_cache_size,
+ max_size=self.config.event_cache_size,
apply_cache_factor_from_config=False,
)
add_resizable_cache("event_cache", cache_resize_callback=cache.set_cache_factor)
diff --git a/tests/config/test_load.py b/tests/config/test_load.py
index 8e49ca26d9..59635de205 100644
--- a/tests/config/test_load.py
+++ b/tests/config/test_load.py
@@ -49,7 +49,7 @@ class ConfigLoadingTestCase(unittest.TestCase):
config = HomeServerConfig.load_config("", ["-c", self.file])
self.assertTrue(
- hasattr(config, "macaroon_secret_key"),
+ hasattr(config.key, "macaroon_secret_key"),
"Want config to have attr macaroon_secret_key",
)
if len(config.key.macaroon_secret_key) < 5:
@@ -60,7 +60,7 @@ class ConfigLoadingTestCase(unittest.TestCase):
config = HomeServerConfig.load_or_generate_config("", ["-c", self.file])
self.assertTrue(
- hasattr(config, "macaroon_secret_key"),
+ hasattr(config.key, "macaroon_secret_key"),
"Want config to have attr macaroon_secret_key",
)
if len(config.key.macaroon_secret_key) < 5:
@@ -74,8 +74,12 @@ class ConfigLoadingTestCase(unittest.TestCase):
config1 = HomeServerConfig.load_config("", ["-c", self.file])
config2 = HomeServerConfig.load_config("", ["-c", self.file])
config3 = HomeServerConfig.load_or_generate_config("", ["-c", self.file])
- self.assertEqual(config1.macaroon_secret_key, config2.macaroon_secret_key)
- self.assertEqual(config1.macaroon_secret_key, config3.macaroon_secret_key)
+ self.assertEqual(
+ config1.key.macaroon_secret_key, config2.key.macaroon_secret_key
+ )
+ self.assertEqual(
+ config1.key.macaroon_secret_key, config3.key.macaroon_secret_key
+ )
def test_disable_registration(self):
self.generate_config()
diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py
index b6bc1876b5..9ba5781573 100644
--- a/tests/config/test_tls.py
+++ b/tests/config/test_tls.py
@@ -42,9 +42,9 @@ class TLSConfigTests(TestCase):
"""
config = {}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1")
def test_tls_client_minimum_set(self):
"""
@@ -52,29 +52,29 @@ class TLSConfigTests(TestCase):
"""
config = {"federation_client_minimum_tls_version": 1}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1")
config = {"federation_client_minimum_tls_version": 1.1}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1.1")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.1")
config = {"federation_client_minimum_tls_version": 1.2}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1.2")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.2")
# Also test a string version
config = {"federation_client_minimum_tls_version": "1"}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1")
config = {"federation_client_minimum_tls_version": "1.2"}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1.2")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.2")
def test_tls_client_minimum_1_point_3_missing(self):
"""
@@ -91,7 +91,7 @@ class TLSConfigTests(TestCase):
config = {"federation_client_minimum_tls_version": 1.3}
t = TestConfig()
with self.assertRaises(ConfigError) as e:
- t.read_config(config, config_dir_path="", data_dir_path="")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
self.assertEqual(
e.exception.args[0],
(
@@ -112,8 +112,8 @@ class TLSConfigTests(TestCase):
config = {"federation_client_minimum_tls_version": 1.3}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
- self.assertEqual(t.federation_client_minimum_tls_version, "1.3")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
+ self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.3")
def test_tls_client_minimum_set_passed_through_1_2(self):
"""
@@ -121,7 +121,7 @@ class TLSConfigTests(TestCase):
"""
config = {"federation_client_minimum_tls_version": 1.2}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
cf = FederationPolicyForHTTPS(t)
options = _get_ssl_context_options(cf._verify_ssl_context)
@@ -137,7 +137,7 @@ class TLSConfigTests(TestCase):
"""
config = {"federation_client_minimum_tls_version": 1}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
cf = FederationPolicyForHTTPS(t)
options = _get_ssl_context_options(cf._verify_ssl_context)
@@ -159,7 +159,7 @@ class TLSConfigTests(TestCase):
}
t = TestConfig()
e = self.assertRaises(
- ConfigError, t.read_config, config, config_dir_path="", data_dir_path=""
+ ConfigError, t.tls.read_config, config, config_dir_path="", data_dir_path=""
)
self.assertIn("IDNA domain names", str(e))
@@ -174,7 +174,7 @@ class TLSConfigTests(TestCase):
]
}
t = TestConfig()
- t.read_config(config, config_dir_path="", data_dir_path="")
+ t.tls.read_config(config, config_dir_path="", data_dir_path="")
cf = FederationPolicyForHTTPS(t)
diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py
index 03fd5a3e2c..47217f0542 100644
--- a/tests/handlers/test_user_directory.py
+++ b/tests/handlers/test_user_directory.py
@@ -402,6 +402,40 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase):
public3 = self.get_success(self.user_dir_helper.get_users_in_public_rooms())
self.assertEqual(set(public3), {(alice, room2), (bob, room2)})
+ def test_per_room_profile_doesnt_alter_directory_entry(self) -> None:
+ alice = self.register_user("alice", "pass")
+ alice_token = self.login(alice, "pass")
+ bob = self.register_user("bob", "pass")
+
+ # Alice should have a user directory entry created at registration.
+ users = self.get_success(self.user_dir_helper.get_profiles_in_user_directory())
+ self.assertEqual(
+ users[alice], ProfileInfo(display_name="alice", avatar_url=None)
+ )
+
+ # Alice makes a room for herself.
+ room = self.helper.create_room_as(alice, is_public=True, tok=alice_token)
+
+ # Alice sets a nickname unique to that room.
+ self.helper.send_state(
+ room,
+ "m.room.member",
+ {
+ "displayname": "Freddy Mercury",
+ "membership": "join",
+ },
+ alice_token,
+ state_key=alice,
+ )
+
+ # Alice's display name remains the same in the user directory.
+ search_result = self.get_success(self.handler.search_users(bob, alice, 10))
+ self.assertEqual(
+ search_result["results"],
+ [{"display_name": "alice", "avatar_url": None, "user_id": alice}],
+ 0,
+ )
+
def test_private_room(self) -> None:
"""
A user can be searched for only by people that are either in a public
diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py
index 30bdaa9c27..376853fd65 100644
--- a/tests/rest/client/test_rooms.py
+++ b/tests/rest/client/test_rooms.py
@@ -784,6 +784,30 @@ class RoomsCreateTestCase(RoomBase):
# Check that do_3pid_invite wasn't called this time.
self.assertEquals(do_3pid_invite_mock.call_count, len(invited_3pids))
+ def test_spam_checker_may_join_room(self):
+ """Tests that the user_may_join_room spam checker callback is correctly bypassed
+ when creating a new room.
+ """
+
+ async def user_may_join_room(
+ mxid: str,
+ room_id: str,
+ is_invite: bool,
+ ) -> bool:
+ return False
+
+ join_mock = Mock(side_effect=user_may_join_room)
+ self.hs.get_spam_checker()._user_may_join_room_callbacks.append(join_mock)
+
+ channel = self.make_request(
+ "POST",
+ "/createRoom",
+ {},
+ )
+ self.assertEquals(channel.code, 200, channel.json_body)
+
+ self.assertEquals(join_mock.call_count, 0)
+
class RoomTopicTestCase(RoomBase):
"""Tests /rooms/$room_id/topic REST events."""
@@ -975,6 +999,83 @@ class RoomInviteRatelimitTestCase(RoomBase):
self.helper.invite(room_id, self.user_id, "@other-users:red", expect_code=429)
+class RoomJoinTestCase(RoomBase):
+
+ servlets = [
+ admin.register_servlets,
+ login.register_servlets,
+ room.register_servlets,
+ ]
+
+ def prepare(self, reactor, clock, homeserver):
+ self.user1 = self.register_user("thomas", "hackme")
+ self.tok1 = self.login("thomas", "hackme")
+
+ self.user2 = self.register_user("teresa", "hackme")
+ self.tok2 = self.login("teresa", "hackme")
+
+ self.room1 = self.helper.create_room_as(room_creator=self.user1, tok=self.tok1)
+ self.room2 = self.helper.create_room_as(room_creator=self.user1, tok=self.tok1)
+ self.room3 = self.helper.create_room_as(room_creator=self.user1, tok=self.tok1)
+
+ def test_spam_checker_may_join_room(self):
+ """Tests that the user_may_join_room spam checker callback is correctly called
+ and blocks room joins when needed.
+ """
+
+ # Register a dummy callback. Make it allow all room joins for now.
+ return_value = True
+
+ async def user_may_join_room(
+ userid: str,
+ room_id: str,
+ is_invited: bool,
+ ) -> bool:
+ return return_value
+
+ callback_mock = Mock(side_effect=user_may_join_room)
+ self.hs.get_spam_checker()._user_may_join_room_callbacks.append(callback_mock)
+
+ # Join a first room, without being invited to it.
+ self.helper.join(self.room1, self.user2, tok=self.tok2)
+
+ # Check that the callback was called with the right arguments.
+ expected_call_args = (
+ (
+ self.user2,
+ self.room1,
+ False,
+ ),
+ )
+ self.assertEquals(
+ callback_mock.call_args,
+ expected_call_args,
+ callback_mock.call_args,
+ )
+
+ # Join a second room, this time with an invite for it.
+ self.helper.invite(self.room2, self.user1, self.user2, tok=self.tok1)
+ self.helper.join(self.room2, self.user2, tok=self.tok2)
+
+ # Check that the callback was called with the right arguments.
+ expected_call_args = (
+ (
+ self.user2,
+ self.room2,
+ True,
+ ),
+ )
+ self.assertEquals(
+ callback_mock.call_args,
+ expected_call_args,
+ callback_mock.call_args,
+ )
+
+ # Now make the callback deny all room joins, and check that a join actually fails.
+ return_value = False
+ self.helper.join(self.room3, self.user2, expect_code=403, tok=self.tok2)
+
+
class RoomJoinRatelimitTestCase(RoomBase):
user_id = "@sid1:red"
@@ -2430,3 +2531,73 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
"""An alias which does not point to the room raises a SynapseError."""
self._set_canonical_alias({"alias": "@unknown:test"}, expected_code=400)
self._set_canonical_alias({"alt_aliases": ["@unknown:test"]}, expected_code=400)
+
+
+class ThreepidInviteTestCase(unittest.HomeserverTestCase):
+
+ servlets = [
+ admin.register_servlets,
+ login.register_servlets,
+ room.register_servlets,
+ ]
+
+ def prepare(self, reactor, clock, homeserver):
+ self.user_id = self.register_user("thomas", "hackme")
+ self.tok = self.login("thomas", "hackme")
+
+ self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
+
+ def test_threepid_invite_spamcheck(self):
+ # Mock a few functions to prevent the test from failing due to failing to talk to
+ # a remote IS. We keep the mock for _mock_make_and_store_3pid_invite around so we
+ # can check its call_count later on during the test.
+ make_invite_mock = Mock(return_value=make_awaitable(0))
+ self.hs.get_room_member_handler()._make_and_store_3pid_invite = make_invite_mock
+ self.hs.get_identity_handler().lookup_3pid = Mock(
+ return_value=make_awaitable(None),
+ )
+
+ # Add a mock to the spamchecker callbacks for user_may_send_3pid_invite. Make it
+ # allow everything for now.
+ mock = Mock(return_value=make_awaitable(True))
+ self.hs.get_spam_checker()._user_may_send_3pid_invite_callbacks.append(mock)
+
+ # Send a 3PID invite into the room and check that it succeeded.
+ email_to_invite = "teresa@example.com"
+ channel = self.make_request(
+ method="POST",
+ path="/rooms/" + self.room_id + "/invite",
+ content={
+ "id_server": "example.com",
+ "id_access_token": "sometoken",
+ "medium": "email",
+ "address": email_to_invite,
+ },
+ access_token=self.tok,
+ )
+ self.assertEquals(channel.code, 200)
+
+ # Check that the callback was called with the right params.
+ mock.assert_called_with(self.user_id, "email", email_to_invite, self.room_id)
+
+ # Check that the call to send the invite was made.
+ make_invite_mock.assert_called_once()
+
+ # Now change the return value of the callback to deny any invite and test that
+ # we can't send the invite.
+ mock.return_value = make_awaitable(False)
+ channel = self.make_request(
+ method="POST",
+ path="/rooms/" + self.room_id + "/invite",
+ content={
+ "id_server": "example.com",
+ "id_access_token": "sometoken",
+ "medium": "email",
+ "address": email_to_invite,
+ },
+ access_token=self.tok,
+ )
+ self.assertEquals(channel.code, 403)
+
+ # Also check that it stopped before calling _make_and_store_3pid_invite.
+ make_invite_mock.assert_called_once()
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
index cf9748f218..f26d5acf9c 100644
--- a/tests/storage/test_appservice.py
+++ b/tests/storage/test_appservice.py
@@ -126,7 +126,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
self.db_pool = database._db_pool
self.engine = database.engine
- db_config = hs.config.get_single_database()
+ db_config = hs.config.database.get_single_database()
self.store = TestTransactionStore(
database, make_conn(db_config, self.engine, "test"), hs
)
diff --git a/tests/storage/test_txn_limit.py b/tests/storage/test_txn_limit.py
index 6ff3ebb137..ace82cbf42 100644
--- a/tests/storage/test_txn_limit.py
+++ b/tests/storage/test_txn_limit.py
@@ -22,7 +22,7 @@ class SQLTransactionLimitTestCase(unittest.HomeserverTestCase):
return self.setup_test_homeserver(db_txn_limit=1000)
def test_config(self):
- db_config = self.hs.config.get_single_database()
+ db_config = self.hs.config.database.get_single_database()
self.assertEqual(db_config.config["txn_limit"], 1000)
def test_select(self):
|