diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py
index de2a134544..6ee695b2ba 100755
--- a/scripts-dev/build_debian_packages.py
+++ b/scripts-dev/build_debian_packages.py
@@ -28,12 +28,11 @@ from typing import Collection, Optional, Sequence, Set
# example)
DISTS = (
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
- "debian:bookworm", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
- "debian:sid", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
- "ubuntu:focal", # 20.04 LTS (EOL 2025-04) (our EOL forced by Python 3.8 is 2024-10-14)
+ "debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
+ "debian:sid", # (rolling distro, no EOL)
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
- "ubuntu:lunar", # 23.04 (EOL 2024-01) (our EOL forced by Python 3.11 is 2027-10-24)
- "ubuntu:mantic", # 23.10 (EOL 2024-07) (our EOL forced by Python 3.11 is 2027-10-24)
+ "ubuntu:noble", # 24.04 LTS (EOL 2029-06)
+ "ubuntu:oracular", # 24.10 (EOL 2025-07)
"debian:trixie", # (EOL not specified yet)
)
diff --git a/scripts-dev/check_pydantic_models.py b/scripts-dev/check_pydantic_models.py
index 9e67375b6a..5eb1f0a9df 100755
--- a/scripts-dev/check_pydantic_models.py
+++ b/scripts-dev/check_pydantic_models.py
@@ -31,6 +31,7 @@ Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. Se
until then, this script is a best effort to stop us from introducing type coersion bugs
(like the infamous stringy power levels fixed in room version 10).
"""
+
import argparse
import contextlib
import functools
@@ -44,7 +45,6 @@ import traceback
import unittest.mock
from contextlib import contextmanager
from typing import (
- TYPE_CHECKING,
Any,
Callable,
Dict,
@@ -56,30 +56,17 @@ from typing import (
)
from parameterized import parameterized
-
-from synapse._pydantic_compat import HAS_PYDANTIC_V2
-
-if TYPE_CHECKING or HAS_PYDANTIC_V2:
- from pydantic.v1 import (
- BaseModel as PydanticBaseModel,
- conbytes,
- confloat,
- conint,
- constr,
- )
- from pydantic.v1.typing import get_args
-else:
- from pydantic import (
- BaseModel as PydanticBaseModel,
- conbytes,
- confloat,
- conint,
- constr,
- )
- from pydantic.typing import get_args
-
from typing_extensions import ParamSpec
+from synapse._pydantic_compat import (
+ BaseModel as PydanticBaseModel,
+ conbytes,
+ confloat,
+ conint,
+ constr,
+ get_args,
+)
+
logger = logging.getLogger(__name__)
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
@@ -182,22 +169,16 @@ def monkeypatch_pydantic() -> Generator[None, None, None]:
# Most Synapse code ought to import the patched objects directly from
# `pydantic`. But we also patch their containing modules `pydantic.main` and
# `pydantic.types` for completeness.
- patch_basemodel1 = unittest.mock.patch(
- "pydantic.BaseModel", new=PatchedBaseModel
- )
- patch_basemodel2 = unittest.mock.patch(
- "pydantic.main.BaseModel", new=PatchedBaseModel
+ patch_basemodel = unittest.mock.patch(
+ "synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
)
- patches.enter_context(patch_basemodel1)
- patches.enter_context(patch_basemodel2)
+ patches.enter_context(patch_basemodel)
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
wrapper: Callable = make_wrapper(factory)
- patch1 = unittest.mock.patch(f"pydantic.{factory.__name__}", new=wrapper)
- patch2 = unittest.mock.patch(
- f"pydantic.types.{factory.__name__}", new=wrapper
+ patch = unittest.mock.patch(
+ f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
)
- patches.enter_context(patch1)
- patches.enter_context(patch2)
+ patches.enter_context(patch)
yield
diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py
index 467be96fdf..454784c3ae 100755
--- a/scripts-dev/check_schema_delta.py
+++ b/scripts-dev/check_schema_delta.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python3
# Check that no schema deltas have been added to the wrong version.
+#
+# Also checks that schema deltas do not try and create or drop indices.
import re
from typing import Any, Dict, List
@@ -9,6 +11,13 @@ import click
import git
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
+INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE)
+INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE)
+TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE)
+
+# The base branch we want to check against. We use the main development branch
+# on the assumption that is what we are developing against.
+DEVELOP_BRANCH = "develop"
@click.command()
@@ -20,6 +29,9 @@ SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
help="Always output ANSI colours",
)
def main(force_colors: bool) -> None:
+ # Return code. Set to non-zero when we encounter an error
+ return_code = 0
+
click.secho(
"+++ Checking schema deltas are in the right folder",
fg="green",
@@ -30,17 +42,17 @@ def main(force_colors: bool) -> None:
click.secho("Updating repo...")
repo = git.Repo()
- repo.remote().fetch()
+ repo.remote().fetch(refspec=DEVELOP_BRANCH)
click.secho("Getting current schema version...")
- r = repo.git.show("origin/develop:synapse/storage/schema/__init__.py")
+ r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
locals: Dict[str, Any] = {}
exec(r, locals)
current_schema_version = locals["SCHEMA_VERSION"]
- diffs: List[git.Diff] = repo.remote().refs.develop.commit.diff(None)
+ diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
# Get the schema version of the local file to check against current schema on develop
with open("synapse/storage/schema/__init__.py") as file:
@@ -53,7 +65,7 @@ def main(force_colors: bool) -> None:
# local schema version must be +/-1 the current schema version on develop
if abs(local_schema_version - current_schema_version) != 1:
click.secho(
- "The proposed schema version has diverged more than one version from develop, please fix!",
+ f"The proposed schema version has diverged more than one version from {DEVELOP_BRANCH}, please fix!",
fg="red",
bold=True,
color=force_colors,
@@ -67,21 +79,28 @@ def main(force_colors: bool) -> None:
click.secho(f"Current schema version: {current_schema_version}")
seen_deltas = False
- bad_files = []
+ bad_delta_files = []
+ changed_delta_files = []
for diff in diffs:
- if not diff.new_file or diff.b_path is None:
+ if diff.b_path is None:
+ # We don't lint deleted files.
continue
match = SCHEMA_FILE_REGEX.match(diff.b_path)
if not match:
continue
+ changed_delta_files.append(diff.b_path)
+
+ if not diff.new_file:
+ continue
+
seen_deltas = True
_, delta_version, _ = match.groups()
if delta_version != str(current_schema_version):
- bad_files.append(diff.b_path)
+ bad_delta_files.append(diff.b_path)
if not seen_deltas:
click.secho(
@@ -92,41 +111,91 @@ def main(force_colors: bool) -> None:
)
return
- if not bad_files:
+ if bad_delta_files:
+ bad_delta_files.sort()
+
click.secho(
- f"All deltas are in the correct folder: {current_schema_version}!",
- fg="green",
+ "Found deltas in the wrong folder!",
+ fg="red",
bold=True,
color=force_colors,
)
- return
- bad_files.sort()
-
- click.secho(
- "Found deltas in the wrong folder!",
- fg="red",
- bold=True,
- color=force_colors,
- )
+ for f in bad_delta_files:
+ click.secho(
+ f"\t{f}",
+ fg="red",
+ bold=True,
+ color=force_colors,
+ )
- for f in bad_files:
+ click.secho()
click.secho(
- f"\t{f}",
+ f"Please move these files to delta/{current_schema_version}/",
fg="red",
bold=True,
color=force_colors,
)
- click.secho()
- click.secho(
- f"Please move these files to delta/{current_schema_version}/",
- fg="red",
- bold=True,
- color=force_colors,
- )
+ else:
+ click.secho(
+ f"All deltas are in the correct folder: {current_schema_version}!",
+ fg="green",
+ bold=True,
+ color=force_colors,
+ )
- click.get_current_context().exit(1)
+ # Make sure we process them in order. This sort works because deltas are numbered
+ # and delta files are also numbered in order.
+ changed_delta_files.sort()
+
+ # Now check that we're not trying to create or drop indices. If we want to
+ # do that they should be in background updates. The exception is when we
+ # create indices on tables we've just created.
+ created_tables = set()
+ for delta_file in changed_delta_files:
+ with open(delta_file) as fd:
+ delta_lines = fd.readlines()
+
+ for line in delta_lines:
+ # Strip SQL comments
+ line = line.split("--", maxsplit=1)[0]
+
+ # Check and track any tables we create
+ match = TABLE_CREATION_REGEX.search(line)
+ if match:
+ table_name = match.group(1)
+ created_tables.add(table_name)
+
+ # Check for dropping indices, these are always banned
+ match = INDEX_DELETION_REGEX.search(line)
+ if match:
+ clause = match.group()
+
+ click.secho(
+ f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.",
+ fg="red",
+ bold=True,
+ color=force_colors,
+ )
+ return_code = 1
+
+ # Check for index creation, which is only allowed for tables we've
+ # created.
+ match = INDEX_CREATION_REGEX.search(line)
+ if match:
+ clause = match.group()
+ table_name = match.group(1)
+ if table_name not in created_tables:
+ click.secho(
+ f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.",
+ fg="red",
+ bold=True,
+ color=force_colors,
+ )
+ return_code = 1
+
+ click.get_current_context().exit(return_code)
if __name__ == "__main__":
diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 4ad547bc7e..08b500ecd6 100755
--- a/scripts-dev/complement.sh
+++ b/scripts-dev/complement.sh
@@ -195,6 +195,10 @@ if [ -z "$skip_docker_build" ]; then
# Build the unified Complement image (from the worker Synapse image we just built).
echo_if_github "::group::Build Docker image: complement/Dockerfile"
$CONTAINER_RUNTIME build -t complement-synapse \
+ `# This is the tag we end up pushing to the registry (see` \
+ `# .github/workflows/push_complement_image.yml) so let's just label it now` \
+ `# so people can reference it by the same name locally.` \
+ -t ghcr.io/element-hq/synapse/complement-synapse \
-f "docker/complement/Dockerfile" "docker/complement"
echo_if_github "::endgroup::"
@@ -220,9 +224,12 @@ test_packages=(
./tests/msc3874
./tests/msc3890
./tests/msc3391
+ ./tests/msc3757
./tests/msc3930
./tests/msc3902
./tests/msc3967
+ ./tests/msc4140
+ ./tests/msc4155
)
# Enable dirty runs, so tests will reuse the same container where possible.
diff --git a/scripts-dev/gen_config_documentation.py b/scripts-dev/gen_config_documentation.py
new file mode 100755
index 0000000000..a169fd323f
--- /dev/null
+++ b/scripts-dev/gen_config_documentation.py
@@ -0,0 +1,494 @@
+#!/usr/bin/env python3
+"""Generate Synapse documentation from JSON Schema file."""
+
+import json
+import re
+import sys
+from typing import Any, Optional
+
+import yaml
+
+HEADER = """<!-- Document auto-generated by scripts-dev/gen_config_documentation.py -->
+
+# Configuring Synapse
+
+This is intended as a guide to the Synapse configuration. The behavior of a Synapse instance can be modified
+through the many configuration settings documented here — each config option is explained,
+including what the default is, how to change the default and what sort of behaviour the setting governs.
+Also included is an example configuration for each setting. If you don't want to spend a lot of time
+thinking about options, the config as generated sets sensible defaults for all values. Do note however that the
+database defaults to SQLite, which is not recommended for production usage. You can read more on this subject
+[here](../../setup/installation.md#using-postgresql).
+
+## Config Conventions
+
+Configuration options that take a time period can be set using a number
+followed by a letter. Letters have the following meanings:
+
+* `s` = second
+* `m` = minute
+* `h` = hour
+* `d` = day
+* `w` = week
+* `y` = year
+
+For example, setting `redaction_retention_period: 5m` would remove redacted
+messages from the database after 5 minutes, rather than 5 months.
+
+In addition, configuration options referring to size use the following suffixes:
+
+* `K` = KiB, or 1024 bytes
+* `M` = MiB, or 1,048,576 bytes
+* `G` = GiB, or 1,073,741,824 bytes
+* `T` = TiB, or 1,099,511,627,776 bytes
+
+For example, setting `max_avatar_size: 10M` means that Synapse will not accept files larger than 10,485,760 bytes
+for a user avatar.
+
+## Config Validation
+
+The configuration file can be validated with the following command:
+```bash
+python -m synapse.config read <config key to print> -c <path to config>
+```
+
+To validate the entire file, omit `read <config key to print>`:
+```bash
+python -m synapse.config -c <path to config>
+```
+
+To see how to set other options, check the help reference:
+```bash
+python -m synapse.config --help
+```
+
+### YAML
+The configuration file is a [YAML](https://yaml.org/) file, which means that certain syntax rules
+apply if you want your config file to be read properly. A few helpful things to know:
+* `#` before any option in the config will comment out that setting and either a default (if available) will
+ be applied or Synapse will ignore the setting. Thus, in example #1 below, the setting will be read and
+ applied, but in example #2 the setting will not be read and a default will be applied.
+
+ Example #1:
+ ```yaml
+ pid_file: DATADIR/homeserver.pid
+ ```
+ Example #2:
+ ```yaml
+ #pid_file: DATADIR/homeserver.pid
+ ```
+* Indentation matters! The indentation before a setting
+ will determine whether a given setting is read as part of another
+ setting, or considered on its own. Thus, in example #1, the `enabled` setting
+ is read as a sub-option of the `presence` setting, and will be properly applied.
+
+ However, the lack of indentation before the `enabled` setting in example #2 means
+ that when reading the config, Synapse will consider both `presence` and `enabled` as
+ different settings. In this case, `presence` has no value, and thus a default applied, and `enabled`
+ is an option that Synapse doesn't recognize and thus ignores.
+
+ Example #1:
+ ```yaml
+ presence:
+ enabled: false
+ ```
+ Example #2:
+ ```yaml
+ presence:
+ enabled: false
+ ```
+ In this manual, all top-level settings (ones with no indentation) are identified
+ at the beginning of their section (i.e. "### `example_setting`") and
+ the sub-options, if any, are identified and listed in the body of the section.
+ In addition, each setting has an example of its usage, with the proper indentation
+ shown.
+"""
+SECTION_HEADERS = {
+ "modules": {
+ "title": "Modules",
+ "description": (
+ "Server admins can expand Synapse's functionality with external "
+ "modules.\n\n"
+ "See [here](../../modules/index.md) for more documentation on how "
+ "to configure or create custom modules for Synapse."
+ ),
+ },
+ "server_name": {
+ "title": "Server",
+ "description": "Define your homeserver name and other base options.",
+ },
+ "admin_contact": {
+ "title": "Homeserver blocking",
+ "description": "Useful options for Synapse admins.",
+ },
+ "tls_certificate_path": {
+ "title": "TLS",
+ "description": "Options related to TLS.",
+ },
+ "federation_domain_whitelist": {
+ "title": "Federation",
+ "description": "Options related to federation.",
+ },
+ "event_cache_size": {
+ "title": "Caching",
+ "description": "Options related to caching.",
+ },
+ "database": {
+ "title": "Database",
+ "description": "Config options related to database settings.",
+ },
+ "log_config": {
+ "title": "Logging",
+ "description": ("Config options related to logging."),
+ },
+ "rc_message": {
+ "title": "Ratelimiting",
+ "description": (
+ "Options related to ratelimiting in Synapse.\n\n"
+ "Each ratelimiting configuration is made of two parameters:\n"
+ "- `per_second`: number of requests a client can send per second.\n"
+ "- `burst_count`: number of requests a client can send before "
+ "being throttled."
+ ),
+ },
+ "enable_authenticated_media": {
+ "title": "Media Store",
+ "description": "Config options related to Synapse's media store.",
+ },
+ "recaptcha_public_key": {
+ "title": "Captcha",
+ "description": (
+ "See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha."
+ ),
+ },
+ "turn_uris": {
+ "title": "TURN",
+ "description": ("Options related to adding a TURN server to Synapse."),
+ },
+ "enable_registration": {
+ "title": "Registration",
+ "description": (
+ "Registration can be rate-limited using the parameters in the "
+ "[Ratelimiting](#ratelimiting) section of this manual."
+ ),
+ },
+ "session_lifetime": {
+ "title": "User session management",
+ "description": ("Config options related to user session management."),
+ },
+ "enable_metrics": {
+ "title": "Metrics",
+ "description": ("Config options related to metrics."),
+ },
+ "room_prejoin_state": {
+ "title": "API Configuration",
+ "description": ("Config settings related to the client/server API."),
+ },
+ "signing_key_path": {
+ "title": "Signing Keys",
+ "description": ("Config options relating to signing keys."),
+ },
+ "push": {
+ "title": "Push",
+ "description": ("Configuration settings related to push notifications."),
+ },
+ "encryption_enabled_by_default_for_room_type": {
+ "title": "Rooms",
+ "description": ("Config options relating to rooms."),
+ },
+ "opentracing": {
+ "title": "Opentracing",
+ "description": ("Configuration options related to Opentracing support."),
+ },
+ "worker_replication_secret": {
+ "title": "Coordinating workers",
+ "description": (
+ "Configuration options related to workers which belong in the main config file (usually called `homeserver.yaml`). A Synapse deployment can scale horizontally by running multiple Synapse processes called _workers_. Incoming requests are distributed between workers to handle higher loads. Some workers are privileged and can accept requests from other workers.\n\n"
+ "As a result, the worker configuration is divided into two parts.\n\n"
+ "1. The first part (in this section of the manual) defines which shardable tasks are delegated to privileged workers. This allows unprivileged workers to make requests to a privileged worker to act on their behalf.\n"
+ "2. [The second part](#individual-worker-configuration) controls the behaviour of individual workers in isolation.\n\n"
+ "For guidance on setting up workers, see the [worker documentation](../../workers.md)."
+ ),
+ },
+ "worker_app": {
+ "title": "Individual worker configuration",
+ "description": (
+ "These options configure an individual worker, in its worker configuration file. They should be not be provided when configuring the main process.\n\n"
+ "Note also the configuration above for [coordinating a cluster of workers](#coordinating-workers).\n\n"
+ "For guidance on setting up workers, see the [worker documentation](../../workers.md)."
+ ),
+ },
+ "background_updates": {
+ "title": "Background Updates",
+ "description": ("Configuration settings related to background updates."),
+ },
+ "auto_accept_invites": {
+ "title": "Auto Accept Invites",
+ "description": (
+ "Configuration settings related to automatically accepting invites."
+ ),
+ },
+}
+INDENT = " "
+
+
+has_error = False
+
+
+def error(text: str) -> None:
+ global has_error
+ print(f"ERROR: {text}", file=sys.stderr)
+ has_error = True
+
+
+def indent(text: str, first_line: bool = True) -> str:
+ """Indents each non-empty line of the given text."""
+ text = re.sub(r"(\n)([^\n])", r"\1" + INDENT + r"\2", text)
+ if first_line:
+ text = re.sub(r"^([^\n])", INDENT + r"\1", text)
+
+ return text
+
+
+def em(s: Optional[str]) -> str:
+ """Add emphasis to text."""
+ return f"*{s}*" if s else ""
+
+
+def a(s: Optional[str], suffix: str = " ") -> str:
+ """Appends a space if the given string is not empty."""
+ return s + suffix if s else ""
+
+
+def p(s: Optional[str], prefix: str = " ") -> str:
+ """Prepend a space if the given string is not empty."""
+ return prefix + s if s else ""
+
+
+def resolve_local_refs(schema: dict) -> dict:
+ """Returns the given schema with local $ref properties replaced by their keywords.
+
+ Crude approximation that will override keywords.
+ """
+ defs = schema["$defs"]
+
+ def replace_ref(d: Any) -> Any:
+ if isinstance(d, dict):
+ the_def = {}
+ if "$ref" in d:
+ # Found a "$ref" key.
+ def_name = d["$ref"].removeprefix("#/$defs/")
+ del d["$ref"]
+ the_def = defs[def_name]
+
+ new_dict = {k: replace_ref(v) for k, v in d.items()}
+ if common_keys := (new_dict.keys() & the_def.keys()) - {"properties"}:
+ print(
+ f"WARN: '{def_name}' overrides keys '{common_keys}'",
+ file=sys.stderr,
+ )
+
+ new_dict_props = new_dict.get("properties", {})
+ the_def_props = the_def.get("properties", {})
+ if common_props := new_dict_props.keys() & the_def_props.keys():
+ print(
+ f"WARN: '{def_name}' overrides properties '{common_props}'",
+ file=sys.stderr,
+ )
+ if merged_props := {**new_dict_props, **the_def_props}:
+ return {**new_dict, **the_def, "properties": merged_props}
+ else:
+ return {**new_dict, **the_def}
+
+ elif isinstance(d, list):
+ return [replace_ref(v) for v in d]
+ else:
+ return d
+
+ return replace_ref(schema)
+
+
+def sep(values: dict) -> str:
+ """Separator between parts of the description."""
+ # If description is multiple paragraphs already, add new ones. Otherwise
+ # append to same paragraph.
+ return "\n\n" if "\n\n" in values.get("description", "") else " "
+
+
+def type_str(values: dict) -> str:
+ """Type of the current value."""
+ if t := values.get("io.element.type_name"):
+ # Allow custom overrides for the type name, for documentation clarity
+ return f"({t})"
+ if not (t := values.get("type")):
+ return ""
+ if not isinstance(t, list):
+ t = [t]
+ joined = "|".join(t)
+ return f"({joined})"
+
+
+def items(values: dict) -> str:
+ """A block listing properties of array items."""
+ if not (items := values.get("items")):
+ return ""
+ if not (item_props := items.get("properties")):
+ return ""
+ return "\nOptions for each entry include:\n\n" + "\n".join(
+ sub_section(k, v) for k, v in item_props.items()
+ )
+
+
+def properties(values: dict) -> str:
+ """A block listing object properties."""
+ if not (properties := values.get("properties")):
+ return ""
+ return "\nThis setting has the following sub-options:\n\n" + "\n".join(
+ sub_section(k, v) for k, v in properties.items()
+ )
+
+
+def sub_section(prop: str, values: dict) -> str:
+ """Formats a bullet point about the given sub-property."""
+ sep = lambda: globals()["sep"](values)
+ type_str = lambda: globals()["type_str"](values)
+ items = lambda: globals()["items"](values)
+ properties = lambda: globals()["properties"](values)
+
+ def default() -> str:
+ try:
+ default = values["default"]
+ return f"Defaults to `{json.dumps(default)}`."
+ except KeyError:
+ return ""
+
+ def description() -> str:
+ if not (description := values.get("description")):
+ error(f"missing description for {prop}")
+ return "MISSING DESCRIPTION\n"
+
+ return f"{description}{p(default(), sep())}\n"
+
+ return (
+ f"* `{prop}`{p(type_str())}: "
+ + f"{indent(description(), first_line=False)}"
+ + indent(items())
+ + indent(properties())
+ )
+
+
+def section(prop: str, values: dict) -> str:
+ """Formats a section about the given property."""
+ sep = lambda: globals()["sep"](values)
+ type_str = lambda: globals()["type_str"](values)
+ items = lambda: globals()["items"](values)
+ properties = lambda: globals()["properties"](values)
+
+ def is_simple_default() -> bool:
+ """Whether the given default is simple enough for a one-liner."""
+ if not (d := values.get("default")):
+ return True
+ return not isinstance(d, dict) and not isinstance(d, list)
+
+ def default_str() -> str:
+ try:
+ default = values["default"]
+ except KeyError:
+ t = values.get("type", [])
+ if "object" == t or "object" in t:
+ # Skip objects as they probably have child defaults.
+ return ""
+ return "There is no default for this option."
+
+ if not is_simple_default():
+ # Show complex defaults as a code block instead.
+ return ""
+ return f"Defaults to `{json.dumps(default)}`."
+
+ def header() -> str:
+ try:
+ title = SECTION_HEADERS[prop]["title"]
+ description = SECTION_HEADERS[prop]["description"]
+ return f"## {title}\n\n{description}\n\n---\n"
+ except KeyError:
+ return ""
+
+ def title() -> str:
+ return f"### `{prop}`\n"
+
+ def description() -> str:
+ if not (description := values.get("description")):
+ error(f"missing description for {prop}")
+ return "MISSING DESCRIPTION\n"
+ return f"\n{a(em(type_str()))}{description}{p(default_str(), sep())}\n"
+
+ def example_str(example: Any) -> str:
+ return "```yaml\n" + f"{yaml.dump({prop: example}, sort_keys=False)}" + "```\n"
+
+ def default_example() -> str:
+ if is_simple_default():
+ return ""
+ default_cfg = example_str(values["default"])
+ return f"\nDefault configuration:\n{default_cfg}"
+
+ def examples() -> str:
+ if not (examples := values.get("examples")):
+ return ""
+
+ examples_str = "\n".join(example_str(e) for e in examples)
+
+ if len(examples) >= 2:
+ return f"\nExample configurations:\n{examples_str}"
+ else:
+ return f"\nExample configuration:\n{examples_str}"
+
+ def post_description() -> str:
+ # Sometimes it's helpful to have a description after the list of fields,
+ # e.g. with a subsection that consists only of text.
+ # This helps with that.
+ if not (description := values.get("io.element.post_description")):
+ return ""
+ return f"\n{description}\n\n"
+
+ return (
+ "---\n"
+ + header()
+ + title()
+ + description()
+ + items()
+ + properties()
+ + default_example()
+ + examples()
+ + post_description()
+ )
+
+
+def main() -> None:
+ def usage(err_msg: str) -> int:
+ script_name = (sys.argv[:1] or ["__main__.py"])[0]
+ print(err_msg, file=sys.stderr)
+ print(f"Usage: {script_name} <JSON Schema file>", file=sys.stderr)
+ print(f"\n{__doc__}", file=sys.stderr)
+ exit(1)
+
+ def read_json_file_arg() -> Any:
+ if len(sys.argv) > 2:
+ exit(usage("Too many arguments."))
+ if not (filepath := (sys.argv[1:] or [""])[0]):
+ exit(usage("No schema file provided."))
+ with open(filepath) as f:
+ return yaml.safe_load(f)
+
+ schema = read_json_file_arg()
+ schema = resolve_local_refs(schema)
+
+ sections = (section(k, v) for k, v in schema["properties"].items())
+ print(HEADER + "".join(sections), end="")
+
+ if has_error:
+ print("There were errors.", file=sys.stderr)
+ exit(2)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index 8acf0a6fb8..7096100a3e 100755
--- a/scripts-dev/lint.sh
+++ b/scripts-dev/lint.sh
@@ -1,8 +1,9 @@
#!/usr/bin/env bash
#
# Runs linting scripts over the local Synapse checkout
-# black - opinionated code formatter
# ruff - lints and finds mistakes
+# mypy - typechecks python code
+# cargo clippy - lints rust code
set -e
@@ -101,12 +102,6 @@ echo
# Print out the commands being run
set -x
-# Ensure the sort order of imports.
-isort "${files[@]}"
-
-# Ensure Python code conforms to an opinionated style.
-python3 -m black "${files[@]}"
-
# Ensure the sample configuration file conforms to style checks.
./scripts-dev/config-lint.sh
@@ -114,6 +109,9 @@ python3 -m black "${files[@]}"
# --quiet suppresses the update check.
ruff check --quiet --fix "${files[@]}"
+# Reformat Python code.
+ruff format --quiet "${files[@]}"
+
# Catch any common programming mistakes in Rust code.
#
# --bins, --examples, --lib, --tests combined explicitly disable checking
@@ -141,3 +139,6 @@ cargo-fmt
# Ensure type hints are correct.
mypy
+
+# Generate configuration documentation from the JSON Schema
+./scripts-dev/gen_config_documentation.py schema/synapse-config.schema.yaml > docs/usage/configuration/config_documentation.md
diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py
index 877b831751..a15c3c005c 100644
--- a/scripts-dev/mypy_synapse_plugin.py
+++ b/scripts-dev/mypy_synapse_plugin.py
@@ -38,6 +38,7 @@ from mypy.types import (
NoneType,
TupleType,
TypeAliasType,
+ TypeVarType,
UninhabitedType,
UnionType,
)
@@ -233,6 +234,7 @@ IMMUTABLE_CUSTOM_TYPES = {
"synapse.synapse_rust.push.FilteredPushRules",
# This is technically not immutable, but close enough.
"signedjson.types.VerifyKey",
+ "synapse.types.StrCollection",
}
# Immutable containers only if the values are also immutable.
@@ -298,7 +300,7 @@ def is_cacheable(
elif rt.type.fullname in MUTABLE_CONTAINER_TYPES:
# Mutable containers are mutable regardless of their underlying type.
- return False, None
+ return False, f"container {rt.type.fullname} is mutable"
elif "attrs" in rt.type.metadata:
# attrs classes are only cachable iff it is frozen (immutable itself)
@@ -318,6 +320,9 @@ def is_cacheable(
else:
return False, "non-frozen attrs class"
+ elif rt.type.is_enum:
+ # We assume Enum values are immutable
+ return True, None
else:
# Ensure we fail for unknown types, these generally means that the
# above code is not complete.
@@ -326,6 +331,18 @@ def is_cacheable(
f"Don't know how to handle {rt.type.fullname} return type instance",
)
+ elif isinstance(rt, TypeVarType):
+ # We consider TypeVars immutable if they are bound to a set of immutable
+ # types.
+ if rt.values:
+ for value in rt.values:
+ ok, note = is_cacheable(value, signature, verbose)
+ if not ok:
+ return False, f"TypeVar bound not cacheable {value}"
+ return True, None
+
+ return False, "TypeVar is unbound"
+
elif isinstance(rt, NoneType):
# None is cachable.
return True, None
@@ -343,7 +360,7 @@ def is_cacheable(
# For a type alias, check if the underlying real type is cachable.
return is_cacheable(mypy.types.get_proper_type(rt), signature, verbose)
- elif isinstance(rt, UninhabitedType) and rt.is_noreturn:
+ elif isinstance(rt, UninhabitedType):
# There is no return value, just consider it cachable. This is only used
# in tests.
return True, None
diff --git a/scripts-dev/release.py b/scripts-dev/release.py
index 5e519bb758..5de5814b17 100755
--- a/scripts-dev/release.py
+++ b/scripts-dev/release.py
@@ -20,8 +20,7 @@
#
#
-"""An interactive script for doing a release. See `cli()` below.
-"""
+"""An interactive script for doing a release. See `cli()` below."""
import glob
import json
@@ -41,7 +40,7 @@ import commonmark
import git
from click.exceptions import ClickException
from git import GitCommandError, Repo
-from github import Github
+from github import BadCredentialsException, Github
from packaging import version
@@ -255,6 +254,12 @@ def _prepare() -> None:
# Update the version specified in pyproject.toml.
subprocess.check_output(["poetry", "version", new_version])
+ # Update config schema $id.
+ schema_file = "schema/synapse-config.schema.yaml"
+ major_minor_version = ".".join(new_version.split(".")[:2])
+ url = f"https://element-hq.github.io/synapse/schema/synapse/v{major_minor_version}/synapse-config.schema.json"
+ subprocess.check_output(["sed", "-i", f"0,/^\\$id: .*/s||$id: {url}|", schema_file])
+
# Generate changelogs.
generate_and_write_changelog(synapse_repo, current_version, new_version)
@@ -324,6 +329,9 @@ def tag(gh_token: Optional[str]) -> None:
def _tag(gh_token: Optional[str]) -> None:
"""Tags the release and generates a draft GitHub release"""
+ # Test that the GH Token is valid before continuing.
+ check_valid_gh_token(gh_token)
+
# Make sure we're in a git repo.
repo = get_repo_and_check_clean_checkout()
@@ -418,6 +426,11 @@ def publish(gh_token: str) -> None:
def _publish(gh_token: str) -> None:
"""Publish release on GitHub."""
+ if gh_token:
+ # Test that the GH Token is valid before continuing.
+ gh = Github(gh_token)
+ gh.get_user()
+
# Make sure we're in a git repo.
get_repo_and_check_clean_checkout()
@@ -460,6 +473,9 @@ def upload(gh_token: Optional[str]) -> None:
def _upload(gh_token: Optional[str]) -> None:
"""Upload release to pypi."""
+ # Test that the GH Token is valid before continuing.
+ check_valid_gh_token(gh_token)
+
current_version = get_package_version()
tag_name = f"v{current_version}"
@@ -555,6 +571,9 @@ def wait_for_actions(gh_token: Optional[str]) -> None:
def _wait_for_actions(gh_token: Optional[str]) -> None:
+ # Test that the GH Token is valid before continuing.
+ check_valid_gh_token(gh_token)
+
# Find out the version and tag name.
current_version = get_package_version()
tag_name = f"v{current_version}"
@@ -579,7 +598,7 @@ def _wait_for_actions(gh_token: Optional[str]) -> None:
if all(
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
):
- success = (
+ success = all(
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
)
if success:
@@ -711,6 +730,11 @@ Ask the designated people to do the blog and tweets."""
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
def full(gh_token: str) -> None:
+ if gh_token:
+ # Test that the GH Token is valid before continuing.
+ gh = Github(gh_token)
+ gh.get_user()
+
click.echo("1. If this is a security release, read the security wiki page.")
click.echo("2. Check for any release blockers before proceeding.")
click.echo(" https://github.com/element-hq/synapse/labels/X-Release-Blocker")
@@ -782,6 +806,22 @@ def get_repo_and_check_clean_checkout(
return repo
+def check_valid_gh_token(gh_token: Optional[str]) -> None:
+ """Check that a github token is valid, if supplied"""
+
+ if not gh_token:
+ # No github token supplied, so nothing to do.
+ return
+
+ try:
+ gh = Github(gh_token)
+
+ # We need to lookup name to trigger a request.
+ _name = gh.get_user().name
+ except BadCredentialsException as e:
+ raise click.ClickException(f"Github credentials are bad: {e}")
+
+
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
"""Find the branch/ref, looking first locally then in the remote."""
if ref_name in repo.references:
|