summary refs log tree commit diff
diff options
context:
space:
mode:
authorOlivier Wilkinson (reivilibre) <oliverw@matrix.org>2022-03-17 10:42:28 +0000
committerOlivier Wilkinson (reivilibre) <oliverw@matrix.org>2022-03-17 10:42:28 +0000
commit70ee28f3868d55940f485ac9ac6bae9aea527f23 (patch)
treecf707c751c4c5c9ff87f571e7ea8334a2487daf0
parentSTASH (diff)
downloadsynapse-70ee28f3868d55940f485ac9ac6bae9aea527f23.tar.xz
WORKING workers setup template
-rw-r--r--scripts-dev/workers_setup.md61
-rwxr-xr-xscripts-dev/workers_setup.py174
-rw-r--r--scripts-dev/workers_setup/Caddyfile.j224
-rw-r--r--scripts-dev/workers_setup/homeserver.yaml.j257
-rw-r--r--scripts-dev/workers_setup/worker.yaml.j222
5 files changed, 300 insertions, 38 deletions
diff --git a/scripts-dev/workers_setup.md b/scripts-dev/workers_setup.md
new file mode 100644

index 0000000000..a6108e32d9 --- /dev/null +++ b/scripts-dev/workers_setup.md
@@ -0,0 +1,61 @@ +# workers_setup + +This gives you a **development-grade** installation of workerised Synapse. + +DO NOT USE ME IN PRODUCTION. + +## Known defects + +* Non-generic workers aren't set up properly with their worker type. +* I haven't checked the routes that well; they are probably wrong. + + +## Requirements from you: + +* Redis on default port (unauthenticated) + ``` + # You need Redis. On Ubuntu, this gets you what you need running on the right port: + apt install redis-server redis-tools + ``` +* Postgres on default port, using UNIX sockets for authentication. + This means you want your normal user account to have a corresponding Postgres account, + and let Postgres authenticate you automatically. + On Ubuntu, this just means you need to `createuser <your Linux account name>`. + You need a database with the same name as your server_name (I used `syn7`). + It should be owned by your user; see `createdb` to do that properly (and don't + forget to follow the Synapse instructions to use a C locale!) + Typing `psql syn7` should just work once your database is ready. + (If your UNIX socket is not numbered 5432, you might have to add `port: 5433` + to the config. Somehow I messed up my Postgres installation ages ago that it + chose port 5433 rather than the default 5432...) +* Virtualenv with Synapse (don't forget: `[postgres,redis]`) +* You'll need a bog standard Caddy binary (as the reverse proxy / router). + The website offers pre-built static binaries. +* (Optional): If you want to federate, you can set up TLS yourself afterwards. + I haven't bothered so far. + + +## Run the script + +``` +# python scripts-dev/workers_setup.py (path to server dir) (server name) +python scripts-dev/workers_setup.py ../servers/syn7_auto syn7 +``` + + +## Launching the homeserver + +``` +cd syn7_auto +/path/to/synapse/.venv/bin/synctl start homeserver.yaml -a workers +/path/to/caddy run +``` + + +## Stopping the homeserver + +``` +# ^C to stop Caddy +/path/to/synapse/.venv/bin/synctl stop homeserver.yaml -a workers +``` + diff --git a/scripts-dev/workers_setup.py b/scripts-dev/workers_setup.py
index 5d525d76f2..6971f1afa2 100755 --- a/scripts-dev/workers_setup.py +++ b/scripts-dev/workers_setup.py
@@ -16,7 +16,7 @@ import dataclasses import sys from os.path import dirname from pathlib import Path -from typing import List, Tuple, Iterable +from typing import Collection, Dict, Iterable, List, Sequence, Tuple from jinja2 import Environment, FileSystemLoader from signedjson.key import generate_signing_key, write_signing_keys @@ -43,6 +43,118 @@ DESIRED_WORKERS = ( # TODO frontend_proxy? ) +# TODO These are probably all wrong + +# ^/_matrix/client/(api/v1|r0|v3|unstable)/sendToDevice/ ? +# ^/_matrix/client/(api/v1|r0|v3|unstable)/.*/tags +# ^/_matrix/client/(api/v1|r0|v3|unstable)/.*/account_data ? +# ^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/receipt +# ^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/read_markers ? +# ^/_matrix/client/(api/v1|r0|v3|unstable)/presence/ ? + +WORKER_ROUTES: Dict[str, Tuple[str, ...]] = { + "main": (), + "synchrotron": ( + "^/_matrix/client/(v2_alpha|r0|v3)/sync$", + "^/_matrix/client/(api/v1|v2_alpha|r0|v3)/events$", + "^/_matrix/client/(api/v1|r0|v3)/initialSync$", + "^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$", + ), + "federation_inbound": ("^/_matrix/federation/v1/send/",), + "federation_reader": ( + "^/_matrix/federation/v1/event/", + "^/_matrix/federation/v1/state/", + "^/_matrix/federation/v1/state_ids/", + "^/_matrix/federation/v1/backfill/", + "^/_matrix/federation/v1/get_missing_events/", + "^/_matrix/federation/v1/publicRooms", + "^/_matrix/federation/v1/query/", + "^/_matrix/federation/v1/make_join/", + "^/_matrix/federation/v1/make_leave/", + "^/_matrix/federation/v1/send_join/", + "^/_matrix/federation/v2/send_join/", + "^/_matrix/federation/v1/send_leave/", + "^/_matrix/federation/v2/send_leave/", + "^/_matrix/federation/v1/invite/", + "^/_matrix/federation/v2/invite/", + "^/_matrix/federation/v1/query_auth/", + "^/_matrix/federation/v1/event_auth/", + "^/_matrix/federation/v1/exchange_third_party_invite/", + "^/_matrix/federation/v1/user/devices/", + "^/_matrix/federation/v1/get_groups_publicised$", + "^/_matrix/key/v2/query", + "^/_matrix/federation/(v1|unstable/org.matrix.msc2946)/hierarchy/", + ), + "federation_sender": (), + "typing": ("^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/typing",), + "appservice": (), + "client_reader": ( + "^/_matrix/client/(api/v1|r0|v3|unstable)/createRoom$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/publicRooms$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/joined_members$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/context/.*$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$", + "^/_matrix/client/(v1|unstable/org.matrix.msc2946)/rooms/.*/hierarchy$", + "^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$", + "^/_matrix/client/(r0|v3|unstable)/account/3pid$", + "^/_matrix/client/(r0|v3|unstable)/devices$", + "^/_matrix/client/versions$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$", + "^/_matrix/client/(r0|v3|unstable)/joined_groups$", + "^/_matrix/client/(r0|v3|unstable)/publicised_groups$", + "^/_matrix/client/(r0|v3|unstable)/publicised_groups/", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/", + "^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/search$", + "^/_matrix/client/(r0|v3|unstable)/keys/query$", + "^/_matrix/client/(r0|v3|unstable)/keys/changes$", + "^/_matrix/client/(r0|v3|unstable)/keys/claim$", + "^/_matrix/client/(r0|v3|unstable)/room_keys/", + # + "^/_matrix/client/(api/v1|r0|v3|unstable)/login$", + "^/_matrix/client/(r0|v3|unstable)/register$", + "^/_matrix/client/v1/register/m.login.registration_token/validity$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state/", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/join/", + "^/_matrix/client/(api/v1|r0|v3|unstable)/profile/", + "^/_matrix/client/(r0|v3|unstable)/sendToDevice/", + "^/_matrix/client/(r0|v3|unstable)/.*/tags", + "^/_matrix/client/(r0|v3|unstable)/.*/account_data", + "^/_matrix/client/(api/v1|r0|v3|unstable)/presence/", + "^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt", + "^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers", + "^/_matrix/client/(r0|v3|unstable)/.*/tags", + "^/_matrix/client/(r0|v3|unstable)/.*/account_data", + "^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/login/sso/redirect", + "^/_synapse/client/pick_idp$", + "^/_synapse/client/pick_username", + "^/_synapse/client/new_user_consent$", + "^/_synapse/client/sso_register$", + "^/_synapse/client/oidc/callback$", + "^/_synapse/client/saml2/authn_response$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/login/cas/ticket$", + ), + "event_creator": (), + "event_persister": (), + "media_repository": ( + "^/_synapse/admin/v1/purge_media_cache$", + "^/_synapse/admin/v1/room/.*/media.*$", + "^/_synapse/admin/v1/user/.*/media.*$", + "^/_synapse/admin/v1/media/.*$", + "^/_synapse/admin/v1/quarantine_media/.*$", + "^/_synapse/admin/v1/users/.*/media$", + ), + "pusher": (), + "user_dir": ("^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$",), + "background_worker": (), + "receipts_account_data": (), +} + @dataclasses.dataclass class Worker: @@ -67,14 +179,21 @@ def make_workers(workers: Iterable[Tuple[str, int]]) -> List[Worker]: else: worker_name = f"{worker_type}{worker_idx}" - result.append(Worker( - worker_name, worker_type, worker_idx, worker_num_to_ip(worker_overall_num) - )) + result.append( + Worker( + worker_name, + worker_type, + worker_idx, + worker_num_to_ip(worker_overall_num), + ) + ) return result -def generate(worker_counts: Tuple[Tuple[str, int], ...], target_path: Path, server_name: str) -> None: +def generate( + worker_counts: Tuple[Tuple[str, int], ...], target_path: Path, server_name: str +) -> None: if target_path.exists(): print("Target path already exists. Won't overwrite.") return @@ -92,6 +211,7 @@ def generate(worker_counts: Tuple[Tuple[str, int], ...], target_path: Path, serv hs_template = env.get_template("homeserver.yaml.j2") worker_template = env.get_template("worker.yaml.j2") logging_template = env.get_template("logging.yaml.j2") + rp_template = env.get_template("Caddyfile.j2") worker_dir = target_path.joinpath("workers") worker_dir.mkdir() @@ -113,14 +233,14 @@ def generate(worker_counts: Tuple[Tuple[str, int], ...], target_path: Path, serv worker_dir=worker_dir, logs_dir=logs_dir, all_workers=all_workers, - workers_by_name=workers_by_name + workers_by_name=workers_by_name, ) with open(log_config_path, "w") as fout: fout.write(log_config) - if worker.name == "main": - # Main can't use a worker file. - continue + # if worker.name == "main": + # Main can't use a worker file. + # continue worker_config_path = worker_dir.joinpath(f"{worker.name}.yaml") worker_config = worker_template.render( @@ -128,7 +248,7 @@ def generate(worker_counts: Tuple[Tuple[str, int], ...], target_path: Path, serv worker_dir=worker_dir, logs_dir=logs_dir, all_workers=all_workers, - workers_by_name=workers_by_name + workers_by_name=workers_by_name, ) with open(worker_config_path, "w") as fout: fout.write(worker_config) @@ -136,20 +256,50 @@ def generate(worker_counts: Tuple[Tuple[str, int], ...], target_path: Path, serv hs_config_path = target_path.joinpath("homeserver.yaml") hs_config = hs_template.render( all_workers=all_workers, + workers_by_name=workers_by_name, worker_dir=worker_dir, logs_dir=logs_dir, server_name=server_name, - macaroon_secret_key=macaroon_secret_key + macaroon_secret_key=macaroon_secret_key, ) with open(hs_config_path, "w") as fout: fout.write(hs_config) + caddy_config_path = target_path.joinpath("Caddyfile") + caddy_config = rp_template.render( + server_name=server_name, + port=8447, + http_ip=worker_num_to_ip(1), + routing=build_routes_template_var(all_workers), + main_server=f"{worker_num_to_ip(1)}:8080", + ) + with open(caddy_config_path, "w") as fout: + fout.write(caddy_config) + + +def build_routes_template_var( + all_workers: List[Worker], +) -> Sequence[Tuple[str, Collection[str], List[str]]]: + route_groups = {} + for worker in all_workers: + if worker.kind not in route_groups: + if WORKER_ROUTES[worker.kind]: + route_groups[worker.kind] = worker.kind, WORKER_ROUTES[worker.kind], [] + else: + continue + + _, _routes, server_endpoints = route_groups[worker.kind] + + server_endpoints.append(f"{worker.ip}:8080") + + return tuple(route_groups.values()) + def main(target_path: Path, server_name: str) -> None: generate(DESIRED_WORKERS, target_path, server_name) -if __name__ == '__main__': +if __name__ == "__main__": target_path = Path(sys.argv[1]) server_name = sys.argv[2] main(target_path, server_name) diff --git a/scripts-dev/workers_setup/Caddyfile.j2 b/scripts-dev/workers_setup/Caddyfile.j2 new file mode 100644
index 0000000000..d9820e37f2 --- /dev/null +++ b/scripts-dev/workers_setup/Caddyfile.j2
@@ -0,0 +1,24 @@ +{ + # Prevents Caddy from asking for sudo password to install a root cert that + # we don't even want to use here. + skip_install_trust +} + +# If you want TLS, you can add https:// schemes and configure the TLS cert... somehow. +http://{{ server_name }}:{{ port }}, http://{{ http_ip }}:{{ port }} { + {%- for route_group_name, routes, route_servers in routing %} + @{{ route_group_name }} { + {%- for route in routes %} + path_regexp {{ route }} + {%- endfor %} + } + route @{{ route_group_name }} { + reverse_proxy {% for server in route_servers %} {{ server }} {% endfor %} + } + {%- endfor %} + + # fallback to main + route { + reverse_proxy {{ main_server }} + } +} diff --git a/scripts-dev/workers_setup/homeserver.yaml.j2 b/scripts-dev/workers_setup/homeserver.yaml.j2
index b2c6ebfb71..90cef91337 100644 --- a/scripts-dev/workers_setup/homeserver.yaml.j2 +++ b/scripts-dev/workers_setup/homeserver.yaml.j2
@@ -7,6 +7,8 @@ signing_key_path: "{{ worker_dir }}/../signing.key" macaroon_secret_key: "{{ macaroon_secret_key }}" +enable_registration: true + redis: enabled: true #host: localhost @@ -14,6 +16,20 @@ redis: trusted_key_servers: [] +listeners: + - port: 8080 + bind_address: {{ workers_by_name.main.ip }} + type: http + resources: + - names: [client, federation] + + # The HTTP replication port + - port: 9090 + bind_address: {{ workers_by_name.main.ip }} + type: http + resources: + - names: [replication] + database: name: psycopg2 args: @@ -28,45 +44,44 @@ database: cp_max: 10 instance_map: -{-% for worker in all_workers %} +{%- for worker in all_workers %} {{ worker.name }}: host: {{ worker.ip }} port: 9090 -{% endfor %} +{%- endfor %} stream_writers: events: -{% for worker in all_workers %-} -{-% if worker.kind == "event_persister" %} +{%- for worker in all_workers %} +{%- if worker.kind == "event_persister" %} - {{ worker.name }} -{% endif %-} -{-% endfor %} - -typing: -{% for worker in all_workers %-} -{-% if worker.kind == "typing" %} +{%- endif %} +{%- endfor %} + typing: +{%- for worker in all_workers %} +{%- if worker.kind == "typing" %} - {{ worker.name }} -{% endif %-} -{-% endfor %} +{%- endif %} +{%- endfor %} start_pushers: false pusher_instances: -{% for worker in all_workers %-} -{-% if worker.kind == "pusher" %} +{% for worker in all_workers -%} +{%- if worker.kind == "pusher" %} - {{ worker.name }} -{% endif %-} -{-% endfor %} +{%- endif %} +{%- endfor %} notify_appservices: False federation_sender_instances: -{% for worker in all_workers %-} -{-% if worker.kind == "federation_sender" %} +{% for worker in all_workers -%} +{%- if worker.kind == "federation_sender" %} - {{ worker.name }} -{% endif %-} -{-% endfor %} +{% endif -%} +{% endfor %} enable_media_repo: False @@ -75,3 +90,5 @@ media_instance_running_background_jobs: "media1" update_user_directory: False pid_file: "{{ logs_dir }}/main.pid" + +log_config: '{{ worker_dir }}.logging/main.logging.yaml' \ No newline at end of file diff --git a/scripts-dev/workers_setup/worker.yaml.j2 b/scripts-dev/workers_setup/worker.yaml.j2
index 7a83efc7df..81dae3b8a4 100644 --- a/scripts-dev/workers_setup/worker.yaml.j2 +++ b/scripts-dev/workers_setup/worker.yaml.j2
@@ -1,26 +1,36 @@ -{# TODO worker apps! #} +{% set main_worker = workers_by_name.main %} + {% if worker.kind == "main" %} + worker_app: synapse.app.homeserver + {% else %} + worker_app: synapse.app.generic_worker worker_name: {{ worker.name }} # The replication listener on the main synapse process. -worker_replication_host: {{ worker.ip }} +worker_replication_host: {{ main_worker.ip }} worker_replication_http_port: 9090 worker_listeners: - type: http + bind_address: {{ worker.ip }} port: 8080 resources: - names: - client - federation -{% if worker.kind == "media" %} +{%- if worker.kind == "media" %} - media -{% endif %} +{%- endif %} + - type: http + bind_address: {{ worker.ip }} + port: 9090 + resources: + - names: [replication] worker_log_config: '{{ worker_dir }}.logging/{{ worker.name }}.logging.yaml' worker_pid_file: '{{ logs_dir }}/{{ worker.name }}.pid' -{% endif %} -{% set main_worker = workers_by_name.main %} worker_main_http_uri: http://{{ main_worker.ip }}:8080 + +{% endif %}