summary refs log tree commit diff
path: root/scripts
diff options
context:
space:
mode:
authorBen Banfield-Zanin <benbz@matrix.org>2020-09-15 11:44:49 +0100
committerBen Banfield-Zanin <benbz@matrix.org>2020-09-15 11:44:49 +0100
commit1a7d96aa6ff81638f2ea696fdee2ec44e7bff75a (patch)
tree1839e80f89c53b34ff1b36974305c6cb0c94aab4 /scripts
parentFix group server for older synapse (diff)
parentClarify changelog. (diff)
downloadsynapse-github/bbz/info-mainline-1.20.0.tar.xz
Merge remote-tracking branch 'origin/release-v1.20.0' into bbz/info-mainline-1.20.0 github/bbz/info-mainline-1.20.0 bbz/info-mainline-1.20.0
Diffstat (limited to '')
-rwxr-xr-xscripts-dev/build_debian_packages1
-rwxr-xr-xscripts-dev/check-newsfragment13
-rwxr-xr-xscripts-dev/check_line_terminators.sh34
-rw-r--r--scripts-dev/check_signature.py2
-rwxr-xr-xscripts-dev/federation_client.py38
-rw-r--r--scripts-dev/hash_history.py2
-rwxr-xr-xscripts-dev/lint.sh8
-rw-r--r--scripts-dev/mypy_synapse_plugin.py85
-rwxr-xr-xscripts-dev/update_database4
-rwxr-xr-xscripts/synapse_port_db122
10 files changed, 239 insertions, 70 deletions
diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages

index e6f4bd1dca..d055cf3287 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages
@@ -24,7 +24,6 @@ DISTS = ( "debian:sid", "ubuntu:xenial", "ubuntu:bionic", - "ubuntu:eoan", "ubuntu:focal", ) diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment
index 98a618f6b2..448cadb829 100755 --- a/scripts-dev/check-newsfragment +++ b/scripts-dev/check-newsfragment
@@ -3,6 +3,8 @@ # A script which checks that an appropriate news file has been added on this # branch. +echo -e "+++ \033[32mChecking newsfragment\033[m" + set -e # make sure that origin/develop is up to date @@ -16,6 +18,8 @@ pr="$BUILDKITE_PULL_REQUEST" if ! git diff --quiet FETCH_HEAD... -- debian; then if git diff --quiet FETCH_HEAD... -- debian/changelog; then echo "Updates to debian directory, but no update to the changelog." >&2 + echo "!! Please see the contributing guide for help writing your changelog entry:" >&2 + echo "https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#debian-changelog" >&2 exit 1 fi fi @@ -26,7 +30,12 @@ if ! git diff --name-only FETCH_HEAD... | grep -qv '^debian/'; then exit 0 fi -tox -qe check-newsfragment +# Print a link to the contributing guide if the user makes a mistake +CONTRIBUTING_GUIDE_TEXT="!! Please see the contributing guide for help writing your changelog entry: +https://github.com/matrix-org/synapse/blob/develop/CONTRIBUTING.md#changelog" + +# If check-newsfragment returns a non-zero exit code, print the contributing guide and exit +tox -qe check-newsfragment || (echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 && exit 1) echo echo "--------------------------" @@ -38,6 +47,7 @@ for f in `git diff --name-only FETCH_HEAD... -- changelog.d`; do lastchar=`tr -d '\n' < $f | tail -c 1` if [ $lastchar != '.' -a $lastchar != '!' ]; then echo -e "\e[31mERROR: newsfragment $f does not end with a '.' or '!'\e[39m" >&2 + echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 exit 1 fi @@ -47,5 +57,6 @@ done if [[ -n "$pr" && "$matched" -eq 0 ]]; then echo -e "\e[31mERROR: Did not find a news fragment with the right number: expected changelog.d/$pr.*.\e[39m" >&2 + echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 exit 1 fi diff --git a/scripts-dev/check_line_terminators.sh b/scripts-dev/check_line_terminators.sh new file mode 100755
index 0000000000..c983956231 --- /dev/null +++ b/scripts-dev/check_line_terminators.sh
@@ -0,0 +1,34 @@ +#!/bin/bash +# +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This script checks that line terminators in all repository files (excluding +# those in the .git directory) feature unix line terminators. +# +# Usage: +# +# ./check_line_terminators.sh +# +# The script will emit exit code 1 if any files that do not use unix line +# terminators are found, 0 otherwise. + +# cd to the root of the repository +cd `dirname $0`/.. + +# Find and print files with non-unix line terminators +if find . -path './.git/*' -prune -o -type f -print0 | xargs -0 grep -I -l $'\r$'; then + echo -e '\e[31mERROR: found files with CRLF line endings. See above.\e[39m' + exit 1 +fi diff --git a/scripts-dev/check_signature.py b/scripts-dev/check_signature.py
index ecda103cf7..6755bc5282 100644 --- a/scripts-dev/check_signature.py +++ b/scripts-dev/check_signature.py
@@ -2,9 +2,9 @@ import argparse import json import logging import sys -import urllib2 import dns.resolver +import urllib2 from signedjson.key import decode_verify_key_bytes, write_signing_keys from signedjson.sign import verify_signed_json from unpaddedbase64 import decode_base64 diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py
index 7c19e405d4..ad12523c4d 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py
@@ -21,11 +21,12 @@ import argparse import base64 import json import sys - -from six.moves.urllib import parse as urlparse +from typing import Any, Optional +from urllib import parse as urlparse import nacl.signing import requests +import signedjson.types import srvlookup import yaml from requests.adapters import HTTPAdapter @@ -70,7 +71,9 @@ def encode_canonical_json(value): ).encode("UTF-8") -def sign_json(json_object, signing_key, signing_name): +def sign_json( + json_object: Any, signing_key: signedjson.types.SigningKey, signing_name: str +) -> Any: signatures = json_object.pop("signatures", {}) unsigned = json_object.pop("unsigned", None) @@ -123,7 +126,14 @@ def read_signing_keys(stream): return keys -def request_json(method, origin_name, origin_key, destination, path, content): +def request( + method: Optional[str], + origin_name: str, + origin_key: signedjson.types.SigningKey, + destination: str, + path: str, + content: Optional[str], +) -> requests.Response: if method is None: if content is None: method = "GET" @@ -160,11 +170,14 @@ def request_json(method, origin_name, origin_key, destination, path, content): if method == "POST": headers["Content-Type"] = "application/json" - result = s.request( - method=method, url=dest, headers=headers, verify=False, data=content + return s.request( + method=method, + url=dest, + headers=headers, + verify=False, + data=content, + stream=True, ) - sys.stderr.write("Status Code: %d\n" % (result.status_code,)) - return result.json() def main(): @@ -223,7 +236,7 @@ def main(): with open(args.signing_key_path) as f: key = read_signing_keys(f)[0] - result = request_json( + result = request( args.method, args.server_name, key, @@ -232,7 +245,12 @@ def main(): content=args.body, ) - json.dump(result, sys.stdout) + sys.stderr.write("Status Code: %d\n" % (result.status_code,)) + + for chunk in result.iter_content(): + # we write raw utf8 to stdout. + sys.stdout.buffer.write(chunk) + print("") diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py
index bf3862a386..89acb52e6a 100644 --- a/scripts-dev/hash_history.py +++ b/scripts-dev/hash_history.py
@@ -15,7 +15,7 @@ from synapse.storage.pdu import PduStore from synapse.storage.signatures import SignatureStore -class Store(object): +class Store: _get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"] _get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"] _get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"] diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index 34c4854e11..0647993658 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh
@@ -2,8 +2,8 @@ # # Runs linting scripts over the local Synapse checkout # isort - sorts import statements -# flake8 - lints and finds mistakes # black - opinionated code formatter +# flake8 - lints and finds mistakes set -e @@ -11,11 +11,11 @@ if [ $# -ge 1 ] then files=$* else - files="synapse tests scripts-dev scripts" + files="synapse tests scripts-dev scripts contrib synctl" fi echo "Linting these locations: $files" -isort -y -rc $files -flake8 $files +isort $files python3 -m black $files ./scripts-dev/config-lint.sh +flake8 $files diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py new file mode 100644
index 0000000000..a5b88731f1 --- /dev/null +++ b/scripts-dev/mypy_synapse_plugin.py
@@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This is a mypy plugin for Synpase to deal with some of the funky typing that +can crop up, e.g the cache descriptors. +""" + +from typing import Callable, Optional + +from mypy.plugin import MethodSigContext, Plugin +from mypy.typeops import bind_self +from mypy.types import CallableType + + +class SynapsePlugin(Plugin): + def get_method_signature_hook( + self, fullname: str + ) -> Optional[Callable[[MethodSigContext], CallableType]]: + if fullname.startswith( + "synapse.util.caches.descriptors._CachedFunction.__call__" + ): + return cached_function_method_signature + return None + + +def cached_function_method_signature(ctx: MethodSigContext) -> CallableType: + """Fixes the `_CachedFunction.__call__` signature to be correct. + + It already has *almost* the correct signature, except: + + 1. the `self` argument needs to be marked as "bound"; and + 2. any `cache_context` argument should be removed. + """ + + # First we mark this as a bound function signature. + signature = bind_self(ctx.default_signature) + + # Secondly, we remove any "cache_context" args. + # + # Note: We should be only doing this if `cache_context=True` is set, but if + # it isn't then the code will raise an exception when its called anyway, so + # its not the end of the world. + context_arg_index = None + for idx, name in enumerate(signature.arg_names): + if name == "cache_context": + context_arg_index = idx + break + + if context_arg_index: + arg_types = list(signature.arg_types) + arg_types.pop(context_arg_index) + + arg_names = list(signature.arg_names) + arg_names.pop(context_arg_index) + + arg_kinds = list(signature.arg_kinds) + arg_kinds.pop(context_arg_index) + + signature = signature.copy_modified( + arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds, + ) + + return signature + + +def plugin(version: str): + # This is the entry point of the plugin, and let's us deal with the fact + # that the mypy plugin interface is *not* stable by looking at the version + # string. + # + # However, since we pin the version of mypy Synapse uses in CI, we don't + # really care. + return SynapsePlugin diff --git a/scripts-dev/update_database b/scripts-dev/update_database
index 94aa8758b4..56365e2b58 100755 --- a/scripts-dev/update_database +++ b/scripts-dev/update_database
@@ -40,7 +40,7 @@ class MockHomeserver(HomeServer): config.server_name, reactor=reactor, config=config, **kwargs ) - self.version_string = "Synapse/"+get_version_string(synapse) + self.version_string = "Synapse/" + get_version_string(synapse) if __name__ == "__main__": @@ -86,7 +86,7 @@ if __name__ == "__main__": store = hs.get_datastore() async def run_background_updates(): - await store.db.updates.run_background_updates(sleep=False) + await store.db_pool.updates.run_background_updates(sleep=False) # Stop the reactor to exit the script once every background update is run. reactor.stop() diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index 9a0fbc61d8..a34bdf1830 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db
@@ -23,8 +23,6 @@ import sys import time import traceback -from six import string_types - import yaml from twisted.internet import defer, reactor @@ -37,30 +35,29 @@ from synapse.logging.context import ( make_deferred_yieldable, run_in_background, ) -from synapse.storage.data_stores.main.client_ips import ClientIpBackgroundUpdateStore -from synapse.storage.data_stores.main.deviceinbox import ( - DeviceInboxBackgroundUpdateStore, -) -from synapse.storage.data_stores.main.devices import DeviceBackgroundUpdateStore -from synapse.storage.data_stores.main.events_bg_updates import ( +from synapse.storage.database import DatabasePool, make_conn +from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore +from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore +from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore +from synapse.storage.databases.main.events_bg_updates import ( EventsBackgroundUpdatesStore, ) -from synapse.storage.data_stores.main.media_repository import ( +from synapse.storage.databases.main.media_repository import ( MediaRepositoryBackgroundUpdateStore, ) -from synapse.storage.data_stores.main.registration import ( +from synapse.storage.databases.main.registration import ( RegistrationBackgroundUpdateStore, + find_max_generated_user_id_localpart, ) -from synapse.storage.data_stores.main.room import RoomBackgroundUpdateStore -from synapse.storage.data_stores.main.roommember import RoomMemberBackgroundUpdateStore -from synapse.storage.data_stores.main.search import SearchBackgroundUpdateStore -from synapse.storage.data_stores.main.state import MainStateBackgroundUpdateStore -from synapse.storage.data_stores.main.stats import StatsStore -from synapse.storage.data_stores.main.user_directory import ( +from synapse.storage.databases.main.room import RoomBackgroundUpdateStore +from synapse.storage.databases.main.roommember import RoomMemberBackgroundUpdateStore +from synapse.storage.databases.main.search import SearchBackgroundUpdateStore +from synapse.storage.databases.main.state import MainStateBackgroundUpdateStore +from synapse.storage.databases.main.stats import StatsStore +from synapse.storage.databases.main.user_directory import ( UserDirectoryBackgroundUpdateStore, ) -from synapse.storage.data_stores.state.bg_updates import StateBackgroundUpdateStore -from synapse.storage.database import Database, make_conn +from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStore from synapse.storage.engines import create_engine from synapse.storage.prepare_database import prepare_database from synapse.util import Clock @@ -91,6 +88,7 @@ BOOLEAN_COLUMNS = { "account_validity": ["email_sent"], "redactions": ["have_censored"], "room_stats_state": ["is_federatable"], + "local_media_repository": ["safe_from_quarantine"], } @@ -129,6 +127,26 @@ APPEND_ONLY_TABLES = [ ] +IGNORED_TABLES = { + # We don't port these tables, as they're a faff and we can regenerate + # them anyway. + "user_directory", + "user_directory_search", + "user_directory_search_content", + "user_directory_search_docsize", + "user_directory_search_segdir", + "user_directory_search_segments", + "user_directory_search_stat", + "user_directory_search_pos", + "users_who_share_private_rooms", + "users_in_public_room", + # UI auth sessions have foreign keys so additional care needs to be taken, + # the sessions are transient anyway, so ignore them. + "ui_auth_sessions", + "ui_auth_sessions_credentials", +} + + # Error returned by the run function. Used at the top-level part of the script to # handle errors and return codes. end_error = None @@ -155,14 +173,14 @@ class Store( StatsStore, ): def execute(self, f, *args, **kwargs): - return self.db.runInteraction(f.__name__, f, *args, **kwargs) + return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs) def execute_sql(self, sql, *args): def r(txn): txn.execute(sql, args) return txn.fetchall() - return self.db.runInteraction("execute_sql", r) + return self.db_pool.runInteraction("execute_sql", r) def insert_many_txn(self, txn, table, headers, rows): sql = "INSERT INTO %s (%s) VALUES (%s)" % ( @@ -207,7 +225,7 @@ class Porter(object): async def setup_table(self, table): if table in APPEND_ONLY_TABLES: # It's safe to just carry on inserting. - row = await self.postgres_store.db.simple_select_one( + row = await self.postgres_store.db_pool.simple_select_one( table="port_from_sqlite3", keyvalues={"table_name": table}, retcols=("forward_rowid", "backward_rowid"), @@ -224,7 +242,7 @@ class Porter(object): ) = await self._setup_sent_transactions() backward_chunk = 0 else: - await self.postgres_store.db.simple_insert( + await self.postgres_store.db_pool.simple_insert( table="port_from_sqlite3", values={ "table_name": table, @@ -254,7 +272,7 @@ class Porter(object): await self.postgres_store.execute(delete_all) - await self.postgres_store.db.simple_insert( + await self.postgres_store.db_pool.simple_insert( table="port_from_sqlite3", values={"table_name": table, "forward_rowid": 1, "backward_rowid": 0}, ) @@ -291,21 +309,14 @@ class Porter(object): ) return - if table in ( - "user_directory", - "user_directory_search", - "users_who_share_rooms", - "users_in_pubic_room", - ): - # We don't port these tables, as they're a faff and we can regenreate - # them anyway. + if table in IGNORED_TABLES: self.progress.update(table, table_size) # Mark table as done return if table == "user_directory_stream_pos": # We need to make sure there is a single row, `(X, null), as that is # what synapse expects to be there. - await self.postgres_store.db.simple_insert( + await self.postgres_store.db_pool.simple_insert( table=table, values={"stream_id": None} ) self.progress.update(table, table_size) # Mark table as done @@ -346,7 +357,7 @@ class Porter(object): return headers, forward_rows, backward_rows - headers, frows, brows = await self.sqlite_store.db.runInteraction( + headers, frows, brows = await self.sqlite_store.db_pool.runInteraction( "select", r ) @@ -362,7 +373,7 @@ class Porter(object): def insert(txn): self.postgres_store.insert_many_txn(txn, table, headers[1:], rows) - self.postgres_store.db.simple_update_one_txn( + self.postgres_store.db_pool.simple_update_one_txn( txn, table="port_from_sqlite3", keyvalues={"table_name": table}, @@ -400,7 +411,7 @@ class Porter(object): return headers, rows - headers, rows = await self.sqlite_store.db.runInteraction("select", r) + headers, rows = await self.sqlite_store.db_pool.runInteraction("select", r) if rows: forward_chunk = rows[-1][0] + 1 @@ -438,7 +449,7 @@ class Porter(object): ], ) - self.postgres_store.db.simple_update_one_txn( + self.postgres_store.db_pool.simple_update_one_txn( txn, table="port_from_sqlite3", keyvalues={"table_name": "event_search"}, @@ -481,7 +492,7 @@ class Porter(object): db_conn, allow_outdated_version=allow_outdated_version ) prepare_database(db_conn, engine, config=self.hs_config) - store = Store(Database(hs, db_config, engine), db_conn, hs) + store = Store(DatabasePool(hs, db_config, engine), db_conn, hs) db_conn.commit() return store @@ -489,7 +500,7 @@ class Porter(object): async def run_background_updates_on_postgres(self): # Manually apply all background updates on the PostgreSQL database. postgres_ready = ( - await self.postgres_store.db.updates.has_completed_background_updates() + await self.postgres_store.db_pool.updates.has_completed_background_updates() ) if not postgres_ready: @@ -498,9 +509,9 @@ class Porter(object): self.progress.set_state("Running background updates on PostgreSQL") while not postgres_ready: - await self.postgres_store.db.updates.do_next_background_update(100) + await self.postgres_store.db_pool.updates.do_next_background_update(100) postgres_ready = await ( - self.postgres_store.db.updates.has_completed_background_updates() + self.postgres_store.db_pool.updates.has_completed_background_updates() ) async def run(self): @@ -521,7 +532,7 @@ class Porter(object): # Check if all background updates are done, abort if not. updates_complete = ( - await self.sqlite_store.db.updates.has_completed_background_updates() + await self.sqlite_store.db_pool.updates.has_completed_background_updates() ) if not updates_complete: end_error = ( @@ -563,22 +574,24 @@ class Porter(object): ) try: - await self.postgres_store.db.runInteraction("alter_table", alter_table) + await self.postgres_store.db_pool.runInteraction( + "alter_table", alter_table + ) except Exception: # On Error Resume Next pass - await self.postgres_store.db.runInteraction( + await self.postgres_store.db_pool.runInteraction( "create_port_table", create_port_table ) # Step 2. Get tables. self.progress.set_state("Fetching tables") - sqlite_tables = await self.sqlite_store.db.simple_select_onecol( + sqlite_tables = await self.sqlite_store.db_pool.simple_select_onecol( table="sqlite_master", keyvalues={"type": "table"}, retcol="name" ) - postgres_tables = await self.postgres_store.db.simple_select_onecol( + postgres_tables = await self.postgres_store.db_pool.simple_select_onecol( table="information_schema.tables", keyvalues={}, retcol="distinct table_name", @@ -610,8 +623,10 @@ class Porter(object): ) ) - # Step 5. Do final post-processing + # Step 5. Set up sequences + self.progress.set_state("Setting up sequence generators") await self._setup_state_group_id_seq() + await self._setup_user_id_seq() self.progress.done() except Exception as e: @@ -635,7 +650,7 @@ class Porter(object): return bool(col) if isinstance(col, bytes): return bytearray(col) - elif isinstance(col, string_types) and "\0" in col: + elif isinstance(col, str) and "\0" in col: logger.warning( "DROPPING ROW: NUL value in table %s col %s: %r", table, @@ -677,7 +692,7 @@ class Porter(object): return headers, [r for r in rows if r[ts_ind] < yesterday] - headers, rows = await self.sqlite_store.db.runInteraction("select", r) + headers, rows = await self.sqlite_store.db_pool.runInteraction("select", r) rows = self._convert_rows("sent_transactions", headers, rows) @@ -710,7 +725,7 @@ class Porter(object): next_chunk = await self.sqlite_store.execute(get_start_id) next_chunk = max(max_inserted_rowid + 1, next_chunk) - await self.postgres_store.db.simple_insert( + await self.postgres_store.db_pool.simple_insert( table="port_from_sqlite3", values={ "table_name": "sent_transactions", @@ -779,7 +794,14 @@ class Porter(object): next_id = curr_id + 1 txn.execute("ALTER SEQUENCE state_group_id_seq RESTART WITH %s", (next_id,)) - return self.postgres_store.db.runInteraction("setup_state_group_id_seq", r) + return self.postgres_store.db_pool.runInteraction("setup_state_group_id_seq", r) + + def _setup_user_id_seq(self): + def r(txn): + next_id = find_max_generated_user_id_localpart(txn) + 1 + txn.execute("ALTER SEQUENCE user_id_seq RESTART WITH %s", (next_id,)) + + return self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r) ##############################################