diff options
author | Richard van der Hoff <1389908+richvdh@users.noreply.github.com> | 2021-05-14 11:46:35 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-05-14 11:46:35 +0100 |
commit | 6482075c95957ad980d9c1323f9f982e6f7aaff4 (patch) | |
tree | 593a4e2e96f27d63be8265d1a20228a2271b1be4 | |
parent | Minor `@cachedList` enhancements (#9975) (diff) | |
download | synapse-6482075c95957ad980d9c1323f9f982e6f7aaff4.tar.xz |
Run `black` on the scripts (#9981)
Turns out these scripts weren't getting linted.
-rw-r--r-- | changelog.d/9981.misc | 1 | ||||
-rwxr-xr-x | scripts-dev/build_debian_packages | 105 | ||||
-rwxr-xr-x | scripts-dev/lint.sh | 18 | ||||
-rwxr-xr-x | scripts/export_signing_key | 13 | ||||
-rwxr-xr-x | scripts/generate_config | 18 | ||||
-rwxr-xr-x | scripts/hash_password | 6 | ||||
-rwxr-xr-x | scripts/synapse_port_db | 46 | ||||
-rw-r--r-- | tox.ini | 10 |
8 files changed, 141 insertions, 76 deletions
diff --git a/changelog.d/9981.misc b/changelog.d/9981.misc new file mode 100644 index 0000000000..677c9b4cbd --- /dev/null +++ b/changelog.d/9981.misc @@ -0,0 +1 @@ +Run `black` on files in the `scripts` directory. diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages index 07d018db99..546724f89f 100755 --- a/scripts-dev/build_debian_packages +++ b/scripts-dev/build_debian_packages @@ -21,18 +21,18 @@ DISTS = ( "debian:buster", "debian:bullseye", "debian:sid", - "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23) - "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) - "ubuntu:groovy", # 20.10 (EOL 2021-07-07) + "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23) + "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) + "ubuntu:groovy", # 20.10 (EOL 2021-07-07) "ubuntu:hirsute", # 21.04 (EOL 2022-01-05) ) -DESC = '''\ +DESC = """\ Builds .debs for synapse, using a Docker image for the build environment. By default, builds for all known distributions, but a list of distributions can be passed on the commandline for debugging. -''' +""" class Builder(object): @@ -46,7 +46,7 @@ class Builder(object): """Build deb for a single distribution""" if self._failed: - print("not building %s due to earlier failure" % (dist, )) + print("not building %s due to earlier failure" % (dist,)) raise Exception("failed") try: @@ -68,48 +68,65 @@ class Builder(object): # we tend to get source packages which are full of debs. (We could hack # around that with more magic in the build_debian.sh script, but that # doesn't solve the problem for natively-run dpkg-buildpakage). - debsdir = os.path.join(projdir, '../debs') + debsdir = os.path.join(projdir, "../debs") os.makedirs(debsdir, exist_ok=True) if self.redirect_stdout: - logfile = os.path.join(debsdir, "%s.buildlog" % (tag, )) + logfile = os.path.join(debsdir, "%s.buildlog" % (tag,)) print("building %s: directing output to %s" % (dist, logfile)) stdout = open(logfile, "w") else: stdout = None # first build a docker image for the build environment - subprocess.check_call([ - "docker", "build", - "--tag", "dh-venv-builder:" + tag, - "--build-arg", "distro=" + dist, - "-f", "docker/Dockerfile-dhvirtualenv", - "docker", - ], stdout=stdout, stderr=subprocess.STDOUT) + subprocess.check_call( + [ + "docker", + "build", + "--tag", + "dh-venv-builder:" + tag, + "--build-arg", + "distro=" + dist, + "-f", + "docker/Dockerfile-dhvirtualenv", + "docker", + ], + stdout=stdout, + stderr=subprocess.STDOUT, + ) container_name = "synapse_build_" + tag with self._lock: self.active_containers.add(container_name) # then run the build itself - subprocess.check_call([ - "docker", "run", - "--rm", - "--name", container_name, - "--volume=" + projdir + ":/synapse/source:ro", - "--volume=" + debsdir + ":/debs", - "-e", "TARGET_USERID=%i" % (os.getuid(), ), - "-e", "TARGET_GROUPID=%i" % (os.getgid(), ), - "-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), - "dh-venv-builder:" + tag, - ], stdout=stdout, stderr=subprocess.STDOUT) + subprocess.check_call( + [ + "docker", + "run", + "--rm", + "--name", + container_name, + "--volume=" + projdir + ":/synapse/source:ro", + "--volume=" + debsdir + ":/debs", + "-e", + "TARGET_USERID=%i" % (os.getuid(),), + "-e", + "TARGET_GROUPID=%i" % (os.getgid(),), + "-e", + "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), + "dh-venv-builder:" + tag, + ], + stdout=stdout, + stderr=subprocess.STDOUT, + ) with self._lock: self.active_containers.remove(container_name) if stdout is not None: stdout.close() - print("Completed build of %s" % (dist, )) + print("Completed build of %s" % (dist,)) def kill_containers(self): with self._lock: @@ -117,9 +134,14 @@ class Builder(object): for c in active: print("killing container %s" % (c,)) - subprocess.run([ - "docker", "kill", c, - ], stdout=subprocess.DEVNULL) + subprocess.run( + [ + "docker", + "kill", + c, + ], + stdout=subprocess.DEVNULL, + ) with self._lock: self.active_containers.remove(c) @@ -130,31 +152,38 @@ def run_builds(dists, jobs=1, skip_tests=False): def sig(signum, _frame): print("Caught SIGINT") builder.kill_containers() + signal.signal(signal.SIGINT, sig) with ThreadPoolExecutor(max_workers=jobs) as e: res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists) # make sure we consume the iterable so that exceptions are raised. - for r in res: + for _ in res: pass -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=DESC, ) parser.add_argument( - '-j', '--jobs', type=int, default=1, - help='specify the number of builds to run in parallel', + "-j", + "--jobs", + type=int, + default=1, + help="specify the number of builds to run in parallel", ) parser.add_argument( - '--no-check', action='store_true', - help='skip running tests after building', + "--no-check", + action="store_true", + help="skip running tests after building", ) parser.add_argument( - 'dist', nargs='*', default=DISTS, - help='a list of distributions to build for. Default: %(default)s', + "dist", + nargs="*", + default=DISTS, + help="a list of distributions to build for. Default: %(default)s", ) args = parser.parse_args() run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check) diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index 9761e97594..869eb2372d 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -80,8 +80,22 @@ else # then lint everything! if [[ -z ${files+x} ]]; then # Lint all source code files and directories - # Note: this list aims the mirror the one in tox.ini - files=("synapse" "docker" "tests" "scripts-dev" "scripts" "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite") + # Note: this list aims to mirror the one in tox.ini + files=( + "synapse" "docker" "tests" + # annoyingly, black doesn't find these so we have to list them + "scripts/export_signing_key" + "scripts/generate_config" + "scripts/generate_log_config" + "scripts/hash_password" + "scripts/register_new_matrix_user" + "scripts/synapse_port_db" + "scripts-dev" + "scripts-dev/build_debian_packages" + "scripts-dev/sign_json" + "scripts-dev/update_database" + "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite" + ) fi fi diff --git a/scripts/export_signing_key b/scripts/export_signing_key index 0ed167ea85..bf0139bd64 100755 --- a/scripts/export_signing_key +++ b/scripts/export_signing_key @@ -30,7 +30,11 @@ def exit(status: int = 0, message: Optional[str] = None): def format_plain(public_key: nacl.signing.VerifyKey): print( "%s:%s %s" - % (public_key.alg, public_key.version, encode_verify_key_base64(public_key),) + % ( + public_key.alg, + public_key.version, + encode_verify_key_base64(public_key), + ) ) @@ -50,7 +54,10 @@ if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( - "key_file", nargs="+", type=argparse.FileType("r"), help="The key file to read", + "key_file", + nargs="+", + type=argparse.FileType("r"), + help="The key file to read", ) parser.add_argument( @@ -63,7 +70,7 @@ if __name__ == "__main__": parser.add_argument( "--expiry-ts", type=int, - default=int(time.time() * 1000) + 6*3600000, + default=int(time.time() * 1000) + 6 * 3600000, help=( "The expiry time to use for -x, in milliseconds since 1970. The default " "is (now+6h)." diff --git a/scripts/generate_config b/scripts/generate_config index 771cbf8d95..931b40c045 100755 --- a/scripts/generate_config +++ b/scripts/generate_config @@ -11,23 +11,22 @@ if __name__ == "__main__": parser.add_argument( "--config-dir", default="CONFDIR", - help="The path where the config files are kept. Used to create filenames for " - "things like the log config and the signing key. Default: %(default)s", + "things like the log config and the signing key. Default: %(default)s", ) parser.add_argument( "--data-dir", default="DATADIR", help="The path where the data files are kept. Used to create filenames for " - "things like the database and media store. Default: %(default)s", + "things like the database and media store. Default: %(default)s", ) parser.add_argument( "--server-name", default="SERVERNAME", help="The server name. Used to initialise the server_name config param, but also " - "used in the names of some of the config files. Default: %(default)s", + "used in the names of some of the config files. Default: %(default)s", ) parser.add_argument( @@ -41,21 +40,22 @@ if __name__ == "__main__": "--generate-secrets", action="store_true", help="Enable generation of new secrets for things like the macaroon_secret_key." - "By default, these parameters will be left unset." + "By default, these parameters will be left unset.", ) parser.add_argument( - "-o", "--output-file", - type=argparse.FileType('w'), + "-o", + "--output-file", + type=argparse.FileType("w"), default=sys.stdout, help="File to write the configuration to. Default: stdout", ) parser.add_argument( "--header-file", - type=argparse.FileType('r'), + type=argparse.FileType("r"), help="File from which to read a header, which will be printed before the " - "generated config.", + "generated config.", ) args = parser.parse_args() diff --git a/scripts/hash_password b/scripts/hash_password index a30767f758..1d6fb0d700 100755 --- a/scripts/hash_password +++ b/scripts/hash_password @@ -41,7 +41,7 @@ if __name__ == "__main__": parser.add_argument( "-c", "--config", - type=argparse.FileType('r'), + type=argparse.FileType("r"), help=( "Path to server config file. " "Used to read in bcrypt_rounds and password_pepper." @@ -72,8 +72,8 @@ if __name__ == "__main__": pw = unicodedata.normalize("NFKC", password) hashed = bcrypt.hashpw( - pw.encode('utf8') + password_pepper.encode("utf8"), + pw.encode("utf8") + password_pepper.encode("utf8"), bcrypt.gensalt(bcrypt_rounds), - ).decode('ascii') + ).decode("ascii") print(hashed) diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index 5fb5bb35f7..7c7645c05a 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -294,8 +294,7 @@ class Porter(object): return table, already_ported, total_to_port, forward_chunk, backward_chunk async def get_table_constraints(self) -> Dict[str, Set[str]]: - """Returns a map of tables that have foreign key constraints to tables they depend on. - """ + """Returns a map of tables that have foreign key constraints to tables they depend on.""" def _get_constraints(txn): # We can pull the information about foreign key constraints out from @@ -504,7 +503,9 @@ class Porter(object): return def build_db_store( - self, db_config: DatabaseConnectionConfig, allow_outdated_version: bool = False, + self, + db_config: DatabaseConnectionConfig, + allow_outdated_version: bool = False, ): """Builds and returns a database store using the provided configuration. @@ -740,7 +741,7 @@ class Porter(object): return col outrows = [] - for i, row in enumerate(rows): + for row in rows: try: outrows.append( tuple(conv(j, col) for j, col in enumerate(row) if j > 0) @@ -890,8 +891,7 @@ class Porter(object): await self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r) async def _setup_events_stream_seqs(self) -> None: - """Set the event stream sequences to the correct values. - """ + """Set the event stream sequences to the correct values.""" # We get called before we've ported the events table, so we need to # fetch the current positions from the SQLite store. @@ -920,12 +920,14 @@ class Porter(object): ) await self.postgres_store.db_pool.runInteraction( - "_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos, + "_setup_events_stream_seqs", + _setup_events_stream_seqs_set_pos, ) - async def _setup_sequence(self, sequence_name: str, stream_id_tables: Iterable[str]) -> None: - """Set a sequence to the correct value. - """ + async def _setup_sequence( + self, sequence_name: str, stream_id_tables: Iterable[str] + ) -> None: + """Set a sequence to the correct value.""" current_stream_ids = [] for stream_id_table in stream_id_tables: max_stream_id = await self.sqlite_store.db_pool.simple_select_one_onecol( @@ -939,14 +941,19 @@ class Porter(object): next_id = max(current_stream_ids) + 1 def r(txn): - sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name, ) - txn.execute(sql + " %s", (next_id, )) + sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name,) + txn.execute(sql + " %s", (next_id,)) - await self.postgres_store.db_pool.runInteraction("_setup_%s" % (sequence_name,), r) + await self.postgres_store.db_pool.runInteraction( + "_setup_%s" % (sequence_name,), r + ) async def _setup_auth_chain_sequence(self) -> None: curr_chain_id = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="event_auth_chains", keyvalues={}, retcol="MAX(chain_id)", allow_none=True + table="event_auth_chains", + keyvalues={}, + retcol="MAX(chain_id)", + allow_none=True, ) def r(txn): @@ -968,8 +975,7 @@ class Porter(object): class Progress(object): - """Used to report progress of the port - """ + """Used to report progress of the port""" def __init__(self): self.tables = {} @@ -994,8 +1000,7 @@ class Progress(object): class CursesProgress(Progress): - """Reports progress to a curses window - """ + """Reports progress to a curses window""" def __init__(self, stdscr): self.stdscr = stdscr @@ -1020,7 +1025,7 @@ class CursesProgress(Progress): self.total_processed = 0 self.total_remaining = 0 - for table, data in self.tables.items(): + for data in self.tables.values(): self.total_processed += data["num_done"] - data["start"] self.total_remaining += data["total"] - data["num_done"] @@ -1111,8 +1116,7 @@ class CursesProgress(Progress): class TerminalProgress(Progress): - """Just prints progress to the terminal - """ + """Just prints progress to the terminal""" def update(self, table, num_done): super(TerminalProgress, self).update(table, num_done) diff --git a/tox.ini b/tox.ini index ecd609271d..da77d124fc 100644 --- a/tox.ini +++ b/tox.ini @@ -34,7 +34,17 @@ lint_targets = synapse tests scripts + # annoyingly, black doesn't find these so we have to list them + scripts/export_signing_key + scripts/generate_config + scripts/generate_log_config + scripts/hash_password + scripts/register_new_matrix_user + scripts/synapse_port_db scripts-dev + scripts-dev/build_debian_packages + scripts-dev/sign_json + scripts-dev/update_database stubs contrib synctl |