diff options
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/benchmark.js | 6 | ||||
-rw-r--r-- | scripts/benchmark/connections.js | 6 | ||||
-rw-r--r-- | scripts/benchmark/users.js | 4 | ||||
-rw-r--r-- | scripts/build.js | 116 | ||||
-rw-r--r-- | scripts/build/clean.js | 18 | ||||
-rw-r--r-- | scripts/build/compile_tsc.js | 48 | ||||
-rw-r--r-- | scripts/build/plugin_prepare.js | 31 | ||||
-rw-r--r-- | scripts/build/plugin_resources.js | 13 | ||||
-rw-r--r-- | scripts/build/remap_imports.js | 15 | ||||
-rw-r--r-- | scripts/build_new.js | 31 | ||||
-rw-r--r-- | scripts/code_quality.js | 75 | ||||
-rw-r--r-- | scripts/db_migrations.js | 80 | ||||
-rwxr-xr-x | scripts/db_migrations.sh | 41 | ||||
-rw-r--r-- | scripts/depcheck.js | 44 | ||||
-rw-r--r-- | scripts/depclean.js | 25 | ||||
-rwxr-xr-x | scripts/first_setup.js | 272 | ||||
-rw-r--r-- | scripts/gen_index.js | 47 | ||||
-rw-r--r-- | scripts/generate_schema.js | 11 | ||||
-rw-r--r-- | scripts/migrate_db_engine.js | 6 | ||||
-rw-r--r-- | scripts/rights.js | 18 | ||||
-rw-r--r-- | scripts/update_schemas.js | 2 | ||||
-rw-r--r-- | scripts/utils.js | 48 | ||||
-rw-r--r-- | scripts/utils/ask.js | 20 |
23 files changed, 800 insertions, 177 deletions
diff --git a/scripts/benchmark.js b/scripts/benchmark.js index e7435191..53db92c5 100644 --- a/scripts/benchmark.js +++ b/scripts/benchmark.js @@ -3,9 +3,7 @@ const Models = require("../dist/entities"); const { PrimaryColumn } = require("typeorm"); function shouldIncludeEntity(name) { - return ![Models.BaseClassWithoutId, PrimaryColumn, Models.BaseClass, Models.PrimaryGeneratedColumn] - .map((x) => x?.name) - .includes(name); + return ![Models.BaseClassWithoutId, PrimaryColumn, Models.BaseClass, Models.PrimaryGeneratedColumn].map((x) => x?.name).includes(name); } async function main() { @@ -14,7 +12,7 @@ async function main() { type: "sqlite", database: ":memory:", entities: Object.values(Models).filter((x) => x.constructor.name == "Function" && shouldIncludeEntity(x.name)), - synchronize: true, + synchronize: true }); await db.initialize(); console.log("Initialized database"); diff --git a/scripts/benchmark/connections.js b/scripts/benchmark/connections.js index 661548c3..f74d0c6d 100644 --- a/scripts/benchmark/connections.js +++ b/scripts/benchmark/connections.js @@ -8,7 +8,7 @@ let cores = 1; try { cores = Number(process.env.THREADS) || os.cpus().length; } catch { - console.log("[Bundle] Failed to get thread count! Using 1...") + console.log("[Bundle] Failed to get thread count! Using 1..."); } if (!token) { @@ -46,8 +46,8 @@ function connect() { op: 2, d: { token, - properties: {}, - }, + properties: {} + } }) ); diff --git a/scripts/benchmark/users.js b/scripts/benchmark/users.js index bce67bf4..415d6d8b 100644 --- a/scripts/benchmark/users.js +++ b/scripts/benchmark/users.js @@ -14,9 +14,9 @@ async function main() { consent: true, date_of_birth: "2000-01-01", gift_code_sku_id: null, - captcha_key: null, + captcha_key: null }), - headers: { "content-type": "application/json" }, + headers: { "content-type": "application/json" } }); console.log(i); } diff --git a/scripts/build.js b/scripts/build.js index 2c0d7328..f618100c 100644 --- a/scripts/build.js +++ b/scripts/build.js @@ -2,20 +2,22 @@ const { execSync } = require("child_process"); const path = require("path"); const fs = require("fs"); const { argv, stdout, exit } = require("process"); -const { execIn, parts } = require('./utils'); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("./utils"); -if(argv.includes("help")) { +if (argv.includes("help")) { console.log(`Fosscord build script help: Arguments: clean Cleans up previous builds verbose Enable verbose logging logerrors Log build errors to console pretty-errors Pretty-print build errors - silent No output to console or files.`); + silent No output to console or files. + propagate-err Exit script with error code if build fails.`); exit(0); } -let steps = 1, i = 0; +let steps = 5, + i = 0; if (argv.includes("clean")) steps++; const verbose = argv.includes("verbose") || argv.includes("v"); @@ -23,7 +25,7 @@ const logerr = argv.includes("logerrors"); const pretty = argv.includes("pretty-errors"); const silent = argv.includes("silent"); -if(silent) console.error = console.log = function(){} +if (silent) console.error = console.log = function () {}; if (argv.includes("clean")) { console.log(`[${++i}/${steps}] Cleaning...`); @@ -36,37 +38,79 @@ if (argv.includes("clean")) { console.log(`[${++i}/${steps}] Compiling src files ...`); -let buildFlags = '' -if(pretty) buildFlags += '--pretty ' +let buildFlags = ""; +if (pretty) buildFlags += "--pretty "; -try { - execSync( - 'node "' + - path.join(__dirname, "..", "node_modules", "typescript", "lib", "tsc.js") + - '" -p "' + - path.join(__dirname, "..") + - '" ' + buildFlags, - { - cwd: path.join(__dirname, ".."), - shell: true, - env: process.env, - encoding: "utf8" - } - ) -} catch (error) { - if(verbose || logerr) { - error.stdout.split(/\r?\n/).forEach((line) => { - let _line = line.replace('dist/','',1); - if(!pretty && _line.includes('.ts(')) { - //reformat file path for easy jumping - _line = _line.replace('(',':',1).replace(',',':',1).replace(')','',1) - } - console.error(_line); - }) +console.log(`[${++i}/${steps}] Building plugin index...`); +let pluginDir = path.join(__dirname, "..", "src", "plugins"); +let output = 'import { Plugin } from "util/plugin";\n'; + +const dirs = fs.readdirSync(pluginDir).filter((x) => { + try { + fs.readdirSync(path.join(pluginDir, x)); + return true; + } catch (e) { + return false; } - console.error(`Build failed! Please check build.log for info!`); - if(!silent){ - if(pretty) fs.writeFileSync("build.log.ansi", error.stdout); - fs.writeFileSync("build.log", error.stdout.replaceAll(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, '')); +}); +dirs.forEach((x) => { + let pluginManifest = require(path.join(pluginDir, x, "plugin.json")); + output += `import * as ${sanitizeVarName(x)} from "./${x}/${pluginManifest.mainClass}";\n`; +}); +output += `\nexport const PluginIndex: any = {\n`; +dirs.forEach((x) => { + output += ` "${x}": new ${sanitizeVarName(x)}.default(),\n`; //ctor test: '${path.resolve(path.join(pluginDir, x))}', require('./${x}/plugin.json') +}); +output += `};`; + +fs.writeFileSync(path.join(__dirname, "..", "src", "plugins", "PluginIndex.ts"), output); + +if (!argv.includes("copyonly")) { + console.log(`[${++i}/${steps}] Compiling source code...`); + + let buildFlags = ""; + if (pretty) buildFlags += "--pretty "; + + try { + execSync( + 'node "' + + path.join(__dirname, "..", "node_modules", "typescript", "lib", "tsc.js") + + '" -p "' + + path.join(__dirname, "..") + + '" ' + + buildFlags, + { + cwd: path.join(__dirname, ".."), + shell: true, + env: process.env, + encoding: "utf8" + } + ); + } catch (error) { + if (verbose || logerr) { + error.stdout.split(/\r?\n/).forEach((line) => { + let _line = line.replace("dist/", "", 1); + if (!pretty && _line.includes(".ts(")) { + //reformat file path for easy jumping + _line = _line.replace("(", ":", 1).replace(",", ":", 1).replace(")", "", 1); + } + console.error(_line); + }); + } + console.error(`Build failed! Please check build.log for info!`); + if (!silent) { + if (pretty) fs.writeFileSync("build.log.ansi", error.stdout); + fs.writeFileSync( + "build.log", + error.stdout.replaceAll(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, "") + ); + } + throw error; } -} \ No newline at end of file +} + +console.log(`[${++i}/${steps}] Copying plugin data...`); +let pluginFiles = walk(pluginDir).filter((x) => !x.endsWith(".ts")); +pluginFiles.forEach((x) => { + fs.copyFileSync(x, x.replace("src", "dist")); +}); diff --git a/scripts/build/clean.js b/scripts/build/clean.js new file mode 100644 index 00000000..92ec6d77 --- /dev/null +++ b/scripts/build/clean.js @@ -0,0 +1,18 @@ +const { execSync } = require("child_process"); +const path = require("path"); +const fs = require("fs"); +const { argv, stdout, exit } = require("process"); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("../utils"); + +module.exports = function (config) { + if (fs.existsSync(config.buildLog)) fs.rmSync(config.buildLog); + if (fs.existsSync(config.buildLogAnsi)) fs.rmSync(config.buildLogAnsi); + + if (config.clean) { + console.log(`==> Cleaning...`); + if (fs.existsSync(config.distDir)) { + fs.rmSync(config.distDir, { recursive: true }); + if (config.verbose) console.log(`Deleted ${path.resolve(config.distDir)}!`); + } + } +}; diff --git a/scripts/build/compile_tsc.js b/scripts/build/compile_tsc.js new file mode 100644 index 00000000..179707a3 --- /dev/null +++ b/scripts/build/compile_tsc.js @@ -0,0 +1,48 @@ +const { execSync } = require("child_process"); +const path = require("path"); +const fs = require("fs"); +const { argv, stdout, exit } = require("process"); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("../utils"); + +module.exports = function (config) { + console.log("==> Compiling source with tsc..."); + let buildFlags = ""; + if (config.pretty) buildFlags += "--pretty "; + + try { + execSync( + 'node "' + + path.join(config.rootDir, "node_modules", "typescript", "lib", "tsc.js") + + '" -p "' + + path.join(config.rootDir) + + '" ' + + buildFlags, + { + cwd: path.join(config.rootDir), + shell: true, + env: process.env, + encoding: "utf8" + } + ); + } catch (error) { + if (config.verbose || config.logerr) { + error.stdout.split(/\r?\n/).forEach((line) => { + let _line = line.replace("dist/", "", 1); + if (!config.pretty && _line.includes(".ts(")) { + //reformat file path for easy jumping + _line = _line.replace("(", ":", 1).replace(",", ":", 1).replace(")", "", 1); + } + console.error(_line); + }); + } + console.error(`Build failed! Please check build.log for info!`); + if (!config.silent) { + if (config.pretty) fs.writeFileSync(path.join(config.rootDir, "build.log.ansi"), error.stdout); + fs.writeFileSync( + path.join(config.rootDir, "build.log"), + error.stdout.replaceAll(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, "") + ); + } + throw error; + } +}; diff --git a/scripts/build/plugin_prepare.js b/scripts/build/plugin_prepare.js new file mode 100644 index 00000000..247ad22d --- /dev/null +++ b/scripts/build/plugin_prepare.js @@ -0,0 +1,31 @@ +const { execSync } = require("child_process"); +const path = require("path"); +const fs = require("fs"); +const { argv, stdout, exit } = require("process"); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("../utils"); + +module.exports = function (config) { + console.log(`==> Building plugin index...`); + let output = 'import { Plugin } from "util/plugin";\n'; + + const dirs = fs.readdirSync(config.pluginDir).filter((x) => { + try { + fs.readdirSync(path.join(config.pluginDir, x)); + return true; + } catch (e) { + return false; + } + }); + dirs.forEach((x) => { + let pluginManifest = require(path.join(config.pluginDir, x, "plugin.json")); + console.log(` ==> Registering plugin: ${pluginManifest.name} (${pluginManifest.id}) by ${pluginManifest.authors}`); + output += `import * as ${sanitizeVarName(x)} from "./${x}/${pluginManifest.mainClass}";\n`; + }); + output += `\nexport const PluginIndex: any = {\n`; + dirs.forEach((x) => { + output += ` "${x}": new ${sanitizeVarName(x)}.default(),\n`; //ctor test: '${path.resolve(path.join(pluginDir, x))}', require('./${x}/plugin.json') + }); + output += `};`; + + fs.writeFileSync(path.join(config.pluginDir, "PluginIndex.ts"), output); +}; diff --git a/scripts/build/plugin_resources.js b/scripts/build/plugin_resources.js new file mode 100644 index 00000000..5b4b97f2 --- /dev/null +++ b/scripts/build/plugin_resources.js @@ -0,0 +1,13 @@ +const { execSync } = require("child_process"); +const path = require("path"); +const fs = require("fs"); +const { argv, stdout, exit } = require("process"); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("../utils"); + +module.exports = function (config) { + console.log(`==> Copying all plugin resources...`); + let pluginFiles = walk(config.pluginDir).filter((x) => !x.endsWith(".ts")); + pluginFiles.forEach((x) => { + fs.copyFileSync(x, x.replace("src", "dist")); + }); +}; diff --git a/scripts/build/remap_imports.js b/scripts/build/remap_imports.js new file mode 100644 index 00000000..cdcd571a --- /dev/null +++ b/scripts/build/remap_imports.js @@ -0,0 +1,15 @@ +const { execSync } = require("child_process"); +const path = require("path"); +const fs = require("fs"); +const { argv, stdout, exit } = require("process"); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("../utils"); + +module.exports = function (config) { + console.log(`==> Remapping module imports...`); + let files = walk(config.distDir).filter((x) => x.endsWith(".js")); + files.forEach((x) => { + let fc = fs.readFileSync(x).toString(); + fc = fc.replaceAll("@fosscord/", "#"); + fs.writeFileSync(x, fc); + }); +}; diff --git a/scripts/build_new.js b/scripts/build_new.js new file mode 100644 index 00000000..6a56e7f7 --- /dev/null +++ b/scripts/build_new.js @@ -0,0 +1,31 @@ +const { execSync } = require("child_process"); +const path = require("path"); +const fs = require("fs"); +const { argv, stdout, exit } = require("process"); +const { execIn, parts, getDirs, walk, sanitizeVarName } = require("./utils"); + +//file paths +const rootDir = path.join(__dirname, ".."); +const srcDir = path.join(rootDir, "src"); +const distDir = path.join(rootDir, "dist"); +const scriptsDir = path.join(rootDir, "scripts"); +const configPath = path.join(rootDir, "build.json"); +const buildLog = path.join(rootDir, "build.log"); +const buildLogAnsi = path.join(rootDir, "build.log.ansi"); +const pluginDir = path.join(srcDir, "plugins"); + +//more, dont export +const buildStepDir = path.join(scriptsDir, "build"); + +if (!fs.existsSync(configPath)) { + if (!fs.existsSync(path.join(configPath + ".default"))) { + console.log("build.json.default not found! Exiting!"); + exit(1); + } + fs.copyFileSync(configPath + ".default", configPath); +} +let config = { rootDir, srcDir, distDir, configPath, buildLog, buildLogAnsi, pluginDir, ...require(configPath) }; + +config.steps.pre.forEach((step) => require(path.join(buildStepDir, step))(config)); +require(path.join(buildStepDir, "compile_" + config.compiler))(config); +config.steps.post.forEach((step) => require(path.join(buildStepDir, step))(config)); diff --git a/scripts/code_quality.js b/scripts/code_quality.js new file mode 100644 index 00000000..aca01d24 --- /dev/null +++ b/scripts/code_quality.js @@ -0,0 +1,75 @@ +const path = require("path"); +const fs = require("fs").promises; +const { execIn, getLines, walk, projectRoot } = require("./utils"); + +let printTodos = process.argv.includes("TODOS"); + +let root = path.resolve(path.join(__dirname, "..", "src")); +let files = walk(root); +let _files = []; +let errors = 0, + warnings = 0, + todos = 0; + +Promise.all(files.map(getFile)).then((f) => { + Promise.all(Object.keys(_files).map(checkFile)); + console.log(`\n${errors} errors, ${warnings} warnings, ${todos} TODOs.`); + + let loc = 0; + Object.values(_files).forEach((x) => { + loc += x.length; + }); + console.log("\nStats:\n"); + console.log(`Lines of code: ${loc} lines in ${Object.values(_files).length} files.`); + + debugger; +}); + +async function getFile(name) { + let contents = (await fs.readFile(name)).toString().split("\n"); + _files[name] = contents; +} + +async function checkFile(x) { + _files[x].forEach((line) => scanLine(x, line)); +} + +function log(file, line, msg) { + let lineNum = _files[file].indexOf(line) + 1; + console.log(msg, "File:", file.replace(root + "/", "") + ":" + lineNum); +} + +function scanLine(x, line) { + if (/import/.test(line)) { + if (/import {?.*}? from '.*'/.test(line)) { + log(x, line, `[WARN] Inconsistent import syntax, please use double quotes!`); + warnings++; + } + } else { + if (line.trim().endsWith("TODO:")) { + log(x, line, `[ERRO] Empty TODO!`); + errors++; + } else if (/\/\/\s{0,3}TODO:/.test(line)) { + if (printTodos) log(x, line, `[TODO] Found a TODO: ${line.split("TODO:")[1].trim()}.`); + todos++; + } + if (/(:|=)/.test(line)) { + if (/(:|=) {2,}/.test(line)) { + log(x, line, `[WARN] Multiple spaces in assignment!`); + warnings++; + } + if (/(:|=)\t'/.test(line)) { + log(x, line, `[WARN] Tab in assignment!`); + warnings++; + } + if (/(:|=)\w'/.test(line)) { + log(x, line, `[WARN] Missing space in assignment!`); + warnings++; + } + if (/(:|=) undefined/.test(line) && !/(:|=){2,} undefined/.test(line)) { + log(x, line, `[WARN] Use of undefined!`); + warnings++; + } + } + } +} diff --git a/scripts/db_migrations.js b/scripts/db_migrations.js new file mode 100644 index 00000000..df5196b1 --- /dev/null +++ b/scripts/db_migrations.js @@ -0,0 +1,80 @@ +#!/usr/bin/node +const path = require("path"); +const fs = require("fs"); +const { stdout, exit } = require("process"); +const { execIn } = require("./utils.js"); +const { ask } = require("./utils/ask.js"); + +async function main() { + let filename; + if(process.argv[2]) filename = process.argv[2]; + else filename = await ask("Please enter the name of your migration: "); + let dbconf; + try { + dbconf = JSON.parse(fs.readFileSync("dbconf.json")); + } catch (e) { + console.log("No dbconf.json found!"); + dbconf = {}; + } + + if(!dbconf["sqlite"]) + dbconf.sqlite = { + conn_str: "migrations.db", + migrations_dir: "sqlite", + package: "sqlite3" + } + if(!dbconf["postgres"] && process.env.FC_DB_POSTGRES) { + console.log("Found FC_DB_POSTGRES environment variable. Using it!"); + dbconf.postgres = { + conn_str: process.env.FC_DB_POSTGRES, + migrations_dir: "postgres", + package: "pg" + } + } + if(!dbconf["mariadb"] && process.env.FC_DB_MARIADB){ + console.log("Found FC_DB_MARIADB environment variable. Using it!"); + dbconf.mariadb = { + conn_str: process.env.FC_DB_MARIADB, + migrations_dir: "mariadb", + package: "mysql2" + } + } + fs.writeFileSync("dbconf.json", JSON.stringify(dbconf, null, 4)); + + //build + execIn(`node scripts/build_new.js`, process.cwd(), {stdio: "inherit"}); + + if(fs.existsSync(".env") && !fs.existsSync(".env.bak")) + fs.renameSync(".env", ".env.bak"); + Object.keys(dbconf).forEach((db) => { + console.log(`Applying migrations for ${db}`); + if(!fs.existsSync(path.join("node_modules", dbconf[db].package))) + execIn(`npm i ${dbconf[db].package}`, process.cwd()); + fs.writeFileSync( + `.env`, + `DATABASE=${dbconf[db].conn_str} + THREADS=1 + DB_MIGRATE=true + DB_VERBOSE=true` + ); + execIn(`node dist/start.js`, process.cwd(), {stdio: "inherit"}); + }); + + Object.keys(dbconf).forEach((db) => { + console.log(`Generating new migrations for ${db}`); + fs.writeFileSync( + `.env`, + `DATABASE=${dbconf[db].conn_str} + THREADS=1 + DB_MIGRATE=true + DB_VERBOSE=true` + ); + execIn(`node node_modules/typeorm/cli.js migration:generate "src/util/migrations/${db}/${filename}" -d dist/util/util/Database.js -p`, process.cwd(), {stdio: "inherit"}); + }); + if(fs.existsSync(".env.bak")) { + fs.rmSync(".env"); + fs.renameSync(".env.bak", ".env"); + } + exit(0); +} +main(); \ No newline at end of file diff --git a/scripts/db_migrations.sh b/scripts/db_migrations.sh deleted file mode 100755 index 9ec8230a..00000000 --- a/scripts/db_migrations.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/sh - -if [ ! -z "$1" ] -then - FILENAME="$1" - echo "Using filename: $FILENAME" -else - read -p "Enter migration filename: " FILENAME -fi - -[ -f ".env" ] && ( - mv .env .env.tmp 2>/dev/null - source .env.tmp 2>/dev/null -) -npm run build clean logerrors pretty-errors - -make_migration() { - echo "Creating migrations for $2" - mkdir "src/util/migrations/$2" 2>/dev/null -# npm run build clean logerrors pretty-errors - THREADS=1 DATABASE="$1" DB_MIGRATE=a npm run start:bundle - THREADS=1 DATABASE="$1" DB_MIGRATE=a npx typeorm-ts-node-commonjs migration:generate "src/util/migrations/$2/$FILENAME" -d src/util/util/Database.ts -p - #npm run build clean logerrors pretty-errors - #THREADS=1 DATABASE="$1" DB_MIGRATE=a npm run start:bundle -} - -npm i sqlite3 -make_migration "database.db" "sqlite" - -[ -z "$FC_DB_POSTGRES" ] || ( - npm i pg - make_migration "$FC_DB_POSTGRES" "postgres" -) - -[ -z "$FC_DB_MARIADB" ] || ( - npm i mysql2 - make_migration "$FC_DB_MARIADB" "mariadb" -) - -[ -f ".env.tmp" ] && mv .env.tmp .env 2>/dev/null - diff --git a/scripts/depcheck.js b/scripts/depcheck.js index 08df156c..44ac2bb6 100644 --- a/scripts/depcheck.js +++ b/scripts/depcheck.js @@ -9,22 +9,22 @@ const { execIn, getLines } = require("./utils"); let npmi_extra_flags = ""; const resolveminor = argv.includes("resolveminor"); -if(argv.includes("nobuild")) npmi_extra_flags += "--ignore-scripts "; +if (argv.includes("nobuild")) npmi_extra_flags += "--ignore-scripts "; parts.forEach((part) => { let partDir = path.join(__dirname, "..", "..", part); let distDir = path.join(partDir, "dist"); console.log(`Checking updates for ${part} (${partDir})`); - if(part == "bundle") { - execIn(`npm run syncdeps`, partDir) + if (part == "bundle") { + execIn(`npm run syncdeps`, partDir); } - if(resolveminor) { - fs.rmSync(path.join(partDir, "node_modules"), { + if (resolveminor) { + fs.rmSync(path.join(partDir, "node_modules"), { recursive: true, - force: true, + force: true }); - execIn(`npm i --save --no-fund --no-audit --no-package-lock ${npmi_extra_flags}`, partDir) - } + execIn(`npm i --save --no-fund --no-audit --no-package-lock ${npmi_extra_flags}`, partDir); + } let x = [ [ "pkg", @@ -33,24 +33,18 @@ parts.forEach((part) => { wanted: "2.0", latest: "2.0", dependent: "cdn", - location: "/usr/src/fosscord/bundle/node_packages/pkg", - }, - ], + location: "/usr/src/fosscord/bundle/node_packages/pkg" + } + ] ]; - x = Object.entries( - JSON.parse(execIn("npm outdated --json", partDir)) - ); + x = Object.entries(JSON.parse(execIn("npm outdated --json", partDir))); x.forEach((a) => { - let pkgname = a[0]; - let pkginfo = a[1]; - if(!pkginfo.current) - console.log(`MISSING ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted} (latest: ${pkginfo.latest})`); - else if(pkginfo.latest != pkginfo.wanted){ - if(pkginfo.current != pkginfo.wanted) - console.log(`MINOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted}`); - console.log(`MAJOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.latest}`); - } - else - console.log(`MINOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted}`); + let pkgname = a[0]; + let pkginfo = a[1]; + if (!pkginfo.current) console.log(`MISSING ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted} (latest: ${pkginfo.latest})`); + else if (pkginfo.latest != pkginfo.wanted) { + if (pkginfo.current != pkginfo.wanted) console.log(`MINOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted}`); + console.log(`MAJOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.latest}`); + } else console.log(`MINOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted}`); }); }); diff --git a/scripts/depclean.js b/scripts/depclean.js index 333f5aa0..5a402331 100644 --- a/scripts/depclean.js +++ b/scripts/depclean.js @@ -3,8 +3,7 @@ const fs = require("fs"); const { env } = require("process"); const { execSync } = require("child_process"); const { argv, stdout, exit } = require("process"); - -const { execIn, getLines } = require('./utils'); +const { execIn, getLines } = require("./utils"); const bundleRequired = ["@ovos-media/ts-transform-paths"]; const removeModules = argv.includes("cleanup"); @@ -15,13 +14,11 @@ execIn("npm i", path.join(__dirname, "..")); let partDir = path.join(__dirname, ".."); let distDir = path.join(partDir, "dist"); let start = 0; -start = getLines( - execIn("npm ls --parseable --package-lock-only -a", partDir) -); +start = getLines(execIn("npm ls --parseable --package-lock-only -a", partDir)); if (fs.existsSync(distDir)) fs.rmSync(distDir, { recursive: true, - force: true, + force: true }); let x = { dependencies: [], @@ -29,17 +26,13 @@ let x = { invalidDirs: [], invalidFiles: [], missing: [], - using: [], + using: [] }; let dcproc = execIn("npx depcheck --json", partDir); -if(dcproc.stdout) x = JSON.parse(dcproc.stdout); +if (dcproc.stdout) x = JSON.parse(dcproc.stdout); else x = JSON.parse(dcproc); -fs.writeFileSync( - path.join(__dirname, "..", `depclean.out.json`), - JSON.stringify(x, null, "\t"), - { encoding: "utf8" } -); +fs.writeFileSync(path.join(__dirname, "..", `depclean.out.json`), JSON.stringify(x, null, "\t"), { encoding: "utf8" }); let depsToRemove = x.dependencies.join(" "); if (depsToRemove) execIn(`npm r --save ${depsToRemove}`, partDir); @@ -50,11 +43,9 @@ if (depsToRemove) execIn(`npm r --save --dev ${depsToRemove}`, partDir); if (removeModules && fs.existsSync(path.join(partDir, "node_modules"))) fs.rmSync(path.join(partDir, "node_modules"), { recursive: true, - force: true, + force: true }); -let end = getLines( - execIn("npm ls --parseable --package-lock-only -a", partDir) -); +let end = getLines(execIn("npm ls --parseable --package-lock-only -a", partDir)); console.log(`${part}: ${start} -> ${end} (diff: ${start - end})`); console.log("Installing required packages for bundle..."); diff --git a/scripts/first_setup.js b/scripts/first_setup.js new file mode 100755 index 00000000..4ce6e7de --- /dev/null +++ b/scripts/first_setup.js @@ -0,0 +1,272 @@ +#!/usr/bin/node +const path = require("path"); +const fs = require("fs"); +const { stdout, exit } = require("process"); +const { execIn } = require("./utils.js"); +const { ask } = require("./utils/ask.js"); + + +const data = { env: [], config: { register: {} }, extra_pkgs: [] }; +let rights = []; + +process.on("SIGINT", function () { + console.log("Caught interrupt signal"); + process.exit(); +}); + +console.log("Welcome to Fosscord!"); +console.log("Please remember this is pre-release software!"); +console.log("We will guide you through some important setup steps."); +console.log(); + +if (fs.existsSync("package-lock.json")) fs.rmSync("package-lock.json"); +if (fs.existsSync("yarn.lock")) fs.rmSync("yarn.lock"); + +async function main() { + printTitle("Step 1: Database setup"); + console.log("1. PostgreSQL (recommended)"); + console.log("2. MariaDB/MySQL"); + console.log("3. SQLite (not recommended, but good for a simple test)"); + + while (!data.db) { + let answer = await ask("Please select a database type: "); + if (answer == "1") { + data.db = "postgres"; + data.extra_pkgs.push("pg"); + } else if (answer == "2") { + data.db = "mariadb"; + data.extra_pkgs.push("mysql2"); + } else if (answer == "3") { + data.db = "sqlite"; + data.extra_pkgs.push("sqlite3"); + } else { + console.log("Invalid choice!"); + } + } + + printTitle("Step 2: Database credentials"); + if (data.db != "sqlite") { + console.log("Please enter your database credentials."); + console.log("You can leave the password field empty if you don't want to set a password."); + console.log(); + while (!data.db_host) { + data.db_host = await ask("Host: "); + } + while (!data.db_port) { + data.db_port = await ask("Port: "); + } + while (!data.db_user) { + data.db_user = await ask("Username: "); + } + while (!data.db_pass) { + data.db_pass = await ask("Password: "); + } + while (!data.db_name) { + data.db_name = await ask("Database name: "); + } + } else { + console.log("SQLite does not use credentials..."); + } + + printTitle("Step 3: Domain setup"); + console.log("Please enter your domain."); + console.log("You can leave the port field empty if you don't want to set a port."); + console.log(); + + data.domain = await ask("Domain (default=localhost): "); + if (!data.domain) data.domain = "localhost"; + else data.ssl = /y?/i.test(await ask("SSL/HTTPS (Y/n): ")); + + data.port = await ask("Port (default=3001): "); + if (!data.port) data.port = "3001"; + + if (data.db != "sqlite") + data.env.push(`DATABASE=${data.db}://${data.db_user}:${data.db_pass}@${data.db_host}:${data.db_port}/${data.db_name}`); + data.env.push(`PORT=${data.port}`); + data.env.push("THREADS=1"); + + printTitle("Step 4: Default rights"); + console.log("Please enter the default rights for new users."); + console.log("Valid rights are: none, discord, full, custom."); + console.log(); + let lines = fs.readFileSync(path.join(__dirname, "..", "src", "util", "util", "Rights.ts")).toString(); + let lines2 = lines.split("\n"); + let lines3 = lines2.filter((y) => y.includes(": BitFlag(")); + let lines4 = lines3.map((x) => x.split("//")[0].trim()); + + let maxRights = 0n; + lines4.forEach((x) => { + maxRights += eval(`rights.${x.replace(":", " = ").replace(",", ";")}`); + }); + discordRights = maxRights; + discordRights -= rights.SEND_BACKDATED_EVENTS; + discordRights -= rights.MANAGE_GUILD_DIRECTORY; + discordRights -= rights.CREDITABLE; + discordRights -= rights.BYPASS_RATE_LIMITS; + discordRights -= rights.ADD_MEMBERS; + discordRights -= rights.MANAGE_RATE_LIMITS; + discordRights -= rights.OPERATOR; + + data.default_rights = await ask("Rights (default=none): "); + if (!data.default_rights || data.defaultRights == "none") data.config.register.defaultRights = "0"; + else if (data.default_rights == "discord") data.config.register.defaultRights = discordRights.toString(); + else if (data.default_rights == "full") data.config.register.defaultRights = maxRights.toString(); + else if (data.default_rights == "custom") data.config.register.defaultRights = (await askRights()).toString(); + + if (data.domain != "localhost") + data.config = { + cdn: { + endpointPrivate: `http://localhost:${data.port}`, + endpointPublic: `${data.ssl ? "https" : "http"}://${data.domain}:${data.port}` + }, + gateway: { + endpointPrivate: `ws://localhost:${data.port}`, + endpointPublic: `${data.ssl ? "wss" : "ws"}://${data.domain}:${data.port}` + }, + ...data.config + }; + printTitle("Step 5: extra options"); + + if (/y?/i.test(await ask("Use fast BCrypt implementation (requires a compiler) (Y/n): "))) data.extra_pkgs.push("bcrypt"); + if (/y?/i.test(await ask("Enable support for widgets (requires compiler, known to fail on some ARM devices.) (Y/n): "))) + data.extra_pkgs.push("canvas"); + + printTitle("Step 6: finalizing..."); + //save + console.log("==> Writing .env..."); + fs.writeFileSync(".env", data.env.join("\n")); + console.log("==> Writing initial.json"); + fs.writeFileSync("initial.json", JSON.stringify(data.config, (space = 4))); + //install packages... + console.log("==> Installing packages..."); + console.log(" ==> Ensuring yarn is up to date (v3, not v1)..."); + execIn("npx yarn set version stable", process.cwd()); + console.log(" ==> Installing base packages"); + execIn("npx --yes yarn install", process.cwd(), { stdio: "inherit" }); + if (data.extra_pkgs.length > 0) { + console.log(" ==> Checking dependencies..."); + checkCompilers(); + if (data.extra_pkgs.includes("canvas")) checkCanvasDeps(); + if (data.extra_pkgs.includes("bcrypt")) checkBcryptDeps(); + + console.log(` ==> Installing extra packages: ${data.extra_pkgs.join(", ")}...`); + execIn(`npx --yes yarn add -O ${data.extra_pkgs.join(" ")}`, process.cwd(), { stdio: "inherit" }); + } + + console.log("==> Building..."); + execIn("npx --yes yarn run build", process.cwd(), { stdio: "inherit" }); + printTitle("Step 6: run your instance!"); + console.log("Installation is complete!"); + console.log("You can now start your instance by running 'npm run start:bundle'!"); + exit(0); +} +main(); + +async function askRights() { + let w = 0; + let brights = { ...eval(`rights`) }; + Object.keys(rights).forEach((x) => { + brights[x] = false; + let str = `[x] ${Object.keys(rights).length}: ${x}`; + if (str.length > w) w = str.length; + }); + + let resp = ""; + let selectedRights = 0n; + while (resp != "q") { + selectedRights = 0n; + Object.keys(brights).forEach((x) => { + if (brights[x]) selectedRights += rights[x]; + }); + console.clear(); + printTitle("Step 4: Default rights"); + printTitle(`Current rights: ${selectedRights} (0b${selectedRights.toString(2)}, 0x${selectedRights.toString(16)})`); + let xpos = 0; + Object.keys(rights).forEach((x) => { + let str = `[${brights[x] ? "X" : " "}] ${Object.keys(rights).indexOf(x)}: ${x}`.padEnd(w + 1, " "); + if (xpos + str.length > stdout.columns) { + console.log(); + xpos = 0; + } + stdout.write(str); + xpos += str.length; + }); + + console.log(); + resp = await ask("Enter an option, or q to exit: "); + if (/\d{1,}/.test(resp) && resp < Object.keys(rights).length && resp > -1) { + brights[Object.keys(brights)[parseInt(resp)]] ^= true; + } + } + return selectedRights; +} + + + +function printTitle(input) { + let width = stdout.columns / 2 - 1; //40 + console.log(); + console.log("-".repeat(width - input.length / 2), input, "-".repeat(width - input.length / 2)); + console.log(); +} + + +function BitFlag(int) { + return 1n << BigInt(int); +} + +function checkCanvasDeps() { + if ( + !( + checkDep("pixman", "/usr/include/pixman-1/pixman.h") && + checkDep("pixman", "/usr/lib/libpixman-1.so") && + checkDep("cairo", "/usr/include/cairo/cairo.h") && + checkDep("cairo", "/usr/lib/libcairo.so") && + checkDep("pango", "/usr/include/pango-1.0/pango/pangocairo.h") && + checkDep("pango", "/usr/lib/libpango-1.0.so") && + checkDep("pkgconfig", "/usr/bin/pkg-config") + ) + ) { + console.log("Canvas requires the following dependencies to be installed: pixman, cairo, pango, pkgconfig"); + exit(1); + } +} +function checkBcryptDeps() { + /*if (!(checkDep("bcrypt", "/usr/include/bcrypt.h") && checkDep("bcrypt", "/usr/lib/libbcrypt.so"))) { + console.log("Bcrypt requires the following dependencies to be installed: bcrypt"); + exit(1); + }*/ + //TODO: check if required +} + +function checkCompilers() { + //check for gcc, grep, make, python-is-python3 + if ( + !( + checkDep("gcc", "/usr/bin/gcc") && + checkDep("grep", "/usr/bin/grep") && + checkDep("make", "/usr/bin/make") && + checkDep("python3", "/usr/bin/python3") + ) + ) { + console.log("Compiler requirements not met. Please install the following: gcc, grep, make, python3"); + exit(1); + } + + //check if /usr/bin/python is a symlink to /usr/bin/python3 + if (!fs.lstatSync("/usr/bin/python").isSymbolicLink()) { + console.log("/usr/bin/python is not a symlink. Please make sure it is a symlink to /usr/bin/python3"); + if (fs.existsSync("/usr/bin/python3")) { + console.log("Hint: sudo ln -s /usr/bin/python3 /usr/bin/python"); + } + exit(1); + } +} + +function checkDep(name, path, message) { + if (!fs.existsSync(path)) { + console.log(`${name} not found at ${path}! Installation of some modules may fail!`); + console.log(message ?? `Please consult your distro's manual for installation instructions.`); + } + return fs.existsSync(path); +} diff --git a/scripts/gen_index.js b/scripts/gen_index.js index 71c64a9f..8a3c7eb8 100644 --- a/scripts/gen_index.js +++ b/scripts/gen_index.js @@ -1,34 +1,37 @@ const path = require("path"); const fs = require("fs"); -const { execIn, getLines, parts } = require('./utils'); +const { execIn, getLines } = require("./utils"); if (!process.argv[2] || !fs.existsSync(process.argv[2])) { - console.log("Please pass a directory that exists!"); - process.exit(1); + console.log("Please pass a directory that exists!"); + process.exit(1); } -console.log(`// ${process.argv[2]}/index.ts`) -const recurse = process.argv.includes("--recursive") +console.log(`// ${process.argv[2]}/index.ts`); +const recurse = process.argv.includes("--recursive"); -const files = fs.readdirSync(process.argv[2]).filter(x => x.endsWith('.ts') && x != 'index.ts'); +const files = fs.readdirSync(process.argv[2]).filter((x) => x.endsWith(".ts") && x != "index.ts"); -let output = ''; +let output = ""; -files.forEach(x => output += `export * from "./${x.replaceAll('.ts','')}";\n`) +files.forEach((x) => (output += `export * from "./${x.replaceAll(".ts", "")}";\n`)); -const dirs = fs.readdirSync(process.argv[2]).filter(x => { - try { - fs.readdirSync(path.join(process.argv[2], x)); - return true; - } catch (e) { - return false; - } +const dirs = fs.readdirSync(process.argv[2]).filter((x) => { + try { + fs.readdirSync(path.join(process.argv[2], x)); + return true; + } catch (e) { + return false; + } +}); +dirs.forEach((x) => { + output += `export * from "./${x}/index";\n`; }); -dirs.forEach(x => { - output += `export * from "./${x}/index";\n` -}) console.log(output); -fs.writeFileSync(path.join(process.argv[2], "index.ts"), output) +fs.writeFileSync(path.join(process.argv[2], "index.ts"), output); -dirs.forEach(x => { - if(recurse) console.log(execIn([process.argv[0], process.argv[1], `"${path.join(process.argv[2], x)}"`, "--recursive"].join(' '), process.cwd())) -}) \ No newline at end of file +dirs.forEach((x) => { + if (recurse) + console.log( + execIn([process.argv[0], process.argv[1], `"${path.join(process.argv[2], x)}"`, "--recursive"].join(" "), process.cwd()) + ); +}); diff --git a/scripts/generate_schema.js b/scripts/generate_schema.js index 6925df5d..e4bdd0c4 100644 --- a/scripts/generate_schema.js +++ b/scripts/generate_schema.js @@ -36,7 +36,7 @@ const Excluded = [ "UncheckedPropertiesSchema", "PropertiesSchema", "AsyncSchema", - "AnySchema", + "AnySchema" ]; function modify(obj) { @@ -48,13 +48,8 @@ function modify(obj) { } function main() { - const files = [ - ...walk(path.join(__dirname, "..", "src", "util", "schemas")), - ]; - const program = TJS.getProgramFromFiles( - files, - compilerOptions - ); + const files = [...walk(path.join(__dirname, "..", "src", "util", "schemas"))]; + const program = TJS.getProgramFromFiles(files, compilerOptions); const generator = TJS.buildGenerator(program, settings); if (!generator || !program) return; diff --git a/scripts/migrate_db_engine.js b/scripts/migrate_db_engine.js index 79e9d86f..b5b8008b 100644 --- a/scripts/migrate_db_engine.js +++ b/scripts/migrate_db_engine.js @@ -25,7 +25,7 @@ const { Template, User, VoiceState, - Webhook, + Webhook } = require("../../dist/entities/index"); async function main() { @@ -54,7 +54,7 @@ async function main() { VoiceState, Webhook, Message, - Attachment, + Attachment ]; const oldDB = await initDatabase(); @@ -69,7 +69,7 @@ async function main() { database: isSqlite ? process.env.TO : undefined, entities, name: "new", - synchronize: true, + synchronize: true }); let i = 0; diff --git a/scripts/rights.js b/scripts/rights.js index 20fd139c..5ae576ef 100644 --- a/scripts/rights.js +++ b/scripts/rights.js @@ -6,20 +6,20 @@ const { argv, stdout, exit } = require("process"); const { execIn, getLines, parts } = require("./utils"); -let lines = fs.readFileSync(path.join(__dirname, "..", "src", "util", "util","Rights.ts")).toString() +let lines = fs.readFileSync(path.join(__dirname, "..", "src", "util", "util", "Rights.ts")).toString(); let lines2 = lines.split("\n"); -let lines3 = lines2.filter(y=>y.includes(": BitFlag(")); -let lines4 = lines3.map(x=>x.split("//")[0].trim()) +let lines3 = lines2.filter((y) => y.includes(": BitFlag(")); +let lines4 = lines3.map((x) => x.split("//")[0].trim()); function BitFlag(int) { - return 1n << eval(`${int}n`); + return 1n << BigInt(int); } -let rights = [] +let rights = []; let maxRights = 0n; -lines4.forEach(x=>{ - maxRights += eval(`rights.${x.replace(':'," = ").replace(",",";")}`) -}) +lines4.forEach((x) => { + maxRights += eval(`rights.${x.replace(":", " = ").replace(",", ";")}`); +}); //max rights... console.log(`Maximum rights: ${maxRights}`); //discord rights... @@ -31,4 +31,4 @@ discordRights -= rights.BYPASS_RATE_LIMITS; discordRights -= rights.ADD_MEMBERS; discordRights -= rights.MANAGE_RATE_LIMITS; discordRights -= rights.OPERATOR; -console.log(`Discord-like rights: ${discordRights}`); \ No newline at end of file +console.log(`Discord-like rights: ${discordRights}`); diff --git a/scripts/update_schemas.js b/scripts/update_schemas.js index 361bedc1..151b52d2 100644 --- a/scripts/update_schemas.js +++ b/scripts/update_schemas.js @@ -6,4 +6,4 @@ const { argv, stdout, exit } = require("process"); const { execIn, getLines, parts } = require("./utils"); -execIn("node scripts/generate_schema.js", path.join('.')); \ No newline at end of file +execIn("node scripts/generate_schema.js", path.join(".")); diff --git a/scripts/utils.js b/scripts/utils.js index 84aaeed6..b679392b 100644 --- a/scripts/utils.js +++ b/scripts/utils.js @@ -4,6 +4,8 @@ const { env } = require("process"); const { execSync } = require("child_process"); const { argv, stdout, exit } = require("process"); +const projectRoot = path.resolve(path.join(__dirname, "..")); + function copyRecursiveSync(src, dest) { //if (verbose) console.log(`cpsync: ${src} -> ${dest}`); let exists = fs.existsSync(src); @@ -16,10 +18,7 @@ function copyRecursiveSync(src, dest) { if (isDirectory) { fs.mkdirSync(dest, { recursive: true }); fs.readdirSync(src).forEach(function (childItemName) { - copyRecursiveSync( - path.join(src, childItemName), - path.join(dest, childItemName) - ); + copyRecursiveSync(path.join(src, childItemName), path.join(dest, childItemName)); }); } else { fs.copyFileSync(src, dest); @@ -44,8 +43,45 @@ function getLines(output) { return output.split("\n").length; } -module.exports = { +function getDirs(dir) { + return fs.readdirSync(dir).filter((x) => { + try { + fs.readdirSync(dir.join(dir, x)); + return true; + } catch (e) { + return false; + } + }); +} + +function walk(dir) { + let results = []; + let list = fs.readdirSync(dir); + list.forEach(function (file) { + file = dir + "/" + file; + let stat = fs.statSync(file); + if (stat && stat.isDirectory()) { + /* Recurse into a subdirectory */ + results = results.concat(walk(file)); + } else { + results.push(file); + } + }); + return results; +} + +function sanitizeVarName(str) { + return str.replace("-", "_").replace(/[^\w\s]/gi, ""); +} + +module.exports = { //consts + projectRoot, //functions - copyRecursiveSync, execIn, getLines + copyRecursiveSync, + execIn, + getLines, + getDirs, + walk, + sanitizeVarName }; diff --git a/scripts/utils/ask.js b/scripts/utils/ask.js new file mode 100644 index 00000000..cb8a29f6 --- /dev/null +++ b/scripts/utils/ask.js @@ -0,0 +1,20 @@ +const readline = require("readline"); +const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); + +async function ask(question) { + return new Promise((resolve, _reject) => { + return rl.question(question, (answer) => { + resolve(answer); + }); + }).catch((err) => { + console.log(err); + }); +} +async function askBool(question) { + return /y?/i.test(await ask(question)); +} + +module.exports = { + ask, + askBool +} \ No newline at end of file |