summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorxnacly <matteogropp@gmail.com>2020-12-30 22:49:01 +0100
committerxnacly <matteogropp@gmail.com>2020-12-30 22:49:01 +0100
commit82e63ef807cad85905df323b09b2ac0fb21bcb1a (patch)
tree2f0a09f42c40402e56e010447df6f560db6d505d /src
parentInitial commit (diff)
downloadserver-82e63ef807cad85905df323b09b2ac0fb21bcb1a.tar.xz
:sparkles: added crawler to cdn
Diffstat (limited to 'src')
-rw-r--r--src/Server.ts93
-rw-r--r--src/Util.ts38
-rw-r--r--src/index.ts14
-rw-r--r--src/routes/attachments.ts.disabled19
-rw-r--r--src/routes/external.ts83
5 files changed, 247 insertions, 0 deletions
diff --git a/src/Server.ts b/src/Server.ts
new file mode 100644

index 00000000..7d93c444 --- /dev/null +++ b/src/Server.ts
@@ -0,0 +1,93 @@ +import express, { Application, Router, Request, Response, NextFunction } from "express"; +import { MongoDatabase, Database } from "lambert-db"; +import { Server as HTTPServer } from "http"; +import { traverseDirectory } from "./Util"; +import bodyParser from "body-parser"; +import "express-async-errors"; + +const log = console.log; +console.log = (content) => { + log(`[${new Date().toTimeString().split(" ")[0]}]`, content); +}; + +export type ServerOptions = { + db: string; + port: number; + host: string; +}; + +declare global { + namespace Express { + interface Request { + server: Server; + } + } +} + +export class Server { + app: Application; + http: HTTPServer; + db: Database; + routes: Router[]; + options: ServerOptions; + + constructor(options: Partial<ServerOptions> = { port: 3000, host: "0.0.0.0" }) { + this.app = express(); + this.db = new MongoDatabase(options?.db); + this.options = options as ServerOptions; + } + + async init() { + await this.db.init(); + + console.log("[Database] connected..."); + await new Promise((res, rej) => { + this.http = this.app.listen(this.options.port, this.options.host, () => res(null)); + }); + this.routes = await this.registerRoutes(__dirname + "/routes/"); + } + + async registerRoutes(root: string) { + this.app.use((req, res, next) => { + req.server = this; + next(); + }); + const routes = await traverseDirectory({ dirname: root, recursive: true }, this.registerRoute.bind(this, root)); + this.app.use((err: string | Error, req: Request, res: Response, next: NextFunction) => { + res.status(400).send(err); + next(err); + }); + return routes; + } + + registerRoute(root: string, file: string): any { + if (root.endsWith("/") || root.endsWith("\\")) root = root.slice(0, -1); // removes slash at the end of the root dir + let path = file.replace(root, ""); // remove root from path and + path = path.split(".").slice(0, -1).join("."); // trancate .js/.ts file extension of path + if (path.endsWith("/index")) path = path.slice(0, -6); // delete index from path + + try { + var router = require(file); + if (router.router) router = router.router; + if (router.default) router = router.default; + if (!router || router?.prototype?.constructor?.name !== "router") + throw `File doesn't export any default router`; + this.app.use(path, <Router>router); + console.log(`[Routes] ${path} registerd`); + + return router; + } catch (error) { + console.error(new Error(`[Server] ¯\\_(ツ)_/¯ Failed to register route ${path}: ${error}`)); + } + } + + async destroy() { + await this.db.destroy(); + await new Promise((res, rej) => { + this.http.close((err) => { + if (err) return rej(err); + return res(""); + }); + }); + } +} diff --git a/src/Util.ts b/src/Util.ts new file mode 100644
index 00000000..291372c1 --- /dev/null +++ b/src/Util.ts
@@ -0,0 +1,38 @@ +import fs from "fs/promises"; +import "missing-native-js-functions"; + +export interface traverseDirectoryOptions { + dirname: string; + filter?: RegExp; + excludeDirs?: RegExp; + recursive?: boolean; +} + +const DEFAULT_EXCLUDE_DIR = /^\./; +const DEFAULT_FILTER = /^([^\.].*)\.js$/; + +export async function traverseDirectory<T>( + options: traverseDirectoryOptions, + action: (path: string) => T +): Promise<T[]> { + if (!options.filter) options.filter = DEFAULT_FILTER; + if (!options.excludeDirs) options.excludeDirs = DEFAULT_EXCLUDE_DIR; + + const routes = await fs.readdir(options.dirname); + const promises = <Promise<T | T[] | undefined>[]>routes.map(async (file) => { + const path = options.dirname + file; + const stat = await fs.lstat(path); + if (path.match(<RegExp>options.excludeDirs)) return; + + if (stat.isFile() && path.match(<RegExp>options.filter)) { + return action(path); + } else if (options.recursive && stat.isDirectory()) { + return traverseDirectory({ ...options, dirname: path + "/" }, action); + } + }); + const result = await Promise.all(promises); + + const t = <(T | undefined)[]>result.flat(); + + return <T[]>t.filter((x) => x != undefined); +} diff --git a/src/index.ts b/src/index.ts new file mode 100644
index 00000000..d8025968 --- /dev/null +++ b/src/index.ts
@@ -0,0 +1,14 @@ +import { Server } from "./Server"; + +const server = new Server(); +server + .init() + .then(() => { + console.log("[Server] started on :" + server.options.port); + }) + .catch((e) => console.error("[Server] Error starting: ", e)); + +//// server +//// .destroy() +//// .then(() => console.log("[Server] closed.")) +//// .catch((e) => console.log("[Server] Error closing: ", e)); diff --git a/src/routes/attachments.ts.disabled b/src/routes/attachments.ts.disabled new file mode 100644
index 00000000..db1a7efc --- /dev/null +++ b/src/routes/attachments.ts.disabled
@@ -0,0 +1,19 @@ +import { Router } from "express"; +import multer from "multer"; +const multer_ = multer(); + +const router = Router(); +router.post("/:file", multer_.single("attachment"), async (req, res) => { + const { buffer } = req.file; + + res.set("Content-Type", "image/png"); + res.send(buffer); +}); +router.get("/:hash/:file", async (req, res) => { + res.send(`${req.params.hash}/${req.params.file}`); +}); +router.delete("/:hash/:file", async (req, res) => { + res.send("remove"); +}); + +export default router; diff --git a/src/routes/external.ts b/src/routes/external.ts new file mode 100644
index 00000000..14980b05 --- /dev/null +++ b/src/routes/external.ts
@@ -0,0 +1,83 @@ +import bodyParser from "body-parser"; +import { Router } from "express"; +import fetch from "node-fetch"; +import cheerio from "cheerio"; +import btoa from "btoa"; +import { URL } from "url"; + +const router = Router(); + +type crawled = { + title: string; + type: string; + description: string; + url: string; + image_url: string; +}; + +const DEFAULT_FETCH_OPTIONS: any = { + redirect: "follow", + follow: 1, + headers: { + "user-agent": "Mozilla/5.0 (compatible; Discordbot/2.0; +https://discordapp.com)", + }, + size: 1024 * 1024 * 8, + compress: true, + method: "GET", +}; + +router.post("/", bodyParser.json(), async (req, res) => { + if (!req.body) throw new Error("Invalid Body (url missing) \nExample: url:https://discord.com"); + + const { db } = req.server; + const { url } = req.body; + + const ID = btoa(url); + + const cache = await db.data.crawler({ id: ID }).get(); + if (cache) return res.send(cache); + + try { + const request = await fetch(url, DEFAULT_FETCH_OPTIONS); + + const text = await request.text(); + const ツ: any = cheerio.load(text); + + const ogTitle = ツ('meta[property="og:title"]').attr("content"); + const ogDescription = ツ('meta[property="og:description"]').attr("content"); + const ogImage = ツ('meta[property="og:image"]').attr("content"); + const ogUrl = ツ('meta[property="og:url"]').attr("content"); + const ogType = ツ('meta[property="og:type"]').attr("content"); + + const filename = new URL(url).host.split(".")[0]; + + const ImageResponse = await fetch(ogImage, DEFAULT_FETCH_OPTIONS); + const ImageType = ImageResponse.headers.get("content-type"); + const ImageExtension = ImageType?.split("/")[1]; + const ImageResponseBuffer = (await ImageResponse.buffer()).toString("base64"); + const cachedImage = `/external/${ID}/${filename}.${ImageExtension}`; + + await db.data.externals.push({ image: ImageResponseBuffer, id: ID, type: ImageType }); + + const new_cache_entry = { id: ID, ogTitle, ogDescription, cachedImage, ogUrl, ogType }; + await db.data.crawler.push(new_cache_entry); + + res.send(new_cache_entry); + } catch (error) { + console.log(error); + + throw new Error("Couldn't fetch website"); + } +}); + +router.get("/:id/:filename", async (req, res) => { + const { db } = req.server; + const { id, filename } = req.params; + const { image, type } = await db.data.externals({ id: id }).get(); + const imageBuffer = Buffer.from(image, "base64"); + + res.set("Content-Type", type); + res.send(imageBuffer); +}); + +export default router;