summary refs log tree commit diff
path: root/src/cdn/util
diff options
context:
space:
mode:
Diffstat (limited to 'src/cdn/util')
-rw-r--r--src/cdn/util/FileStorage.ts50
-rw-r--r--src/cdn/util/S3Storage.ts56
-rw-r--r--src/cdn/util/Storage.ts58
-rw-r--r--src/cdn/util/index.ts3
-rw-r--r--src/cdn/util/multer.ts10
5 files changed, 177 insertions, 0 deletions
diff --git a/src/cdn/util/FileStorage.ts b/src/cdn/util/FileStorage.ts
new file mode 100644
index 00000000..fea013a6
--- /dev/null
+++ b/src/cdn/util/FileStorage.ts
@@ -0,0 +1,50 @@
+import fs from "fs";
+import { dirname, join } from "path";
+import { Readable } from "stream";
+import { Storage } from "./Storage";
+//import ExifTransformer = require("exif-be-gone");
+import ExifTransformer from "exif-be-gone";
+
+// TODO: split stored files into separate folders named after cloned route
+
+function getPath(path: string) {
+	// STORAGE_LOCATION has a default value in start.ts
+	const root = process.env.STORAGE_LOCATION || "../";
+	let filename = join(root, path);
+
+	if (path.indexOf("\0") !== -1 || !filename.startsWith(root)) throw new Error("invalid path");
+	return filename;
+}
+
+export class FileStorage implements Storage {
+	async get(path: string): Promise<Buffer | null> {
+		path = getPath(path);
+		try {
+			return fs.readFileSync(path);
+		} catch (error) {
+			try {
+				const files = fs.readdirSync(path);
+				if (!files.length) return null;
+				return fs.readFileSync(join(path, files[0]));
+			} catch (error) {
+				return null;
+			}
+		}
+	}
+
+	async set(path: string, value: any) {
+		path = getPath(path);
+		//fse.ensureDirSync(dirname(path));
+		fs.mkdirSync(dirname(path), { recursive: true });
+
+		value = Readable.from(value);
+		const cleaned_file = fs.createWriteStream(path);
+
+		return value.pipe(new ExifTransformer()).pipe(cleaned_file);
+	}
+
+	async delete(path: string) {
+		//TODO we should delete the parent directory if empty
+		fs.unlinkSync(getPath(path));
+	}
+}
diff --git a/src/cdn/util/S3Storage.ts b/src/cdn/util/S3Storage.ts
new file mode 100644
index 00000000..a7892e5e
--- /dev/null
+++ b/src/cdn/util/S3Storage.ts
@@ -0,0 +1,56 @@
+import { S3 } from "@aws-sdk/client-s3";
+import { Readable } from "stream";
+import { Storage } from "./Storage";
+
+const readableToBuffer = (readable: Readable): Promise<Buffer> =>
+	new Promise((resolve, reject) => {
+		const chunks: Buffer[] = [];
+		readable.on("data", (chunk) => chunks.push(chunk));
+		readable.on("error", reject);
+		readable.on("end", () => resolve(Buffer.concat(chunks)));
+	});
+
+export class S3Storage implements Storage {
+	public constructor(private client: S3, private bucket: string, private basePath?: string) {}
+
+	/**
+	 * Always return a string, to ensure consistency.
+	 */
+	get bucketBasePath() {
+		return this.basePath ?? "";
+	}
+
+	async set(path: string, data: Buffer): Promise<void> {
+		await this.client.putObject({
+			Bucket: this.bucket,
+			Key: `${this.bucketBasePath}${path}`,
+			Body: data
+		});
+	}
+
+	async get(path: string): Promise<Buffer | null> {
+		try {
+			const s3Object = await this.client.getObject({
+				Bucket: this.bucket,
+				Key: `${this.bucketBasePath ?? ""}${path}`
+			});
+
+			if (!s3Object.Body) return null;
+
+			const body = s3Object.Body;
+
+			return await readableToBuffer(<Readable>body);
+		} catch (err) {
+			console.error(`[CDN] Unable to get S3 object at path ${path}.`);
+			console.error(err);
+			return null;
+		}
+	}
+
+	async delete(path: string): Promise<void> {
+		await this.client.deleteObject({
+			Bucket: this.bucket,
+			Key: `${this.bucketBasePath}${path}`
+		});
+	}
+}
diff --git a/src/cdn/util/Storage.ts b/src/cdn/util/Storage.ts
new file mode 100644
index 00000000..1ab6a1d9
--- /dev/null
+++ b/src/cdn/util/Storage.ts
@@ -0,0 +1,58 @@
+import path from "path";
+import { FileStorage } from "./FileStorage";
+//import fse from "fs-extra";
+import { S3 } from "@aws-sdk/client-s3";
+import fs from "fs";
+import { bgCyan, black } from "picocolors";
+import { S3Storage } from "./S3Storage";
+process.cwd();
+
+export interface Storage {
+	set(path: string, data: Buffer): Promise<void>;
+	get(path: string): Promise<Buffer | null>;
+	delete(path: string): Promise<void>;
+}
+
+let storage: Storage;
+
+if (process.env.STORAGE_PROVIDER === "file" || !process.env.STORAGE_PROVIDER) {
+	let location = process.env.STORAGE_LOCATION;
+	if (location) {
+		location = path.resolve(location);
+	} else {
+		location = path.join(process.cwd(), "files");
+	}
+	console.log(`[CDN] storage location: ${bgCyan(`${black(location)}`)}`);
+	//fse.ensureDirSync(location);
+	fs.mkdirSync(location, { recursive: true });
+	process.env.STORAGE_LOCATION = location;
+
+	storage = new FileStorage();
+} else if (process.env.STORAGE_PROVIDER === "s3") {
+	const region = process.env.STORAGE_REGION,
+		bucket = process.env.STORAGE_BUCKET;
+
+	if (!region) {
+		console.error(`[CDN] You must provide a region when using the S3 storage provider.`);
+		process.exit(1);
+	}
+
+	if (!bucket) {
+		console.error(`[CDN] You must provide a bucket when using the S3 storage provider.`);
+		process.exit(1);
+	}
+
+	// in the S3 provider, this should be the root path in the bucket
+	let location = process.env.STORAGE_LOCATION;
+
+	if (!location) {
+		console.warn(`[CDN] STORAGE_LOCATION unconfigured for S3 provider, defaulting to the bucket root...`);
+		location = undefined;
+	}
+
+	const client = new S3({ region });
+
+	storage = new S3Storage(client, bucket, location);
+}
+
+export { storage };
diff --git a/src/cdn/util/index.ts b/src/cdn/util/index.ts
new file mode 100644
index 00000000..07a5c31a
--- /dev/null
+++ b/src/cdn/util/index.ts
@@ -0,0 +1,3 @@
+export * from "./FileStorage";
+export * from "./multer";
+export * from "./Storage";
diff --git a/src/cdn/util/multer.ts b/src/cdn/util/multer.ts
new file mode 100644
index 00000000..f56b0fb5
--- /dev/null
+++ b/src/cdn/util/multer.ts
@@ -0,0 +1,10 @@
+import multerConfig from "multer";
+
+export const multer = multerConfig({
+	storage: multerConfig.memoryStorage(),
+	limits: {
+		fields: 10,
+		files: 10,
+		fileSize: 1024 * 1024 * 100 // 100 mb
+	}
+});