From 236f79e4d4df4806e00571ace820eb3d77925912 Mon Sep 17 00:00:00 2001 From: Tristan Chin Date: Thu, 7 Jul 2022 20:40:20 -0400 Subject: [PATCH] Re-implement upload size security --- .eslintrc | 1 + src/errors/AddFilesFault.ts | 9 ++ src/errors/MergerDisposedFault.ts | 9 ++ src/errors/MergerMaxFileCountFault.ts | 5 +- src/errors/MergerMaxFileSizeFault.ts | 5 +- src/index.ts | 4 +- .../DatabaseService/Models/MergerModel.ts | 76 ++++++++- src/services/MergerService/MergerService.ts | 146 ++++++++++++------ 8 files changed, 192 insertions(+), 63 deletions(-) create mode 100644 src/errors/AddFilesFault.ts create mode 100644 src/errors/MergerDisposedFault.ts diff --git a/.eslintrc b/.eslintrc index d1f0aae..f995a34 100644 --- a/.eslintrc +++ b/.eslintrc @@ -73,6 +73,7 @@ } ], "no-restricted-exports": "off", + "no-restricted-syntax":"off", "spaced-comment": [ "warn", "always", diff --git a/src/errors/AddFilesFault.ts b/src/errors/AddFilesFault.ts new file mode 100644 index 0000000..a96f2c2 --- /dev/null +++ b/src/errors/AddFilesFault.ts @@ -0,0 +1,9 @@ +import Fault from "./Fault"; + +class AddFilesFault extends Fault { + constructor() { + super(500, "An error occured when adding your file(s)."); + } +} + +export default AddFilesFault; diff --git a/src/errors/MergerDisposedFault.ts b/src/errors/MergerDisposedFault.ts new file mode 100644 index 0000000..2c59965 --- /dev/null +++ b/src/errors/MergerDisposedFault.ts @@ -0,0 +1,9 @@ +import Fault from "./Fault"; + +class MergerDisposedFault extends Fault { + constructor() { + super(404, "The requested merge has been disposed."); + } +} + +export default MergerDisposedFault; diff --git a/src/errors/MergerMaxFileCountFault.ts b/src/errors/MergerMaxFileCountFault.ts index 2670a37..4358306 100644 --- a/src/errors/MergerMaxFileCountFault.ts +++ b/src/errors/MergerMaxFileCountFault.ts @@ -2,7 +2,10 @@ import Fault from "./Fault"; class MergerMaxFileCountFault extends Fault { constructor() { - super(400, "Max input file count reached for this merge."); + super( + 400, + "Max input file count reached for this merge. Any files before the limit was reached were still added to the merge." + ); } } diff --git a/src/errors/MergerMaxFileSizeFault.ts b/src/errors/MergerMaxFileSizeFault.ts index 461dc1a..ee1f881 100644 --- a/src/errors/MergerMaxFileSizeFault.ts +++ b/src/errors/MergerMaxFileSizeFault.ts @@ -2,7 +2,10 @@ import Fault from "./Fault"; class MergerMaxFileSizeFault extends Fault { constructor() { - super(400, "Max input file size reached for this merge."); + super( + 400, + "Max input file size reached for this merge. Any files before the limit was reached were still added to the merge." + ); } } diff --git a/src/index.ts b/src/index.ts index b72d0fb..449da40 100644 --- a/src/index.ts +++ b/src/index.ts @@ -37,7 +37,7 @@ app.post( parseDate(req.body.creationDate) ); - return res.sendFile(output); + return res.sendFile(output.path); } ); @@ -71,7 +71,7 @@ app.post( parseDate(req.body.creationDate) ); - return res.sendFile(output); + return res.sendFile(output.path); } ); diff --git a/src/services/DatabaseService/Models/MergerModel.ts b/src/services/DatabaseService/Models/MergerModel.ts index c182d94..8b8a131 100644 --- a/src/services/DatabaseService/Models/MergerModel.ts +++ b/src/services/DatabaseService/Models/MergerModel.ts @@ -1,27 +1,87 @@ +import fs from "fs-extra"; +import MergerDisposedFault from "../../../errors/MergerDisposedFault"; +import MergerMaxFileCountFault from "../../../errors/MergerMaxFileCountFault"; +import MergerMaxFileSizeFault from "../../../errors/MergerMaxFileSizeFault"; +import ConfigService from "../../ConfigService/ConfigService"; import Model from "./Model"; +export interface MergerFile { + path: string; + size: number; +} + +const config = ConfigService.getConfig(); + class MergerModel extends Model { - private files: string[] = []; - private output: string | null = null; + private files: MergerFile[] = []; + private output: MergerFile | null = null; + private disposed = false; - public addFiles(...files: string[]) { + public async addFiles(...files: MergerFile[]) { + this.ensureNotDisposed(); if (this.output) { - this.output = null; + await this.setOutput(null); + } + + for (let i = 0; i < files.length; ++i) { + const file = files[i]!; + + if (this.getRemainingSize() < file.size) { + throw new MergerMaxFileSizeFault(); + } + if (this.getRemainingFilesCount() < 1) { + throw new MergerMaxFileCountFault(); + } + + this.files.push(file); } - this.files.push(...files); } - public getFiles(): string[] { + public getFiles(): MergerFile[] { return this.files; } - public setOutput(output: string) { + public getFilesCount(): number { + return this.files.length; + } + + public getRemainingFilesCount(): number { + return config.maxMergerFileCount - this.getFilesCount(); + } + + public getSize(): number { + return this.files.reduce((acc, file) => acc + file.size, 0); + } + + public getRemainingSize(): number { + return config.maxMergerFileSize - this.getSize(); + } + + public async setOutput(output: MergerFile | null) { + this.ensureNotDisposed(); + if (this.output) { + await fs.remove(this.output.path); + } this.output = output; } - public getOutput(): string | null { + public getOutput(): MergerFile | null { return this.output; } + + public async dispose() { + this.disposed = true; + const files = [...this.files, ...(this.output ? [this.output] : [])]; + await Promise.all(files.map((f) => fs.remove(f.path))); + this.files = []; + this.output = null; + } + + private ensureNotDisposed() { + if (this.disposed) { + throw new MergerDisposedFault(); + } + } } export default MergerModel; diff --git a/src/services/MergerService/MergerService.ts b/src/services/MergerService/MergerService.ts index 0b66584..35fc6e0 100644 --- a/src/services/MergerService/MergerService.ts +++ b/src/services/MergerService/MergerService.ts @@ -7,13 +7,17 @@ import chalk from "chalk"; import ConfigService from "../ConfigService/ConfigService"; import MergerNotFoundFault from "../../errors/MergerNotFoundFault"; import DatabaseService from "../DatabaseService/DatabaseService"; -import MergerModel from "../DatabaseService/Models/MergerModel"; +import MergerModel, { MergerFile } from "../DatabaseService/Models/MergerModel"; import { DIR_OUTPUTS, DIR_INPUTS, DB_MERGERS } from "../../config/constants"; import MergeFault from "../../errors/MergeFault"; import execAsync from "../../utils/execAsync"; import ffmpegDate from "../../utils/ffmpegDate"; import MergerAlreadyExistsFault from "../../errors/MergerAlreadyExistsFault"; import normalizePath from "../../utils/normalizePath"; +import MergerMaxFileSizeFault from "../../errors/MergerMaxFileSizeFault"; +import MergerMaxFileCountFault from "../../errors/MergerMaxFileCountFault"; +import Fault from "../../errors/Fault"; +import AddFilesFault from "../../errors/AddFilesFault"; const config = ConfigService.getConfig(); @@ -47,55 +51,75 @@ class MergerService { } public async append(mergerId: string, ...files: Express.Multer.File[]) { - const exists = await this.db.has(mergerId); - - if (!exists) throw new MergerNotFoundFault(); - - const intermediatePaths = await Promise.all( - files - .map((f) => normalizePath(f.path)) - .map(async (filePath) => { - const intermediatePath = normalizePath( - path.join(DIR_INPUTS, uuid()) - ); - const command = `ffmpeg -i "${filePath}" -c copy -bsf:v h264_mp4toannexb -f mpegts ${intermediatePath}`; - await execAsync(command); - await fs.remove(filePath); - return intermediatePath; - }) - ); + const merger = await this.db.get(mergerId); + let err: any | null = null; + + if (!merger) throw new MergerNotFoundFault(); + + try { + for await (const file of files) { + const remainingSize = merger.getRemainingSize(); + const remainingCount = merger.getRemainingFilesCount(); + + if (remainingCount < 1) { + throw new MergerMaxFileCountFault(); + } + if (remainingSize < file.size) { + throw new MergerMaxFileSizeFault(); + } + + const intermediateFile = await this.createIntermediateFile( + file.path, + remainingSize + ); + + // Recheck max file size and delete the intermediate file if it's too big. + // We use <= instead of <, because FFMPEG only LIMITS the size of the file, it does not throw an error if it is bigger. + // If the intermediate file size is the same as the remaining size, it most likely means the file was too big. + if (remainingSize <= intermediateFile.size) { + await fs.remove(intermediateFile.path); + throw new MergerMaxFileSizeFault(); + } + + await merger.addFiles(intermediateFile); + } + } catch (e) { + if (e instanceof Fault) { + err = e; + } else { + err = new AddFilesFault(); + } + } + + await Promise.all(files.map((f) => fs.remove(f.path))); + await this.db.update(merger.id, merger); - await this.db.update(mergerId, (merger) => { - merger.addFiles(...intermediatePaths); - }); + if (err) { + throw err; + } } public async merge( mergerId: string, creationDate: Date = new Date() - ): Promise { + ): Promise { const merger = await this.db.get(mergerId); if (!merger) { throw new MergerNotFoundFault(); } - let output = merger.getOutput(); + const existingOutput = merger.getOutput(); + if (existingOutput) return existingOutput; - if (output) return output; - - const outputPath = path.join(DIR_OUTPUTS, `${uuid()}.mp4`); const files = merger.getFiles(); - const ffmpegCommand = await this.createFfmpegCommand( - files, - creationDate, - outputPath - ); - - this.log(`merge ${merger.id}: Merging ${files.length} files`); + let outputFile: MergerFile | null = null; try { - await execAsync(ffmpegCommand); - output = outputPath; + this.log(`merge ${merger.id}: Merging ${files.length} files`); + outputFile = await this.createMergedFile( + files.map((f) => f.path), + creationDate + ); } catch (err) { if (err instanceof Error) { console.error(chalk.red(err.message)); @@ -103,26 +127,18 @@ class MergerService { } } - if (output === null) { + if (!outputFile) { throw new MergeFault(); } - this.log(`merge ${merger.id}: Merged to ${output}`); - - merger.setOutput(output); + await merger.setOutput(outputFile); await this.db.update(merger.id, merger); - return output; - } - private async createFfmpegCommand( - inputPaths: string[], - creationDate: Date, - outputPath: string - ): Promise { - const creationTime = ffmpegDate(creationDate); - const input = inputPaths.join("|"); + this.log( + `merge ${merger.id}: Merged to ${outputFile.path} (${outputFile.size} bytes)` + ); - return `ffmpeg -i "concat:${input}" -metadata creation_time="${creationTime}" -c copy -bsf:a aac_adtstoasc ${outputPath}`; + return outputFile; } private log(...message: string[]) { @@ -144,10 +160,38 @@ class MergerService { schedule.cancelJob(this.getMergerScheduleId(merger.id)); await this.db.delete(merger.id); + await merger.dispose(); + } + + private async createIntermediateFile( + inputFilePath: string, + maxSize: number + ): Promise { + const intermediatePath = normalizePath(path.join(DIR_INPUTS, uuid())); + const command = `ffmpeg -i "${inputFilePath}" -fs ${maxSize}B -c copy -bsf:v h264_mp4toannexb -f mpegts ${intermediatePath}`; + + await execAsync(command); + + const { size } = await fs.stat(intermediatePath); + return { path: intermediatePath, size }; + } + + private async createMergedFile( + inputPaths: string[], + creationDate = new Date() + ): Promise { + const outputPath = normalizePath( + path.join(DIR_OUTPUTS, `${uuid()}.mp4`) + ); + const creationTime = ffmpegDate(creationDate); + const input = inputPaths.join("|"); + const ffmpegCommand = `ffmpeg -i "concat:${input}" -metadata creation_time="${creationTime}" -c copy -bsf:a aac_adtstoasc ${outputPath}`; + + await execAsync(ffmpegCommand); - const mergerFiles = [...merger.getFiles(), merger.getOutput()]; + const { size } = await fs.stat(outputPath); - await Promise.all(mergerFiles.map((f) => f && fs.remove(f))); + return { path: outputPath, size }; } }