refactor: ♻️ Use real async in file.ts, change FileUploadSettings to match FilePointer properties

This commit is contained in:
Jack W. 2023-10-24 16:27:08 -04:00
parent 0405f89542
commit 365aace294
No known key found for this signature in database
2 changed files with 586 additions and 507 deletions

View file

@ -1,14 +1,16 @@
import axios from "axios"; import axios from "axios"
import Discord, { Client, TextBasedChannel } from "discord.js"; import Discord, { Client, Message, TextBasedChannel } from "discord.js"
import { readFile, writeFile } from "fs"; import { readFile, writeFile } from "node:fs/promises"
import { Readable } from "node:stream"; import { Readable } from "node:stream"
import crypto from "node:crypto"; import crypto from "node:crypto"
import { files } from "./accounts"; import { files } from "./accounts"
import * as Accounts from "./accounts"; import * as Accounts from "./accounts"
export let id_check_regex = /[A-Za-z0-9_\-\.\!\=\:\&\$\,\+\;\@\~\*\(\)\']+/ export let id_check_regex = /[A-Za-z0-9_\-\.\!\=\:\&\$\,\+\;\@\~\*\(\)\']+/
export let alphanum = Array.from("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890") export let alphanum = Array.from(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
)
// bad solution but whatever // bad solution but whatever
@ -19,72 +21,66 @@ export type FileVisibility = "public" | "anonymous" | "private"
* @param length Length of the ID * @param length Length of the ID
* @returns a random alphanumeric string * @returns a random alphanumeric string
*/ */
export function generateFileId(length:number=5) { export function generateFileId(length: number = 5) {
let fid = "" let fid = ""
for (let i = 0; i < length; i++) { for (let i = 0; i < length; i++) {
fid += alphanum[crypto.randomInt(0,alphanum.length)] fid += alphanum[crypto.randomInt(0, alphanum.length)]
} }
return fid return fid
} }
export interface FileUploadSettings { export type FileUploadSettings = Partial<Pick<FilePointer, "mime" | "owner">> &
name?: string, Pick<FilePointer, "mime" | "filename"> & { uploadId?: string }
mime: string,
uploadId?: string,
owner?:string
}
export interface Configuration { export interface Configuration {
maxDiscordFiles: number, maxDiscordFiles: number
maxDiscordFileSize: number, maxDiscordFileSize: number
targetGuild: string, targetGuild: string
targetChannel: string, targetChannel: string
requestTimeout: number, requestTimeout: number
maxUploadIdLength: number, maxUploadIdLength: number
accounts: { accounts: {
registrationEnabled: boolean, registrationEnabled: boolean
requiredForUpload: boolean requiredForUpload: boolean
}, }
trustProxy: boolean, trustProxy: boolean
forceSSL: boolean forceSSL: boolean
} }
export interface FilePointer { export interface FilePointer {
filename:string, filename: string
mime:string, mime: string
messageids:string[], messageids: string[]
owner?:string, owner?: string
sizeInBytes?:number, sizeInBytes?: number
tag?:string, tag?: string
visibility?:FileVisibility, visibility?: FileVisibility
reserved?: boolean, reserved?: boolean
chunkSize?: number chunkSize?: number
} }
export interface StatusCodeError { export interface StatusCodeError {
status: number, status: number
message: string message: string
} }
/* */ /* */
export default class Files { export default class Files {
config: Configuration config: Configuration
client: Client client: Client
files: {[key:string]:FilePointer} = {} files: { [key: string]: FilePointer } = {}
uploadChannel?: TextBasedChannel uploadChannel?: TextBasedChannel
constructor(client: Client, config: Configuration) { constructor(client: Client, config: Configuration) {
this.config = config
this.client = client
this.config = config; client.on("ready", () => {
this.client = client;
client.on("ready",() => {
console.log("Discord OK!") console.log("Discord OK!")
client.guilds.fetch(config.targetGuild).then((g) => { client.guilds.fetch(config.targetGuild).then((g) => {
g.channels.fetch(config.targetChannel).then((a) => { g.channels.fetch(config.targetChannel).then((a) => {
if (a?.isTextBased()) { if (a?.isTextBased()) {
@ -94,168 +90,163 @@ export default class Files {
}) })
}) })
readFile(process.cwd()+"/.data/files.json",(err,buf) => { readFile(process.cwd() + "/.data/files.json")
if (err) {console.log(err);return} .then((buf) => {
this.files = JSON.parse(buf.toString() || "{}") this.files = JSON.parse(buf.toString() || "{}")
}) })
.catch(console.error)
} }
/** /**
* @description Uploads a new file * @description Uploads a new file
* @param settings Settings for your new upload * @param metadata Settings for your new upload
* @param fBuffer Buffer containing file content * @param buffer Buffer containing file content
* @returns Promise which resolves to the ID of the new file * @returns Promise which resolves to the ID of the new file
*/ */
uploadFile(settings:FileUploadSettings,fBuffer:Buffer):Promise<string|StatusCodeError> { async uploadFile(
return new Promise<string>(async (resolve,reject) => { metadata: FileUploadSettings,
if (!this.uploadChannel) { buffer: Buffer
reject({status:503,message:"server is not ready - please try again later"}) ): Promise<string | StatusCodeError> {
return if (!this.uploadChannel)
throw {
status: 503,
message: "server is not ready - please try again later",
} }
if (!settings.name || !settings.mime) { if (!metadata.filename || !metadata.mime)
reject({status:400,message:"missing name/mime"}); throw { status: 400, message: "missing filename/mime" }
return
let uploadId = (metadata.uploadId || generateFileId()).toString()
if (
(uploadId.match(id_check_regex) || [])[0] != uploadId ||
uploadId.length > this.config.maxUploadIdLength
)
throw { status: 400, message: "invalid id" }
if (
this.files[uploadId] &&
(metadata.owner
? this.files[uploadId].owner != metadata.owner
: true)
)
throw {
status: 400,
message: "you are not the owner of this file id",
} }
if (!settings.owner && this.config.accounts.requiredForUpload) { if (this.files[uploadId] && this.files[uploadId].reserved)
reject({status:401,message:"an account is required for upload"}); throw {
return status: 400,
} message:
"already uploading this file. if your file is stuck in this state, contact an administrator",
let uploadId = (settings.uploadId || generateFileId()).toString();
if ((uploadId.match(id_check_regex) || [])[0] != uploadId || uploadId.length > this.config.maxUploadIdLength) {
reject({status:400,message:"invalid id"});return
}
if (this.files[uploadId] && (settings.owner ? this.files[uploadId].owner != settings.owner : true)) {
reject({status:400,message:"you are not the owner of this file id"});
return
} }
if (this.files[uploadId] && this.files[uploadId].reserved) { if (metadata.filename.length > 128)
reject({status:400,message:"already uploading this file. if your file is stuck in this state, contact an administrator"}); throw { status: 400, message: "name too long" }
return
}
if (settings.name.length > 128) { if (metadata.mime.length > 128)
reject({status:400,message:"name too long"}); throw { status: 400, message: "mime too long" }
return
}
if (settings.mime.length > 128) { // reserve file, hopefully should prevent
reject({status:400,message:"mime too long"}); // large files breaking
return
}
// reserve file, hopefully should prevent let existingFile = this.files[uploadId]
// large files breaking
let ogf = this.files[uploadId] // save
this.files[uploadId] = { if (metadata.owner) {
filename:settings.name, await files.index(metadata.owner, uploadId)
messageids:[], }
mime:settings.mime,
sizeInBytes:0,
owner:settings.owner, // get buffer
visibility: settings.owner ? "private" : "public", if (
reserved: true, buffer.byteLength >=
this.config.maxDiscordFileSize * this.config.maxDiscordFiles
)
throw { status: 400, message: "file too large" }
chunkSize: this.config.maxDiscordFileSize // generate buffers to upload
} let toUpload = []
for (
// save let i = 0;
i < Math.ceil(buffer.byteLength / this.config.maxDiscordFileSize);
if (settings.owner) { i++
await files.index(settings.owner,uploadId) ) {
} toUpload.push(
buffer.subarray(
// get buffer i * this.config.maxDiscordFileSize,
if (fBuffer.byteLength >= (this.config.maxDiscordFileSize*this.config.maxDiscordFiles)) { Math.min(
reject({status:400,message:"file too large"}); buffer.byteLength,
return (i + 1) * this.config.maxDiscordFileSize
}
// generate buffers to upload
let toUpload = []
for (let i = 0; i < Math.ceil(fBuffer.byteLength/this.config.maxDiscordFileSize); i++) {
toUpload.push(
fBuffer.subarray(
i*this.config.maxDiscordFileSize,
Math.min(
fBuffer.byteLength,
(i+1)*this.config.maxDiscordFileSize
)
) )
) )
)
}
// begin uploading
let uploadTmplt: Discord.AttachmentBuilder[] = toUpload.map((e) => {
return new Discord.AttachmentBuilder(e).setName(
Math.random().toString().slice(2)
)
})
let uploadGroups = []
for (let i = 0; i < Math.ceil(uploadTmplt.length / 10); i++) {
uploadGroups.push(uploadTmplt.slice(i * 10, (i + 1) * 10))
}
let msgIds = []
for (const uploadGroup of uploadGroups) {
let message = await this.uploadChannel
.send({
files: uploadGroup,
})
.catch((e) => {
console.error(e)
})
if (message && message instanceof Message) {
msgIds.push(message.id)
} else {
if (!existingFile) delete this.files[uploadId]
else this.files[uploadId] = existingFile
throw { status: 500, message: "please try again" }
} }
}
// begin uploading
let uploadTmplt:Discord.AttachmentBuilder[] = toUpload.map((e) => { // this code deletes the files from discord, btw
return new Discord.AttachmentBuilder(e) // if need be, replace with job queue system
.setName(Math.random().toString().slice(2))
}) if (existingFile && this.uploadChannel) {
let uploadGroups = [] for (let x of existingFile.messageids) {
for (let i = 0; i < Math.ceil(uploadTmplt.length/10); i++) { this.uploadChannel.messages
uploadGroups.push(uploadTmplt.slice(i*10,((i+1)*10))) .delete(x)
.catch((err) => console.error(err))
} }
}
let msgIds = []
for (let i = 0; i < uploadGroups.length; i++) {
let ms = await this.uploadChannel.send({ const { filename, mime, owner } = metadata
files:uploadGroups[i] return this.writeFile(uploadId, {
}).catch((e) => {console.error(e)}) filename,
messageids: msgIds,
mime,
owner,
sizeInBytes: buffer.byteLength,
if (ms) { visibility: existingFile
msgIds.push(ms.id) ? existingFile.visibility
} else { : metadata.owner
if (!ogf) delete this.files[uploadId] ? Accounts.getFromId(metadata.owner)?.defaultFileVisibility
else this.files[uploadId] = ogf : undefined,
reject({status:500,message:"please try again"}); return // so that json.stringify doesnt include tag:undefined
} ...((existingFile || {}).tag ? { tag: existingFile.tag } : {}),
}
// this code deletes the files from discord, btw chunkSize: this.config.maxDiscordFileSize,
// if need be, replace with job queue system
if (ogf&&this.uploadChannel) {
for (let x of ogf.messageids) {
this.uploadChannel.messages.delete(x).catch(err => console.error(err))
}
}
resolve(await this.writeFile(
uploadId,
{
filename:settings.name,
messageids:msgIds,
mime:settings.mime,
sizeInBytes:fBuffer.byteLength,
owner:settings.owner,
visibility: ogf ? ogf.visibility
: (
settings.owner
? Accounts.getFromId(settings.owner)?.defaultFileVisibility
: undefined
),
// so that json.stringify doesnt include tag:undefined
...((ogf||{}).tag ? {tag:ogf.tag} : {}),
chunkSize: this.config.maxDiscordFileSize
}
))
}) })
} }
// fs // fs
/** /**
@ -264,24 +255,26 @@ export default class Files {
* @param file FilePointer representing the new file * @param file FilePointer representing the new file
* @returns Promise which resolves to the file's ID * @returns Promise which resolves to the file's ID
*/ */
writeFile(uploadId: string, file: FilePointer):Promise<string> { async writeFile(uploadId: string, file: FilePointer): Promise<string> {
return new Promise((resolve, reject) => { this.files[uploadId] = file
this.files[uploadId] = file return writeFile(
process.cwd() + "/.data/files.json",
writeFile(process.cwd()+"/.data/files.json",JSON.stringify(this.files),(err) => { JSON.stringify(
this.files,
if (err) { null,
reject({status:500,message:"server may be misconfigured, contact admin for help"}); process.env.NODE_ENV === "development" ? 4 : undefined
delete this.files[uploadId]; )
return )
.then(() => uploadId)
.catch(() => {
delete this.files[uploadId]
throw {
status: 500,
message:
"server may be misconfigured, contact admin for help",
} }
resolve(uploadId)
}) })
})
} }
/** /**
@ -290,139 +283,183 @@ export default class Files {
* @param range Byte range to get * @param range Byte range to get
* @returns A `Readable` containing the file's contents * @returns A `Readable` containing the file's contents
*/ */
readFileStream(uploadId: string, range?: {start:number, end:number}):Promise<Readable> { async readFileStream(
return new Promise(async (resolve,reject) => { uploadId: string,
if (!this.uploadChannel) { range?: { start: number; end: number }
reject({status:503,message:"server is not ready - please try again later"}) ): Promise<Readable> {
return if (!this.uploadChannel) {
throw {
status: 503,
message: "server is not ready - please try again later",
}
}
if (this.files[uploadId]) {
let file = this.files[uploadId]
let scan_msg_begin = 0,
scan_msg_end = file.messageids.length - 1,
scan_files_begin = 0,
scan_files_end = -1
let useRanges = range && file.chunkSize && file.sizeInBytes
// todo: figure out how to get typesccript to accept useRanges
// i'm too tired to look it up or write whatever it wnats me to do
if (range && file.chunkSize && file.sizeInBytes) {
// Calculate where to start file scans...
scan_files_begin = Math.floor(range.start / file.chunkSize)
scan_files_end = Math.ceil(range.end / file.chunkSize) - 1
scan_msg_begin = Math.floor(scan_files_begin / 10)
scan_msg_end = Math.ceil(scan_files_end / 10)
} }
if (this.files[uploadId]) { let attachments: Discord.Attachment[] = []
let file = this.files[uploadId]
let /* File updates */
scan_msg_begin = 0, let file_updates: Pick<FilePointer, "chunkSize" | "sizeInBytes"> =
scan_msg_end = file.messageids.length-1, {}
scan_files_begin = 0, let atSIB: number[] = [] // kepes track of the size of each file...
scan_files_end = -1
let useRanges = range && file.chunkSize && file.sizeInBytes; for (let xi = scan_msg_begin; xi < scan_msg_end + 1; xi++) {
let msg = await this.uploadChannel.messages
// todo: figure out how to get typesccript to accept useRanges .fetch(file.messageids[xi])
// i'm too tired to look it up or write whatever it wnats me to do .catch(() => {
if (range && file.chunkSize && file.sizeInBytes) {
// Calculate where to start file scans...
scan_files_begin = Math.floor(range.start / file.chunkSize)
scan_files_end = Math.ceil(range.end / file.chunkSize) - 1
scan_msg_begin = Math.floor(scan_files_begin / 10)
scan_msg_end = Math.ceil(scan_files_end / 10)
}
let attachments: Discord.Attachment[] = [];
/* File updates */
let file_updates: Pick<FilePointer, "chunkSize" | "sizeInBytes"> = {}
let atSIB: number[] = [] // kepes track of the size of each file...
for (let xi = scan_msg_begin; xi < scan_msg_end+1; xi++) {
let msg = await this.uploadChannel.messages.fetch(file.messageids[xi]).catch(() => {return null})
if (msg?.attachments) {
let attach = Array.from(msg.attachments.values())
for (let i = (useRanges && xi == scan_msg_begin ? ( scan_files_begin - (xi*10) ) : 0); i < (useRanges && xi == scan_msg_end ? ( scan_files_end - (xi*10) + 1 ) : attach.length); i++) {
attachments.push(attach[i])
atSIB.push(attach[i].size)
}
}
}
if (!file.sizeInBytes) file_updates.sizeInBytes = atSIB.reduce((a,b) => a+b, 0);
if (!file.chunkSize) file_updates.chunkSize = atSIB[0]
if (Object.keys(file_updates).length) { // if file_updates not empty
// i gotta do these weird workarounds, ts is weird sometimes
// originally i was gonna do key is keyof FilePointer but for some reason
// it ended up making typeof file[key] never??? so
// its 10pm and chinese people suck at being quiet so i just wanna get this over with
// chinese is the worst language in terms of volume lmao
let valid_fp_keys = ["sizeInBytes", "chunkSize"]
let isValidFilePointerKey = (key: string): key is "sizeInBytes" | "chunkSize" => valid_fp_keys.includes(key)
for (let [key,value] of Object.entries(file_updates)) {
if (isValidFilePointerKey(key)) file[key] = value
}
writeFile(process.cwd()+"/.data/files.json",JSON.stringify(this.files),(err) => {})
}
let position = 0;
let getNextChunk = async () => {
let scanning_chunk = attachments[position]
if (!scanning_chunk) {
return null return null
} })
if (msg?.attachments) {
let d = await axios.get( let attach = Array.from(msg.attachments.values())
scanning_chunk.url, for (
{ let i =
responseType:"arraybuffer", useRanges && xi == scan_msg_begin
headers: { ? scan_files_begin - xi * 10
...(useRanges ? { : 0;
"Range": `bytes=${position == 0 && range && file.chunkSize ? range.start-(scan_files_begin*file.chunkSize) : "0"}-${position == attachments.length-1 && range && file.chunkSize ? range.end-(scan_files_end*file.chunkSize) : ""}` i <
} : {}) (useRanges && xi == scan_msg_end
} ? scan_files_end - xi * 10 + 1
} : attach.length);
).catch((e:Error) => {console.error(e)}) i++
) {
position++; attachments.push(attach[i])
atSIB.push(attach[i].size)
if (d) {
return d.data
} else {
reject({status:500,message:"internal server error"})
return "__ERR"
} }
} }
let ord:number[] = []
// hopefully this regulates it?
let lastChunkSent = true
let dataStream = new Readable({
read(){
if (!lastChunkSent) return
lastChunkSent = false
getNextChunk().then(async (nextChunk) => {
if (nextChunk == "__ERR") {this.destroy(new Error("file read error")); return}
let response = this.push(nextChunk)
if (!nextChunk) return // EOF
while (response) {
let nextChunk = await getNextChunk()
response = this.push(nextChunk)
if (!nextChunk) return
}
lastChunkSent = true
})
}
})
resolve(dataStream)
} else {
reject({status:404,message:"not found"})
} }
})
if (!file.sizeInBytes)
file_updates.sizeInBytes = atSIB.reduce((a, b) => a + b, 0)
if (!file.chunkSize) file_updates.chunkSize = atSIB[0]
if (Object.keys(file_updates).length) {
// if file_updates not empty
// i gotta do these weird workarounds, ts is weird sometimes
// originally i was gonna do key is keyof FilePointer but for some reason
// it ended up making typeof file[key] never??? so
// its 10pm and chinese people suck at being quiet so i just wanna get this over with
// chinese is the worst language in terms of volume lmao
let valid_fp_keys = ["sizeInBytes", "chunkSize"]
let isValidFilePointerKey = (
key: string
): key is "sizeInBytes" | "chunkSize" =>
valid_fp_keys.includes(key)
for (let [key, value] of Object.entries(file_updates)) {
if (isValidFilePointerKey(key)) file[key] = value
}
// The original was a callback so I don't think I'm supposed to `await` this -Jack
writeFile(
process.cwd() + "/.data/files.json",
JSON.stringify(
this.files,
null,
process.env.NODE_ENV === "development" ? 4 : undefined
)
)
}
let position = 0
let getNextChunk = async () => {
let scanning_chunk = attachments[position]
if (!scanning_chunk) {
return null
}
let d = await axios
.get(scanning_chunk.url, {
responseType: "arraybuffer",
headers: {
...(useRanges
? {
Range: `bytes=${
position == 0 &&
range &&
file.chunkSize
? range.start -
scan_files_begin *
file.chunkSize
: "0"
}-${
position == attachments.length - 1 &&
range &&
file.chunkSize
? range.end -
scan_files_end * file.chunkSize
: ""
}`,
}
: {}),
},
})
.catch((e: Error) => {
console.error(e)
})
position++
if (d) {
return d.data
} else {
throw {
status: 500,
message: "internal server error",
}
}
}
let ord: number[] = []
// hopefully this regulates it?
let lastChunkSent = true
let dataStream = new Readable({
read() {
if (!lastChunkSent) return
lastChunkSent = false
getNextChunk().then(async (nextChunk) => {
if (nextChunk == "__ERR") {
this.destroy(new Error("file read error"))
return
}
let response = this.push(nextChunk)
if (!nextChunk) return // EOF
while (response) {
let nextChunk = await getNextChunk()
response = this.push(nextChunk)
if (!nextChunk) return
}
lastChunkSent = true
})
},
})
return dataStream
} else {
throw { status: 404, message: "not found" }
}
} }
/** /**
@ -430,33 +467,41 @@ export default class Files {
* @param uploadId Target file's ID * @param uploadId Target file's ID
* @param noWrite Whether or not the change should be written to disk. Enable for bulk deletes * @param noWrite Whether or not the change should be written to disk. Enable for bulk deletes
*/ */
unlink(uploadId:string, noWrite: boolean = false):Promise<void> { async unlink(uploadId: string, noWrite: boolean = false): Promise<void> {
return new Promise(async (resolve,reject) => { let tmp = this.files[uploadId]
let tmp = this.files[uploadId]; if (!tmp) {
if (!tmp) {resolve(); return} return
if (tmp.owner) { }
let id = files.deindex(tmp.owner,uploadId,noWrite); if (tmp.owner) {
if (id) await id let id = files.deindex(tmp.owner, uploadId, noWrite)
} if (id) await id
// this code deletes the files from discord, btw }
// if need be, replace with job queue system // this code deletes the files from discord, btw
// if need be, replace with job queue system
if (!this.uploadChannel) {reject(); return} if (!this.uploadChannel) {
for (let x of tmp.messageids) { return
this.uploadChannel.messages.delete(x).catch(err => console.error(err)) }
} for (let x of tmp.messageids) {
this.uploadChannel.messages
delete this.files[uploadId]; .delete(x)
if (noWrite) {resolve(); return} .catch((err) => console.error(err))
writeFile(process.cwd()+"/.data/files.json",JSON.stringify(this.files),(err) => { }
if (err) {
this.files[uploadId] = tmp // !! this may not work, since tmp is a link to this.files[uploadId]?
reject()
} else {
resolve()
}
})
delete this.files[uploadId]
if (noWrite) {
return
}
return writeFile(
process.cwd() + "/.data/files.json",
JSON.stringify(
this.files,
null,
process.env.NODE_ENV === "development" ? 4 : undefined
)
).catch((err) => {
this.files[uploadId] = tmp // !! this may not work, since tmp is a link to this.files[uploadId]?
throw err
}) })
} }
@ -465,8 +510,7 @@ export default class Files {
* @param uploadId Target file's ID * @param uploadId Target file's ID
* @returns FilePointer for the file * @returns FilePointer for the file
*/ */
getFilePointer(uploadId:string):FilePointer { getFilePointer(uploadId: string): FilePointer {
return this.files[uploadId] return this.files[uploadId]
} }
} }

View file

@ -1,203 +1,238 @@
import bodyParser from "body-parser"; import bodyParser from "body-parser"
import express, { Router } from "express"; import express, { Router } from "express"
import * as Accounts from "../../../lib/accounts"; import * as Accounts from "../../../lib/accounts"
import * as auth from "../../../lib/auth"; import * as auth from "../../../lib/auth"
import axios, { AxiosResponse } from "axios" import axios, { AxiosResponse } from "axios"
import { type Range } from "range-parser"; import { type Range } from "range-parser"
import multer, {memoryStorage} from "multer" import multer, { memoryStorage } from "multer"
import ServeError from "../../../lib/errors"; import ServeError from "../../../lib/errors"
import Files from "../../../lib/files"; import Files from "../../../lib/files"
import { getAccount, requiresPermissions } from "../../../lib/middleware"; import { getAccount, requiresPermissions } from "../../../lib/middleware"
let parser = bodyParser.json({ let parser = bodyParser.json({
type: ["text/plain","application/json"] type: ["text/plain", "application/json"],
}) })
export let primaryApi = Router(); export let primaryApi = Router()
const multerSetup = multer({storage:memoryStorage()}) const multerSetup = multer({ storage: memoryStorage() })
let config = require(`${process.cwd()}/config.json`) let config = require(`${process.cwd()}/config.json`)
primaryApi.use(getAccount); primaryApi.use(getAccount)
module.exports = function(files: Files) { module.exports = function (files: Files) {
primaryApi.get(
["/file/:fileId", "/cpt/:fileId/*", "/:fileId"],
async (req: express.Request, res: express.Response) => {
let acc = res.locals.acc as Accounts.Account
primaryApi.get(["/file/:fileId", "/cpt/:fileId/*", "/:fileId"], async (req:express.Request,res:express.Response) => { let file = files.getFilePointer(req.params.fileId)
res.setHeader("Access-Control-Allow-Origin", "*")
let acc = res.locals.acc as Accounts.Account res.setHeader("Content-Security-Policy", "sandbox allow-scripts")
if (req.query.attachment == "1")
res.setHeader("Content-Disposition", "attachment")
let file = files.getFilePointer(req.params.fileId) if (file) {
res.setHeader("Access-Control-Allow-Origin", "*") if (file.visibility == "private") {
res.setHeader("Content-Security-Policy","sandbox allow-scripts") if (acc?.id != file.owner) {
if (req.query.attachment == "1") res.setHeader("Content-Disposition", "attachment") ServeError(res, 403, "you do not own this file")
return
if (file) { }
if (file.visibility == "private") {
if (acc?.id != file.owner) {
ServeError(res,403,"you do not own this file")
return
}
if (auth.getType(auth.tokenFor(req)) == "App" && auth.getPermissions(auth.tokenFor(req))?.includes("private")) { if (
ServeError(res,403,"insufficient permissions") auth.getType(auth.tokenFor(req)) == "App" &&
return auth
} .getPermissions(auth.tokenFor(req))
} ?.includes("private")
) {
let range: Range | undefined ServeError(res, 403, "insufficient permissions")
return
res.setHeader("Content-Type",file.mime)
if (file.sizeInBytes) {
res.setHeader("Content-Length",file.sizeInBytes)
if (file.chunkSize) {
let rng = req.range(file.sizeInBytes)
if (rng) {
// error handling
if (typeof rng == "number") {
res.status(rng == -1 ? 416 : 400).send()
return
}
if (rng.type != "bytes") {
res.status(400).send();
return
}
// set ranges var
let rngs = Array.from(rng)
if (rngs.length != 1) { res.status(400).send(); return }
range = rngs[0]
} }
} }
}
// supports ranges let range: Range | undefined
files.readFileStream(req.params.fileId, range).then(async stream => { res.setHeader("Content-Type", file.mime)
if (file.sizeInBytes) {
res.setHeader("Content-Length", file.sizeInBytes)
if (range) { if (file.chunkSize) {
res.status(206) let rng = req.range(file.sizeInBytes)
res.header("Content-Length", (range.end-range.start + 1).toString()) if (rng) {
res.header("Content-Range", `bytes ${range.start}-${range.end}/${file.sizeInBytes}`) // error handling
if (typeof rng == "number") {
res.status(rng == -1 ? 416 : 400).send()
return
}
if (rng.type != "bytes") {
res.status(400).send()
return
}
// set ranges var
let rngs = Array.from(rng)
if (rngs.length != 1) {
res.status(400).send()
return
}
range = rngs[0]
}
}
} }
stream.pipe(res)
}).catch((err) => {
ServeError(res,err.status,err.message)
})
} else { // supports ranges
ServeError(res, 404, "file not found")
}
})
primaryApi.head(["/file/:fileId", "/cpt/:fileId/*", "/:fileId"], (req: express.Request, res:express.Response) => { files
let file = files.getFilePointer(req.params.fileId) .readFileStream(req.params.fileId, range)
.then(async (stream) => {
if ( if (range) {
file.visibility == "private" res.status(206)
&& ( res.header(
res.locals.acc?.id != file.owner "Content-Length",
|| (auth.getType(auth.tokenFor(req)) == "App" && auth.getPermissions(auth.tokenFor(req))?.includes("private")) (range.end - range.start + 1).toString()
) )
) { res.header(
res.status(403).send() "Content-Range",
return `bytes ${range.start}-${range.end}/${file.sizeInBytes}`
} )
}
res.setHeader("Access-Control-Allow-Origin", "*") stream.pipe(res)
res.setHeader("Content-Security-Policy","sandbox allow-scripts") })
.catch((err) => {
if (req.query.attachment == "1") res.setHeader("Content-Disposition", "attachment") ServeError(res, err.status, err.message)
})
if (!file) { } else {
res.status(404) ServeError(res, 404, "file not found")
res.send()
} else {
res.setHeader("Content-Type",file.mime)
if (file.sizeInBytes) {
res.setHeader("Content-Length",file.sizeInBytes)
} }
if (file.chunkSize) {
res.setHeader("Accept-Ranges", "bytes")
}
res.send()
} }
}) )
primaryApi.head(
["/file/:fileId", "/cpt/:fileId/*", "/:fileId"],
(req: express.Request, res: express.Response) => {
let file = files.getFilePointer(req.params.fileId)
if (
file.visibility == "private" &&
(res.locals.acc?.id != file.owner ||
(auth.getType(auth.tokenFor(req)) == "App" &&
auth
.getPermissions(auth.tokenFor(req))
?.includes("private")))
) {
res.status(403).send()
return
}
res.setHeader("Access-Control-Allow-Origin", "*")
res.setHeader("Content-Security-Policy", "sandbox allow-scripts")
if (req.query.attachment == "1")
res.setHeader("Content-Disposition", "attachment")
if (!file) {
res.status(404)
res.send()
} else {
res.setHeader("Content-Type", file.mime)
if (file.sizeInBytes) {
res.setHeader("Content-Length", file.sizeInBytes)
}
if (file.chunkSize) {
res.setHeader("Accept-Ranges", "bytes")
}
res.send()
}
}
)
// upload handlers // upload handlers
primaryApi.post("/upload", requiresPermissions("upload"), multerSetup.single('file'), async (req,res) => { primaryApi.post(
"/upload",
let acc = res.locals.acc as Accounts.Account requiresPermissions("upload"),
multerSetup.single("file"),
async (req, res) => {
let acc = res.locals.acc as Accounts.Account
if (req.file) { if (req.file) {
try { try {
let prm = req.header("monofile-params") let prm = req.header("monofile-params")
let params:{[key:string]:any} = {} let params: { [key: string]: any } = {}
if (prm) { if (prm) {
params = JSON.parse(prm) params = JSON.parse(prm)
}
files
.uploadFile(
{
owner: acc?.id,
uploadId: params.uploadId,
filename: req.file.originalname,
mime: req.file.mimetype,
},
req.file.buffer
)
.then((uID) => res.send(uID))
.catch((stat) => {
res.status(stat.status)
res.send(`[err] ${stat.message}`)
})
} catch {
res.status(400)
res.send("[err] bad request")
} }
} else {
files.uploadFile({
owner: acc?.id,
uploadId:params.uploadId,
name:req.file.originalname,
mime:req.file.mimetype
},req.file.buffer)
.then((uID) => res.send(uID))
.catch((stat) => {
res.status(stat.status);
res.send(`[err] ${stat.message}`)
})
} catch {
res.status(400) res.status(400)
res.send("[err] bad request") res.send("[err] bad request")
} }
} else {
res.status(400)
res.send("[err] bad request")
} }
}) )
primaryApi.post("/clone", requiresPermissions("upload"), bodyParser.json({type: ["text/plain","application/json"]}) ,(req,res) => { primaryApi.post(
"/clone",
let acc = res.locals.acc as Accounts.Account requiresPermissions("upload"),
bodyParser.json({ type: ["text/plain", "application/json"] }),
(req, res) => {
let acc = res.locals.acc as Accounts.Account
try { try {
axios.get(req.body.url,{responseType:"arraybuffer"}).then((data:AxiosResponse) => { axios
.get(req.body.url, { responseType: "arraybuffer" })
files.uploadFile({ .then((data: AxiosResponse) => {
owner: acc?.id, files
.uploadFile(
name:req.body.url.split("/")[req.body.url.split("/").length-1] || "generic", {
mime:data.headers["content-type"], owner: acc?.id,
uploadId:req.body.uploadId filename:
},Buffer.from(data.data)) req.body.url.split("/")[
.then((uID) => res.send(uID)) req.body.url.split("/").length - 1
.catch((stat) => { ] || "generic",
res.status(stat.status); mime: data.headers["content-type"],
res.send(`[err] ${stat.message}`) uploadId: req.body.uploadId,
},
Buffer.from(data.data)
)
.then((uID) => res.send(uID))
.catch((stat) => {
res.status(stat.status)
res.send(`[err] ${stat.message}`)
})
}) })
.catch((err) => {
}).catch((err) => { console.log(err)
console.log(err) res.status(400)
res.status(400) res.send(`[err] failed to fetch data`)
res.send(`[err] failed to fetch data`) })
}) } catch {
} catch { res.status(500)
res.status(500) res.send("[err] an error occured")
res.send("[err] an error occured") }
} }
}) )
return primaryApi return primaryApi
} }