mirror of
https://github.com/mollersuite/monofile.git
synced 2024-11-21 13:36:25 -08:00
still incomplete and probably bad
This commit is contained in:
parent
971ee31d73
commit
5d3fb29c1f
|
@ -256,7 +256,7 @@ export class UploadStream extends Writable {
|
|||
setUploadId(id: string) {
|
||||
if (this.uploadId)
|
||||
return this.destroy( new WebError(400, "duplicate attempt to set upload ID") )
|
||||
if (id.match(id_check_regex)?.[0] != id
|
||||
if (!id || id.match(id_check_regex)?.[0] != id
|
||||
|| id.length > this.files.config.maxUploadIdLength)
|
||||
return this.destroy( new WebError(400, "invalid file ID") )
|
||||
|
||||
|
|
|
@ -1,14 +1,27 @@
|
|||
import { Transform, Duplex } from "node:stream";
|
||||
import { Transform, Readable } from "node:stream";
|
||||
import { TransformCallback } from "stream";
|
||||
|
||||
let content_disposition_matcher = /\s*([^=;]+)(?:=(?:"((?:\\"|[^"])*)"|([^;]*))?;?|;?)/g // probably a bad regex but IDC
|
||||
|
||||
/**
|
||||
* @description Checks if a chunk can be completed by something else (ex. a boundary)
|
||||
* @param chunk Chunk to perform check on
|
||||
* @param cmp Chunk to check whether or not something is completable with
|
||||
* @returns Whether or not this chunk could be completed by cmp
|
||||
*/
|
||||
function endChk(chunk: Buffer, cmp: Buffer) {
|
||||
for (let i = cmp.byteLength-1; i > 0; i--)
|
||||
if (chunk.subarray(-(i-1)).equals(cmp.subarray(0,i)))
|
||||
return true
|
||||
return false
|
||||
}
|
||||
|
||||
export type Headers = {
|
||||
["content-disposition"]?: (string|{key: string, value: string})[],
|
||||
["content-disposition"]?: Record<string, boolean|string>,
|
||||
["content-type"]?: string
|
||||
}
|
||||
|
||||
export class Field extends Duplex {
|
||||
export class Field extends Readable {
|
||||
|
||||
headers: Headers = {}
|
||||
|
||||
|
@ -20,9 +33,28 @@ export class Field extends Duplex {
|
|||
)
|
||||
|
||||
if (this.headers["content-disposition"])
|
||||
this.headers["content-disposition"] = Array.from(
|
||||
this.headers["content-disposition"] = Object.fromEntries(Array.from(
|
||||
(this.headers["content-disposition"] as unknown as string)
|
||||
.matchAll(content_disposition_matcher)).map(e => e[2] ? {key: e[1], value: e[2]} : e[1])
|
||||
.matchAll(content_disposition_matcher)).map(e => [e[1], e[2] ? e[2] : true]))
|
||||
}
|
||||
|
||||
_read(size: number): void {
|
||||
this.emit("hungry")
|
||||
}
|
||||
|
||||
collect(maxSize: number = 0) {
|
||||
return new Promise<Buffer>((res,rej) => {
|
||||
let bufs: Buffer[] = []
|
||||
|
||||
this.on("data", (data) => {
|
||||
if (maxSize && bufs.reduce((cur, acc) => cur+acc.byteLength, 0) > maxSize)
|
||||
this.destroy(new Error("went above collect()'s maxSize"))
|
||||
bufs.push(data)
|
||||
})
|
||||
|
||||
this.on("end", () => res(Buffer.concat(bufs)))
|
||||
this.on("error", (err) => rej(err))
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -31,16 +63,19 @@ export default class FormDataParser extends Transform {
|
|||
|
||||
readableObjectMode = true
|
||||
|
||||
boundary: string
|
||||
internalBuffer: Buffer | undefined
|
||||
readonly boundary: string
|
||||
private workingMemory: Buffer | undefined
|
||||
private workingField: Field | undefined
|
||||
|
||||
constructor(boundary: string) {
|
||||
super()
|
||||
this.boundary = boundary
|
||||
}
|
||||
|
||||
_transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void {
|
||||
_transform(_chunk: any, encoding: BufferEncoding, callback: TransformCallback): void {
|
||||
|
||||
let chunk = this.workingMemory ? Buffer.concat([this.workingMemory, _chunk]) : _chunk
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -7,7 +7,9 @@ import RangeParser, { type Range } from "range-parser"
|
|||
import ServeError from "../../../lib/errors.js"
|
||||
import Files, { WebError } from "../../../lib/files.js"
|
||||
import { getAccount, requiresPermissions } from "../../../lib/middleware.js"
|
||||
import FormDataParser, { Field } from "../../../lib/formdata.js"
|
||||
import {Readable} from "node:stream"
|
||||
import {ReadableStream as StreamWebReadable} from "node:stream/web"
|
||||
export let primaryApi = new Hono<{
|
||||
Variables: {
|
||||
account: Accounts.Account
|
||||
|
@ -95,19 +97,45 @@ export default function (files: Files) {
|
|||
primaryApi.post(
|
||||
"/upload",
|
||||
requiresPermissions("upload"),
|
||||
async (ctx) => {
|
||||
(ctx) => { return new Promise((resolve,reject) => {
|
||||
let acc = ctx.get("account") as Accounts.Account
|
||||
|
||||
if (!ctx.req.header("Content-Type")?.startsWith("multipart/form-data")) {
|
||||
ctx.status(400)
|
||||
return ctx.body("[err] must be multipart/form-data")
|
||||
resolve(ctx.body("[err] must be multipart/form-data"))
|
||||
}
|
||||
|
||||
if (!ctx.req.raw.body) {
|
||||
ctx.status(400)
|
||||
return ctx.body("[err] body must be supplied")
|
||||
resolve(ctx.body("[err] body must be supplied"))
|
||||
}
|
||||
}
|
||||
|
||||
let file = files.createWriteStream(acc.id)
|
||||
let formDataParser = new FormDataParser('')
|
||||
|
||||
Readable.fromWeb(ctx.req.raw.body as StreamWebReadable)
|
||||
.pipe(formDataParser)
|
||||
.on("data", async (field: Field) => {
|
||||
if (field.headers["content-disposition"]?.filename) {
|
||||
field.pipe(file)
|
||||
} else {
|
||||
switch(field.headers["content-disposition"]?.name) {
|
||||
case "uploadId":
|
||||
file.setUploadId((await field.collect(65536).catch(e => {formDataParser.destroy(new WebError(413, e.message))}))?.toString() || "")
|
||||
}
|
||||
}
|
||||
})
|
||||
.on("end", async () => {
|
||||
if (!file.writableEnded) await new Promise((res, rej) => {file.once("finish", res); file.once("error", res)})
|
||||
if (file.errored || !(await file.commit().catch(e => file.error = e))) {
|
||||
ctx.status(file.error instanceof WebError ? file.error.statusCode : 500)
|
||||
resolve(`[err] ${file.error instanceof WebError ? file.error.message : file.error?.toString()}`)
|
||||
return
|
||||
}
|
||||
|
||||
resolve(ctx.body(file.uploadId!))
|
||||
})
|
||||
})}
|
||||
)
|
||||
/*
|
||||
primaryApi.post(
|
||||
|
|
Loading…
Reference in a new issue