mirror of
https://github.com/mollersuite/monofile.git
synced 2024-11-24 22:56:26 -08:00
RANGE HEADER 🎉 I AM SO UNBELIEVABLY HAPPY
This commit is contained in:
parent
9bb5babd1d
commit
72efec9f6d
|
@ -15,6 +15,7 @@ import * as Accounts from "./lib/accounts"
|
|||
import { authRoutes, auth_setFilesObj } from "./routes/authRoutes";
|
||||
import { fileApiRoutes, setFilesObj } from "./routes/fileApiRoutes";
|
||||
import { adminRoutes, admin_setFilesObj } from "./routes/adminRoutes";
|
||||
import { Range } from "range-parser";
|
||||
|
||||
require("dotenv").config()
|
||||
|
||||
|
@ -187,36 +188,46 @@ let fgRQH = async (req:express.Request,res:express.Response) => {
|
|||
return
|
||||
}
|
||||
|
||||
// todo: make readfilestream just the stream since we already have filepointer
|
||||
files.readFileStream(req.params.fileId).then(async f => {
|
||||
res.setHeader("Content-Type",f.contentType)
|
||||
if (f.byteSize) {
|
||||
res.setHeader("Content-Length",f.byteSize)
|
||||
}
|
||||
let range: Range | undefined
|
||||
|
||||
if (f.byteSize && req.range(f.byteSize)) {
|
||||
// range header implementation
|
||||
// todo : make this better (or actually work if i dont manage to finish it)
|
||||
let ranges = req.range(f.byteSize)
|
||||
if (typeof ranges == "number" || !ranges) { res.status(400); res.send(); return }
|
||||
let fsds = f.dataStream;
|
||||
res.status(206);
|
||||
res.setHeader("Content-Type",file.mime)
|
||||
if (file.sizeInBytes) {
|
||||
res.setHeader("Content-Length",file.sizeInBytes)
|
||||
|
||||
if (file.chunkSize) {
|
||||
let rng = req.range(file.sizeInBytes)
|
||||
if (rng) {
|
||||
|
||||
let bytePosition = 0
|
||||
// error handling
|
||||
if (typeof rng == "number") {
|
||||
res.status(rng == -1 ? 416 : 400).send()
|
||||
return
|
||||
}
|
||||
if (rng.type != "bytes") {
|
||||
res.status(400).send();
|
||||
return
|
||||
}
|
||||
|
||||
console.log(ranges.type)
|
||||
|
||||
for await(let x of fsds) {
|
||||
// set ranges var
|
||||
let rngs = Array.from(rng)
|
||||
if (rngs.length != 1) { res.status(400).send(); return }
|
||||
range = rngs[0]
|
||||
|
||||
let curRanges = [ bytePosition, bytePosition+x.byteLength()-1 ]
|
||||
bytePosition+= x.byteLength
|
||||
|
||||
}
|
||||
} else {
|
||||
res.status(200)
|
||||
f.dataStream.pipe(res)
|
||||
}
|
||||
}
|
||||
|
||||
// supports ranges
|
||||
|
||||
|
||||
files.readFileStream(req.params.fileId, range).then(async stream => {
|
||||
|
||||
if (range) {
|
||||
res.status(206)
|
||||
res.header("Content-Length", (range.end-range.start + 1).toString())
|
||||
res.header("Content-Range", `bytes ${range.start}-${range.end}/${file.sizeInBytes}`)
|
||||
}
|
||||
stream.pipe(res)
|
||||
|
||||
}).catch((err) => {
|
||||
ServeError(res,err.status,err.message)
|
||||
|
@ -229,6 +240,23 @@ let fgRQH = async (req:express.Request,res:express.Response) => {
|
|||
|
||||
}
|
||||
|
||||
let fgwh = (req: express.Request, res:express.Response) => {
|
||||
let file = files.getFilePointer(req.params.fileId)
|
||||
|
||||
if (!file) {
|
||||
res.status(404)
|
||||
res.send()
|
||||
} else {
|
||||
res.setHeader("Content-Type",file.mime)
|
||||
if (file.sizeInBytes) {
|
||||
res.setHeader("Content-Length",file.sizeInBytes)
|
||||
}
|
||||
if (file.chunkSize) {
|
||||
res.setHeader("Accept-Ranges", "bytes")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
app.get("/server",(req,res) => {
|
||||
res.send(JSON.stringify({
|
||||
...config,
|
||||
|
@ -240,6 +268,9 @@ app.get("/server",(req,res) => {
|
|||
app.get("/file/:fileId",fgRQH)
|
||||
app.get("/:fileId",fgRQH)
|
||||
|
||||
app.head("/file/:fileId",fgwh)
|
||||
app.head("/:fileId",fgwh)
|
||||
|
||||
/*
|
||||
routes should be in this order:
|
||||
|
||||
|
|
|
@ -50,7 +50,8 @@ export interface FilePointer {
|
|||
sizeInBytes?:number,
|
||||
tag?:string,
|
||||
visibility?:FileVisibility,
|
||||
reserved?: boolean
|
||||
reserved?: boolean,
|
||||
chunkSize?: number
|
||||
}
|
||||
|
||||
export interface StatusCodeError {
|
||||
|
@ -147,7 +148,9 @@ export default class Files {
|
|||
|
||||
owner:settings.owner,
|
||||
visibility: settings.owner ? "private" : "public",
|
||||
reserved: true
|
||||
reserved: true,
|
||||
|
||||
chunkSize: this.config.maxDiscordFileSize
|
||||
}
|
||||
|
||||
// save
|
||||
|
@ -228,7 +231,9 @@ export default class Files {
|
|||
: undefined
|
||||
),
|
||||
// so that json.stringify doesnt include tag:undefined
|
||||
...((ogf||{}).tag ? {tag:ogf.tag} : {})
|
||||
...((ogf||{}).tag ? {tag:ogf.tag} : {}),
|
||||
|
||||
chunkSize: this.config.maxDiscordFileSize
|
||||
}
|
||||
))
|
||||
|
||||
|
@ -260,7 +265,7 @@ export default class Files {
|
|||
|
||||
// todo: move read code here
|
||||
|
||||
readFileStream(uploadId: string):Promise<{dataStream:Readable,contentType:string,byteSize?:number}> {
|
||||
readFileStream(uploadId: string, range?: {start:number, end:number}):Promise<Readable> {
|
||||
return new Promise(async (resolve,reject) => {
|
||||
if (!this.uploadChannel) {
|
||||
reject({status:503,message:"server is not ready - please try again later"})
|
||||
|
@ -274,18 +279,50 @@ export default class Files {
|
|||
read(){}
|
||||
})
|
||||
|
||||
resolve({
|
||||
contentType: file.mime,
|
||||
dataStream: dataStream,
|
||||
byteSize: file.sizeInBytes
|
||||
})
|
||||
resolve(dataStream)
|
||||
|
||||
let
|
||||
scan_msg_begin = 0,
|
||||
scan_msg_end = file.messageids.length,
|
||||
scan_files_begin = 0,
|
||||
scan_files_end = -1
|
||||
|
||||
let useRanges = range && file.chunkSize && file.sizeInBytes;
|
||||
|
||||
// todo: figure out how to get typesccript to accept useRanges
|
||||
// i'm too tired to look it up or write whatever it wnats me to do
|
||||
if (range && file.chunkSize && file.sizeInBytes) {
|
||||
|
||||
// Calculate where to start file scans...
|
||||
|
||||
scan_files_begin = Math.floor(range.start / file.chunkSize)
|
||||
scan_files_end = Math.ceil(range.end / file.chunkSize) - 1
|
||||
|
||||
scan_msg_begin = Math.floor(scan_files_begin / 10)
|
||||
scan_msg_end = Math.ceil(scan_files_end / 10)
|
||||
|
||||
}
|
||||
|
||||
for (let i = 0; i < file.messageids.length; i++) {
|
||||
let msg = await this.uploadChannel.messages.fetch(file.messageids[i]).catch(() => {return null})
|
||||
for (let xi = scan_msg_begin; xi < scan_msg_end; xi++) {
|
||||
|
||||
let msg = await this.uploadChannel.messages.fetch(file.messageids[xi]).catch(() => {return null})
|
||||
if (msg?.attachments) {
|
||||
|
||||
let attach = Array.from(msg.attachments.values())
|
||||
for (let i = 0; i < attach.length; i++) {
|
||||
let d = await axios.get(attach[i].url,{responseType:"arraybuffer"}).catch((e:Error) => {console.error(e)})
|
||||
for (let i = (useRanges && xi == scan_msg_begin ? scan_files_begin : 0); i < (useRanges && xi == scan_msg_end ? scan_files_end : attach.length); i++) {
|
||||
|
||||
let d = await axios.get(
|
||||
attach[i].url,
|
||||
{
|
||||
responseType:"arraybuffer",
|
||||
headers: {
|
||||
...(useRanges ? {
|
||||
"Range": `bytes=${i+(xi*10) == scan_files_begin && range && file.chunkSize ? range.start-(scan_files_begin*file.chunkSize) : "0"}-${i+(xi*10) == scan_files_end && range && file.chunkSize ? range.end-(scan_files_end*file.chunkSize) : ""}`
|
||||
} : {})
|
||||
}
|
||||
}
|
||||
).catch((e:Error) => {console.error(e)})
|
||||
|
||||
if (d) {
|
||||
dataStream.push(d.data)
|
||||
} else {
|
||||
|
@ -293,8 +330,11 @@ export default class Files {
|
|||
dataStream.destroy(new Error("file read error"))
|
||||
return
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
dataStream.push(null)
|
||||
|
|
Loading…
Reference in a new issue