mirror of
https://github.com/mollersuite/monofile.git
synced 2024-11-24 22:56:26 -08:00
hopefully implement backpressure
This commit is contained in:
parent
81378fc0dc
commit
486fb6912e
|
@ -275,12 +275,6 @@ export default class Files {
|
||||||
if (this.files[uploadId]) {
|
if (this.files[uploadId]) {
|
||||||
let file = this.files[uploadId]
|
let file = this.files[uploadId]
|
||||||
|
|
||||||
let dataStream = new Readable({
|
|
||||||
read(){}
|
|
||||||
})
|
|
||||||
|
|
||||||
resolve(dataStream)
|
|
||||||
|
|
||||||
let
|
let
|
||||||
scan_msg_begin = 0,
|
scan_msg_begin = 0,
|
||||||
scan_msg_end = file.messageids.length-1,
|
scan_msg_end = file.messageids.length-1,
|
||||||
|
@ -303,6 +297,8 @@ export default class Files {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let attachments: Discord.Attachment[] = [];
|
||||||
|
|
||||||
for (let xi = scan_msg_begin; xi < scan_msg_end+1; xi++) {
|
for (let xi = scan_msg_begin; xi < scan_msg_end+1; xi++) {
|
||||||
|
|
||||||
let msg = await this.uploadChannel.messages.fetch(file.messageids[xi]).catch(() => {return null})
|
let msg = await this.uploadChannel.messages.fetch(file.messageids[xi]).catch(() => {return null})
|
||||||
|
@ -311,33 +307,58 @@ export default class Files {
|
||||||
let attach = Array.from(msg.attachments.values())
|
let attach = Array.from(msg.attachments.values())
|
||||||
for (let i = (useRanges && xi == scan_msg_begin ? ( scan_files_begin - (xi*10) ) : 0); i < (useRanges && xi == scan_msg_end ? ( scan_files_end - (xi*10) + 1 ) : attach.length); i++) {
|
for (let i = (useRanges && xi == scan_msg_begin ? ( scan_files_begin - (xi*10) ) : 0); i < (useRanges && xi == scan_msg_end ? ( scan_files_end - (xi*10) + 1 ) : attach.length); i++) {
|
||||||
|
|
||||||
|
attachments.push(attach[i])
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
let position = 0;
|
||||||
|
|
||||||
|
let getNextChunk = async () => {
|
||||||
|
let scanning_chunk = attachments[position]
|
||||||
|
if (!scanning_chunk) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
let d = await axios.get(
|
let d = await axios.get(
|
||||||
attach[i].url,
|
scanning_chunk.url,
|
||||||
{
|
{
|
||||||
responseType:"arraybuffer",
|
responseType:"arraybuffer",
|
||||||
headers: {
|
headers: {
|
||||||
...(useRanges ? {
|
...(useRanges ? {
|
||||||
"Range": `bytes=${i+(xi*10) == scan_files_begin && range && file.chunkSize ? range.start-(scan_files_begin*file.chunkSize) : "0"}-${i+(xi*10) == scan_files_end && range && file.chunkSize ? range.end-(scan_files_end*file.chunkSize) : ""}`
|
"Range": `bytes=${position == 0 && range && file.chunkSize ? range.start-(scan_files_begin*file.chunkSize) : "0"}-${position == attachments.length-1 && range && file.chunkSize ? range.end-(scan_files_end*file.chunkSize) : ""}`
|
||||||
} : {})
|
} : {})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).catch((e:Error) => {console.error(e)})
|
).catch((e:Error) => {console.error(e)})
|
||||||
|
|
||||||
|
position++;
|
||||||
|
|
||||||
if (d) {
|
if (d) {
|
||||||
dataStream.push(d.data)
|
return d.data
|
||||||
} else {
|
} else {
|
||||||
reject({status:500,message:"internal server error"})
|
reject({status:500,message:"internal server error"})
|
||||||
dataStream.destroy(new Error("file read error"))
|
return "__ERR"
|
||||||
return
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
let dataStream = new Readable({
|
||||||
|
read(){
|
||||||
|
getNextChunk().then(async (nextChunk) => {
|
||||||
|
if (nextChunk == "__ERR") {this.destroy(new Error("file read error")); return}
|
||||||
|
let response = this.push(nextChunk)
|
||||||
|
|
||||||
|
while (response) {
|
||||||
|
response = this.push(await getNextChunk())
|
||||||
}
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
|
||||||
dataStream.push(null)
|
resolve(dataStream)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
reject({status:404,message:"not found"})
|
reject({status:404,message:"not found"})
|
||||||
|
|
Loading…
Reference in a new issue