我总体上仍在尝试通过streams
。我已经能够使用form.on('part')
中的multiparty流式传输大文件。但是我需要推迟调用并在读取流之前对其进行解析。我已经尝试过PassThrough
,through
。 through2
,但得到了不同的结果,主要是挂起了它,我不知道该怎么做,也不知道要调试的步骤。我愿意接受所有替代方案。感谢所有见解。
import multiparty from 'multiparty'
import {
PassThrough
} from 'stream';
import through from 'through'
import through2 from 'through2'
export function promisedMultiparty(req) {
return new Promise((resolve, reject) => {
const form = new multiparty.Form()
const form_files = []
let q_str = ''
form.on('field', (fieldname, value) => {
if (value) q_str = appendQStr(fieldname, value, q_str)
})
form.on('part', async (part) => {
if (part.filename) {
const pass1 = new PassThrough() // this hangs at 10%
const pass2 = through(function write(data) { // this hangs from the beginning
this.queue(data)
},
function end() {
this.queue(null)
})
const pass3 = through2() // this hangs at 10%
/*
// This way works for large files, but I want to defer
// invocation
const form_data = new FormData()
form_data.append(savepath, part, {
filename,
})
const r = request.post(url, {
headers: {
'transfer-encoding': 'chunked'
}
}, responseCallback(resolve))
r._form = form
*/
form_files.push({
part: part.pipe(pass1),
// part: part.pipe(pass2),
// part: part.pipe(pass3),
})
} else {
part.resume()
}
})
form.on('close', () => {
resolve({
fields: qs.parse(q_str),
forms: form_files,
})
})
form.parse(req)
})
}
p.s。如果有人可以使用适当的术语,请确保标题会更好。谢谢。
答案 0 :(得分:1)
我相信这是因为您没有正确使用through2
-即在缓冲区已满后实际上并未清空(这就是为什么在较大的文件上它挂10%,而在较小的文件上挂起)。
我相信这样的实现应该做到这一点:
const pass2 = through2(function(chunk, encoding, next) {
// do something with the data
// Use this only if you want to send the data further to another stream reader
// Note - From your implementation you don't seem to need it
// this.push(data)
// This is what tells through2 it's ready to empty the
// buffer and read more data
next();
})