我有一个接收文件的端点,并创建了一个后台任务,用于将这些文件上传到S3。
为了后台文件上传,我使用了议程(https://github.com/agenda/agenda)。唯一的限制是我需要以MongoDB支持的格式存储文件(这是Agenda在幕后使用的格式)。为此,我将文件转换为缓冲区,然后再将其发送到议程。
这是我的代码:
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return await Promise.all(
pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const { createReadStream } = await file;
const stream = createReadStream();
console.log("Setting up buffer...");
const buffer = await new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
});
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
console.log("Creating backgruond task...");
Agenda.now("uploadProgressPic", {
userId: currentUser.id,
progressPicId: progressPic.id,
filename: `${progressPic.id}-${bodyPart}.jpg`,
buffer,
});
console.log("Done.");
return progressPic;
})
);
}
),
},
这在我的本地开发服务器上速度很快,但是由于存在缓冲区,要花很长时间才能在生产中运行。 console.log(Setting up buffer...)
之后的行需要很长时间。
我想做的是:
pics
中的一个元素这可能吗?
============更新=========
因此,如果我没有await
来履行承诺,它将抱怨请求在缓冲区完成之前就断开了连接:
const uploadProgressPic = async ({ file, progressPicId, userId, bodyPart }) => {
try {
const { createReadStream } = await file;
const stream = createReadStream();
console.log("Setting up buffer...");
const buffer = await new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
});
console.log("Done.");
console.log("Creating backgruond task...");
Agenda.now("uploadProgressPic", {
userId,
progressPicId,
filename: `${progressPicId}-${bodyPart}.jpg`,
buffer,
});
} catch (error) {
console.log("ERROR OCCURRED: ", error);
}
};
export default {
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
uploadProgressPic({
file,
progressPicId: progressPic.id,
userId: currentUser.id,
bodyPart,
});
return progressPic;
});
}
),
},
};
错误:
ERROR OCCURRED: BadRequestError: Request disconnected during file upload stream parsing.
at IncomingMessage.<anonymous> (/Users/edmundmai/Documents/src/acne-tracker/server/node_modules/graphql-upload/lib/processRequest.js:300:35)
at Object.onceWrapper (events.js:291:20)
at IncomingMessage.emit (events.js:203:13)
at IncomingMessage.EventEmitter.emit (domain.js:471:20)
at resOnFinish (_http_server.js:614:7)
at ServerResponse.emit (events.js:208:15)
at ServerResponse.EventEmitter.emit (domain.js:471:20)
at onFinish (_http_outgoing.js:649:10)
at onCorkedFinish (_stream_writable.js:678:5)
at afterWrite (_stream_writable.js:483:3)
at processTicksAndRejections (internal/process/task_queues.js:77:11) {
message: 'Request disconnected during file upload stream parsing.',
expose: true,
statusCode: 499,
status: 499
}
==========更新2 ============
即使试图1)简化它和2)将createReadStream()
移到uploadProgressPic
之外,也会显示相同的错误:
const uploadProgressPic = async ({
stream,
progressPicId,
userId,
bodyPart,
models,
}) => {
try {
console.log("Uploading to S3...");
const { Location: url, Key: key, Bucket: bucket } = await S3.upload({
stream,
folder: userId,
filename: `${progressPicId}-${bodyPart}.jpg`,
});
if (url && key && bucket) {
await models.ProgressPic.findOneAndUpdate(
{ _id: progressPicId },
{ $set: { url, key, bucket } },
{ new: true, useFindAndModify: false }
);
console.log("Done!");
}
} catch (error) {
console.log("ERROR OCCURRED: ", error);
}
};
export default {
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
const { createReadStream } = await file;
const stream = createReadStream();
uploadProgressPic({
stream,
progressPicId: progressPic.id,
userId: currentUser.id,
bodyPart,
models,
});
return progressPic;
});
}
),
},
};
错误:
Uploading to S3...
Uploading to S3...
Uploading to S3...
ERROR OCCURRED: BadRequestError: Request disconnected during file upload stream parsing.
at IncomingMessage.<anonymous> (/Users/edmundmai/Documents/src/acne-tracker/server/node_modules/graphql-upload/lib/processRequest.js:300:35)
at Object.onceWrapper (events.js:291:20)
at IncomingMessage.emit (events.js:203:13)
at IncomingMessage.EventEmitter.emit (domain.js:471:20)
at resOnFinish (_http_server.js:614:7)
at ServerResponse.emit (events.js:208:15)
at ServerResponse.EventEmitter.emit (domain.js:471:20)
at onFinish (_http_outgoing.js:649:10)
at onCorkedFinish (_stream_writable.js:678:5)
at afterWrite (_stream_writable.js:483:3)
at processTicksAndRejections (internal/process/task_queues.js:77:11) {
message: 'Request disconnected during file upload stream parsing.',
expose: true,
statusCode: 499,
status: 499
}
Done!
有趣的是,即使抱怨,我仍然在日志中看到一些Done!
吗?
答案 0 :(得分:0)
不是该主题的专家,但我有一个可行的想法和一个理论:
想法:如果您要处理的图像数量过多,则可能是由于等待Promise.all()引起的问题。我建议您使用异步中的parallelLimit来限制一次执行的并行功能,否则会出现性能问题。
理论:也许您可以在每次使用Buffer
之后释放内存分配,以避免出现内存泄漏问题并使服务器性能更高。
如果我仍然错了,请纠正我。我本人对此问题的结果感兴趣。
答案 1 :(得分:0)
不要等待诺言。
new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
}).then((buffer) => {
Agenda.now("uploadProgressPic", {
userId: currentUser.id,
progressPicId: progressPic.id,
filename: `${progressPic.id}-${bodyPart}.jpg`,
buffer,
});
}).catch((error) => {
// Clean up here
});
return models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
这样,您将开始创建缓冲区,但实际上不会等待该代码执行,而是会立即创建ProgressPic实例并返回它。由于对Agenda.now
的调用需要Promise的解析值,因此我们将其保留在then
回调中。请注意,也必须附加一个catch
,如果不添加,可能会导致未处理的拒绝。
您可能要使用catch
回调来记录错误并进行其他清理。例如,您可能要创建创建的ProgressPic(在这种情况下,应将create
调用移至缓冲区Promise上方,以便您可以引用创建的实例)。
如果您像我一样,每次必须键入then
时都会在内部死掉一点,则可以将所有逻辑提取到单独的函数中:
const uploadProgressPic = async (/* parameters omitted for brevity */) => {
try {
const buffer = await new Promise(...)
Agenda.now(...)
} catch (error) {
// Do whatever
}
}
,然后再次在解析器中调用它,不用等待:
uploadProgressPic()
return models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
答案 2 :(得分:0)
我尝试了各种最终以失败告终的方法,因为由于某种原因创建缓冲区的速度太慢了。我最终可行的最终解决方案是将上传分为两个请求:
后端:
请求1:使用本地文件路径作为URL,创建进度图片 请求2:上传文件并更新进度图片
import { combineResolvers } from "graphql-resolvers";
import { isAuthenticated } from "./authorization";
import S3 from "../services/s3";
export default {
Query: {
progressPics: combineResolvers(
isAuthenticated,
async (parent, args, { models, currentUser }) => {
return await models.ProgressPic.find({ user: currentUser.id });
}
),
},
Mutation: {
createProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { bodyPart, localPath } = pic;
return await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
return progressPic;
});
}
),
updateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, filename, progressPicId } = pic;
const { createReadStream } = await file;
const stream = createReadStream();
const { Location: url, Key: key, Bucket: bucket } = await S3.upload({
stream,
filename,
folder: currentUser.id,
});
return await models.ProgressPic.findOneAndUpdate(
{ _id: progressPicId },
{ $set: { url, key, bucket } },
{ new: true, useFindAndModify: false }
);
});
}
),
},
};
然后,前端将等待请求#1的响应,并发送请求#2,但忽略响应,因此它可以立即返回。
const createAndUploadProgressPics = async photos => {
const {
data: { createProgressPics: progressPics },
} = await createProgressPics({
variables: {
pics: photos.map((p, i) => ({
bodyPart: BODY_PARTS[i],
localPath: p.uri,
})),
},
});
updateProgressPics({
variables: {
pics: progressPics.map(({ id, bodyPart }, i) => {
return {
progressPicId: id,
filename: `${id}-${bodyPart}.jpg`,
file: photos[i],
};
}),
},
});
onFinish(progressPics);
navigation.goBack();
};