当前,我正在使用pptxgenjs
创建一个ppt文件并将其导出。 pptexporter文件包含许多其他功能来获取资产和字符串等。但是,当我使用较大的资产(例如图像和视频大小大于容器mem_limit
)时,总会出现内存泄漏,这将杀死该容器。在生成ppt文件之前和之后,我都做了堆转储,有很大的不同。但是以某种方式发送响应后,堆内存仍然很大。是因为我没有正确传输数据吗?但是看来pptxgenjs
在建立ppt时正在照顾资产。
如果未使用大的图像资源或视频,则堆稳定在51mb左右。但是,如果我使用一项资产或许多总大小为300mb的资产,则docker stats显示内存在某一时刻跃升至1.6g。
pptExporter.js的一部分:
export default async (project) => {
const pptx = await initPresentation(project);
slide.addText(project.properties.courseTitle.properties.value.data, slideHelpers.mainTitleOptions);
await createLessons(pptx, project);
slide.addText(project.properties.courseAudience.properties.value.data, slideHelpers.mainAudienceOptions);
return pptx.stream().then(data => {
return new Buffer(data, "base64");
});
};
getCoursePpt.js:
import getPptFile from './pptExporter';
export default async (project, fileName, credentials) => {
heapdump.writeSnapshot('/' + Date.now() + '.heapsnapshot');
heapdump.writeSnapshot(function (err, filename) {
console.log('dump1 written to', filename);
});
const pptFile = await getPptFile(project);
heapdump.writeSnapshot('/' + Date.now() + '.heapsnapshot');
heapdump.writeSnapshot(function (err, filename) {
console.log('dump2 written to', filename);
});
const s3 = new AWS.S3({
credentials: new AWS.Credentials({
accessKeyId: credentials.accessKey,
secretAccessKey: credentials.secretAccess
})
});
const params = {
Key: `${fileName}.pptx`,
Bucket: credentials.name,
Body: pptFile
};
const uploadPromise = () => new Promise((resolve, reject) => {
s3.upload(params, (error, data) => {
if (error) {
reject(error);
} else {
resolve(data.Location);
}
});
});
let data;
try {
data = await uploadPromise(params);
} catch (error) {
throw error;
}
return data;
};
然后重新运行:
getPptLink = async (request, response) => {
const { user, body: { rootId } } = request;
const link = await ppt.getCoursePpt(rootId, user);
response.json({ link });
heapdump.writeSnapshot('/' + Date.now() + '.heapsnapshot');
heapdump.writeSnapshot(function (err, filename) {
console.log('dump4 written to', filename);
});
};