我正在AWS上编写一个NodeJS应用,该应用执行以下操作:
代码在我的本地节点实例上运行良好。整个过程通常不到3秒即可完成。我可以通过检查AWS s3存储桶和DynamoDB来确认这一点。
但是,当我将其打包以由Lambda运行时,即使超时设置为5分钟并且内存已满,该操作也会终止。在检查日志时,我注意到在Lambda上运行时,来自axios的承诺从未返回。有人可以发现我的代码有任何问题吗?谢谢。
仅供参考,我是Lambda开发的新手。我将代码部署到Lambda的方法是,将封闭的export.handler方法添加到本地代码中,并使用模块和package.json将其压缩。我当然可以使用一些指导来简化此过程。我提到这一点是因为我不确定这是否与问题有关。
谢谢。
exports.handler = function(event, context, callback) {
async function main() {
await getData();
updateDB();
updateAssets();
}
let dataStore = {};
let s3Assets = [
'photo1',
'photo2',
'photo3',
'photo4',
'photo5',
'logoLarge',
'logoSmall',
'logoCompany'
];
let s3Status = {};
s3Assets.map( (i) => {
// Initialize the status of all assets with 0 (false)
s3Status[i] = 0;
})
let s3Ready = () => {
let count = 0;
s3Assets.map( (i) => {
count += s3Status[i];
})
console.log(s3Status);
console.log(`Upload Count: ${count}`);
if (count === s3Assets.length) {
console.log(`[SUCCESS] All assets are updated`);
console.log('< END: updateAssets');
}
}
let getData = () => {
... // code omitted
}
let handleAsset = (asset) => {
let src = dataStore[asset];
let destination = dataStore.prefix + asset + '.jpg';
axios({
method:'get',
url: src,
responseType:'stream'
}).then( (response) => {
let body = response.data.pipe(zlib.createGzip());
let fileType = 'multipart/form-data';
let s3Promise = s3.upload({
Bucket: 'someBucket',
Key: destination,
Body: body,
ContentType: fileType,
ContentEncoding: 'gzip'
}).promise();
s3Promise.then( (data) => {
s3Status[asset] = 1;
console.log('\n');
console.log(`[SUCCESS] s3 Upload: ${data.Location}`);
}).then( () => {
s3Ready();
}).catch( (error) => {
console.log(`[ERROR] s3 Upload: ${error}`);
});
}).catch( (error) => {
console.log(`[ERROR] axios: ${error}`);
});
}
let updateAssets = () => {
console.log('> START: updateAssets');
s3Assets.map( (i) => {
handleAsset(i)
});
}
let updateDB = () => {
... // code omitted
}
main();
}