我正在使用lambda函数从存储桶中获取图像,然后将其复制到另一个存储桶中。然后拍摄原始图像并调整其大小,并将其复制到另一个存储桶中。然后,完成此操作后,从原始存储桶中删除原始照片。
开始:
Bucket1 = image.jpg Bucket2 =空
END:
Bucket1 =空 Bucket2 = imagecopy.jpg,imageresized.jpg
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
},
function copyImage(next) {
s3.copyObject({
CopySource: srcBucket + '/' + srcKey,
Bucket: dstBucket,
Key: srcKey
}, next)
},
function deleteOrig(next) {
s3.deleteObject({
Bucket: srcBucket,
Key: srcKey
})
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
日志:
START RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Version: $LATEST
2019-01-31T10:39:52.312Z b9ed83e2-4048-4434-bb5c-adf511b6200b Reading options from event:
{ Records:
[ { eventVersion: '2.0',
eventSource: 'aws:s3',
awsRegion: 'us-west-2',
eventTime: '1970-01-01T00:00:00.000Z',
eventName: 'ObjectCreated:Put',
userIdentity: { principalId: 'AIDAJDPLRKLG7UEXAMPLE' },
requestParameters: { sourceIPAddress: '127.0.0.1' },
responseElements:
{ 'x-amz-request-id': 'C3D13FE58DE4C810',
'x-amz-id-2': 'FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD' },
s3:
{ s3SchemaVersion: '1.0',
configurationId: 'testConfigRule',
bucket:
{ name: 'ciansource',
ownerIdentity: { principalId: 'A3NL1KOZZKExample' },
arn: 'arn:aws:s3:::ciansource' },
object:
{ key: 'football.jpg',
size: 1024,
eTag: 'd41d8cd98f00b204e9800998ecf8427e',
versionId: '096fKKXTRTtl3on89fVO.nfljtsv6qko' } } } ] }
2019-01-31T10:39:52.617Z b9ed83e2-4048-4434-bb5c-adf511b6200b TypeError: callback.call is not a function
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:364:18)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:115:18)
END RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b
REPORT RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Duration: 344.41 ms Billed Duration: 400 ms Memory Size: 1024 MB Max Memory Used: 39 MB
RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Process exited before completing request
答案 0 :(得分:1)
感谢您添加了日志。仅阅读代码很难说。因此,我在ES2017和Nodev8中使用新的async
/ await
关键字对其进行了重构。
我在笔记本电脑上测试了此代码,它可以正常工作。
此外,它还具有以下优点:
尝试一下。
如果由于某种原因您无法使用ES2017或Node v8,我建议在每个瀑布函数中添加console.log()
,以了解它在哪里中断。
// dependencies
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
// added for testing on laptop - you NEED TO delete this to run it from Lambda
const event = {
"Records" : [
{
"s3" : {
"bucket" : {
"name" : "test-so-sst"
},
"object" : {
"key" : "image.jpg"
}
}
}
]
}
async function download(srcBucket, srcKey) {
return new Promise((resolve, reject) => {
s3.getObject({
Bucket: srcBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
async function transform(imageType, image) {
return new Promise((resolve, reject) => {
gm(image).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
reject(err);
} else {
resolve(buffer);
}
});
});
});
}
async function upload(dstBucket, dstKey, contentType, data) {
return new Promise((resolve, reject) => {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
}
async function copyImage(srcBucket, srcKey, dstBucket) {
return new Promise((resolve, reject) => {
s3.copyObject({
CopySource: srcBucket + '/' + srcKey,
Bucket: dstBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
async function deleteOrig(srcBucket, srcKey) {
return new Promise((resolve, reject) => {
s3.deleteObject({
Bucket: srcBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
exports.handler = async function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", JSON.stringify(event, null,2));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "-resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback(`Unsupported image type: ${imageType}`);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
try {
let responseDownload = await download(srcBucket, srcKey);
let responseTransform = await transform(imageType, responseDownload.Body);
let responseUpload = await upload(dstBucket, dstKey, responseDownload.ContentType, responseTransform);
let responseCopy = await copyImage(srcBucket, srcKey, dstBucket);
let responseDelete = await deleteOrig(srcBucket, srcKey);
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
} catch (error) {
const message = 'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + error;
console.error(message);
callback(error, message);
}
callback(null, "success");
};
//to test from my laptop - you can safely remove this before to deploy to Lambda
exports.handler(event, null, (error, message) => {
if (error) console.log(error);
console.log(message);
})