AWS Lambda达到内存限制

时间:2017-04-26 13:48:30

标签: amazon-web-services aws-lambda

我使用这个Lambda函数动态生成缩略图。但我得到以下错误:

REPORT RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Duration: 188.18 ms Billed Duration: 200 ms Memory Size: 1536 MB Max Memory Used: 1536 MB 

和...

RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Process exited before completing request

所以我认为我达到了最大内存限制。没有函数" uploadRecentImage()"有用。但是,如果我向imgVariants []添加一个新的大小,我也将达到内存限制。 我认为函数处理imgVariants(每个循环)的方式会导致这种情况,但我不知道如何让它变得更好。 我将不胜感激任何帮助。

这是我的功能:

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({
  imageMagick: true
}); // use ImageMagick
var util = require('util');

// configuration as code - add, modify, remove array elements as desired
var imgVariants = [
  {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 6000,
    "MAX_HEIGHT": 6000,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  },
    {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 1280,
    "MAX_HEIGHT": 1280,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  },
  {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 500,
    "MAX_HEIGHT": 500,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  },
    {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 100,
    "MAX_HEIGHT": 100,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  }
];
var DST_BUCKET_POSTFIX = "resized";



// get reference to S3 client
var s3 = new AWS.S3();

exports.handler = function (event, context) {
  // Read options from the event.
  console.log("Reading options from event:\n", util.inspect(event, {
    depth: 5
  }));
  var srcBucket = event.Records[0].s3.bucket.name;
  // Object key may have spaces or unicode non-ASCII characters.
  var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
  // derive the file name and extension
  var srcFile = srcKey.match(/(.+)\.([^.]+)/);

  var srcName = srcFile[1];
  var scrExt = srcFile[2];
  // set the destination bucket
  var dstBucket = srcBucket + DST_BUCKET_POSTFIX;


  // make sure that source and destination are different buckets.
  if (srcBucket === dstBucket) {
    console.error("Destination bucket must be different from source bucket.");
    return;
  }

  if (!scrExt) {
    console.error('unable to derive file type extension from file key ' + srcKey);
    return;
  }

  if (scrExt != "jpg" && scrExt != "png") {
    console.log('skipping non-supported file type ' + srcKey + ' (must be jpg or png)');
    return;
  }

  function processImage(data, options, callback) {
    gm(data.Body).size(function (err, size) {

      var scalingFactor = Math.min(
        options.MAX_WIDTH / size.width,
        options.MAX_HEIGHT / size.height
      );
      var width = scalingFactor * size.width;
      var height = scalingFactor * size.height;

      this.resize(width, height)
        .quality(options.SIZING_QUALITY || 75)
        .interlace(options.INTERLACE || 'None')
        .toBuffer(scrExt, function (err, buffer) {
          if (err) {
            callback(err);

          } else {
            uploadImage(data.ContentType, buffer, options, callback);
            uploadRecentImage(data.ContentType, buffer, options, callback);
          }
        });
    });
  }

  function uploadImage(contentType, data, options, callback) {
    // Upload the transformed image to the destination S3 bucket.
    s3.putObject({
        Bucket: dstBucket,
        Key: options.MAX_WIDTH + '/' + srcName + '.' + scrExt,
        Body: data,
        ContentType: contentType
      },
      callback);
  }


  function uploadRecentImage(contentType, data, options, callback) {
    if(options.MAX_WIDTH == 500){
         s3.putObject({
            Bucket: dstBucket,
            Key: 'recent_optimized.' + scrExt,
            Body: data,
            ContentType: contentType
          },
          callback);
    }
    if(options.MAX_WIDTH == 100){
           s3.putObject({
            Bucket: dstBucket,
            Key: 'recent_thumb.' + scrExt,
            Body: data,
            ContentType: contentType
          },
          callback);
     }
  }


  // Download the image from S3 and process for each requested image variant.
  async.waterfall(
    [
      function download(next) {
          // Download the image from S3 into a buffer.
          s3.getObject({
              Bucket: srcBucket,
              Key: srcKey
            },
            next);
      },
      function processImages(data, next) {
          async.each(imgVariants, function (variant, next) {
            processImage(data, variant, next);
          }, next);

      }

    ],
    function (err) {
      if (err) {
        console.error(
          'Unable to resize ' + srcBucket + '/' + srcKey +
          ' and upload to ' + dstBucket +
          ' due to an error: ' + err
        );
      } else {
        console.log(
          'Successfully resized ' + srcBucket + '/' + srcKey +
          ' and uploaded to ' + dstBucket
        );
      }

      context.done();
    }
  );
};

1 个答案:

答案 0 :(得分:0)

  1. 您可以限制并行processImages调用的次数:
  2. 替换async.each(imgVariants,

    async.eachLimit(imgVariants, 2,

    不能并行处理两个以上的图像。

    1. 该脚本有一个错误:
    2. uploadImage(data.ContentType, buffer, options, callback); uploadRecentImage(data.ContentType, buffer, options, callback); 这将调用callback两次,这是不允许的。只调用一次回调!

      1. 该脚本还有另一个错误:event.Records[0]它只会处理第一张图片。如果您同时上传多张图片,则会遗漏一些图片。