尽管不同的密钥覆盖了对象

时间:2018-01-31 20:24:53

标签: amazon-s3 aws-lambda

希望有人在这里为我阐明这种情况 - 也许我错过了一些简单的事情。

我有一个流程设置,我的应用程序从S3存储桶中提取预先签名的URL,以便上传一个或多个图像。上传图像时,会触发在SDK顶部编写的Lambda函数。这个lambda应该将图像调整为3种不同的大小,并为它们分配键/将它们放入"文件夹"像这样:照片/ 000/123/456 / medium / image.jpg,照片/ 000/123/456 / large / image.jpg,照片/ 000/123/456 / original / image.jpg。

不幸的是,每次迭代时,前一个对象都会被覆盖,因此照片/ 000/123/456 /最终只包含original / image.jpg。我的印象是,由于它们是不同的键,所有三个都会被保存而不会相互覆盖。这似乎不是这样的?下面的代码示例(请注意,最初的图像是通过循环放入目标存储桶;在打破代码的过程中,它变得有点混乱,但它的工作原理和结果是相同的,有或没有循环):< / p>

// dependencies
var async = require('async');
var path = require('path');
var AWS = require('aws-sdk');
var gm = require('gm')
            .subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');

var max_width  = 20;
var max_height = 20;

// get reference to S3 client
var s3 = new AWS.S3();

exports.handler = function(event, context) {
  // Read options from the event.
  console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
  var srcBucket = event.Records[0].s3.bucket.name;
  // Object key may have spaces or unicode non-ASCII characters.
    var srcKey    =
    decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
  var dstBucket = srcBucket;
  var dstKey    = srcKey.replace('originals', 'assets');
  var extension = path.extname(dstKey);
  var filename  = path.basename(dstKey, extension);
  var directory = path.dirname(dstKey);
  // dstKey = directory + '/' + filename + extension;
  // var sub_folders = ['original', 'large', 'medium', 'thumb']


    //  LARGE image from S3, transform, and upload to a different S3 bucket.
    dstKey = directory + '/' + 'large' + '/' + filename + extension;
    max_width  = 600;
    max_height = 600;
    async.waterfall([
      function download(next) {
        // Download the image from S3 into a buffer.
        s3.getObject({
            Bucket: srcBucket,
            Key: srcKey
          },
          next);
        },
      function transform(response, next) {
        gm(response.Body).size(function(err, size) {
          // Infer the scaling factor to avoid stretching the image unnaturally.
          var scalingFactor = Math.min(
            max_width / size.width,
            max_height / size.height
          );
          var width  = scalingFactor * size.width;
          var height = scalingFactor * size.height;
          // var height = scalingFactor * size.height;
          // Transform the image buffer in memory.
          this.resize(width, height)
            .toBuffer(null, function(err, buffer) {
              if (err) {
                next(err);
              } else {
                next(null, response.ContentType, buffer);
              }
            });
        });
      },
      function upload(contentType, data, next) {
        // Stream the transformed image to a different S3 bucket.
        s3.putObject({
            Bucket: dstBucket,
            Key: dstKey,
            Body: data,
            ContentType: contentType
          },
          next);
        }
      ], function (err) {
        if (err) {
          console.error(
            'Unable to resize ' + srcBucket + '/' + srcKey +
            ' and upload to ' + dstBucket + '/' + dstKey +
            ' due to an error: ' + err
          );
        } else {
          console.log(
            'Successfully resized ' + srcBucket + '/' + srcKey +
            ' and uploaded to ' + dstBucket + '/' + dstKey
          );
        }

        context.done();
      }
    );


    // MEDIUM download the image from S3, transform, and upload to a different S3 bucket.
    dstKey = directory + '/' + 'medium' + '/' + filename + extension;
    max_width  = 600;
    max_height = 600;
    async.waterfall([
      function download(next) {
        // Download the image from S3 into a buffer.
        s3.getObject({
            Bucket: srcBucket,
            Key: srcKey
          },
          next);
        },
      function transform(response, next) {
        gm(response.Body).size(function(err, size) {
          // Infer the scaling factor to avoid stretching the image unnaturally.
          var scalingFactor = Math.min(
            max_width / size.width,
            max_height / size.height
          );
          var width  = scalingFactor * size.width;
          var height = scalingFactor * size.height;
          // var height = scalingFactor * size.height;
          // Transform the image buffer in memory.
          this.resize(width, height)
            .toBuffer(null, function(err, buffer) {
              if (err) {
                next(err);
              } else {
                next(null, response.ContentType, buffer);
              }
            });
        });
      },
      function upload(contentType, data, next) {
        // Stream the transformed image to a different S3 bucket.
        s3.putObject({
            Bucket: dstBucket,
            Key: dstKey,
            Body: data,
            ContentType: contentType
          },
          next);
        },

      function transform(response, next) {
        gm(response.Body).size(function(err, size) {
          // Infer the scaling factor to avoid stretching the image unnaturally.
          var scalingFactor = Math.min(
            330 / size.width,
            330 / size.height
          );
          var width  = scalingFactor * size.width;
          var height = scalingFactor * size.height;
          // var height = scalingFactor * size.height;
          // Transform the image buffer in memory.
          this.resize(width, height)
            .toBuffer(null, function(err, buffer) {
              if (err) {
                next(err);
              } else {
                next(null, response.ContentType, buffer);
              }
            });
        });
      },
      function upload(contentType, data, next) {
        // Stream the transformed image to a different S3 bucket.
        s3.putObject({
            Bucket: dstBucket,
            Key: directory + '/' + 'medium' + '/' + filename + extension,
            Body: data,
            ContentType: contentType
          },
          next);
        }
      ], function (err) {
        if (err) {
          console.error(
            'Unable to resize ' + srcBucket + '/' + srcKey +
            ' and upload to ' + dstBucket + '/' + dstKey +
            ' due to an error: ' + err
          );
        } else {
          console.log(
            'Successfully resized ' + srcBucket + '/' + srcKey +
            ' and uploaded to ' + dstBucket + '/' + dstKey
          );
        }

        context.done();
      }
    );    


};

1 个答案:

答案 0 :(得分:0)

简而言之,您的代码实际上并不是按照您的意图编写的。

async.waterfall( ... );不会阻止周围的程序流程。它几乎立即返回 - 我不确定这是在第一个嵌套函数启动之后或之前立即发生的,但它是这两个中的一个 - 并且精确的时间并不是非常重要的当前的讨论,因为无论哪种方式,它都会非常快速地返回,并且执行将继续进行下面的任何操作。

因此,在你的瀑布式嵌套函数实际使用变量值之后,你会多次覆盖dstKey和内存中其他变量的值 ...将看到当前值。在任何调整大小操作完成之前,dstKey的值位于 final 赋值的值,因此,正在覆盖对象,因为你始终使用dstKey的最后一个值。

这些瀑布并行执行不协调。这就是异步功能的运作方式。它们不能阻止执行 - 这也会阻止事件循环。

在各种函数中记录变量的值(以及你所在位置的上下文,例如在调用s3上传函数之前),你应该能够看到这一点。