在Sails(船长)

时间:2016-03-09 17:56:28

标签: node.js sails.js

我目前正将船长中的文件上传到skipper-s3,但我想保存该文件的多个已调整大小的副本。有没有办法克隆文件流?我发现这个solution但找不到生成文件流的方法。也许有一种方法可行。无论如何,这是上传的代码:

  processUpload: function (req, res) {

    var type = req.param('type');
    if (type !== 'thumbnail' && type !== 'image' || typeof req.file('file')._files[0] == 'undefined') return res.badRequest();

    var dir = 'news/'+type+'/',
      filename = moment().unix() + '_' + req.file('file')._files[0].stream.filename,
      filetype = req.file('file')._files[0].stream.headers['content-type']
      filesize = req.file('file')._files[0].stream.byteCount;

    // Lowercase and convert spaces to hyphens
    filename = filename.toLowerCase()
             .replace(/ +/g,'-');

    if (filetype != 'image/jpeg' && filetype != 'image/png' && filetype != 'image/gif') return res.badRequest('File must be a JPG, GIF, or PNG');
    if (filesize > 2000000) return res.badRequest('File cannot exceed 2MB');

    var fs = require('fs')
      , gm = require('gm');

    var resizeStream = req.file('file')._files[0].stream;
    var bs = gm(resizeStream, 'img.jpg')
    .resize(100,100)
    .write('resize.jpg', function (err) { });


    req.file('file').upload({
      adapter: require('skipper-s3'),
      key: 'Axx',
      secret: 'xx',
      bucket: 'xx6',
      saveAs: dir+filename,
      headers: {
        'x-amz-acl': 'public-read'
      }
    }, function (err, filesUploaded) {
      if (err) return res.negotiate(err);

      return res.ok({
        trigger: 'article-'+type+'-uploaded',
        fileUploaded: filename,
        previewURL: 'https://s3.amazonaws.com/sdf/'+dir+filename,
      });
    });

  }

这不起作用,因为一旦我调整大小并保存图像,.upload功能就没有流动了。如果您知道克隆流的好方法或解决此问题,请告诉我。谢谢你查看我的问题。

1 个答案:

答案 0 :(得分:0)

这是我最终的工作,我知道很奇怪,但是我使用了skipper-s3模块进行了第一次上传,而aws-sdk用于后续的模式,因为它们取决于第一个要复制的内容。这似乎不是一个理想的解决方案,但我努力将一个文件流式传输到多个缓冲流,通过gm调整大小并将大小上传到s3。

var fs = require('fs')
, gm = require('gm');

req.file('file').upload({
  adapter: require('skipper-s3'),
  key: 'Asdf234',
  secret: 'hehe',
  bucket: 'xd',
  function (file, cb) {
            if (allowedTypes.indexOf(file.headers['content-type']) === -1) return false;

            var extension = file.filename.split('.').pop();
            var clean = file.filename.toLowerCase()
                                .replace(/ +/g,'-');
            filename = moment().unix() + '_' + clean + '.' + extension;

            cb(null, dir+filename);
        },
  headers: {
    'x-amz-acl': 'public-read'
  }
}, function (err, filesUploaded) {
  if (err) return res.negotiate(err);


        var request = require('request');
        var AWS = require('aws-sdk');
        AWS.config.region = 'us-east-1';

        var s3obj = new AWS.S3({params: {Bucket: 'xd'}});

        gm(request(filesUploaded[0]['extra']['Location']),"/tmp/orig.jpg")
        .resize('750', '375')
        .stream(function (err, stdout, stderr) {

            var buf = new Buffer('');

            stdout.on('data',function(data) {
                buf = Buffer.concat([buf,data]);
            });

            stdout.on('end',function(data) {
                var params = {
                ACL: 'public-read',
                  Key: 'news/1/'+filename,
                  Body: buf};

                    s3obj.upload(params, function(err, data) {
                if (err) {
                  sails.log.debug("Error uploading data: ", err);
                } else {
                  sails.log.debug("Successfully uploaded data to myBucket/myKey");
                }
              });
            });
        });