如何将归档文件(zip)传输到S3存储桶

时间:2018-08-20 21:21:25

标签: javascript node.js amazon-s3 archive

我对如何继续感到困惑。我正在使用Archive(node js模块)作为将数据写入zip文件的方法。当前,当我写入文件(本地存储)时,我的代码可以正常工作。

var fs = require('fs');
var archiver = require('archiver');

var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
     zlib: { level: 9 }  
});

archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});

我想修改代码,以便存档目标文件是一个AWS S3存储桶。查看代码示例,当我创建存储桶对象时,我可以指定存储桶名称和键(和主体),如下所示:

var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
    … 
});

Or 

s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
    .send(function(err,data) {
    …
    });

对于我的S3示例,myStream似乎是可读的流,由于archive.pipe需要可写的流,因此我对如何进行此工作感到困惑。这是我们需要使用直通流的地方吗?我找到了一个示例,其中有人创建了直通流,但是该示例过于简洁,无法正确理解。我要指的具体示例是:

Pipe a stream to s3.upload()

任何人能给我的帮助将不胜感激。谢谢。

2 个答案:

答案 0 :(得分:3)

下面的示例采用可接受的答案,并根据要求使其与本地文件一起使用。

const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")

const zipAndUpload = async () => {
  const files = [`test1.txt`, `test2.txt`]
  const fileNames = [`test1target.txt`, `test2target.txt`]
  const archive = archiver("zip", {
    zlib: { level: 9 } // Sets the compression level.
  })
  files.map((thisFile, index) => {
    archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
  })
  const uploadStream = new stream.PassThrough()
  archive.pipe(uploadStream)
  archive.finalize()
  archive.on("warning", function (err) {
    if (err.code === "ENOENT") {
      console.log(err)
    } else {
      throw err
    }
  })
  archive.on("error", function (err) {
    throw err
  })
  archive.on("end", function () {
    console.log("archive end")
  })
  await uploadFromStream(uploadStream)
  console.log("all done")
}

const uploadFromStream = async pass => {
  const s3params = {
    Bucket: "bucket-name",
    Key: `streamtest.zip`,
    Body: pass,
    ContentType: "application/zip"
  }
  return s3.upload(s3params).promise()
}

zipAndUpload()

答案 1 :(得分:1)

这可能对想知道如何使用pipe的其他人很有用。

由于您使用传递流正确引用了示例,所以这是我的工作代码:

1-例程本身,使用node-archiver

压缩文件
exports.downloadFromS3AndZipToS3 = () => {
  // These are my input files I'm willing to read from S3 to ZIP them

  const files = [
    `${s3Folder}/myFile.pdf`,
    `${s3Folder}/anotherFile.xml`
  ]

  // Just in case you like to rename them as they have a different name in the final ZIP

  const fileNames = [
    'finalPDFName.pdf',
    'finalXMLName.xml'
  ]

  // Use promises to get them all

  const promises = []

  files.map((file) => {
    promises.push(s3client.getObject({
      Bucket: yourBubucket,
      Key: file
    }).promise())
  })

  // Define the ZIP target archive

  let archive = archiver('zip', {
    zlib: { level: 9 } // Sets the compression level.
  })

  // Pipe!

  archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))

  archive.on('warning', function(err) {
    if (err.code === 'ENOENT') {
      // log warning
    } else {
      // throw error
      throw err;
    }
  })

  // Good practice to catch this error explicitly
  archive.on('error', function(err) {
    throw err;
  })

  // The actual archive is populated here 

  return Promise
    .all(promises)
    .then((data) => {
      data.map((thisFile, index) => {
        archive.append(thisFile.Body, { name: fileNames[index] })
      })

      archive.finalize()
    })
  }

2-辅助方法

const uploadFromStream = (s3client) => {
  const pass = new stream.PassThrough()

  const s3params = {
    Bucket: yourBucket,
    Key: `${someFolder}/${aFilename}`,
    Body: pass,
    ContentType: 'application/zip'
  }

  s3client.upload(s3params, (err, data) => {
    if (err)
      console.log(err)

    if (data)
      console.log('Success')
  })

  return pass
}