NodeJS Lambda问题与操作顺序

时间:2017-04-21 15:10:08

标签: node.js aws-lambda aws-sdk aws-codepipeline

主要问题是当代码使用CodePipeline执行时,它将下载输入工件,解压缩下载的工件,并将grep template.yml下载为名为Function name的字符串。它大部分时间都可以工作,但是会说偶尔找不到template.yml文件会失败。如果我再次重新运行它将无误地工作。任何帮助都很感激。谢谢!

'使用严格的'

// dependencies
const child_process = require('child_process')
const fs = require('fs')
const path = require('path')
const stripDirs = require('strip-dirs')
const AWS = require('aws-sdk')
const unzip = require('unzip')
const shell = require('shelljs')

// global process variable is still accessible
process.env['PATH'] = process.env['PATH'] + ':' + process.env['/tmp']

// get reference to S3 client
const s3 = new AWS.S3({maxRetries: 1, "signatureVersion":"v4"})

exports.handler = function(event, context) {

  const codepipeline = new AWS.CodePipeline()
  let jobId

  // Notify AWS CodePipeline of a successful job
  function putJobSuccess(message) {
    console.log(message)
    codepipeline.putJobSuccessResult({ jobId },
      (err, data) => {
        if (err)
          context.fail(err)
        else
          context.succeed(message)
       })
  }

  // Notify AWS CodePipeline of a failed job
  function putJobFailure(message) {
    console.error('job failure: ', message)
    codepipeline.putJobFailureResult({
      jobId,
      failureDetails: {
      message: JSON.stringify(message),
       type: 'JobFailed',
       externalExecutionId: context.invokeid
      }
    }, (err, data) => context.fail(message))
  }


  try {
const jobEvent = event['CodePipeline.job']
jobId = jobEvent.id
const jobData = jobEvent.data
console.log(jobData)

// Retrieve the value of UserParameters from the Lambda action configuration in AWS CodePipeline, in this case a URL which will be
// health checked by this function.
const userParams = jobData.actionConfiguration.configuration.UserParameters
const userParamsSplit = userParams && userParams.split(' ')
if (!userParams || !userParamsSplit || userParamsSplit.length !== 1)
  throw new Error('The User Parameters field must contain three items separated by spaces: the input artifact name, the location of the lambda function code within the input artifact, and the destination lambda function name')

const artifactName = userParamsSplit[0]
const artifact = jobData.inputArtifacts.find(a => a.name === artifactName && a.location.type === 'S3')
if (!artifact) throw new Error('artifact not found: ', artifactName)
console.log('Artifact:', artifact)

const tmpDir = '/tmp'

const artifactZipFilePath = path.join(tmpDir, stripDirs(artifact.location.s3Location.objectKey, 2))
console.log('ZipFilePath:', artifactZipFilePath)

s3.getObject({
  Bucket: artifact.location.s3Location.bucketName,
  Key: artifact.location.s3Location.objectKey
}, (err, data) => {
  if (err) return putJobFailure(`could not download artifact from S3: ${err.stack || err}`)
  console.log()
  fs.writeFileSync(artifactZipFilePath, data.Body)

const zipFileContents = fs.readFileSync(artifactZipFilePath)
const zipFileDir = stripDirs(artifact.location.s3Location.objectKey, 2).slice(0, -4)
console.log('zipFileDir:', zipFileDir)

const newZipArtifact = path.join('/tmp', stripDirs(artifact.location.s3Location.objectKey, 2))
fs.createReadStream(newZipArtifact).pipe(unzip.Extract({ path: '/tmp' }))
const prependFunctionName = shell.grep('FunctionName', '/tmp/template.yml')
const destLambdaFunctionName = prependFunctionName.replace(/^.+:/,'').replace(/\s/g,'')
const command = require('child_process').exec

command(`echo ${destLambdaFunctionName}`, (error, stdout, stderr) => {
  if (error) {
    console.log("Error occurs");
    console.error(error);
    return;
  }
  console.log(stdout);
  console.log(stderr);
})

lambda.updateFunctionCode({
    FunctionName: destLambdaFunctionName,
    Publish: true,
    ZipFile: zipFileContents
  }, function(err, data) {
    if (err) console.log(err, err.stack) // an error occurred
    else     console.log(data)           // successful response
    const sqsMsg = (data)
    putJobSuccess('lambda code updated')
  })
})
} catch (err) {
putJobFailure(err.stack)
 }
}

1 个答案:

答案 0 :(得分:0)

试一试:

// dependencies
const AWS = require('aws-sdk')
const unzip = require('unzip')

// global process variable is still accessible
process.env['PATH'] = process.env['PATH'] + ':' + process.env['/tmp']
// get reference to CodePipeline client
const codepipeline = new AWS.CodePipeline()
// get reference to S3 client
const s3 = new AWS.S3({
  maxRetries: 1,
  "signatureVersion": "v4"
})

exports.handler = function(event, context) {
  let jobId

  // Notify AWS CodePipeline of a successful job
  function putJobSuccess(message) {
    console.log(message)
    if (!jobId) return context.fail(err)
    codepipeline.putJobSuccessResult({
        jobId
      },
      (err, data) => {
        if (err)
          context.fail(err)
        else
          context.succeed(message)
      })
  }

  // Notify AWS CodePipeline of a failed job
  function putJobFailure(message) {
    console.error('job failure: ', message)
    if (!jobId) return context.fail(err)
    codepipeline.putJobFailureResult({
      jobId,
      failureDetails: {
        message: JSON.stringify(message),
        type: 'JobFailed',
        externalExecutionId: context.invokeid
      }
    }, (err, data) => context.fail(message))
  }


  try {
    const jobEvent = event['CodePipeline.job']
    jobId = jobEvent.id
    const jobData = jobEvent.data
    console.log(jobData)

    // Retrieve the value of UserParameters from the Lambda action configuration in AWS CodePipeline, in this case a URL which will be
    // health checked by this function.
    const userParams = jobData.actionConfiguration.configuration.UserParameters
    const userParamsSplit = userParams && userParams.split(' ')
    if (!userParams || !userParamsSplit || userParamsSplit.length !== 1)
      throw new Error('The User Parameters field must contain three items separated by spaces: the input artifact name, the location of the lambda function code within the input artifact, and the destination lambda function name')

    const artifactName = userParamsSplit[0]
    const artifact = jobData.inputArtifacts.find(a => a.name === artifactName && a.location.type === 'S3')
    if (!artifact) throw new Error('artifact not found: ', artifactName)
    console.log('Artifact:', artifact)

    let destLambdaFunctionName = null
    let foundTemplateYml = false
    const Bucket = artifact.location.s3Location.bucketName
    const Key = artifact.location.s3Location.objectKey

    s3.getObject({ Bucket, Key })
      .createReadStream()
      .on('error', (err) => putJobFailure(`could not download artifact from S3: ${err.stack || err}`))
      .pipe(unzip.Parse())
      .on('error', (err) => putJobFailure(`could not parse zip from S3: ${err.stack || err}`))
      .on('entry', (entry) => {
        if (entry.path === 'template.yml') {
          let chunks = []
          foundTemplateYml = true
          entry.on('data', (chunk) => chunks.push(chunk)).on('end', () => {
            const match = Buffer.concat(chunks).toString().match(/FunctionName: (.+)/)
            if (!match || !match[1]) return putJobFailure(`could not find FunctionName in template.yml`)
            destLambdaFunctionName = match[1]
            lambda.updateFunctionCode({
              FunctionName: destLambdaFunctionName,
              Publish: true,
              S3Bucket: Bucket,
              S3Key: Key
            }, (err, data) => {
              if (err) return putJobFailure(`could not update Lambda function code for ${destLambdaFunctionName}: ${err.stack || err}`)
              console.log(data)
              putJobSuccess('lambda code updated')
            })
          })
        } else {
          entry.autodrain()
        }
      })
      .on('end', () => {
        if (!foundTemplateYml) {
          putJobFailure(`could not find template.yml in zip`)
        }
      })
  } catch (err) {
    putJobFailure(err.stack)
  }
}