我一直在研究AWS Lambda函数,以在S3存储桶每次从Cloudfront接收日志时对它发出的通知做出反应。 我能够解压缩日志包并使用Cloudfront Parser对其进行解析。
但是,我仍然无法将它们发送到ElasticSearch端点。我主要使用winston-elasticsearch和s3-to-logstore进行交易,但它们均无效。
这是我的代码:
exports.handler = function(event, context, callback) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
async.waterfall([
function fetchLogFromS3(next){
console.log('Fetching compressed log from S3...');
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function uncompressLog(response, next){
console.log("Uncompressing log...");
zlib.gunzip(response.Body, next);
},
function publishNotifications(jsonBuffer, next) {
console.log('Filtering log...');
var json = jsonBuffer.toString();
console.log('CloudFront JSON from S3:', json);
var records;
CloudFrontParser.parse(json, { format: 'web' }, function (err, accesses) {
if(err){
console.log(err);
} else {
records = accesses;
}
});
// Here, how to send the parsed data?
console.log('CloudFront parsed:', records);
}
], function (err) {
if (err) {
console.error('Failed to send data: ', err);
} else {
console.log('Successfully send data.');
}
callback(null,"message");
});
};
是否有一种简单的方法可以将数据发送到ES?
像这样:
var client = new elasticsearch.Client({
host: process.env.ES_HOST,
log: 'trace',
keepAlive: false
});
client.index({
index: 'cloudfront_index',
type: 'log',
body: records
}, function(err, resp, status) {
console.log(resp);
});
可以使用,但不发送数据:
GET cloudfront_index/_search
{
"took": 0,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"skipped": 0,
"failed": 0
},
"hits": {
"total": 0,
"max_score": null,
"hits": []
}
}
答案 0 :(得分:1)
您快到了。您需要使用bulk
method来实现所需的目标:
var client = new elasticsearch.Client({
host: process.env.ES_HOST,
log: 'trace',
keepAlive: false
});
var bulk = [];
records.forEach(function(record) {
bulk.push({"index": {}})
bulk.push(record);
});
client.bulk({
index: 'cloudfront_index',
type: 'log',
body: bulk
}, function(err, resp, status) {
console.log(resp);
});