使用grok解析日志,但在nodejs

时间:2017-02-04 12:53:42

标签: node.js logging logstash-grok

我正在尝试使用grok解析一些日志,但是当日志行看起来有点不一样时我会遇到一些麻烦......

我的日志文件可以这样说:

[2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
[2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
[2017-02-03 19:26:20,605] INFO Rolled new log segment for \'omega-replica-sync-dev-8\' in 21 ms. (kafka.log.Log)
[2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log)
[2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log)

我当前的节点代码如下所示:

'use strict';

var nodegrok = require('node-grok');
var Regex = require("regex");
var zlib = require('zlib');

var msg = '[2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:26:20,605] INFO Rolled new log segment for \'omega-replica-sync-dev-8\' in 21 ms. (kafka.log.Log)\n[2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log)\n[2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log)'

console.log('message: ', msg);

var p2 = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} \\[%{DATA:message1}\\]: %{GREEDYDATA:message2}'

var lines = msg.toString().split('\n');

for(var i = 0;i < lines.length;i++){

    console.log('line [i]:', lines[i])
    var str = lines[i]

    var patterns = require('node-grok').loadDefaultSync();
    var pattern = patterns.createPattern(p2)
    console.log('pattern:', pattern.parseSync(lines[i]));

}

但最后两个似乎输出null ...因为它错过了模式中的第3部分。

line [i]: [2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
pattern: { timestamp: '2017-02-03 19:15:51,112',
  level: 'INFO',
  message1: 'Group Metadata Manager on Broker 1',
  message2: 'Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)' }
line [i]: [2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
pattern: { timestamp: '2017-02-03 19:25:51,112',
  level: 'INFO',
  message1: 'Group Metadata Manager on Broker 1',
  message2: 'Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)' }
line [i]: [2017-02-03 19:26:20,605] INFO Rolled new log segment for 'omega-replica-sync-dev-8' in 21 ms. (kafka.log.Log)
pattern: null
line [i]: [2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log)
pattern: null
line [i]: [2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log)
pattern: null

如何在grok中格式化不同格式的行?

1 个答案:

答案 0 :(得分:0)

所以这是我开始工作的一种方式...本质上是要查看模式是否与if语句匹配然后进行评估,但是,如果有6种可能的日志格式怎么办?如果嵌套的语句那么我必须要6吗?听起来对我来说是一种有效的方式......有更好的方法吗?

'use strict';

var nodegrok = require('node-grok');
var Regex = require("regex");
var zlib = require('zlib');

var msg = '[2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:26:20,605] INFO Rolled new log segment for \'omega-replica-sync-dev-8\' in 21 ms. (kafka.log.Log)\n[2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log)\n[2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log)'

console.log('message: ', msg);

var p2 = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} \\[%{DATA:message1}\\]: %{GREEDYDATA:message2}'

var lines = msg.toString().split('\n');

for(var i = 0;i < lines.length;i++){

    console.log('line [i]:', lines[i])
    var str = lines[i]
    var p = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} \\[%{DATA:message1}\\]: %{GREEDYDATA:message2}'
    var p2 = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} %{GREEDYDATA:message2}'

    var patterns = require('node-grok').loadDefaultSync();
    var pattern = patterns.createPattern(p)

    if (pattern.parseSync(lines[i]) == null ) {
        var pattern = patterns.createPattern(p2)
        console.log('patternf:', pattern.parseSync(lines[i]));

    } else {
        console.log('pattern:', pattern.parseSync(lines[i]));   
    }

}