如何创建为单独的输入类型创建的单独索引

时间:2018-08-24 16:24:22

标签: logstash-grok logstash-configuration logstash-file

我有一个logstash-syslog.conf以下文件,其中有两种不同的输入类型,一种是type => "syslog",另一种是type => "APIC"。因此,我需要创建两个分别为syslog-2018.08.25APIC-2018.08.05的输出索引。

我希望动态创建这些索引,我尝试了一些index => "%{[type]}-%{+YYYY.MM.dd}",但没有成功,并杀死了logstash。

您能否建议我在以下配置中出了什么问题?该配置必须同时针对配置和索引类型进行修正。

下面是配置logstash文件:

logstash版本为6.2

$ vi logstash-syslog.conf
input {
  file {
    path => [ "/scratch/rsyslog/*/messages.log" ]
    type => "syslog"
  }
  file {
    path => [ "/scratch/rsyslog/Aug/messages.log" ]
    type => "APIC"
  }
}

filter {
  if [type] == "syslog" {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp } %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
    }
  }
  if [type] == "APIC" {
    grok {
      match => { "message" => "%{CISCOTIMESTAMP:syslog_timestamp} %{CISCOTIMESTAMP} %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
   }
 }
}
output {
              elasticsearch {
                hosts => "noida-elk:9200"
                index => "syslog-%{+YYYY.MM.dd}"
                #index => "%{[type]}-%{+YYYY.MM.dd}"
                document_type => "messages"
  }
}

1 个答案:

答案 0 :(得分:1)

为我解决了对我的困扰。

 $ cat logstash-syslog.conf
    input {
      file {
        path => [ "/scratch/rsyslog/*/messages.log" ]
        type => "syslog"
      }
      file {
        path => [ "/scratch/rsyslog/Aug/messages.log" ]
        type => "apic_logs"
      }
    }

    filter {
      if [type] == "syslog" {
        grok {
          match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp } %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
          add_field => [ "received_at", "%{@timestamp}" ]
          remove_field => ["@version", "host", "message", "_type", "_index", "_score", "path"]
        }
        syslog_pri { }
        date {
          match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
     }
    }
      if [type] == "apic_logs" {
        grok {
          match => { "message" => "%{CISCOTIMESTAMP:syslog_timestamp} %{CISCOTIMESTAMP} %{SYSLOGHOST:syslog_hostname} (?<prog>[\w._/%-]+) %{SYSLOG5424SD:f1}%{SYSLOG5424SD:f2}%{SYSLOG5424SD:f3}%{SYSLOG5424SD:f4}%{SYSLOG5424SD:f5} %{GREEDYDATA:syslog_message}" }
          add_field => [ "received_at", "%{@timestamp}" ]
          remove_field => ["@version", "host", "message", "_type", "_index", "_score", "path"]
       }
     }
    }
    output {
            if [type] == "syslog" {
            elasticsearch {
                    hosts => "noida-elk:9200"
                    manage_template => false
                    index => "syslog-%{+YYYY.MM.dd}"
                    document_type => "messages"
      }
     }
    }

    output {
            if [type] == "apic_logs" {
            elasticsearch {
                    hosts => "noida-elk:9200"
                    manage_template => false
                    index => "apic_logs-%{+YYYY.MM.dd}"
                    document_type => "messages"
      }
     }
    }