Suricata安装在ELK堆栈建议之上?

时间:2015-04-08 12:24:27

标签: logging elasticsearch logstash logstash-grok

我使用安装在一系列不同Linux机器上的logstash-forwarders为日志收集创建了一个ELK堆栈,这非常有效。

我现在正在考虑将Suricata安装到主ELK堆栈上以开始使用IDS / IPS功能

我的第一个问题是我只需要在主ELK盒子上安装suricata并更改此盒子上的conf文件以及logtash-forwarders,所以只需要在一个盒子上安装suricata吗?

其次,我尝试更改conf文件以允许suricata,因此我列出了我的conf文件以用于logstash和下面的logstash转发器

文件13-suricata.conf是我尝试将其带入logstash conf文件但是我不确定这是否是正确的方法而且我不确定如何处理logstash-forwarder conf甚至?

任何帮助都会很棒

/etc/logstash/conf.d$ ls 
01-lumberjack-input.conf  11-sshlog.conf  13-suricata.conf
10-syslog.conf            12-apache.conf  30-lumberjack-output.conf

01-伐木-input.conf中

input   {
  lumberjack    {
    port => 5000
    type => "logs"
    ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
    ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
  }

}

10的syslog.conf

过滤器{   if [type] ==" syslog" {

grok {
  match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
  add_field => [ "received_at", "%{@timestamp}" ]
  add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
  match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
}

} }

11-sshlog.conf

filter {
if [type] == "sshlog" {
  grok {
    type => "sshlog"
    match => {"message" => "Failed password for (invalid user |)%{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
    add_tag => "ssh_brute_force_attack"
  }

  grok {
    type => "sshlog"
    match => {"message" => "Accepted password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
    add_tag => "ssh_sucessful_login"
  }

  geoip {
    source => "src_ip"
  }
}
}

12的apache.conf

filter {
  if [type] == "apache-access" {
    grok {
      match => { "message" => "%{COMBINEDAPACHELOG}" }
    }
  }
}

13-suricata.conf

    filter {
      if [type] == "SuricataIDPS" {
        date {
          match => [ "timestamp", "ISO8601" ]
        }
        ruby {
          code => "if event['event_type'] == 'fileinfo'; event['fileinfo']['type']=event['fileinfo']['magic'].to_s.split(',')[0]; end;"
        }
      }

  if [src_ip]  {
    geoip {
      source => "src_ip"
      target => "geoip"
      #database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
      add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
      add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}"  ]
    }
    mutate {
      convert => [ "[geoip][coordinates]", "float" ]
    }
    if ![geoip.ip] {
      if [dest_ip]  {
        geoip {
          source => "dest_ip"
          target => "geoip"
          #database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
          add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
          add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}"  ]
        }
        mutate {
          convert => [ "[geoip][coordinates]", "float" ]
        }
      }
    }
  }
}

30伐木-output.conf

output {
  elasticsearch { host => localhost }
  stdout { codec => rubydebug }
}

logstash fordwarer conf

"files": [
   {
      "paths": [
        "/var/log/syslog",
        "/var/log/auth.log"
       ],
      "fields": { "type": "syslog" }
    },
    # An array of hashes. Each hash tells what paths to watch and
    # what fields to annotate on events from those paths.
    #{
      #"paths": [
        # single paths are fine
        #"/var/log/messages",
        # globs are fine too, they will be periodically evaluated
        # to see if any new files match the wildcard.
        #"/var/log/*.log"
      #],

      # A dictionary of fields to annotate on each event.
      #"fields": { "type": "syslog" }
    #}, {
      # A path of "-" means stdin.
      #"paths": [ "-" ],
      #"fields": { "type": "stdin" }
#    },
      {
      "paths": [
        "/var/log/apache2/*.log"
      ],
      "fields": { "type": "apache-access" }
        },
        {
      "paths": [
        "/var/log/auth*.log"
      ],
      "fields": { "type": "sshlog" }
        }
        "files": [
    {
      "paths": [ "/var/log/suricata/eve.json" ],
      "fields": { "type": "suricata" }
    }

  ]


}

1 个答案:

答案 0 :(得分:0)

必须在两台服务器上安装suricata并进行一些配置更改才能使数据发布JSON

除非上面发布的所有内容都是必需的