0

您好,以下是我在 centos6 logstash 服务器中的配置。我正在使用logstash 1.4.2 和elasticsearch 1.2.1。我从 /var/log/messages 和 /var/log/secure 转发日志,时间格式是“Sep 1 22:15:34”

1.输入.conf

input {
  lumberjack {
    port => 5000
    type => "logs"
    ssl_certificate => "certs/logstash-forwarder.crt"
    ssl_key => "private/logstash-forwarder.key"
  }
}

2.filter.conf

filter {
  if [type] == "syslog" {

grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
syslog_pri { }
date {
        locale => "en"  // possibly this didn't work in logstash 1.4.2
        match => ["syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601"]
        add_field => { "debug" => "timestampMatched"}
        timezone => "UTC"
   }
ruby {   code => "event['@timestamp'] = event['@timestamp'].getlocal"}                                        //I saw somewhere instead of "locale => en " we have to use this  in logstash 1.4.2
mutate { replace => [ "syslog_timestamp", "%{syslog_timestamp} +0545" ] }                               //this probably won't work and give date parsing error
 }
}

3. 输出.conf

output {
  elasticsearch { host => "logstash_server_ip" }
  stdout { codec => rubydebug }
}

以下是所有客户端服务器中的logstash-forwarder conf

{
  "network": {
    "servers": [ "logstash_server_ip:5000" ],
    "timeout": 15,
    "ssl ca": "certs/logstash-forwarder.crt"
  },
  "files": [
    {
      "paths": [
        "/var/log/messages",
        "/var/log/secure"
       ],
      "fields": { "type": "syslog" }
    }
   ]
}

这是问题所在。我正在转发来自 5 个具有不同时区的服务器的日志,例如:EDT、NDT、NST、NPT。logstash_server 时区采用 NPT(尼泊尔时间)[UTC + 5:45]

所有服务器提供以下

2014/09/02 08:09:02.204882 Setting trusted CA from file: certs/logstash-forwarder.crt
2014/09/02 08:09:02.205372 Connecting to logstash_server_ip:5000 (logstash_server_ip)
2014/09/02 08:09:02.205600 Launching harvester on new file: /var/log/secure
2014/09/02 08:09:02.205615 Starting harvester at position 5426763: /var/log/messages
2014/09/02 08:09:02.205742 Current file offset: 5426763
2014/09/02 08:09:02.279715 Starting harvester: /var/log/secure
2014/09/02 08:09:02.279756 Current file offset: 12841221
2014/09/02 08:09:02.638448 Connected to logstash_server_ip
2014/09/02 08:09:09.998098 Registrar received 1024 events
2014/09/02 08:09:15.189079 Registrar received 1024 events

我希望这很好,但只有一个时区 NPT 正在转发日志,我可以在 kibana 中看到它,所有其他人都给了我上面的日志,但我无法在 kibana 中看到它。我认为问题出在 DATE 中,因为它无法解析来自不同服务器的日期。此外,logstash 中也没有显示错误的日志。

在这种情况下如何解决问题?

4

1 回答 1

0

在 logstash-forwarder 配置更改中

"fields": { "type": "syslog" }

"fields": { "type": "syslog", "syslog_timezone": "Asia/Kathmandu" }

并将 filter.conf 更改为

filter {
  if [type] == "syslog" {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    # Set timezone appropriately
    if [syslog_timezone] in [ "Asia/Kathmandu" ] {
      date {
        match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
        remove_field => [ "syslog_timezone" ]
        timezone => "Asia/Kathmandu"
      }
    } else if [syslog_timezone] in [ "America/Chicago", "US/Central" ] {
      date {
        match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
        remove_field => [ "syslog_timezone" ]
        timezone => "America/Chicago"
      }
    } else if [syslog_timezone] =~ /.+/ {
      date {
        match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
        add_tag => [ "unknown_timezone" ]
        timezone => "Etc/UTC"
      }
    } else {
      date {
        match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
        timezone => "Etc/UTC"
      }
    }
  }
}
于 2015-03-20T23:26:19.053 回答