如何创建为单独的输入类型创建的单独索引

How to Create Separate index created for separate input types

我有 Below logstash-syslog.conf 文件,其中有两种不同的输入类型,一种是 type => "syslog" ,另一种是 type => "APIC" 。所以,我需要创建两个单独的输出索引 syslog-2018.08.25APIC-2018.08.05 .

我希望动态创建这些索引,我尝试了一些方法 index => "%{[type]}-%{+YYYY.MM.dd}" 但它没有用并杀死了 logstash。

你能否建议我在下面的配置中做错了什么,需要针对配置和索引类型进行修复。

下面是配置logstash文件:

logstash 版本为:6.2

$ vi logstash-syslog.conf
input {
  file {
    path => [ "/scratch/rsyslog/*/messages.log" ]
    type => "syslog"
  }
  file {
    path => [ "/scratch/rsyslog/Aug/messages.log" ]
    type => "APIC"
  }
}

filter {
  if [type] == "syslog" {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp } %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
    }
  }
  if [type] == "APIC" {
    grok {
      match => { "message" => "%{CISCOTIMESTAMP:syslog_timestamp} %{CISCOTIMESTAMP} %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
   }
 }
}
output {
              elasticsearch {
                hosts => "noida-elk:9200"
                index => "syslog-%{+YYYY.MM.dd}"
                #index => "%{[type]}-%{+YYYY.MM.dd}"
                document_type => "messages"
  }
}

为我修复,因为它适合我。

 $ cat logstash-syslog.conf
    input {
      file {
        path => [ "/scratch/rsyslog/*/messages.log" ]
        type => "syslog"
      }
      file {
        path => [ "/scratch/rsyslog/Aug/messages.log" ]
        type => "apic_logs"
      }
    }

    filter {
      if [type] == "syslog" {
        grok {
          match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp } %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
          add_field => [ "received_at", "%{@timestamp}" ]
          remove_field => ["@version", "host", "message", "_type", "_index", "_score", "path"]
        }
        syslog_pri { }
        date {
          match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
     }
    }
      if [type] == "apic_logs" {
        grok {
          match => { "message" => "%{CISCOTIMESTAMP:syslog_timestamp} %{CISCOTIMESTAMP} %{SYSLOGHOST:syslog_hostname} (?<prog>[\w._/%-]+) %{SYSLOG5424SD:f1}%{SYSLOG5424SD:f2}%{SYSLOG5424SD:f3}%{SYSLOG5424SD:f4}%{SYSLOG5424SD:f5} %{GREEDYDATA:syslog_message}" }
          add_field => [ "received_at", "%{@timestamp}" ]
          remove_field => ["@version", "host", "message", "_type", "_index", "_score", "path"]
       }
     }
    }
    output {
            if [type] == "syslog" {
            elasticsearch {
                    hosts => "noida-elk:9200"
                    manage_template => false
                    index => "syslog-%{+YYYY.MM.dd}"
                    document_type => "messages"
      }
     }
    }

    output {
            if [type] == "apic_logs" {
            elasticsearch {
                    hosts => "noida-elk:9200"
                    manage_template => false
                    index => "apic_logs-%{+YYYY.MM.dd}"
                    document_type => "messages"
      }
     }
    }