filebeat核心配置
filebeat.prospectors:
- type: log
enabled: true
paths:
- /usr/local/tomcat/logs/system/*.log
fields:
#host: 172.10.4.15
log_topics: zzx-api
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
# multiline.pattern: '(WARN|DEBUG|ERROR|INFO) \d{4}/\d{2}/\d{2}'
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
setup.template.settings:
index.number_of_shards: 3
setup.kibana:
output.kafka:
enabled: true
hosts: ["172.10.10.64:7092","172.10.10.64:8092","172.10.10.64:9092"]
topic: '%{[fields.log_topics]}'
logstash消费kafka的日志并输出到es的核心配置:
input {
kafka {
bootstrap_servers => "172.10.10.64:9092,172.10.10.64:8092,172.10.10.64:7092"
#topics_pattern => "zzx-api"
topics_pattern => ".*" # .*可以 配置所有topic
consumer_threads => 5
auto_offset_reset => "latest"
group_id => "logstash_chigua"
codec => "json"
}
}
output {
elasticsearch {
hosts => ["172.10.10.86:9200","172.10.10.87:9200","172.10.10.88:9200"]
#index => "%{[fields.log_topics]}-%{+YYYY.MM.dd}"
index => "%{[@metadata][topic]}-%{+YYYY-MM-dd}"
}
stdout {
codec => rubydebug
}
}