input {
tcp {
host => "logstash-server"
port => 8666
mode => "server"
tags => ["cluster"]
type => "cluster"
codec => json
}
beats {
port => 8665
tags => ["storm"]
type => "storm"
}
}
filter {
if [logger_name] == "com.threadpool.LogExecutor" {
json {
source => "message"
}
mutate {
copy => { "type" => "appname" }
}
}
if [type] == "storm" {
grok{
match => {"message" => "%{SYSLOG5424SD:thread_num} %{SYSLOG5424SD:level}"}
}
grok{
match => {"message" => "%{TIMESTAMP_ISO8601:date}"}
}
date {
match => ["date", "yyyy-MM-dd HH:mm:ss,SSS","ISO8601","yyyy-MM-dd HH:mm:ss"]
target => "@timestamp"
}
mutate {
remove_field => ["date"]
copy => { "type" => "appname" }
}
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => "elasticsearch-server:9200"
index => "%{[appname]}-%{+YYYY.MM.dd}"
user => "admin"
password => "admin"
}
tcp {
host => "logstash-server"
port => 8666
mode => "server"
tags => ["cluster"]
type => "cluster"
codec => json
}
beats {
port => 8665
tags => ["storm"]
type => "storm"
}
}
filter {
if [logger_name] == "com.threadpool.LogExecutor" {
json {
source => "message"
}
mutate {
copy => { "type" => "appname" }
}
}
if [type] == "storm" {
grok{
match => {"message" => "%{SYSLOG5424SD:thread_num} %{SYSLOG5424SD:level}"}
}
grok{
match => {"message" => "%{TIMESTAMP_ISO8601:date}"}
}
date {
match => ["date", "yyyy-MM-dd HH:mm:ss,SSS","ISO8601","yyyy-MM-dd HH:mm:ss"]
target => "@timestamp"
}
mutate {
remove_field => ["date"]
copy => { "type" => "appname" }
}
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => "elasticsearch-server:9200"
index => "%{[appname]}-%{+YYYY.MM.dd}"
user => "admin"
password => "admin"
}
}
客户端日志通过TCP发送到服务端指定端口,配置logstash接收相应日志;
storm计算节点通过filebeat扫描storm日志文件,发送到logstash服务端,配置logstash接收来自filebeat的日志;
解析客户端日志,将操作日志分离出来并赋值模块名称;
解析storm日志,将时间戳和日志级别字段解析出来并覆盖掉logstash内部的相应json字段;
移除解析过程中创建的无用字段。