logstash模式(pattern)GitHub地址:https://github.com/logstash-plugins/logstash-patterns-core/tree/master/patterns
logstash grok调试地址:http://grok.qiexun.net/
ELK下载地址:https://www.elastic.co/cn/downloads
filebeat 部分配置
filebeat.inputs:
# Each - is an input. Most options can be set at the input level, so
# you can use different inputs for various configurations.
# Below are the input specific configurations.
- type: log
# Change to true to enable this input configuration.
enabled: True
# Paths that should be crawled and fetched. Glob based paths.
paths:
- /data/log/mylog.log
#- c:\programdata\elasticsearch\logs\*
# 多行合并为一行,每行以multiline.pattern匹配到的字符开头,否则合并到同一行,向后合并
multiline.pattern: '^([0-9]?[1-9]|[0-9]{2}[1-9][0-9]{1}|[0-9]{1}[1-9][0-9]{2}|[1-9][0-9]{3})-(((0[13578]|1[02])-(0[1-9]|[12][0-9]|3[01]))|((0[469]|11)-(0[1-9]|[12][0-9]|30))|(02-(0[1-9]|[1][0-9]|2[0-8])))'
multiline.negate: true
multiline.match: after
tags: ["my_app1"]
fields:
app_id: my_app
env: prod
logstash部分配置
input {
beats {
port => 5000 # 端口可自定义
type => "log"
}
}
filter {
# 去除日志中颜色,字体等配置
mutate {
gsub => ["message", "\u001B\[[;\d]*m", ""]
}
# 根据不同来源的日志进行判断并分别解析
if "my_app1" in [tags] or "my_app2" in [tags] {
grok {
# 自定义pattern目录
patterns_dir => ["/usr/local/logstash-6.4.3/config/pattern"]
match => {
# MYDATE,GREEDYDATAALL为自定义的pattern
message => "%{MYDATE:time}\s*\[%{DATA:thread}\]\s*%{LOGLEVEL:logLevel}\s*%{JAVACLASS:classInfo}\.\s*%{DATA:method}\s*-%{GREEDYDATAALL:message}"
}
overwrite => ["message"]
}
} else if "my_app3" in [tags] {
dissect {
mapping => {
"message" => "%{time} %{logLevel} %{?useless} --- [%{thread}] %{classInfo} %{?space}: %{message}"
}
}
}
mutate {
#只能删除输出的json里面的_source字段里面的东西
remove_field => ["beat", "kafka", "offset", "prospector"]
}
}
output {
elasticsearch {
hosts => [ "http://127.0.0.1:9200" ]
index => "logstash-%{[fields][env]}-%{[fields][app_id]}-%{+YYYY.MM.dd}"
document_type => "%{type}"
}
}
转载于:https://blog.51cto.com/11650412/2316768