#beats 不支持codec
#codec=>multiline{
# pattern => "^\d{4}-\d{2}-\d{2}\s*\d{2}:\d{2}:\d{2}\.\d{3}"
# negate => true
# what => "previous"
#}
#logstash的插件
#multiline {
# pattern => "^\["
# negate => true
# what => "previous"
#}
数据流入配置
redis流入
input {
redis {
host => "ip"
port => "port"
password => "password"
db => "1"
data_type => "list"
key => "api-log" # redis key 与 filebeat.yml 数据流出配置一致
type => "api-log" # 数据类型
}
}
beat流入
input {
beats {
port => 6007
add_field => { "[fields][class]" => "crm" }
}
}
数据过滤规则配置
filter {
if [type] == "crm-log" {
# grok 表达式配置
grok {
# 表达式文件所在路径
patterns_dir => ["/usr/share/logstash/patterns/java"]
match => {
"message" => (?<crm-log.time>([0-9]{4}-[0-9]{2}-[0-9]{2}\s*[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}))\s*\[%{DATA:crm-log.thread}\]\s*\[%{DATA:crm-log.traceid}\]\s*%{LOGLEVEL:crm-log.level}\s*%{GREEDYDATA:crm-log.class}\s+-\s*%{GREEDYDATA:crm-log.custmsg}\s*\[%{NUMBER:crm-log.spendtime}]\S*\s*(?<crm-log.message>(.|\r|\n)*)
}
add_field => {"crm-log.tmp_field" => "%{crm-log.time}" }
}
date {
match => ["crm-log.time","ISO8601"]
target => "@timestamp"
}
}
}
数据流出配置
output {
if [type] == "crm-log" {
# 流出至elasticsearch配置
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "logstash-%{type}-%{+YYYY.MM.dd}"
}
}
}

本文介绍了Logstash的配置和使用,包括redis和beats的数据输入,grok过滤规则解析日志,date插件处理时间字段,以及将数据输出到Elasticsearch。配置中涉及了multiline codec用于处理多行日志,以及特定的日志格式匹配和字段提取。
2357

被折叠的 条评论
为什么被折叠?



