【Graylog】比较常用的pipeline规则

毫秒转换为yyyy-MM-dd HH:mm:ss日志格式

rule "receiveDate_alignment"
when
  has_field("receiveDate")
then
    let m = parse_unix_milliseconds(to_long($message.receiveDate),"CST");
    let n = format_date(m,"yyyy-MM-dd HH:mm:ss","Asia/Shanghai");
    set_field("ORG_EVENT_TIME",n);
end

时间格式化及时区格式化

rule "parse_log"
when
  contains(value: to_string($message.message), search: "ERROR", ignore_case: false)
then
  set_field("raw_message", to_string($message.message));
  set_field("message", concat("警告:匹配到深证通程序日志有异常关键字,请注意查看!",to_string(format_date(to_date($message.timestamp,"CST"),"yyyy-MM-dd HH:mm:ss","Asia/Shanghai"))));
end

format_date(to_date($message.timestamp,"CST"),"yyyy-MM-dd HH:mm:ss","Asia/Shanghai")

2022-03-03T06:46:26.354Z  => 2022-03-03 14:46:26

特殊时间格式对其(filebeat指标)

rule "time_duiqi2"
when
  true
then
  let m = regex("(\\S+)\\+",to_string($message.message));
  set_field("timestamp",parse_date(replace(to_string(m["0"]),"T"," "),"yyyy-MM-dd HH:mm:ss.SSS","locale.US","Asia/Shanghai"));
end

filebeat日志时间格式为2022-09-16T13:56:39.278+0800,由于中间带T操作时解析总无法实现,把T替换掉为空格即可

判断条件的跃进

rule "test rule"
when
  //可以进行数值判断
  to_long($message.alert_level) > 3
then
  set_field("test_field", "test succ");
end

判断条件的正则匹配(常用的contains不支持正则)

rule "src_ip_v6"
when
has_field("src_ip") &&
  regex("\\d+\\.\\d+\\.\\d+\\.\\d+", to_string($message.src_ip)).matches == false
then
    set_field("reason","maybe v6");
end

根据Pri分析出Facility和Level字段值

//把22解析成pri

<22>Jul 13 17:25:01 localhost postfix/qmgr[1915]: CB3FF60AF6F5: removed 

rule "conv"
when
    true
then
    let m = expand_syslog_priority_as_string(to_string($message.pri));
    set_fields({facility: m.facility, level:m.level});
end

查询表(lookuptable)功能使用csv适配器实现多值

csv文件格式如下

"http_response_code"|"multivalue"
"200"|"中国#测试"

规则如下:

rule "parse_lookuptable_multivalue"
when
  has_field("http_response_code")
then
  let lookup_multivalue = lookup("csv_test",to_string($message.http_response_code));
  let multivalue = split("#",to_string(lookup_multivalue.value));
  set_field("localte",to_string(multivalue[0]));
  set_field("desc",to_string(multivalue[1]));
end

效果如下:

JSON格式解析

rule "parse_json"
when
  true
then
  let m = parse_json(to_string($message.message));
  set_fields(to_map(m));
end

json格式解析(大json解析指定数据)

rule "parse"
when
  true
then
  let m = regex(".*?(\\{.*)",to_string($message.message));
  let x = parse_json(to_string(m["0"]));
  let new_fields = select_jsonpath(x,
            {
            load1: "$.monitoring.metrics.system.load.1",
            load5: "$.monitoring.metrics.system.load.5",
            load15: "$.monitoring.metrics.system.load.15"
            });
  set_fields(new_fields);
  
end

可以从以下json中获取load相关指标

2022-09-15T11:06:53.451+0800	INFO	[monitoring]	log/log.go:145	Non-zero metrics in the last 30s	{"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":1450,"time":{"ms":3}},"total":{"ticks":3160,"time":{"ms":25},"value":3160},"user":{"ticks":1710,"time":{"ms":22}}},"handles":{"limit":{"hard":65536,"soft":65536},"open":14},"info":{"ephemeral_id":"0aed24ea-c8cd-429a-ae16-b701558bebdb","uptime":{"ms":3870086}},"memstats":{"gc_next":23807808,"memory_alloc":12150784,"memory_total":160990448},"runtime":{"goroutines":43}},"filebeat":{"events":{"added":1,"done":1},"harvester":{"files":{"041fdc0c-82ad-495a-9244-54966725bfe5":{"last_event_published_time":"2022-09-15T11:06:29.727Z","last_event_timestamp":"2022-09-15T11:06:29.727Z","read_offset":1231,"size":1231}},"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"acked":1,"batches":1,"total":1}},"outputs":{"kafka":{"bytes_read":60,"bytes_write":1124}},"pipeline":{"clients":2,"events":{"active":0,"published":1,"total":1},"queue":{"acked":1}}},"registrar":{"states":{"current":2,"update":1},"writes":{"success":1,"total":1}},"system":{"load":{"1":0.09,"15":0.29,"5":0.27,"norm":{"1":0.0113,"15":0.0363,"5":0.0338}}}}}}

嵌套json解析

json示例

{"host":{"host":"192.168.100.12","name":"B-BJ-HW-S5720-03_04"},"groups":["Templates/Network devices","DT_network","网络设备","路由器"],"applications":["Interface GigabitEthernet1/0/2()"],"itemid":67697,"name":"Interface GigabitEthernet1/0/2(): Bits 接收","clock":1672724928,"ns":434058026,"value":46440,"type":3}

解析规则

rule "Data Parsing"
when
    true
then
    let msg = parse_json(to_string($message.message));
    let prop = select_jsonpath(msg, {host: "$.host"});
    set_field("host_string", to_string(prop.host));

    let props = parse_json(to_string($message.host_string));
    set_fields(to_map(props),"host_");
    set_fields(to_map(msg));
end

思路: 由于graylog4删减了部分函数,比如好用的nesting_parse_json()用于解析嵌套函数,所以通过社区找到一个案例照着改了一下。

大概的实现过程是这样的,首先先把所有的json外层解析出来,然后针对内层的host字段进行二次解析,解析后的内层host为了防止字段名称冲突导致覆盖或字段类型不同,在set_fields时加上前缀。

## 什么是graylog Graylog 是一个简单易用、功能较全面的日志管理工具,相比 ELK 组合, 优点: - 部署维护简单 - 查询语法简单易懂(对比ES的语法…) - 内置简单的告警 - 可以将搜索结果导出为 json - 提供简单的聚合统计功能 - UI 比较友好 - 当然, 拓展性上比 ELK 差很多。 整套依赖: - Graylog 提供 graylog 对外接口 - Elasticsearch 日志文件的持久化存储和检索 - MongoDB 只是存储一些 Graylog 的配置 ## 安装 > 可以是裸机安装,也可以是docker安装,这里用docker安装 环境要求: - centos7.4 - cpu2个 内存2G 参考: https://hub.docker.com/r/graylog2/graylog/ ### 环境准备 ``` mkdir /root/graylog && cd /root/graylog //挂载目录 mkdir -p mongo_data graylog_journal es_data //配置文件目录 mkdir -p ./graylog/config cd ./graylog/config wget https://raw.githubusercontent.com/Graylog2/graylog-docker/3.0/config/graylog.conf wget https://raw.githubusercontent.com/Graylog2/graylog-docker/3.0/config/log4j2.xml //提前准备镜像 docker pull mongo:3 docker pull graylog/graylog:3.0 docker pull elasticsearch:5.6.9 ``` ### docker-compose.yml ``` version: '2' services: # MongoDB: https://hub.docker.com/_/mongo/ mongo: image: mongo:3 volumes: - ./mongo_data:/data/db - /etc/localtime:/etc/localtime # Elasticsearch: https://www.elastic.co/guide/en/elasticsearch/reference/5.5/docker.html elasticsearch: image: elasticsearch:5.6.9 volumes: - ./es_data:/usr/share/elasticsearch/data - /etc/localtime:/etc/localtime environment: - http.host=0.0.0.0 - transport.host=localhost - network.host=0.0.0.0 # Disable X-Pack security: https://www.elastic.co/guide/en/elasticsearch/reference/5.5/security-settings.html#general-security-settings - xpack.security.enabled=false - "ES_JAVA_OPTS=-Xms512m -Xmx512m" ulimits: memlock: soft: -1 hard: -1 mem_limit: 1g # Graylog: https://hub.docker.com/r/graylog/graylog/ graylog: image: graylog/graylog:3.0 volumes: - ./graylog_journal:/usr/share/graylog/data/journal - ./graylog/config:/usr/share/graylog/data/config - /etc/localtime:/etc/localtime environment: # CHANGE ME! - GRAYLOG_PASSWORD_SECRET=somepasswordpepper # Password: admin - GRAYLOG_ROOT_PASSWORD_SHA2=8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918 # 这里需要修改为要暴露的机器的地址 - GRAYLOG_HTTP_EXTERNAL_URI=http://10.121.60.2:9000/ links: - mongo - elasticsearch ports: # Graylog web interface and REST API - 9000:9000 # Syslog TCP - 514:514 # Syslog UDP - 514:514/udp # GELF TCP - 12201:12201 # GELF UDP - 12201:12201/udp # GELF HTTP - 12202:12202 ``` ### 启动 `docker-compose -f docker-compose.yml up -d` 通过http://10.121.60.2:9000/访问web,admin/admin ### 修改配置 - email相关(告警需要) ``` transport_email_enabled = true transport_email_hostname = smtp.163.com transport_email_port = 994 transport_email_use_auth = true transport_email_use_tls = true transport_email_use_ssl = true transport_email_auth_username = 17191093767@163.com transport_email_auth_password = zhim123456 transport_email_subject_prefix = [graylog] transport_email_from_email = 17191093767@163.com transport_email_web_interface_url = http://10.121.60.2:9000 ``` ## 使用 ### 配置添加Inputs > Graylog 节点能够接受数据的类型称之为input,常见的有GELF TCP, GELF UDP, GELF HTTP. 说明:GELF TCP, GELF UDP可以使用同一个端口,HTTP需要另起端口,原因不解释。 - 添加三个input,过程略,tcp,udp端口使用默认的12201,http端口使用12202。 - 验证 ``` // udp echo -n '{ "version": "1.1", "host": "example.org", "short_message": "A short message info with udp", "level": 1, "_some_info": "foo", "_tag": "test11" }' | nc -w10 -u 10.121.60.2 12201 // tcp echo -n -e '{ "version": "1.1", "host": "example.org", "short_message": "A short message with tcp", "level": 1, "_some_info": "foo" }'"\0" | nc -w10 10.121.60.2 12201 //http curl -X POST -H 'Content-Type: application/json' -d '{ "version": "1.1", "host": "example.org", "short_message": "A short message with http", "level": 5, "_some_info": "foo" }' 'http://10.121.60.2:12202/gelf' ``` ### docker 日志添加到graylog ``` docker run --log-driver=gelf \ --log-opt gelf-address=udp://10.121.60.2:12201 \ --log-opt tag=test1 \ -v /etc/localtime:/etc/localtime \ -it nginx /bin/bash ``` docker-compose.yaml ``` services: mongo: logging: driver: "gelf" options: gelf-address: "udp://10.121.60.2:12201" tag: mongo volumes: - /etc/localtime:/etc/localtime ``` ### java日志直接发送到graylog > 使用logback ``` 10.121.60.2 12201 <!--An example of overwriting the short message pattern--> %ex{short}%.100m <!-- Use HTML output of the full message. Yes, any layout can be used (please don't actually do this)--> %d{MM-dd HH:mm:ss.SSS} [%thread] %-5level \(%F:%L\) - %msg %n true true true true requestId:long <!--Facility is not officially supported in GELF anymore, but you can use staticFields to do the same thing--> tag business-server ``` ## 系统使用 功能菜单说明 - search 日志查询面板 ![](assets/2018-07-10-11-52-07.png) - streams 将日志对象按照filed定义为stream,默认的stream为all messages ![](assets/2018-07-10-11-52-22.png) - alerts 告警相关,选择一个stream对象定义告警条件和通知方式,当stream里面的日志满足条件时候告警并通知 ![](assets/2018-07-10-11-52-35.png) - dashboards 图形面板 ![](assets/2018-07-10-11-52-53.png) - source 告警所在主机 ![](assets/2018-07-10-11-53-37.png) - system 系统配置 ![](assets/2018-07-10-11-53-52.png) ### 查询条件 [官方说明文档](http://docs.graylog.org/en/3.0/pages/queries.html) > 关键字不分大小写 - 单个关键字查询 `ssh` - 多关键字查询,含有ssh or login `ssh login` - 含有某个字符串查询 `ssh login` - 同时含有多个关键字查询 `opening index" AND db` - 根据字段值查询 `tag:couchdb.peer0.org1.ygsoft.com` - 含有多个tag的查询,某条记录只要其中一个字段满足即可 ``` tag: (orderer.ygsoft.com couchdb.peer0.org1.ygsoft.com) or tag: orderer.ygsoft.com couchdb.peer0.org1.ygsoft.com ``` - 含有完全匹配字段 `tag:"ssh login"` - 含有某个字段的记录: `_exists_:tag` - 不含有某个字段的记录: `NOT _exists_:tag` - AND OR `"ssh login" AND source:example.org` `("ssh login" AND (source:example.org OR source:another.example.org)) OR _exists_:always_find_me` - NOT ``` "ssh login" AND NOT source:example.org NOT example.org ``` 注意: AND, OR, and NOT 只能大写. - 通配符 ? 表示单个字符 星号表示0个和多个字符 ``` source:*.org source:exam?le.org source:exam?le.* ``` 注意:默认首字母不能使用通配符,因为这样会使用大量的内存;强行开启修改配置文件`allow_leading_wildcard_searches = true` - 当某关键字不确认顺序的时候使用~ ``` ssh logni~ source:exmaple.org~ 结果可以匹配 ssh login and example.org ``` - 以下字符在使用的时候必须用反斜杠转义 ``` && || : \ / + - ! ( ) { } [ ] ^ " ~ * ? Example: resource:\/posts\/45326 ``` ### 查询条件可以保存下来 使用 save search criteria 按钮
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值