elk 提供了完备且成熟的日志存储和分析的解决方案,本文主要介绍 springboot微服务与elk的日志整合
在上篇文章中介绍了kibana + logstash+ filebea集群的安装,本文章为大家介绍如何实现springboot微服务与elk的日志整合。
elasticsearch+ kibana+ logstash+ filebeat构建高可用分布式日志集群系统(二):kibana + logstash+ filebea集群的安装
elasticsearch+ kibana+ logstash+ filebeat构建高可用分布式日志集群系统(三):springboot微服务的日志整合(本文)
1.新建account/product/insurance服务,并在pom引入
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>5.3</version>
</dependency>
2.account在resources下添加logback-spring.xml 修改相关配置如下
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration scan="true" scanPeriod="60 seconds">
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<property name="service_Name" value="MS-Account"/>
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<!-- 获取当前服务ip -->
<conversionRule conversionWord="ip" converterClass="com.isansi.modcommon.wrapper.LogIpConfig"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN" value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成INFO日志文件 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>logs/${service_Name}-info-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 单个日志文件最多100MB, 30天的日志周期,最大不能超过20GB -->
<maxFileSize>100MB</maxFileSize>
<MaxHistory>30</MaxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成ERROR日志文件 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>logs/${service_Name}-error-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 单个日志文件最多100MB, 30天的日志周期,最大不能超过20GB -->
<maxFileSize>100MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 为logstash输出的JSON格式的Appender -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!-- 多destination连接策略 -->
<destination>192.168.77.120:45001,192.168.77.130:45001,192.168.77.140:45001</destination>
<connectionStrategy>
<random>
<connectionTTL>5 minutes</connectionTTL>
</random>
</connectionStrategy>
<!-- 断开重连时间 -->
<reconnectionDelay>1 second</reconnectionDelay>
<!-- socket输出缓存 -->
<writeBufferSize>16384</writeBufferSize>
<!-- 日志输出编码 -->
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"span": "%X{X-B3-SpanId:-}",
"exportable": "%X{X-Span-Export:-}",
"pid": "${PID:-}",
"springHost": "%ip",
"rest": "%message",
"thread": "%thread",
"severity": "%level",
"class": "%logger{40}",
"service": "${springAppName:-}"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="console"/>
<appender-ref ref="INFO_FILE" />
<appender-ref ref="ERROR_FILE" />
<appender-ref ref="logstash"/>
</root>
</configuration>
3.product服务在resources下添加logback-spring.xml 修改相关配置如下
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration scan="true" scanPeriod="60 seconds">
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<property name="service_Name" value="MS-Product"/>
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<!-- 获取当前服务ip -->
<conversionRule conversionWord="ip" converterClass="com.isansi.modcommon.wrapper.LogIpConfig"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN" value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成INFO日志文件 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>logs/${service_Name}-info-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 单个日志文件最多100MB, 30天的日志周期,最大不能超过20GB -->
<maxFileSize>100MB</maxFileSize>
<MaxHistory>30</MaxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成ERROR日志文件 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>logs/${service_Name}-error-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 单个日志文件最多100MB, 30天的日志周期,最大不能超过20GB -->
<maxFileSize>100MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 为logstash输出的JSON格式的Appender -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!-- 多destination连接策略 -->
<destination>192.168.77.120:45002,192.168.77.130:45002,192.168.77.140:45002</destination>
<connectionStrategy>
<random>
<connectionTTL>5 minutes</connectionTTL>
</random>
</connectionStrategy>
<!-- 断开重连时间 -->
<reconnectionDelay>1 second</reconnectionDelay>
<!-- socket输出缓存 -->
<writeBufferSize>16384</writeBufferSize>
<!-- 日志输出编码 -->
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"span": "%X{X-B3-SpanId:-}",
"exportable": "%X{X-Span-Export:-}",
"pid": "${PID:-}",
"springHost": "%ip",
"rest": "%message",
"thread": "%thread",
"severity": "%level",
"class": "%logger{40}",
"service": "${springAppName:-}"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="console"/>
<appender-ref ref="INFO_FILE" />
<appender-ref ref="ERROR_FILE" />
<appender-ref ref="logstash"/>
</root>
</configuration>
4.insurance服务在resources下添加logback-spring.xml 修改相关配置如下
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration scan="true" scanPeriod="60 seconds">
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<property name="service_Name" value="MS-Insurance"/>
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<!-- 获取当前服务ip -->
<conversionRule conversionWord="ip" converterClass="com.isansi.msinsurance.config.LogIpConfig"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN" value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成INFO日志文件 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>logs/${service_Name}-info-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 单个日志文件最多100MB, 30天的日志周期,最大不能超过20GB -->
<maxFileSize>100MB</maxFileSize>
<MaxHistory>30</MaxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成ERROR日志文件 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>logs/${service_Name}-error-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 单个日志文件最多100MB, 30天的日志周期,最大不能超过20GB -->
<maxFileSize>100MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 为logstash输出的JSON格式的Appender -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!-- 多destination连接策略 -->
<destination>
192.168.77.120:45003,192.168.77.130:45003,192.168.77.140:45003</destination>
<connectionStrategy>
<random>
<connectionTTL>5 minutes</connectionTTL>
</random>
</connectionStrategy>
<!-- 断开重连时间 -->
<reconnectionDelay>1 second</reconnectionDelay>
<!-- socket输出缓存 -->
<writeBufferSize>16384</writeBufferSize>
<!-- 日志输出编码 -->
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"span": "%X{X-B3-SpanId:-}",
"exportable": "%X{X-Span-Export:-}",
"pid": "${PID:-}",
"springHost": "%ip",
"rest": "%message",
"thread": "%thread",
"severity": "%level",
"class": "%logger{40}",
"service": "${springAppName:-}"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="console"/>
<appender-ref ref="INFO_FILE" />
<appender-ref ref="ERROR_FILE" />
<appender-ref ref="logstash"/>
</root>
</configuration>