logback.xml
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<springProperty scope="context" name="springAppName" source="spring.application.name" />
<property name="logback.logdir" value="C:\\logs"/>
<property name="logback.appname" value="logfile"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN" value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 输出到文件-前期暂时保留 -->
<appender name="fileLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${logback.logdir}/${logback.appname}.log</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${logback.logdir}/${logback.appname}.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--只保留最近90天的日志-->
<maxHistory>90</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%d [%thread] %-5level %logger{36} %line - %msg%n</pattern>
</encoder>
</appender>
<!-- 为logstash输出的JSON格式的Appender -->
<!-- <appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>192.168.130.33:9250</destination>
日志输出编码
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"severity": "%level",
"service": "${springAppName:-}",
"trace": "%X{X-B3-TraceId:-}",
"span": "%X{X-B3-SpanId:-}",
"exportable": "%X{X-Span-Export:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger{40}",
"rest": "%message",
"stack_trace": "%exception{5}"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender> -->
<!--指定最基础的日志输出级别-->
<root level="INFO">
<!--appender将会添加到这个loger-->
<appender-ref ref="fileLog"/>
<appender-ref ref="console"/>
<!-- <appender-ref ref="logstash"/> -->
</root>
</configuration>
pom.xml增加
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>6.1</version>
</dependency>
我的logstash版本是6.2.3(windows-64bit)
以下是logstash.conf的配置
input {
tcp {
mode => "server"
host => "192.168.130.33"
port => 9250
}
}
filter {
#Only matched data are send to output.
}
output{
elasticsearch {
action => "index"
hosts => "192.168.130.55:9200"
index => "test_index"
}
}