使用slf4j,需添加依赖
<!-- loombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!-- rabbitMQ -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-amqp</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit</artifactId>
</dependency>
然后就是添加一个lombok插件 (我使用的是IDEA, 其他的IDA可以自行搜索安装方法)
直接按快捷键Ctrl+Alt+s弹出Settings界面
创建logback-spring.xml文件
<?xml version="1.0" encoding="UTF-8" ?>
<configuration>
<property resource="application.properties" />
<!-- 全局参数 -->
<!--日志存储位置(读取配置文件的路径)-->
<springProperty scope="context" name="displayName" source="spring.application.name"/>
<springProperty scope="context" name="log_dir" source="logging.file.path"/>
<springProperty scope="context" name="rabbit_host" source="spring.rabbitmq.host"/>
<springProperty scope="context" name="rabbit_port" source="spring.rabbitmq.port"/>
<springProperty scope="context" name="rabbit_username" source="spring.rabbitmq.username"/>
<springProperty scope="context" name="rabbit_password" source="spring.rabbitmq.password"/>
<!-- 常用的Pattern变量 -->
<!--
<pattern>
%d{yyyy-MM-dd HH:mm:ss} [%level] - %msg%n
Logger: %logger
Class: %class
File: %file
Caller: %caller
Line: %line
Message: %m
Method: %M
Relative: %relative
Thread: %thread
Exception: %ex
xException: %xEx
nopException: %nopex
rException: %rEx
Marker: %marker
%n
</pattern>
-->
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<property name="console-pattern" value="[%ip] %date{yyyy-MM-dd HH:mm:ss} %highlight(%-5level) %magenta(${PID:- })--- [%-5t] %cyan(%logger{56}.%method:%L): %msg%n " />
<property name="pattern" value="[%ip] %date{yyyy-MM-dd HH:mm:ss.SSS} %-5level ${PID:- }--- [%-5t] %logger{56}.%method:%L: %msg%n" />
<!-- 获取IP -->
<conversionRule conversionWord="ip" converterClass="com.cloud.common.config.LogIpConfig" />
<appender name="consoleApp" class="ch.qos.logback.core.ConsoleAppender">
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>
${console-pattern}
</pattern>
</layout>
</appender>
<!-- <appender name="fileInfoApp" class="ch.qos.logback.core.rolling.RollingFileAppender">-->
<!-- <filter class="ch.qos.logback.classic.filter.LevelFilter">-->
<!-- <level>DEBUG</level>-->
<!-- <onMatch>DENY</onMatch>-->
<!-- <onMismatch>ACCEPT</onMismatch>-->
<!-- </filter>-->
<!-- <encoder>-->
<!-- <pattern>-->
<!-- ${pattern}-->
<!-- </pattern>-->
<!-- </encoder>-->
<!-- <!– 滚动策略 –>-->
<!-- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">-->
<!-- <!– 路径 –>-->
<!-- <fileNamePattern>${log_dir}/app.info.%d.log</fileNamePattern>-->
<!-- </rollingPolicy>-->
<!-- </appender>-->
<!-- <appender name="fileErrorApp" class="ch.qos.logback.core.rolling.RollingFileAppender">-->
<!-- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">-->
<!-- <level>ERROR</level>-->
<!-- </filter>-->
<!-- <encoder>-->
<!-- <pattern>-->
<!-- ${pattern}-->
<!-- </pattern>-->
<!-- </encoder>-->
<!-- <!– 设置滚动策略 –>-->
<!-- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">-->
<!-- <!– 路径 –>-->
<!-- <fileNamePattern>${log_dir}/app.err.%d.log</fileNamePattern>-->
<!-- <!– 控制保留的归档文件的最大数量,超出数量就删除旧文件,假设设置每个月滚动,-->
<!-- 且<maxHistory> 是1,则只保存最近1个月的文件,删除之前的旧文件 –>-->
<!-- <MaxHistory>1</MaxHistory>-->
<!-- </rollingPolicy>-->
<!-- </appender>-->
<appender name="RABBITMQ"
class="org.springframework.amqp.rabbit.logback.AmqpAppender">
<layout>
<pattern>${pattern}</pattern>
</layout>
<!--rabbitmq地址 -->
<addresses>${rabbit_host}:${rabbit_port}</addresses>
<username>${rabbit_username}</username>
<password>${rabbit_password}</password>
<declareExchange>true</declareExchange>
<exchangeType>direct</exchangeType>
<exchangeName>rabbit.log</exchangeName>
<routingKeyPattern>info</routingKeyPattern>
<generateId>true</generateId>
<charset>UTF-8</charset>
<durable>true</durable>
<deliveryMode>NON_PERSISTENT</deliveryMode>
<autoDelete>false</autoDelete>
</appender>
<!-- <appender name="mqAppender" class="org.apache.rocketmq.logappender.logback.RocketmqLogbackAppender">-->
<!-- <tag>logback</tag>-->
<!-- <topic>logTopic</topic>-->
<!-- <producerGroup>logback</producerGroup>-->
<!-- <nameServerAddress>localhost:5672</nameServerAddress>-->
<!-- <layout>-->
<!-- <pattern>${pattern}</pattern>-->
<!-- </layout>-->
<!-- </appender>-->
<!-- <appender name="mqAsyncAppender" class="ch.qos.logback.classic.AsyncAppender">-->
<!-- <queueSize>1024</queueSize>-->
<!-- <discardingThreshold>80</discardingThreshold>-->
<!-- <maxFlushTime>2000</maxFlushTime>-->
<!-- <neverBlock>true</neverBlock>-->
<!-- <appender-ref ref="mqAppender"/>-->
<!-- </appender>-->
<!-- 文件记录只记录指定包的日志 -->
<logger name="com.cloud.sparepart" level="info" additivity="false">
<appender-ref ref="consoleApp"/>
<appender-ref ref="RABBITMQ"/>
</logger>
<!--myibatis log configure-->
<logger name="com.cloud.sparepart.mapper" level="debug"/>
<!--log4jdbc -->
<logger name="jdbc.sqltiming" level="debug"/>
<logger name="com.ibatis" level="debug"/>
<logger name="com.ibatis.common.jdbc.SimpleDataSource" level="debug"/>
<logger name="com.ibatis.common.jdbc.ScriptRunner" level="debug"/>
<logger name="com.ibatis.sqlmap.engine.impl.SqlMapClientDelegate" level="debug"/>
<logger name="java.sql.Connection" level="debug"/>
<logger name="java.sql.Statement" level="debug"/>
<logger name="java.sql.PreparedStatement" level="debug"/>
<logger name="java.sql.ResultSet" level="debug"/>
<logger name="org.springframework" level="WARN"/>
<!--rabbitmq的日志输出级别-->
<logger name="org.springframework.amqp.rabbit.listener.BlockingQueueConsumer" level="WARN" />
<logger name="org.springframework.core.task.SimpleAsyncTaskExecutor" level="WARN" />
<!-- root 一定要放在最后,因有加载顺序的问题 -->
<root level="INFO">
<appender-ref ref="consoleApp"/>
<!-- <appender-ref ref="fileInfoApp"/>-->
<!-- <appender-ref ref="fileErrorApp"/>-->
<appender-ref ref="RABBITMQ"/>
</root>
</configuration>
message服务中 rabbitMQ项目中directExchange
/**
* 1.定义direct exchange,绑定queueTest
* 2.durable="true" rabbitmq重启的时候不需要创建新的交换机
* 3.direct交换器相对来说比较简单,匹配规则为:如果路由键匹配,消息就被投送到相关的队列
* fanout交换器中没有路由键的概念,他会把消息发送到所有绑定在此交换器上面的队列中。
* topic交换器你采用模糊匹配路由键的原则进行转发消息到队列中
* key: queue在该direct-exchange中的key值,当消息发送给direct-exchange中指定key为设置值时,
* 消息将会转发给queue参数指定的消息队列
*/
@Bean
public DirectExchange directExchange(){
DirectExchange directExchange = new DirectExchange(RabbitMqConfig.EXCHANGE,true,false);
return directExchange;
}
队列
@Bean
public Queue logQueue() {
/**
durable="true" 持久化 rabbitmq重启的时候不需要创建新的队列
auto-delete 表示消息队列没有在使用时将被自动删除 默认是false
exclusive 表示该消息队列是否只在当前connection生效,默认是false
*/
return new Queue("logback",true,false,false);
}
交换机绑定队列
/**
将消息队列logQueue和交换机进行绑定
*/
@Bean
public Binding binding_logQueue() {
return BindingBuilder.bind(queueConfig.logQueue()).to(exchangeConfig.directExchange()).with("logback");
}
下载logstash
https://www.elastic.co/cn/downloads/logstash
解压(解压路径不包含空格)
在bin文件夹下新建文件logstash.conf
input {
rabbitmq {
type =>"all"
durable => true
exchange => "rabbit.log"
exchange_type => "direct"
key => "info"
host => "127.0.0.1"
port => 5672
user => "admin"
password => "123456"
queue => "logback"
auto_delete => false
}
}
output {
file {
path => "D:/WorkSpace/IdeaProjects/framework/logs/spare-part/info-%{+YYYY-MM-dd}.log"
codec => multiline {
pattern => "^\d"
negate => true
what => "previous"
}
}
}
日志文件将通过rabbitMQ队列统一写入日志文件
codec => multiline {
pattern => "^\d"
negate => true
what => "previous"
}
补充一个codec plugin 编解码器插件
codec 本质上是流过滤器,可以作为input 或output 插件的一部分运行。例如上面output的stdout插件里有用到。
multiline codec plugin 多行合并, 处理堆栈日志或者其他带有换行符日志需要用到
input {
stdin {
codec => multiline {
pattern => "pattern, a regexp" #正则匹配规则,匹配到的内容按照下面两个参数处理
negate => "true" or "false" # 默认为false。处理匹配符合正则规则的行。如果为true,处理不匹配符合正则规则的行。
what => "previous" or "next" #指定上下文。将指定的行是合并到上一行或者下一行。
}
}
}
codec => multiline {
pattern => "^\s"
what => "previous"
}
# 以空格开头的行都合并到上一行
codec => multiline {
# Grok pattern names are valid! :)
pattern => "^%{TIMESTAMP_ISO8601} "
negate => true
what => "previous"
}
# 任何不以这个时间戳格式开头的行都与上一行合并
codec => multiline {
pattern => "\\$"
what => "next"
}
# 以反斜杠结尾的行都与下一行合并
cmd 进入logstash/bin目录下
logstash.bat -f logstash.conf
【参考】
SpringBoot项目的logback日志配置(包括打印mybatis的sql语句)
https://blog.youkuaiyun.com/abysscarry/article/details/80196628
在logback-spring.xml中使用properties文件中的属性
https://blog.youkuaiyun.com/qq_34359363/article/details/104749341
Spring集成Rabbitmq收集Logback日志,利用进行Logstash数据整理存储到Elasticsearch中
https://blog.youkuaiyun.com/niugang0920/article/details/81502022?utm_medium=distribute.pc_relevant_t0.none-task-blog-BlogCommendFromMachineLearnPai2-1.channel_param&depth_1-utm_source=distribute.pc_relevant_t0.none-task-blog-BlogCommendFromMachineLearnPai2-1.channel_param