方法一:log4j2.xml文件配置
<Appenders>
<Kafkaname
="KAFKA"topic="log4j-kafka" syncSend="false">
<!-- <ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY" /> -->
<MarkerFiltermarker ="kafkaLog"onMatch
="ACCEPT"onMismatch=
"DENY"/>
<PatternLayoutpattern ="%d{yyyy-MM-dd
HH:mm:ss,SSS}:%4p %t (%F:%L) - %m%n"/>
<Property
name ="bootstrap.servers"
>0.0.0.0:9092</
Property>
<Property
name ="retries"
>3</
Property>
<Property
name ="linger.ms"
>1000</
Property>
<Property
name ="buffer.memory"
> 10485760</
Property>
</Kafka>
</Appenders>
<Loggers>
<!-- additivity属性作用是在使用当前Logger配置的Appender打印日志 -->
<Logger
name ="com.hhaip"
level="INFO"
additivity ="false"
>
<AppenderRefref ="CONSOLE"/>
<AppenderRefref ="INTERPHONE"/>
<AppenderRefref ="KAFKA"/>
</Logger>
<!-- Root表示所有Logger用Root中的Appender打印日志 -->
<Root
level ="INFO"
>
<AppenderRefref ="CONSOLE"/>
<AppenderRefref ="INTERPHONE"/>
<AppenderRefref ="KAFKA"/>
</Root>
</Loggers>
注意不要定义key.serializer 、value.serializer和batch.size,log4j2里已经帮我们设置好了
解释:MarkerFilter是一种过滤器,marker过滤器的名称,使用时只有加上这种过滤器的日志才会输出到kafka,如下消息发送
ThresholdFilter也是过滤器,用来指定过滤的级别,如:ERROR级别的才会发送至kafka
PatternLayout是消息的格式
syncSend指是否同步等待,设为false表示发送消息后立即返回,true则会等待kafka响应后返回(log4j2
2.8以上版本)
发送消息至kafka
在Java类中可以按如下方式发送消息至kafka:
log.info(Markers.KAFKA,"kafka消息{}",
"log4j-"
+content);
kafka收到的消息
Markers类的内容
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
publicclass Markers {
publicstatic
final Marker
DB = MarkerFactory.getMarker("dbLog");
publicstatic
final Marker
KAFKA = MarkerFactory.getMarker("kafkaLog");
}
方法二:动态添加(SpringBoot项目)
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.PostConstruct;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.Logger;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.mom.kafka.KafkaAppender;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.DefaultConfiguration;
import org.apache.logging.log4j.core.config.Property;
import org.apache.logging.log4j.core.filter.MarkerFilter;
import org.apache.logging.log4j.core.layout.PatternLayout;
import org.springframework.stereotype.Component;
@Component
publicclass KafkaLogAppender {
@PostConstruct
publicvoid init(){
final LoggerContext
ctx = (LoggerContext) LogManager.getContext(false);
final Configuration
config = ctx .getConfiguration();
final Logger
interLogger = ctx .getLogger("com.hhaip"); //需要写日志到数据库的包名
List<Property>list =
new ArrayList<>();
list .add(Property.createProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG
,"0.0.0.0:9092" ));
list .add(Property.createProperty(ProducerConfig.ACKS_CONFIG
, "1"));
list .add(Property.createProperty(ProducerConfig.RETRIES_CONFIG
, "3"));
list .add(Property.createProperty(ProducerConfig.LINGER_MS_CONFIG
, "10000"));
list .add(Property.createProperty(ProducerConfig.BUFFER_MEMORY_CONFIG
,"10485760"));
list .add(Property.createProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG
,"1000"));
Property[]props =
list .toArray(new Property[list .size()]);
//配置Marker过滤器(标记过滤器和方法一的一样)
MarkerFilterfilter = MarkerFilter.createFilter("kafkaLog", Filter.Result.ACCEPT
, Filter.Result.DENY);
Configurationconfiguration =
new DefaultConfiguration();
Layout<String>layout = PatternLayout.createLayout("%date %message",null
, configuration ,
null , Charset.forName ("UTF-8" ),false ,
false ,
null,null);
Appenderappender = KafkaAppender.createAppender(layout,filter,
"KAFKA" ,true ,
"log4j-kafka" , props ,
configuration );
config .addAppender(appender );
interLogger .addAppender(appender );
appender .start();
ctx .updateLoggers();
}
}
方法二和方法一的使用是一样的,也许要定义一个Markers类.
注意:0.0.0.0换成你自己的IP地址