一、依赖
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>kafkademo</groupId>
<artifactId>kafkademo</artifactId>
<version>1</version>
<dependencies>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.7.7</version>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>bijection-avro_2.11</artifactId>
<version>0.9.6</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.58</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId>
<version>11.2.0.3</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
二、Java连接Kafka
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.twitter.bijection.Injection;
import com.twitter.bijection.avro.GenericAvroCodecs;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
public class KafkaConsumerDemo {
private KafkaConsumer<String, byte[]> consumer;
private KafkaConsumer<String, String> schemaConsumer;
private void initSchemaConsumer(String groupId) {
Properties p = new Properties();
p.put("bootstrap.servers", "192.168.163.130:9092");
p.put("group.id", groupId.toUpperCase());
p.put("reset.beginning", true);
p.put("auto.offset.reset", "earliest");
p.put("enable.auto.commit", false);
p.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
p.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
p.put("max.poll.interval.ms", 300000);
p.put("max.poll.records", 100);
this.schemaConsumer = new KafkaConsumer<>(p);
}
private void initConsumer(String groupId) {
Properties p = new Properties();
p.put("bootstrap.servers", "192.168.163.130:9092");
p.put("group.id", groupId);
p.put("reset.beginning", true);
p.put("auto.offset.reset", "earliest");
p.put("enable.auto.commit", false);
p.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
p.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
p.put("max.poll.interval.ms", 300000);
p.put("max.poll.records", 100);
this.consumer = new KafkaConsumer<>(p);
}
private String getSchema(String tableName) {
String topic = "schemaTopic";
initSchemaConsumer(topic);
String result = "";
schemaConsumer.subscribe(Collections.singletonList(topic));
ConsumerRecords<String, String> records = schemaConsumer.poll(Duration.ofMillis(10000));
for (ConsumerRecord record : records) {
if (tableName.toUpperCase().equals(record.key().toString().split("\\.")[1])) {
result = record.value().toString();
}
}
return result;
}
private void getData(String tableName, String schemaValue) {
initConsumer(tableName.toUpperCase());
Schema schema = new Schema.Parser().parse(schemaValue);
consumer.subscribe(Collections.singletonList(tableName.toUpperCase()));
ConsumerRecords<String, byte[]> records = consumer.poll(Duration.ofMillis(10000));
for (ConsumerRecord record : records) {
JSONObject json = parseAvroRecord(record, schema);
System.out.println("1:" + JSON.toJSONString(json, SerializerFeature.WriteMapNullValue));
}
}
private JSONObject parseAvroRecord(ConsumerRecord<String, byte[]> record, Schema schema){
Injection<GenericRecord, byte[]> injection = GenericAvroCodecs.toBinary(schema);
GenericRecord record1 = injection.invert(record.value()).get();
return JSONObject.parseObject(record1.toString());
}
public static void main(String[] args) {
KafkaConsumerDemo kc = new KafkaConsumerDemo();
String schema1 = SchemaUtils.getAvroOfTable("oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@192.168.163.130:1521:syzbj2js",
"scott", "scott", "scott", "student");
kc.getData("student", schema1);
}
}