kafka_05_自定义对象序列化反序列化

本文介绍如何在Kafka中使用自定义的序列化器和反序列化器处理复杂数据类型,如Java对象。通过实例演示了序列化器和反序列化器的配置与使用,以及在生产者和消费者两端的实现。

producer

package com.soul.kafka.level05;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Date;
import java.util.Properties;

public class _09KafkaProducerSerializer {
    public static void main(String[] args) {
        //1.创建链接参数
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka01:9092,kafka02:9092,kafka03:9092");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //使用自定义序列化类
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ObjectSerializer.class.getName());

        //2.创建生产者
        KafkaProducer<String, User> producer = new KafkaProducer<>(props);

        //3.封账消息队列
        for (int i = 0; i < 10; i++) {
            //注意需要已存在的Topic
            ProducerRecord<String, User> record = new ProducerRecord<>("topic02", "K" + i,
                    new User(i, "soul-gin" + i, new Date()));
            producer.send(record);
        }

        producer.close();
    }
}

consumer

package com.soul.kafka.level05;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Iterator;
import java.util.Properties;
import java.util.regex.Pattern;

public class _10KafkaConsumerSerializer {
    public static void main(String[] args) {
        //1.创建Kafka链接参数
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka01:9092,kafka02:9092,kafka03:9092");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //使用自定义的反序列化类
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ObjectDeserializer.class.getName());
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "group01");

        //2.创建Topic消费者
        KafkaConsumer<String, User> consumer = new KafkaConsumer<>(props);
        //3.订阅topic开头的消息队列
        consumer.subscribe(Pattern.compile("^topic.*$"));

        while (true) {
            ConsumerRecords<String, User> consumerRecords = consumer.poll(Duration.ofSeconds(1));
            Iterator<ConsumerRecord<String, User>> recordIterator = consumerRecords.iterator();
            while (recordIterator.hasNext()) {
                ConsumerRecord<String, User> record = recordIterator.next();
                String key = record.key();
                User value = record.value();
                long offset = record.offset();
                int partition = record.partition();
                System.out.println("kfkKey:" + key + ", kfkVal:" + value
                        + ", partition:" + partition + ", offset:" + offset);
            }
        }
    }
}

serializer

package com.soul.kafka.level05;

import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Serializer;

import java.io.Serializable;
import java.util.Map;

public class ObjectSerializer implements Serializer<Object> {

    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
        System.out.println("configure");
    }

    @Override
    public byte[] serialize(String topic, Object data) {
        //使用 commons.lang3 的对象序列化方法, 避免重复造轮, 通过IO流处理对象实现
        return SerializationUtils.serialize((Serializable) data);
    }

    @Override
    public void close() {
        System.out.println("close");
    }
}

deserializer

package com.soul.kafka.level05;

import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Deserializer;

import java.util.Map;

public class ObjectDeserializer implements Deserializer<Object> {
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
        System.out.println("configure");
    }

    @Override
    public Object deserialize(String topic, byte[] data) {
        //使用 commons.lang3 的对象反序列化方法
        return SerializationUtils.deserialize(data);
    }

    @Override
    public void close() {
        System.out.println("close");
    }
}

user

package com.soul.kafka.level05;

import java.io.Serializable;
import java.util.Date;

public class User implements Serializable {

    private Integer id;

    private String name;

    private Date birthDay;

    public User() {
    }

    public User(Integer id, String name, Date birthDay) {
        this.id = id;
        this.name = name;
        this.birthDay = birthDay;
    }

    public Integer getId() {
        return id;
    }

    public void setId(Integer id) {
        this.id = id;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public Date getBirthDay() {
        return birthDay;
    }

    public void setBirthDay(Date birthDay) {
        this.birthDay = birthDay;
    }

    @Override
    public String toString() {
        return "User{" +
                "id=" + id +
                ", name='" + name + '\'' +
                ", birthDay=" + birthDay +
                '}';
    }

}

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.soul</groupId>
    <artifactId>kafka</artifactId>
    <version>0.0.1</version>
    <name>kafka</name>

    <properties>
        <java.version>1.8</java.version>
    </properties>

    <dependencies>
        <!-- kafka begin -->

        <!--https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients-->
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.2.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/log4j/log4j -->
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.25</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>1.7.25</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.9</version>
        </dependency>

        <!-- kafka end -->
        
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>8</source>
                    <target>8</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

log4j.properties

log4j.rootLogger = info,console

log4j.appender.console = org.apache.log4j.ConsoleAppender
log4j.appender.console.Target = System.out
log4j.appender.console.layout = org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern =  %p %d{yyyy-MM-dd HH:mm:ss} %c - %m%n

### 自定义 Kafka 序列化器和反序列化器 为了实现自定义Kafka 序列化器和反序列化器,可以遵循以下方式: #### 实现自定义序列化Kafka 的 `Serializer` 接口提供了两个核心方法:`configure()` 和 `serialize()`。以下是基于 Java 的一个简单示例代码,展示如何创建一个用于对象 `Company` 的自定义序列化器。 ```java import org.apache.kafka.common.serialization.Serializer; import java.nio.ByteBuffer; import java.util.Map; public class CustomSerializer implements Serializer<Company> { @Override public void configure(Map<String, ?> configs, boolean isKey) { // 配置逻辑可在此处编写 } @Override public byte[] serialize(String topic, Company data) { if (data == null) { return null; } try { ByteBuffer buffer = ByteBuffer.allocate(1024); buffer.put(data.getName().getBytes()); buffer.put(data.getAddress().getBytes()); buffer.flip(); byte[] result = new byte[buffer.limit()]; buffer.get(result); return result; } catch (Exception e) { throw new RuntimeException("Error serializing object", e); } } @Override public void close() { // 清理资源(如果有) } } ``` 此代码实现了将 `Company` 对象转换为字节数组的功能[^2]。 --- #### 实现自定义反序列化器 同样地,可以通过实现 `Deserializer` 接口来完成反序列化的操作。下面是对应的反序列化器代码示例: ```java import org.apache.kafka.common.serialization.Deserializer; import java.nio.ByteBuffer; import java.util.Map; public class CustomDeserializer implements Deserializer<Company> { @Override public void configure(Map<String, ?> configs, boolean isKey) { // 可配置初始化参数 } @Override public Company deserialize(String topic, byte[] data) { if (data == null || data.length == 0) { return null; } try { ByteBuffer buffer = ByteBuffer.wrap(data); int nameLength = buffer.getInt(); // 假设名称长度存储在前四个字节中 byte[] nameBytes = new byte[nameLength]; buffer.get(nameBytes); String name = new String(nameBytes); int addressLength = buffer.getInt(); // 地址长度假设也存于固定位置 byte[] addressBytes = new byte[addressLength]; buffer.get(addressBytes); String address = new String(addressBytes); return new Company(name, address); } catch (Exception e) { throw new RuntimeException("Error deserializing object", e); } } @Override public void close() { // 关闭清理工作 } } ``` 这段代码展示了如何从字节数组还原成原始的对象实例[^4]。 --- #### 使用自定义序列化器和反序列化器 当生产者发送消息时,需指定使用的序列化器;消费者接收消息时则应设置相应的反序列化器。例如,在 Kafka 生产者的配置文件中添加如下内容即可启用自定义序列化器: ```properties key.serializer=org.apache.kafka.common.serialization.StringSerializer value.serializer=com.example.CustomSerializer ``` 对于消费者的配置,则需要指明所用的反序列化器类名: ```properties key.deserializer=org.apache.kafka.common.serialization.StringDeserializer value.deserializer=com.example.CustomDeserializer ``` 以上步骤确保了数据能够在生产和消费过程中被正确处理[^3]。 ---
评论 1
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值