kafka 消费--转换--生产模式

本文介绍了一种Kafka消息处理模式,即从Kafka消费者读取数据,进行转换处理,然后将处理后的数据重新发布到另一个Kafka主题。这种模式在数据管道和实时流处理中常见,用于数据清洗、格式转换等操作。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

kafka 消费–转换–生产模式

package com.oldwang.transactional;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.Duration;
import java.util.*;
import java.util.List;

/**
 * @author oldwang
 * kafka 消费--转换--生产模式示例
 */
public class TransactionalConsumerTransformProducer {

    public static final String brokerList = "localhost:9092";

    public static Properties getConsumerProperties() {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "groupId");
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        return properties;
    }

    public static Properties getProducerProperties() {
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "transactionalId");
        return properties;
    }

    public static void main(String[] args) {
        //初始化消费者和生产者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(getConsumerProperties());
        consumer.subscribe(Collections.singleton("kafka-demo"));

        KafkaProducer<String, String> producer = new KafkaProducer<>(getProducerProperties());

        //初始化事物
        producer.initTransactions();

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
            if (!records.isEmpty()) {
                Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();

                //开启事物
                producer.beginTransaction();
                try {
                    for (TopicPartition partition : records.partitions()) {

                        List<ConsumerRecord<String, String>> consumerRecords = records.records(partition);
                        for (ConsumerRecord<String, String> record : consumerRecords) {
                            //对消息处理
                            String value = record.value() + "log";

                            ProducerRecord<String, String> producerRecord = new ProducerRecord<>(record.topic(), record.key(), value);
                            //消费-生产模型
                            producer.send(producerRecord);
                        }
                        long offset = consumerRecords.get(consumerRecords.size() - 1).offset();
                        offsets.put(partition, new OffsetAndMetadata(offset + 1));
                    }
                    //提交消息位移
                    producer.sendOffsetsToTransaction(offsets, "groupId");
                    //提交事物
                    producer.commitTransaction();
                } catch (Exception e) {
                    //终止事物
                    producer.abortTransaction();
                    e.printStackTrace();
                }
            }
        }
    }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值