/**
* kafka高级消费者,获取数据
*
* @param conditionDTO conditionDTO
* @return data
*/
public static List<String> customerHighConsumer(KafkaConsumeConditionDTO conditionDTO) {
// 返回结果
List<String> data = new ArrayList<>();
Properties properties = initPropertie(conditionDTO);
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
// 消费者订阅主题
kafkaConsumer.subscribe(Collections.singletonList(conditionDTO.getTopic()));
log.error("开始消费:(即从kafka获取topic为" + conditionDTO.getTopic() + "的数据)");
while (true) {
try {
// 读取数据,读取超时时间为5000ms
ConsumerRecords<String, String> records = kafkaConsumer.poll(5000L);
if (!records.isEmpty()) {
log.error("数据条数:" + records.count());
for (ConsumerRecord<String, String> record : records) {
data.add(record.value());
// 此处可以进行持久化操作
}
try {
// 异步提交,出现异常不会重试
kafkaConsumer.commitAsync();
} catch (Exception e) {
// 出现异常,利用同步提交,重试
kafkaConsumer.commitSync();
}
} else {
log.error("\n本次获取数据" + data.size() + "条");
break;
}
} catch (Exception e) {
e.printStackTrace();
break;
}
}
return data;
}
/**
* 初始化kafka消费者参数
*
* @param conditionDTO conditionDTO
* @return properties
*/
private static Properties initPropertie(KafkaConsumeConditionDTO conditionDTO) {
Properties properties = new Properties();
// kafka的服务器名称和端口,多个以","隔开
StringBuffer stringBuffer = new StringBuffer();
for (String s : conditionDTO.getBrokerList()) {
stringBuffer.append(s + ",");
}
String brokerIps = stringBuffer.delete(stringBuffer.length() - 1, stringBuffer.length()).toString();
properties.put("bootstrap.servers", brokerIps);
// 每个消费者分配独立的组号
properties.put("group.id", "kafka_group_nrsg");
// 如果value合法,则自动提交偏移量
properties.put("enable.auto.commit", "false");
// 设置多久一次更新被消费消息的偏移量
properties.put("auto.commit.interval.ms", "1000");
// 自动重置offset至上次提交的最新位置
properties.put("auto.offset.reset", "latest");
// 设置会话响应的时间,超过这个时间kafka就可以选择放弃消费或者消费下一条消息
properties.put("session.timeout.ms", "30000");
properties.put("fetch.max.wait.ms", "30000");
properties.put("request.timeout.ms", "30001");
// Consumer每次调用poll()时取到的records的最大数。
properties.put("max.poll.records", "10000");
// k/v反序列化
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
return properties;
}
kafka高级消费者api工具类
最新推荐文章于 2025-01-03 11:06:10 发布