1.kafka属性配置
在config.properties加入kafka的配置
#kafka(这边使用kafka集群,这种方式也支持kafka单点)
kafka.bootstrap.servers=10.1.10.78:9092,10.1.10.84:9092,10.1.10.85:9092
session.timeout.ms=30000
enable.auto.commit=true
auto.commit.interval.ms=1000
auto.offset.reset=latest
group.id=user-realtime-baoji
kafka.concurrency=3
kafka.topic=userPoseTopic
2.kafka依赖包的引入
在pom.xml中加入以下依赖
<!-- kafka依赖包*****start******* -->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.1.1.RELEASE</version>
</dependency>
<!-- kafka依赖包*****end******** -->
3.编写kafka配置对象
com.lantaiyuan.ebus.custom.kafka.KafkaConfig
package com.lantaiyuan.ebus.custom.kafka;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
@Configuration
@EnableKafka
public class KafkaConfig {
@Value("${kafka.bootstrap.servers}")
private String kafkaBootstrapServers;
@Value("${session.timeout.ms}")
private Integer sessionTimeoutMs;
@Value("${enable.auto.commit}")
private boolean enableAutoCommit;
@Value("${auto.commit.interval.ms}")
private Integer autoCommitIntervalMs;
@Value("${auto.offset.reset}")
private String autoOffsetReset;
@Value("${group.id}")
private String groupId;
@Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Integer, String>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(3);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
@Bean
public ConsumerFactory<Integer, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitIntervalMs);
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeoutMs);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return props;
}
@Bean
public KafkaConsumer consumer() {
return new KafkaConsumer();
}
}
4.编写kafka生产者配置对象
com.lantaiyuan.ebus.custom.kafka.KafkaProducerConfig
package com.lantaiyuan.ebus.custom.kafka;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka
public class KafkaProducerConfig {
@Value("${kafka.bootstrap.servers}")
private String brokerAddress;
@Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
@Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<String, String>(producerFactory());
kafkaTemplate.setDefaultTopic("userPoseTopic");
return kafkaTemplate;
}
}
5.kafka属性配置对象的扫描
在spring包扫描中加入kafka配置对象所在目录com.lantaiyuan.ebus.custom.kafka
<!-- 自动扫描(自动注入) -->
<context:component-scan
base-package="com.lantaiyuan.ebus.common.aop,
com.lantaiyuan.ebus.custom.service,
com.lantaiyuan.ebus.realtime.service,
com.lantaiyuan.ebus.common.util,
com.lantaiyuan.ebus.custom.kafka" />
6.应用kafkaTemplate生产数据
com.lantaiyuan.ebus.custom.controller.DataCollectionController
/**
* @Title: DataCollectionController.java
* @Package com.lantaiyuan.ebus.custom.controller
* Company:深圳市蓝泰源信息技术股份有限公司
* @author Yuan.Tan
* @date 2017年2月14日 上午11:22:43
*/
package com.lantaiyuan.ebus.custom.controller;
import javax.annotation.Resource;
import org.lanqiao.ssm.common.model.Json;
import org.lanqiao.ssm.common.web.controller.base.BasicController;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* @ClassName: DataCollectionController
* Company:深圳市蓝泰源信息技术股份有限公司
* @author Yuan.Tan
* @date 2017年2月14日 上午11:22:43
*/
@RestController
@RequestMapping("/dataCollection")
public class DataCollectionController extends BasicController {
@Resource
private KafkaTemplate<Integer, String> kafkaTemplate;
/**
* 每隔10s获取一次用户位置
* @auther liuhao
*/
@GetMapping(value = "/userPos")
public Json userPos(@RequestParam String datacollection) {
//向kafka发送位置信息
kafkaTemplate.sendDefault(datacollection);
// 根据用户信息进行上下车提醒
traveByCarService.jpush4GoToCarAndLeavingCar(datacollection);
return setSimpleSuccess();
}
}
7.编写kafka消费者
com.lantaiyuan.ebus.custom.kafka.KafkaConsumer
package com.lantaiyuan.ebus.custom.kafka;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.util.StringUtils;
import com.alibaba.fastjson.JSONObject;
import com.lantaiyuan.ebus.common.constants.GlobalMap;
import com.lantaiyuan.ebus.custom.model.UserPosRecord;
/***
*
* <p>Title: KafkaConsumer</p>
* <p>Description: 消费kafka用户位置数据和车辆gps数据</p>
* <p>Company: lty</p>
* @author liuhao
* @date 2017年5月5日 下午2:17:41
*/
@Configuration
public class KafkaConsumer {
@KafkaListener(topics = "${kafka.topic}")
public void listen(ConsumerRecord<Integer, String> record) {
String datacollection = record.value();
JSONObject dataJsonObj = JSONObject.parseObject(datacollection);
if (StringUtils.isEmpty(dataJsonObj.get("userid"))) {//用户没有登入
return;
}
String userId = dataJsonObj.get("userid").toString();
Double longitude = Double.valueOf(dataJsonObj.get("longitude").toString());//经度
Double latitude = Double.valueOf(dataJsonObj.get("latitude").toString());//纬度
String citycode = dataJsonObj.get("citycode").toString();
UserPosRecord userPosRecord = new UserPosRecord(citycode, userId, longitude, latitude);
if(GlobalMap.userPosMap.containsKey(citycode)) {//内存中已经存在该城市数据
//更新用户位置信息到该城市map中
Map<String, UserPosRecord> cityMap = GlobalMap.userPosMap.get(citycode);
if(cityMap.containsKey(userId)) {
cityMap.remove(citycode);
}
cityMap.put(userId, userPosRecord);
} else {//不存在该城市的数据
ConcurrentHashMap<String, UserPosRecord> cityMap = new ConcurrentHashMap<>();
cityMap.put(userId, userPosRecord);
GlobalMap.userPosMap.put(citycode, cityMap);
}
}
}