此项目是logstash从kafka拉取数据,并取到elastisearch.并进行格式处理。保存geo_point类型,可以进行空间数据查询
配置文件如下:
es.conf
input {
kafka {
type => "test"
auto_offset_reset => "smallest"
group_id => "mon9"
topic_id => "db100"
zk_connect => "192.168.199.6:2181,192.168.199.7:2181,192.168.199.8:2181"
}
}
filter {
mutate {
split => { "message" => "," }
add_field => {
"id" => "%{message[1]}"
"DZBM_s" => "%{message[2]}"
"DZQC_s" => "%{message[3]}"
"SJYTMC_s" => "%{message[4]}"
"FWYT_s" => "%{message[5]}"
"lon" => "%{message[6]}"
"lat" => "%{message[7]}"
}
remove_field => [ "message", "host" ]
}
mutate {
convert => { "lon" => "float" }
convert => { "lat" => "float" }
}
mutate {
rename => {
"lon" => "[geo][lon]"
"lat" => "[geo][lat]"
}
}
}
output {
elasticsearch {
index => "base_map"
template => "map_template.json"
document_type => "node_points"
hosts => ["192.168.199.10:9200", "192.168.199.11:9200"]
}
}
map_template.json:
{
"template": "base_map",
"order": 1,
"settings": {
"number_of_shards": 1
},
"mappings": {
"node_points" : {
"properties" : {
"geo" : { "type" : "geo_point" }
}
}
}
}
注意:
mapping文件路径一定要写正确,否则不能映射