ElasticSearch
安装搭建步骤见博客:Linux安装部署elasticsearch
代码配置
pom.xml
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>6.0.0</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>6.0.0</version>
</dependency>
application-default.yml
es:
clusterName: my-application
client:
transport:
sniff: true
host: 127.0.0.1 // 部署es的IP地址
port: 9300
shards: 5
replicas: 1
nodeName: node-1
为什么ElasticSearch.yml是9200端口,但是项目中链接要配置9300?
9200作为Http协议,主要用于外部通讯
9300作为Tcp协议,jar之间就是通过tcp协议通讯
ES集群之间是通过9300进行通讯
kibana链接elasticSearch配置9200
es主键?
es中默认将id作为主键,数据同步至_id
但是可以在数据插入时设置某一字段为主键,new IndexRequest().id(property)
代码操作
/**
* ElasticSearch客户端
*/
@Autowired
private TransportClient client;
/**
* 判断索引是否存在
* @param index 索引名称
*/
public boolean isIndexExist(String index) {
return client.admin().indices().prepareExists(index).execute().actionGet().isExists();
}
/**
* 删除索引
* @param index 索引名称
*/
public boolean deleteIndex(String index) {
return isIndexExist(index)
? client.admin().indices().prepareDelete(index).execute().actionGet().isAcknowledged()
: false;
/**
* 新增索引
* @param index 索引名称
*/
public boolean addIndex(String index) {
return isIndexExist(index)
? false
: client.admin().indices().prepareCreate(index).execute().actionGet().isAcknowledged();
}
/**
* 单条数据新增
*
* @param index index
* @param type type
* @param sourceMap 数据
* @param id id
* @return
*/
public boolean insertData(String index, String type, String id, Map<String, Object> sourceMap) {
try {
client.prepareIndex().setIndex(index).setType(type).setId(id).setSource(sourceMap).execute().actionGet();
return true;
} catch (Exception e) {
e.printStackTrace();
logger.error(" es新增数据失败!inex: " + index + " type:" + type, e);
return false;
}
}
/**
* 批量新增数据
*
* @param index
* @param type
* @param sourceMap
* @return
*/
public boolean batchInsertData(String index, String type, Map<String, Map<String, Object>> sourceMap) {
try {
BulkRequestBuilder bulkRequest = client.prepareBulk();
for (String id : sourceMap.keySet()) {
logger.debug("插入数据 JSON : " + JSONObject.fromObject(sourceMap.get(id)));
bulkRequest.add(client.prepareIndex(index, type, id).setSource(sourceMap.get(id)));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
if (bulkResponse.hasFailures()) {
return false;
}
return true;
} catch (Exception e) {
e.printStackTrace();
logger.error(" es新增数据失败!inex: " + index + " type:" + type, e);
return false;
}
}
/**
* 根据条件删除索引中的数据,不删除索引
* @param index 索引名称
*/
public long deleteData(String index) {
//QueryBuilders.matchAllQuery() 查询全部,删除部分条件可以自定义queryBuilder
BulkByScrollResponse searchResponse = DeleteByQueryAction.INSTANCE.newRequestBuilder(client).
filter(QueryBuilders.matchAllQuery()).source(index).get();
long deleted = searchResponse.getDeleted();
return deleted;
}
/**
* 根据条件查询索引数据
* @param beginDate 开始时间
* @param endDate 结束时间
*/
public Map<String,Object> getData(String beginDate, String endDate) {
try {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(getQueryBuilder(beginDate,endDate));
searchSourceBuilder = ESSearchUtil.ESStor(searchSourceBuilder, "dDate", "desc");//排序
SearchHit[] hits = elasticSearchSearchManager.search("indexName","typeName", searchSourceBuilder);
Map<String,Object> resultMap = new HashMap<>();
if (hits == null || hits.length == 0) {
return resultMap;
}
for (SearchHit hit : hits) {
resultMap = hit.getSourceAsMap();
}
return resultMap;
} catch (Exception e) {
logger.info("获取数据失败!", e);
return null;
}
}
/*
* elasticSearchSearchManager.search方法
* 根据条查询数据集合
*
* @param index
*
* @param type
*
* @param sourceBuilder
*
* @return
*/
public SearchHit[] search(String index, String type, SearchSourceBuilder sourceBuilder) {
try {
logger.info("统计ES统计查询语句:" + sourceBuilder.toString());
SearchResponse response = client.search(new SearchRequest(index).types(type).source(sourceBuilder)).get();
if (response == null)
return null;
return response.getHits().getHits();
} catch (Exception e) {
logger.info("查询数据失败!" + e);
return null;
}
}
//**************** QueryBuilders *********************//
/**
* 获取queryBuilder,条件并且条件
* @param beginDate
* @param endDate
* @return
*/
private QueryBuilder getQueryBuilder(String str,String beginDate, String endDate) {
BoolQueryBuilder bqb = null;
QueryBuilder queryBuilder = null;
//某字段 等于 参数 str
if (StringUtils.isNotBlank(str)) {
QueryBuilder qb = QueryBuilders.queryStringQuery(str).field("字段名")
.defaultOperator(Operator.AND);
if (bqb != null) {
bqb.must(qb);
} else {
bqb = QueryBuilders.boolQuery().must(qb);
}
}
//大于起止时间
if (StringUtils.isNotBlank(beginDate)) {
QueryBuilder qb = QueryBuilders.rangeQuery("字段名").gte(beginDate);
if (bqb != null) {
bqb.must(qb);
} else {
bqb = QueryBuilders.boolQuery().must(qb);
}
}
//小于结束时间
if (StringUtils.isNotBlank(endDate)) {
QueryBuilder qb = QueryBuilders.rangeQuery("字段名").lte(endDate);
if (bqb != null) {
bqb.must(qb);
} else {
bqb = QueryBuilders.boolQuery().must(qb);
}
}
if (bqb != null) {
queryBuilder = bqb;
} else {
queryBuilder = QueryBuilders.matchAllQuery();
}
return queryBuilder;
}
/**
* 获取queryBuilder,条件或者条件
* @param beginDate
* @param endDate
* @return
*/
private QueryBuilder getQueryBuilder(String beginDate, String endDate) {
BoolQueryBuilder bqb = null;
QueryBuilder queryBuilder = null;
//某字段 等于 参数一 或者 等于另一个参数
bqb = QueryBuilders.boolQuery().should(QueryBuilders.queryStringQuery("参数一").field("logType")
.defaultOperator(Operator.OR)).minimumShouldMatch(1);
bqb.should(QueryBuilders.queryStringQuery("参数二").field("logType")
.defaultOperator(Operator.OR)).minimumShouldMatch(1);
bqb.should(QueryBuilders.queryStringQuery("参数三").field("logType")
.defaultOperator(Operator.OR)).minimumShouldMatch(1);
//大于起止时间
if (StringUtils.isNotBlank(beginDate)) {
QueryBuilder qb = QueryBuilders.rangeQuery("字段名").gte(beginDate);
if (bqb != null) {
bqb.must(qb);
} else {
bqb = QueryBuilders.boolQuery().must(qb);
}
}
//小于结束时间
if (StringUtils.isNotBlank(endDate)) {
QueryBuilder qb = QueryBuilders.rangeQuery("字段名").lte(endDate);
if (bqb != null) {
bqb.must(qb);
} else {
bqb = QueryBuilders.boolQuery().must(qb);
}
}
if (bqb != null) {
queryBuilder = bqb;
} else {
queryBuilder = QueryBuilders.matchAllQuery();
}
return queryBuilder;
}
Kibana
安装部署
wget https://artifacts.elastic.co/downloads/kibana/kibana-6.0.0-linux-x86_64.tar.gz
tar -zxvf kibana-6.4.3-linux-x86_64.tar.gz
cd kibana-6.4.3-linux-x86_64
vim config/kibana.yml
修改配置文件中的三个地方(注释掉的删除注释)
server.port: 5601
server.host: "172.23.4.218" (此处改成服务器的IP)
elasticsearch.url: "http://172.23.4.218:9200" (此处改成es的访问地址)
cd /usr/local/kibana-6.4.3-linux-x86_64/bin
./kibana
浏览器访问:http://172.23.4.218:5601
创建索引
PUT test //test:索引名称
{
"mappings": {
"product": { //product:type名称 ps:es版本6.0以下是多type,6.0是单type,6.0之后没有type
"properties": {
"cId": {"type": "text"},
"cName": {"type": "text"}
}
}
}
}
删除索引
DELETE test //test:索引名称
索引动态添加字段
如果不要求旧数据添加新字段的默认值且数据量不大的情况下可以适用此方法,否则推荐下方的创建临时索引的方法。
PUT indexName/typeName/_mapping
{
"properties": {
"新字段名": {
"type": "keyword"
}
}
}
修改索引字段 / 索引数据迁移
因为es不支持动态对字段属性进行修改,所以只能新建临时索引,将原索引数据迁移至临时索引,将原索引删除重新创建后,再将数据迁移至原索引。
POST /_reindex
{
"source": {
"index": "oldIndexName"
},
"dest": {
"index": "newIndexName",
"type":"newTypeName"
},
"conflicts": "proceed"
}
未完待续