ES开发环境示例代码

(Jave Native API)High Level

  • 基于Azure国际版 虚拟机搭建的ES单机环境

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.gridsum.mediad3</groupId>
    <artifactId>estest</artifactId>
    <version>1.0-SNAPSHOT</version>

    <dependencies>
        <dependency>
            <groupId>org.elasticsearch.client</groupId>
            <artifactId>transport</artifactId>
            <version>6.2.4</version>
        </dependency>
        <dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
            <version>6.2.4</version>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-io</artifactId>
            <version>1.3.2</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
    </dependencies>

</project>

log4j2.xml

<?xml version="1.0" encoding="UTF-8"?>

<Configuration status="warn">
    <Appenders>
        <!--         这个输出控制台的配置 -->
        <Console name="Console" target="SYSTEM_OUT">
            <!--             控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch) -->
            <ThresholdFilter level="trace" onMatch="ACCEPT" onMismatch="DENY"/>
            <!--             这个都知道是输出日志的格式 -->
            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
        </Console>

        <!--         文件会打印出所有信息,这个log每次运行程序会自动清空,由append属性决定,这个也挺有用的,适合临时测试用 -->
        <!--         append为TRUE表示消息增加到指定文件中,false表示消息覆盖指定的文件内容,默认值是true -->
        <File name="log" fileName="log/test.log" append="false">
            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
        </File>

        <!--          添加过滤器ThresholdFilter,可以有选择的输出某个级别以上的类别  onMatch="ACCEPT" onMismatch="DENY"意思是匹配就接受,否则直接拒绝  -->
        <File name="ERROR" fileName="logs/error.log">
            <ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY"/>
            <PatternLayout pattern="%d{yyyy.MM.dd 'at' HH:mm:ss z} %-5level %class{36} %L %M - %msg%xEx%n"/>
        </File>

        <!--         这个会打印出所有的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档 -->
        <RollingFile name="RollingFile" fileName="logs/web.log"
                     filePattern="logs/$${date:yyyy-MM}/web-%d{MM-dd-yyyy}-%i.log.gz">
            <PatternLayout pattern="%d{yyyy-MM-dd 'at' HH:mm:ss z} %-5level %class{36} %L %M - %msg%xEx%n"/>
            <SizeBasedTriggeringPolicy size="2MB"/>
        </RollingFile>
    </Appenders>
    <Loggers>
        <Root level="INFO">
            <AppenderRef ref="Console" />
        </Root>
    </Loggers>
</Configuration>

ESTest.java

package com.gridsum.mediad3;

import java.io.IOException;
import java.net.InetAddress;
import java.util.Date;
import java.text.SimpleDateFormat;

import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;

import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;

import org.elasticsearch.action.index.IndexResponse;

import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;

import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;

import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.DocWriteResponse;

import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;

import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.SearchHit;

import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;

import org.elasticsearch.action.get.GetResponse;


import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;


public class ESTest {

    private final static Logger logger = LogManager.getLogger(ESTest.class);

    public final static String HOST = "52.231.155.124";

    public final static int PORT = 9300;

    static public TransportClient client;
    private static byte[] LOCK = new byte[0];

    private final static String article="article";
    private final static String content="content";


    public static void main(String[] args) throws Exception {

        initElasticSearchService();
        deleteIndex(article);

        createIndexAndMapping();
        addIndexAndDocument(article,content);
        bulkRequest(article,content);
        multiSearchResponse(article);
        searchById(article,content,"101");
        updateDocument(article,content,"101");
        searchById(article,content,"101");
        deleteById(article,content,"101");
        deleteIndex(article);

    }

    public static void initElasticSearchService() throws Exception {
        synchronized (LOCK) {
            if (client == null) {
                Settings settings = Settings.builder().put("cluster.name", "my-application")
                //                                      .put("client.transport.sniff", true)
                                                      .build();
                try {
                    client = new PreBuiltTransportClient(settings)
                            .addTransportAddress(new TransportAddress(InetAddress.getByName(HOST), PORT));


                    //logger.info("This is info message.");
                } catch (Exception  e) {
                    throw new Exception("es init failed!", e);
                }
            }
        }
    }

    public static void createIndex(String indexName) {
        client.admin().indices().create(new CreateIndexRequest(indexName)).actionGet();
    }

    public static void createIndex(String index, String type) {
        client.prepareIndex(index, type).setSource().get();
    }

    public static void createIndexAndMapping() throws IOException {

        CreateIndexRequestBuilder  cib=client.admin().indices().prepareCreate(article);
        XContentBuilder mapping = XContentFactory.jsonBuilder()
                .startObject()
                .startObject("properties") //设置之定义字段
                .startObject("author")
                .field("type","text") //设置数据类型
                .endObject()
                .startObject("title")
                .field("type","text")
                .endObject()
                .startObject("content")
                .field("type","text")
                .endObject()
                .startObject("price")
                .field("type","text")
                .endObject()
                .startObject("view")
                .field("type","text")
                .endObject()
                .startObject("tag")
                .field("type","text")
                .endObject()
                .startObject("date")
                .field("type","date")  //设置Date类型
                .field("format","yyyy-MM-dd HH:mm:ss") //设置Date的格式
                .endObject()
                .endObject()
                .endObject();
        cib.addMapping(content, mapping);

        CreateIndexResponse res=cib.execute().actionGet();

        logger.info("----------添加映射成功----------");
        logger.info(res.isAcknowledged());
    }

    public static void addIndexAndDocument(String index, String type) throws Exception{

        Date time = new Date();

        IndexResponse response = client.prepareIndex(index, type)
                .setSource(XContentFactory.jsonBuilder().startObject()
                        .field("id","447")
                        .field("author","fendo")
                        .field("title","192.138.1.2")
                        .field("content","这是JAVA有关的书籍")
                        .field("price","20")
                        .field("view","100")
                        .field("tag","a,b,c,d,e,f")
                        .field("date",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(time))
                        .endObject())
                .get();
        logger.info("添加索引成功,版本号:"+response.getVersion());
    }

    public static void bulkRequest(String index,String type) throws Exception {
        BulkRequestBuilder bulkRequest = client.prepareBulk();

        Date time = new Date();

        // either use client#prepare, or use Requests# to directly build index/delete requests
        bulkRequest.add(client.prepareIndex(index, type, "199")
                .setSource(XContentFactory.jsonBuilder()
                        .startObject()
                        .field("id","199")
                        .field("author","fendo")
                        .field("title","BULK")
                        .field("content","这是BULK有关的书籍")
                        .field("price","40")
                        .field("view","300")
                        .field("tag","a,b,c")
                        .field("date",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(time))
                        .endObject()
                )
        );

        bulkRequest.add(client.prepareInde
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值