package com.elh.es.participle.matchlength;

import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpHost;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.action.get.*;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.CountResponse;
import org.elasticsearch.client.core.TermVectorsResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.Max;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.junit.Test;

import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;

@Slf4j
public class EsUtilsTest {

    public static RestHighLevelClient client;

    static {
        client = new RestHighLevelClient(
                RestClient.builder(
                        new HttpHost("192.168.211.132", 9200, "http")));
    }

    /**
     * 添加一个文档
     * <p>
     * 可选参数:
     * request.routing("routing");
     * request.timeout(TimeValue.timeValueSeconds(1));
     * request.timeout("1s");
     * request.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL);
     * request.setRefreshPolicy("wait_for");
     * request.version(2);
     * request.versionType(VersionType.EXTERNAL);
     * request.opType(DocWriteRequest.OpType.CREATE);
     * request.opType("create");
     * request.setPipeline("pipeline");
     */
    @Test
    public void createDocument1() throws IOException {
        // TODO  elastisearch5后，一个索引只能对应一个类型
        IndexRequest request = new IndexRequest("posts");
        request.id("3");
        String jsonString = "{" +
                "\"user\":\"kimchy1\"," +
                "\"postDate\":\"2013-01-30\"," +
                "\"message\":\"trying out Elasticsearch\"" +
                "}";
        request.source(jsonString, XContentType.JSON);
        IndexResponse indexResponse = client.index(request, RequestOptions.DEFAULT);
        log.info();
    }

    /**
     * 添加一个文档
     * <p>
     * 通过Map的方式提供文档source文档源
     *
     * @throws IOException
     */
    @Test
    public void createDocument2() throws IOException {
        Map<String, Object> jsonMap = new HashMap<>();
        jsonMap.put("user", "kimchy");
        jsonMap.put("postDate", new Date());
        jsonMap.put("message", "trying2 out Elasticsearch");
        IndexRequest indexRequest = new IndexRequest("posts", "doc")
                .id("2").source(jsonMap);
        IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
    }


    /**
     * 添加一个文档异步
     * <p>
     * 通过Map的方式提供文档source文档源
     *
     * @throws IOException
     */
//    @Test
//    public void createDocument3() throws IOException {
//        Map<String, Object> jsonMap = new HashMap<>();
//        jsonMap.put("user", "kimchy");
//        jsonMap.put("postDate", new Date());
//        jsonMap.put("message", "trying2 out Elasticsearch");
//        IndexRequest indexRequest = new IndexRequest("posts", "doc")
//                .id("4").source(jsonMap);
//        client.indexAsync(indexRequest, RequestOptions.DEFAULT, new ActionListener() {
//
//            @Override
//            public void onResponse(Object object) {
//                // 成功时调用
//                System.out.println(object);
//            }
//
//            @Override
//            public void onFailure(Exception e) {
//                System.out.println(e.getMessage());
//            }
//        });
//        // 不退出
//        System.in.read();
//    }


    /**
     * 根据 ID 查询文档是否存在
     *
     * @throws IOException
     */
    @Test
    public void get() throws IOException {
        GetRequest request = new GetRequest(
                "posts"
        );
        // 包含的字段
        String[] includes = new String[]{"message"};
        String[] excludes = Strings.EMPTY_ARRAY;
        FetchSourceContext fetchSourceContext =
                new FetchSourceContext(true, includes, excludes);
        request.fetchSourceContext(fetchSourceContext);
        GetResponse getResponse = client.get(request, RequestOptions.DEFAULT);
        Map<String, Object> sourceAsMap = getResponse.getSourceAsMap();
//        String sourceAsString = getResponse.getSourceAsString();
//        Map<String, Object> sourceAsMap = getResponse.getSourceAsMap();
//        byte[] sourceAsBytes = getResponse.getSourceAsBytes();
    }


    /**
     * 删除一个文档
     *
     * @throws IOException
     */
    @Test
    public void delete() throws IOException {
        DeleteRequest request = new DeleteRequest(
                "posts",    //索引
                "2");       //文档id
        DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT);
        System.out.println(deleteResponse.status().getStatus());
    }

    @Test
    public void update() throws IOException {
        UpdateRequest updateRequest = new UpdateRequest("posts", "_doc","3");
        XContentBuilder builder = XContentFactory.jsonBuilder();
        builder.startObject();
        builder.field("user", "lpw123");
        builder.endObject();
        UpdateResponse updateResponse = client.update(updateRequest, RequestOptions.DEFAULT);
        GetResult getResult = updateResponse.getGetResult();
    }


    /**
     * 获取指定文档中指定字段的分词信息和统计信息, 可以设置指定返回哪些统计信息
     *
     * @throws IOException
     */
//    @Test
//    public void term() throws IOException {
//        TermVectorsRequest request = new TermVectorsRequest("posts","1");
//        request.setFields("message");
//        TermVectorsResponse termvectors = client.termvectors(request, RequestOptions.DEFAULT);
//        List<TermVectorsResponse.TermVector> termVectorsList = termvectors.getTermVectorsList();
//        for (TermVectorsResponse.TermVector termVector : termVectorsList) {
//            int docCount = termVector.getFieldStatistics().getDocCount();
//            System.out.println("文档中出现次数:" + docCount);
//            System.out.println("文档中的频率的总和 " + termVector.getFieldStatistics().getSumTotalTermFreq());
//            System.out.println("分词的值"+ termVector.getTerms());
//        }
//    }


    @Test
    public void reindexTest() throws IOException {
        ReindexRequest request = new ReindexRequest();
        request.setSourceIndices("posts");
        request.setDestIndex("dest");
        BulkByScrollResponse bulkResponse =
                client.reindex(request, RequestOptions.DEFAULT);
    }



    /**
     * 使用单个请求执行多个索引、增加、更新和/或删除操作。
     *
     * @throws IOException
     */
    @Test
    public void batchRequest() throws IOException {
        BulkRequest request = new BulkRequest(); //创建BulkRequest
//        request.add(new DeleteRequest("posts", "3")); //向批量请求添加删除请求
//        request.add(new UpdateRequest("posts", "2")
//                .doc(XContentType.JSON,"other", "test"));//向批量请求添加更新请求。
        request.add(new IndexRequest("posts" ).id("7")
                .source(XContentType.JSON, "field", "foo"));//将第一个索引请求添加到批量请求中
        request.add(new IndexRequest("posts").id("8")
                .source(XContentType.JSON, "field", "bar"));//添加第二个索引请求
        request.add(new IndexRequest("posts").id("9")
                .source(XContentType.JSON, "field", "baz"));
        BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
        BulkItemResponse[] items = bulkResponse.getItems();
        Arrays.stream(items).forEach(bulkItemResponse -> {
            if (!bulkItemResponse.isFailed()) {
                DocWriteRequest.OpType opType = bulkItemResponse.getOpType();
                switch (opType) {
                    case INDEX:
                        IndexResponse indexResponse = bulkItemResponse.getResponse();
                        log.info("indexResponse = {}", JSONObject.toJSONString(indexResponse));
                        break;
                    case CREATE:
                        IndexResponse createIndexResponse = bulkItemResponse.getResponse();
                        log.info("createIndexResponse = {}", JSONObject.toJSONString(createIndexResponse));
                        break;
                    case DELETE:
                        UpdateResponse updateResponse = bulkItemResponse.getResponse();
                        log.info("updateResponse = {}", JSONObject.toJSONString(updateResponse));
                        break;
                    case UPDATE:
                        DeleteResponse deleteResponse = bulkItemResponse.getResponse();
                        log.info("deleteResponse = {}", JSONObject.toJSONString(deleteResponse));
                        break;
                    default:
                        break;
                }
            } else {
                log.info("获取操作失败的信息 = {}", JSONObject.toJSONString(bulkItemResponse.getFailure()));
            }
        });}


    /**
     * 在一次请求中并行的执行多个 GetRequest
     *
     * @throws IOException
     */
    @Test
    public void batchUpdate() throws IOException {
        BulkRequest bulkRequest = new BulkRequest();
        Map<String, Object> jsonMap = new HashMap<>();
        jsonMap.put("user", "liulei");
        jsonMap.put("postDate", new Date());
        jsonMap.put("message", "liulei out Elasticsearch");
        UpdateRequest updateRequest = new UpdateRequest("posts","2");
        updateRequest.doc(jsonMap);
        bulkRequest.add(updateRequest);

        Map<String, Object> jsonMap1 = new HashMap<>();
        jsonMap1.put("user", "liulei1");
        jsonMap1.put("postDate", new Date());
        jsonMap1.put("message", "liulei1 out Elasticsearch");
        UpdateRequest updateRequest1 = new UpdateRequest("posts","3");
        updateRequest1.doc(jsonMap1);
        bulkRequest.add(updateRequest1);
        client.bulk(bulkRequest,RequestOptions.DEFAULT);
    }

    @Test
    public void insertUpdate() throws IOException {
        BulkRequest bulkRequest = new BulkRequest();
        Map<String, Object> jsonMap = new HashMap<>();
        jsonMap.put("user", "liulei");
        jsonMap.put("postDate", new Date());
        jsonMap.put("message", "liulei out Elasticsearch");
        IndexRequest  indexRequest1 = new IndexRequest ("posts");
        indexRequest1.source(jsonMap);
        bulkRequest.add(indexRequest1);

        Map<String, Object> jsonMap1 = new HashMap<>();
        jsonMap1.put("user", "liulei1");
        jsonMap1.put("postDate", new Date());
        jsonMap1.put("message", "liulei1 out Elasticsearch");
        IndexRequest indexRequest2 = new IndexRequest("posts");
        indexRequest2.source(jsonMap1);
        bulkRequest.add(indexRequest2);
        client.bulk(bulkRequest,RequestOptions.DEFAULT);
    }

    @Test
    public void getAllDocTest() throws IOException {
        SearchRequest searchRequest = new SearchRequest("posts");
        SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource();
        searchSourceBuilder.query(QueryBuilders.matchAllQuery());
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        log.info("searchResponse = {}", JSONObject.toJSONString(searchResponse));
        // Clusters
        // 在集群情况下搜索时, 保存集群的信息, 总数量，成功的数量和跳过的数量
        SearchResponse.Clusters clusters = searchResponse.getClusters();
        log.info("集群中服务的总数量 = {}", clusters.getTotal());
        log.info("成功的数量 = {}", clusters.getSuccessful());
        log.info("跳过的数量 = {}", clusters.getSkipped());

        // 执行搜索时的使用的分片信息
        log.info("搜索执行失败的分片数 = {}", searchResponse.getFailedShards());
        log.info("执行搜索的分片总数 = {}", searchResponse.getTotalShards());
        log.info("执行搜索的跳过分片总数 = {}", searchResponse.getSkippedShards());
        log.info("执行搜索成功的分片数 = {}", searchResponse.getSuccessfulShards());
        log.info("执行搜索失败的分片原因集合 = {}", JSONObject.toJSONString(searchResponse.getShardFailures()));

        // 如果启用了性能分析, 则会返回包含每个分片的概要文件结果的对象, 如果未启用分析，则返回 null
        // 指向的是 $.hits.hits[0].fields 字段
        log.info("profileResults = {}", JSONObject.toJSONString(searchResponse.getProfileResults()));

        // 搜索是否超时
        log.info("搜索是否超时 = {}", searchResponse.isTimedOut());

        // 搜索使用的时间信息
        TimeValue took = searchResponse.getTook();
        log.info("搜索总耗时 = {}", took.getStringRep());

        // 命中的文档数据
        SearchHits responseHits = searchResponse.getHits();
        log.info("命中的文档总数 = {}", responseHits.getTotalHits());
        log.info("命中文档数据中的分数最大值 = {}", responseHits.getMaxScore());
        log.info("文档数据 = {}", responseHits.getAt(0));
    }

    @Test
    public void simpleSearch() throws IOException {
        // Simple Query = 简单查询
        SearchRequest searchRequest = new SearchRequest("posts");
        SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource();
        searchSourceBuilder.query(QueryBuilders.termQuery("user", "kimchy1"));
        searchSourceBuilder.from(0);
        searchSourceBuilder.size(10);
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.MINUTES));
        searchRequest.source(searchSourceBuilder);
//        sourceBuilder.fetchSource(false);
//        String[] includeFields = new String[] {"title", "innerObject.*"};
//        String[] excludeFields = new String[] {"user"};
//        sourceBuilder.fetchSource(includeFields, excludeFields);
        SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        Arrays.stream(searchResponse.getHits().getHits()).forEach(searchHit -> {
            log.info("source = {}, highlighter = {}", searchHit.getSourceAsString(), searchHit.getHighlightFields());
        });
    }


    @Test
    public void searchMany() throws IOException {
        SearchRequest matchSearchRequest = new SearchRequest("posts");
        SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource();
        QueryBuilders.matchQuery("user","kimchy1");
        searchSourceBuilder.from(0);
        searchSourceBuilder.size(10);
        matchSearchRequest.source(searchSourceBuilder);
        // 设置需要高亮的字段
        HighlightBuilder highlightBuilder = new HighlightBuilder();
        highlightBuilder.field("user");
        searchSourceBuilder.highlighter(highlightBuilder);
        matchSearchRequest.source(searchSourceBuilder);
        // 开始查询
        SearchResponse searchResponse = client.search(matchSearchRequest, RequestOptions.DEFAULT);
        Arrays.stream(searchResponse.getHits().getHits()).forEach(searchHit -> {
            log.info("source = {}, highlighter = {}", searchHit.getSourceAsString(), searchHit.getHighlightFields());
        });
    }




    /**
     * SearchResponse response = client.prepareSearch("bank")
     * .setTypes("account")
     * .setQuery(QueryBuilders.termQuery("gender.keyword", "F"))
     * .addAggregation(new ValueCountAggregationBuilder("totalCnt", ValueType.LONG).field("account_number")) //←所有的女性客户的人数
     * .addAggregation(new MinAggregationBuilder("minAge").field("age")) //←最小年龄的客户
     * .addAggregation(new MaxAggregationBuilder("maxBalance").field("balance"))//←最多银行存款余额的数目
     * .addAggregation(new MinAggregationBuilder("minBanlance").field("balance"))//←最最少银行存款余额的数目
     * .get();
     * Aggregations aggregations = response.getAggregations();
     * long totalCnt = ((ValueCount) aggregations.get("totalCnt")).getValue();
     * double minAge = ((Min) aggregations.get("minAge")).getValue();
     * double maxBalance = ((Max) aggregations.get("maxBalance")).getValue();
     * double minBanlance = ((Min) aggregations.get("minBanlance")).getValue();
     *
     * @throws IOException
     */
    @Test
    public void agree() throws IOException {
        SearchRequest searchRequest = new SearchRequest("posts");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        AvgAggregationBuilder avgAggregationBuilder = AggregationBuilders.avg("average_id")
                .field("_id");
//        TermsAggregationBuilder aggregation = AggregationBuilders.terms("by_company")
////                .field("company.keyword");
//        aggregation.subAggregation(AggregationBuilders.avg("average_age")
//                .field("age"));
        searchSourceBuilder.aggregation(avgAggregationBuilder);
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        List<Aggregation> aggregationList = searchResponse.getAggregations().asList();
        Max average_id = (Max) searchResponse.getAggregations().asMap().get("average_id");
        System.out.println(average_id.getValue());
    }


    //    @Test
    public void height(String keyword, int page, int size) throws IOException {

        if (page <= 1) {
            page = 1;
        }
        //创建搜索请求
        SearchRequest searchRequest = new SearchRequest("索引名");
        //构造搜索参数
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        //设置需要精确查询的字段
        TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("filed", keyword);
        searchSourceBuilder.query(termQueryBuilder);
        searchSourceBuilder.from((page - 1) * size);
        searchSourceBuilder.size(size);
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));
        //高亮
        HighlightBuilder highlightBuilder = new HighlightBuilder();
        //设置高亮字段
        highlightBuilder.field("filed");
        //如果要多个字段高亮,这项要为false
        highlightBuilder.requireFieldMatch(true);
        highlightBuilder.preTags("<span style='color:red'>");
        highlightBuilder.postTags("</span>");

        //下面这两项,如果你要高亮如文字内容等有很多字的字段,必须配置,不然会导致高亮不全,文章内容缺失等
        highlightBuilder.fragmentSize(800000); //最大高亮分片数
        highlightBuilder.numOfFragments(0); //从第一个分片获取高亮片段
        searchSourceBuilder.highlighter(highlightBuilder);

        searchRequest.source(searchSourceBuilder);
        SearchResponse response = client.search(searchRequest, RequestOptions.DEFAULT);
        List<Map<String, Object>> list = new ArrayList<>();
        for (SearchHit hit : response.getHits().getHits()) {
            Map<String, Object> sourceAsMap = hit.getSourceAsMap();
            //解析高亮字段
            Map<String, HighlightField> highlightFields = hit.getHighlightFields();
            HighlightField field = highlightFields.get("field");
            if (field != null) {
                Text[] fragments = field.fragments();
                String n_field = "";
                for (Text fragment : fragments) {
                    n_field += fragment;
                }
                //高亮标题覆盖原标题
                sourceAsMap.put("field", n_field);
            }
            list.add(hit.getSourceAsMap());
        }

    }

    /**
     * 使用 SearchScrollRequest 对大量数据进行搜索, 先使用普通 SearchRequest 获取 scrollId
     * 后使用 SearchScrollRequest 请求进行查询, 再使用上次的 scrollId 进行下次查询, 直到查询数据完成,
     *
     *
      */
    @Test
    public void scroll() throws IOException {
        // 普通 SearchRequest 获取 scrollId, scroll() 方法设置下一次查询的时间间隔
        SearchRequest searchRequest = new SearchRequest("doc");
        SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource();
        searchSourceBuilder.query(QueryBuilders.termQuery("user","kimchy1"));
        searchSourceBuilder.size(1000);
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.MINUTES));
        searchRequest.source(searchSourceBuilder);
        searchRequest.scroll(TimeValue.timeValueSeconds(30));

        // 获取 scrollId 并处理此次查询出来的数据
        SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        String scrollId = searchResponse.getScrollId();
        log.info("scrollId = {}", scrollId);
        log.info("当前命中的数据数量 = {}", searchResponse.getHits().getTotalHits());

        // 使用此新的滚动标识符来搜索下一批数据
        SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
        scrollRequest.scroll(TimeValue.timeValueSeconds(30));
        SearchResponse searchScrollResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT);
        scrollId = searchScrollResponse.getScrollId();
        log.info("scrollId = {}", scrollId);
        log.info("当前命中的数据数量 = {}", searchScrollResponse.getHits().getTotalHits());

        // 重复上面的过程, 直到数据检索完成, 然后调用 ClearScrollRequest 清除滚动上下文
        // 当滚动上下文到期时, 会自动清除, 但最建议是当滚动会话结束后尽快释放资源
        ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
        clearScrollRequest.addScrollId(scrollId);
        ClearScrollResponse clearScrollResponse = client.clearScroll(clearScrollRequest, RequestOptions.DEFAULT);
        log.info("清除结果 = {}", clearScrollResponse.isSucceeded());
    }

    /**
     * 批量执行多个查询请求
     *
     * @throws IOException
     */
    @Test
    public void multiSearch() throws IOException {
        MultiSearchRequest request = new MultiSearchRequest();
        SearchRequest firstSearchRequest = new SearchRequest();
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        searchSourceBuilder.query(QueryBuilders.matchQuery("user", "kimchy"));
        firstSearchRequest.source(searchSourceBuilder);
        request.add(firstSearchRequest);
        SearchRequest secondSearchRequest = new SearchRequest();
        searchSourceBuilder = new SearchSourceBuilder();
        searchSourceBuilder.query(QueryBuilders.matchQuery("user", "luca"));
        secondSearchRequest.source(searchSourceBuilder);
        request.add(secondSearchRequest);
        MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT);
        MultiSearchResponse.Item[] responseResponses = response.getResponses();
    }

    /**
     *
     * 跨索引库查询
     *
     * @throws IOException
     */
    @Test
    public void filedCapabilitiesRequestTest() throws IOException {
        FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest();
        fieldCapabilitiesRequest.fields("user", "kimchy");
        fieldCapabilitiesRequest.indices("post1", "post2");
        FieldCapabilitiesResponse response = client.fieldCaps(fieldCapabilitiesRequest, RequestOptions.DEFAULT);
        Map<String, FieldCapabilities> fieldCapabilitiesMap = response.getField("user");
        fieldCapabilitiesMap.forEach((type, fieldCapabilities) -> {
            log.info("type = {}, fieldCapabilities = {}", type, JSONObject.toJSONString(fieldCapabilities));
        });
    }


    /**
     * 计数统计
     *
     * @throws IOException
     */
    @Test
    public void countRequestTest() throws IOException {
        CountRequest countRequest = new CountRequest("post");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        searchSourceBuilder.query(QueryBuilders.matchQuery("user", "kimchy"));
        countRequest.source(searchSourceBuilder);
        CountResponse countResponse = client.count(countRequest, RequestOptions.DEFAULT);
        log.info("matchCount = {}", countResponse.getCount());
    }

}