package com.hou.elasticsearch.utils;

import com.alibaba.fastjson.JSONObject;
import com.hou.elasticsearch.bean.Book;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;

import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;

@Component
public class ElasticSearchUtils {
    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticSearchUtils.class);

    /**
     * 在Springframework里,是不能@Autowired一个静态变量，使之成为一个Spring bean的
     * 因为当类加载器加载静态变量时，Spring上下文尚未加载。所以类加载器不会在bean中正确注入静态类，并且会失败
     */
    @Autowired
    private TransportClient transportClient;

    @Autowired
    private BulkProcessor bulkProcessor;

    private static TransportClient client;

    private static BulkProcessor bulk;

    //解决注入变量静态问题
    @PostConstruct
    public void initBulk() {
        bulk = this.bulkProcessor;
        client = this.transportClient;
    }

    /**
     * 创建索引以及映射mapping，并给索引某些字段指定iK分词，以后向该索引中查询时，就会用ik分词。
     * indexName: 数据库
     * esType: 表
     */
    public static boolean createIndex(String indexName, String esTpye) {
        if (!isIndexExist(indexName)) {
            System.out.println("索引不存在");
        }
        //创建映射,指定分词器,类型名称等
        XContentBuilder mapping = null;
        XContentBuilder settings = null;
        try {
            settings = XContentFactory.jsonBuilder()  //设置信息
                    .startObject()
                    .field("number_of_shards", 3)
                    .field("number_of_replicas", 2)
                    /**
                     * 默认情况下索引的refresh_interval为1秒,这意味着数据写1秒后就可以被搜索到,每次索引的 refresh 会产生一个新的 lucene 段,
                     * 这会导致频繁的 segment merge 行为,如果你不需要这么高的搜索实时性,应该降低索引refresh 周期
                     */
                    .field("refresh_interval", "30s") //刷新时间
                    .endObject();
            mapping = XContentFactory.jsonBuilder()  //映射信息,字段名字,类型,分词等
                    .startObject()
                    .startObject("properties")
                    // title:字段名，  type:文本类型       analyzer ：分词器类型
                    .startObject("id")
                    .field("type", "text").endObject()
                    .startObject("name")
                    .field("type", "text")
                    .field("store", "yes") //是否存储
                    .field("index", "analyzed") //建立索引并分词 not_analyzed:建立索引不分词  no: 不建索引
                    // 存储时使用ik_max_word ,搜索时使用ik_smart
                    .field("analyzer", "ik_max_word")
                    .field("search_analyzer", "ik_smart").endObject()
                    .startObject("message")
                    .field("type", "text")
                    .field("analyzer", "ik_max_word")
                    .field("search_analyzer", "ik_smart").endObject()
                    .startObject("price")
                    .field("type", "float").endObject()
                    .startObject("creatDate")
                    .field("type", "date").endObject()
                    .endObject()
                    .endObject();
        } catch (IOException e) {
            e.printStackTrace();
        }
        //index：索引名   type：类型名
        PutMappingRequest putmap = Requests.putMappingRequest(indexName).type(esTpye).source(mapping);
        //创建索引
        client.admin().indices().prepareCreate(indexName).setSettings(settings).execute().actionGet();
        //为索引添加映射
        AcknowledgedResponse acknowledgedResponse = client.admin().indices().putMapping(putmap).actionGet();
        return acknowledgedResponse.isAcknowledged();
    }

    /**
     * 创建索引,不设置mapping
     *
     * @param index
     * @return
     */
    public static boolean createIndex(String index) {
        if (!isIndexExist(index)) {
            LOGGER.error("索引不存在");
        }
        CreateIndexResponse indexresponse = client.admin().indices().prepareCreate(index).execute().actionGet();
        return indexresponse.isAcknowledged();
    }

    /**
     * 删除索引
     *
     * @param index
     * @return
     */
    public static boolean deleteIndex(String index) {
        if (!isIndexExist(index)) {
            LOGGER.error("索引不存在");
            return false;
        }
        AcknowledgedResponse acknowledgedResponse = client.admin().indices().prepareDelete(index).execute().actionGet();
        return acknowledgedResponse.isAcknowledged();
    }

    /**
     * 判断索引是否存在
     *
     * @param index
     * @return
     */
    public static boolean isIndexExist(String index) {
        System.out.println("------------" + client);
        IndicesExistsResponse indicesExistsResponse = client.admin().indices().exists(new IndicesExistsRequest(index)).actionGet();
        if (indicesExistsResponse.isExists()) {
            LOGGER.info("Index [" + index + "] is exist!");
        } else {
            LOGGER.info("Index [" + index + "] is not exist!");
        }
        return indicesExistsResponse.isExists();
    }

    /**
     * 数据添加，指定ID,id存在则更新
     *
     * @param jsonObject 要增加的数据
     * @param index      索引，类似数据库
     * @param type       类型，类似表
     * @param id         数据ID(为空默认生成)
     * @return
     */
    public static String addData(JSONObject jsonObject, String index, String type, String id) {
        IndexResponse response = client.prepareIndex(index, type, id).setSource(jsonObject).get();
        LOGGER.info("addData response status:{},id:{}", response.status().getStatus(), response.getId());
        return response.getId();
    }

    /**
     * 数据添加
     *
     * @param jsonObject 要增加的数据
     * @param index      索引，类似数据库
     * @param type       类型，类似表
     * @return
     */
    public static String addData(JSONObject jsonObject, String index, String type) {
        return addData(jsonObject, index, type, UUID.randomUUID().toString().replaceAll("-", "").toUpperCase());
    }

    /**
     * 通过ID删除数据
     *
     * @param index 索引，类似数据库
     * @param type  类型，类似表
     * @param id    数据ID
     */
    public static void deleteDataById(String index, String type, String id) {
        DeleteResponse deleteResponse = client.prepareDelete(index, type, id).execute().actionGet();
        LOGGER.info("deleteDataById response status:{},id:{}", deleteResponse.status().getStatus(), deleteResponse.getId());
    }

    /**
     * 通过过滤条件删除
     */
    public static void deleteByQuery(String index, String content) {
        long deleted = DeleteByQueryAction.INSTANCE.newRequestBuilder(client)
                //指定匹配条件
                .filter(QueryBuilders.matchQuery("name", content))
                //价格再21到25之间的数据
                .filter(QueryBuilders.boolQuery().must(QueryBuilders.rangeQuery("price").from(21.0).to(25.0)
                        .includeLower(true)  // true  包含下界， false 不包含下界
                        .includeUpper(false)))  //true  包含下界， false 不包含下界
                .source(index).get().getDeleted();//  指定索引
        System.out.println("删除了" + deleted + "条数据");

    }


    /**
     * 通过ID 更新数据
     *
     * @param jsonObject 要增加的数据
     * @param index      索引，类似数据库
     * @param type       类型，类似表
     * @param id         数据ID
     * @return
     */
    public static void updateDataById(JSONObject jsonObject, String index, String type, String id) {
        UpdateRequest updateRequest = new UpdateRequest();
        updateRequest.index(index).type(type).id(id).doc(jsonObject);
        client.update(updateRequest);
    }

    /**
     * 通过ID获取数据
     *
     * @param index  索引，类似数据库
     * @param type   类型，类似表
     * @param id     数据ID
     * @param fields 需要显示的字段，逗号分隔（缺省为全部字段）
     * @return 结果
     */
    public static Map<String, Object> searchDataById(String index, String type, String id, String fields) {
        GetRequestBuilder getRequestBuilder = client.prepareGet(index, type, id);
        if (StringUtils.isNotEmpty(fields)) {
            getRequestBuilder.setFetchSource(fields.split(","), null);
        }
        GetResponse getResponse = getRequestBuilder.execute().actionGet();
        return getResponse.getSource();
    }

    /**
     * 批量增加,一次添加10万个
     *
     * @param: [index, type, bookList]
     * @return: org.elasticsearch.action.bulk.BulkResponse
     */
    public static void bulkAddDocument(String index, String type, List<Book> bookList) {
        BulkRequestBuilder bulkRequest = client.prepareBulk();
        bookList.stream().forEach(
                book -> {
                    try {
                        bulkRequest.add(client.prepareIndex(index, type, book.getId())
                                .setSource(XContentFactory.jsonBuilder()
                                        .startObject()
                                        .field("id", book.getId())
                                        .field("name", book.getName())
                                        .field("creatDate", book.getCreatDate())
                                        .field("price", book.getPrice())
                                        .field("message", book.getMessage())
                                        .endObject()
                                )
                        );
                        if (Integer.valueOf(book.getId()) % 100000 == 0) {
                            BulkResponse responses = bulkRequest.execute().actionGet();
                            if (responses.hasFailures()) {
                                System.out.println("bulk error:" + responses.buildFailureMessage());
                            }
                            LOGGER.info("insert status?" + responses.status());
                        }
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
        );
        BulkResponse responses = bulkRequest.execute().actionGet();
        if (responses.hasFailures()) {
            System.out.println("bulk error:" + responses.buildFailureMessage());
        }
        LOGGER.info("insert status?" + responses.status());
    }


    /**
     * bulkProcessor 批量增加
     *
     * @param: [indexName, type, bookList]
     * @return: void
     * @auther: LHL
     * @date: 2018/10/16 11:46
     */
    public static void bulkProcessorAdd(String indexName, String type, List<Book> bookList) {
        bookList.stream().parallel().forEach(
                book -> {
                    try {
                        bulk.add(new IndexRequest(indexName, type, book.getId()).source(XContentFactory.jsonBuilder()
                                .startObject()
                                .field("id", book.getId())
                                .field("name", book.getName())
                                .field("creatDate", book.getCreatDate())
                                .field("price", book.getPrice())
                                .field("message", book.getMessage())
                                .endObject()));
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
        );
    }

    /**
     * 批量删除
     *
     * @param: [index, type, ids]
     * @return: org.elasticsearch.action.bulk.BulkResponse
     * @auther: LHL
     * @date: 2018/10/15 11:42
     */
    public static BulkResponse bulkDeleteDocument(String index, String type, List<String> ids) {
        BulkRequestBuilder bulkRequest = client.prepareBulk();
        ids.stream().forEach(
                id -> bulkRequest.add(client.prepareDelete(index, type, id))
        );
        BulkResponse bulkResponse = bulkRequest.execute().actionGet();
        return bulkResponse;
    }

    /**
     * 查询所有
     *
     * @param: [indexName]
     * @return: void
     * @auther: LHL
     * @date: 2018/10/15 14:56
     */
    public static List<String> searchAll(String indexName) {
        List<String> resultList = new ArrayList<>();
        String result = null;
        SearchResponse response = client.prepareSearch(indexName).setQuery(matchAllQuery()).setFrom(0).setSize(10).addSort("price", SortOrder.ASC).get();
        for (SearchHit searchHit : response.getHits()) {
            result = searchHit.getSourceAsString();
            resultList.add(result);
        }
        return resultList;
    }


    /**
     * 查询多个数据,可从不同索引中查询
     */
    public static String searchMany() {
        MultiGetResponse multiGetItemResponses = client.prepareMultiGet()
                .add("blog", "java", "1")   //添加查询条件
                .add("spring", "boot", "1")
                .get();
        //迭代获取数据
        multiGetItemResponses.forEach(x -> {
            if (x.getResponse().isExists()) {
                Map<String, Object> sourceAsMap = x.getResponse().getSourceAsMap();
                System.out.println(sourceAsMap);
            }
        });
        return null;
    }

    /**
     * 使用分词查询  高亮 排序 ,并分页
     *
     * @param index          索引名称
     * @param type           类型名称,可传入多个type逗号分隔
     * @param startPage      当前页
     * @param pageSize       每页显示条数
     * @param query          查询条件
     * @param fields         需要显示的字段，逗号分隔（缺省为全部字段）
     * @param sortField      排序字段
     * @param highlightField 高亮字段
     * @return 结果
     */
    public static PageBean searchDataPage(String index, String type, int startPage, int pageSize, QueryBuilder query,
                                          String fields, String sortField, String highlightField) {
        SearchRequestBuilder searchRequestBuilder = client.prepareSearch(index);
        if (StringUtils.isNotEmpty(type)) {
            searchRequestBuilder.setTypes(type.split(","));
        }
        // 需要显示的字段，逗号分隔（缺省为全部字段）
        if (StringUtils.isNotEmpty(fields)) {
            searchRequestBuilder.setFetchSource(fields.split(","), null);
        }//排序字段
        if (StringUtils.isNotEmpty(sortField)) {
            searchRequestBuilder.addSort(sortField, SortOrder.ASC);
        }// 高亮（xxx=111,aaa=222）
        if (StringUtils.isNotEmpty(highlightField)) {
            HighlightBuilder highlightBuilder = new HighlightBuilder();
            //设置前缀
            highlightBuilder.preTags("<span style='color:red' >");
            //设置后缀
            highlightBuilder.postTags("</span>");
            // 设置高亮字段
            highlightBuilder.field(highlightField);
            searchRequestBuilder.highlighter(highlightBuilder);
        }
        // 设置是否按查询匹配度排序
        searchRequestBuilder.setExplain(true);
        //深度分页
        if (startPage > (10000 - pageSize)) {
            searchRequestBuilder.setQuery(query);
            searchRequestBuilder
                    .setScroll(TimeValue.timeValueMinutes(1))
                    .setSize(10000);
            //打印的内容 可以在 Elasticsearch head 和 Kibana  上执行查询
            LOGGER.info("\n{}", searchRequestBuilder);
            // 执行搜索,返回搜索响应信息
            SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
            long totalHits = searchResponse.getHits().totalHits;
            if (searchResponse.status().getStatus() == 200) {
                //使用scrollId迭代查询
                List<Map<String, Object>> result = disposeScrollResult(searchResponse, highlightField);
                List<Map<String, Object>> sourceList = result.stream().parallel().skip((startPage - 1 - (10000 / pageSize)) * pageSize).
                        limit(pageSize).collect(Collectors.toList());
                return new PageBean(startPage, pageSize, (int) totalHits, sourceList);
            }
        } else {//浅分页
            searchRequestBuilder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
            // QUERY_THEN_FETCH    QUERY_AND_FETCH  DFS_QUERY_THEN_FETCH
            searchRequestBuilder.setQuery(matchAllQuery());
            searchRequestBuilder.setQuery(query);
            // 分页应用
            searchRequestBuilder
                    .setFrom(startPage)
                    .setSize(pageSize);
            //打印的内容 可以在 Elasticsearch head 和 Kibana  上执行查询
            LOGGER.info("\n{}", searchRequestBuilder);
            // 执行搜索,返回搜索响应信息
            SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
            long totalHits = searchResponse.getHits().totalHits;
            long length = searchResponse.getHits().getHits().length;
            LOGGER.debug("共查询到[{}]条数据,处理数据条数[{}]", totalHits, length);
            if (searchResponse.status().getStatus() == 200) {
                // 解析对象
                List<Map<String, Object>> sourceList = setSearchResponse(searchResponse, highlightField);
                return new PageBean(startPage, pageSize, (int) totalHits, sourceList);
            }
        }
        return null;
    }

    /**
     * 深度分页  从当前页为1001开始
     *
     * @param: [indexName, esType, startPage, pageSize, highlightField]
     * @return: com.aqh.utils.EsPage
     * @auther: LHL
     * @date: 2018/10/17 15:07
     */
    public static PageBean deepPageing(String indexName, String esType, int startPage, int pageSize, String highlightField) {
        System.out.println("scroll 模式启动！");
        long begin = System.currentTimeMillis();
        //初始化查询，获取scrollId
        BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
        boolQueryBuilder.must(QueryBuilders.matchQuery("name", "名"));
        //boolQueryBuilder.filter(QueryBuilders.rangeQuery("price").from("1").to("999821"));
        SearchResponse response = client.prepareSearch(indexName)//对应索引
                .setTypes(esType)//对应索引type
                .setQuery(boolQueryBuilder)
                .addSort("price", SortOrder.ASC)
                .setScroll(TimeValue.timeValueMinutes(1))
                .setSize(10000) //第一次不返回size条数据
                .highlighter(new HighlightBuilder().preTags("<span style='color:red' >").postTags("</span>").field(highlightField))
                .setExplain(true)
                .execute()
                .actionGet();
        long totalHits = response.getHits().totalHits;
        List<Map<String, Object>> result = disposeScrollResult(response, highlightField);
        List<Map<String, Object>> sourceList = result.stream().parallel().skip((startPage - 1 - (10000 / pageSize)) * pageSize).
                limit(pageSize).collect(Collectors.toList());
        long end = System.currentTimeMillis();
        System.out.println("耗时: " + (end - begin) + "ms");
        System.out.println("耗时: " + (end - begin) / 1000 + "s");
        System.out.println("查询" + totalHits + "条数据");
        return new PageBean(startPage, pageSize, (int) totalHits, sourceList);
    }

    /**
     * 处理scroll结果
     *
     * @param: [response, highlightField]
     * @return: java.util.List<java.util.Map   <   java.lang.String   ,   java.lang.Object>>
     * @auther: LHL
     * @date: 2018/10/17 11:17
     */
    private static List<Map<String, Object>> disposeScrollResult(SearchResponse response, String highlightField) {
        List<Map<String, Object>> sourceList = new ArrayList<Map<String, Object>>();
        //使用scrollId迭代查询
        while (response.getHits().getHits().length > 0) {
            String scrollId = response.getScrollId();
            response = client.prepareSearchScroll(scrollId)
                    .setScroll(TimeValue.timeValueMinutes(1))//设置查询context的存活时间
                    .execute()
                    .actionGet();
            SearchHits hits = response.getHits();
            for (SearchHit hit : hits.getHits()) {
                Map<String, Object> resultMap = getResultMap(hit, highlightField);
                sourceList.add(resultMap);
//                System.out.println(JSON.toJSONString(resultMap));
            }
        }
        ClearScrollRequest request = new ClearScrollRequest();
        request.addScrollId(response.getScrollId());
        client.clearScroll(request);
        return sourceList;
    }

    /**
     * 使用分词查询  排序 高亮
     *
     * @param index          索引名称
     * @param type           类型名称,可传入多个type逗号分隔
     * @param query          查询条件
     * @param size           文档大小限制
     * @param fields         需要显示的字段，逗号分隔（缺省为全部字段）
     * @param sortField      排序字段
     * @param highlightField 高亮字段
     * @return 结果
     */
    public static List<Map<String, Object>> searchListData(String index, String type, QueryBuilder query,
                                                           Integer size, String fields, String sortField,
                                                           String highlightField) {
        SearchRequestBuilder searchRequestBuilder = client.prepareSearch(index);
        if (StringUtils.isNotEmpty(type)) {
            searchRequestBuilder.setTypes(type.split(","));
        }
        if (StringUtils.isNotEmpty(highlightField)) {
            HighlightBuilder highlightBuilder = new HighlightBuilder();
            // 设置高亮字段
            highlightBuilder.field(highlightField);
            searchRequestBuilder.highlighter(highlightBuilder);
        }
        searchRequestBuilder.setQuery(query);
        if (StringUtils.isNotEmpty(fields)) {
            searchRequestBuilder.setFetchSource(fields.split(","), null);
        }
        searchRequestBuilder.setFetchSource(true);
        if (StringUtils.isNotEmpty(sortField)) {
            searchRequestBuilder.addSort(sortField, SortOrder.ASC);
        }
        if (size != null && size > 0) {
            searchRequestBuilder.setSize(size);
        }//打印的内容 可以在 Elasticsearch head 和 Kibana  上执行查询
        LOGGER.info("\n{}", searchRequestBuilder);
        SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
        long totalHits = searchResponse.getHits().totalHits;
        long length = searchResponse.getHits().getHits().length;
        LOGGER.info("共查询到[{}]条数据,处理数据条数[{}]", totalHits, length);
        if (searchResponse.status().getStatus() == 200) {
            // 解析对象
            return setSearchResponse(searchResponse, highlightField);
        }
        return null;
    }

    /**
     * 高亮结果集 特殊处理
     *
     * @param searchResponse 搜索的结果集
     * @param highlightField 高亮字段
     */
    private static List<Map<String, Object>> setSearchResponse(SearchResponse searchResponse, String highlightField) {
        List<Map<String, Object>> sourceList = new ArrayList<Map<String, Object>>();
        for (SearchHit searchHit : searchResponse.getHits().getHits()) {
            Map<String, Object> resultMap = getResultMap(searchHit, highlightField);
            sourceList.add(resultMap);
        }
        return sourceList;
    }

    /**
     * 获取高亮结果集
     *
     * @param: [hit, highlightField]
     * @return: java.util.Map<java.lang.String   ,   java.lang.Object>
     * @auther: LHL
     * @date: 2018/10/17 10:54
     */
    private static Map<String, Object> getResultMap(SearchHit hit, String highlightField) {
        hit.getSourceAsMap().put("id", hit.getId());
        if (StringUtils.isNotEmpty(highlightField)) {
            Text[] text = hit.getHighlightFields().get(highlightField).getFragments();
            String hightStr = null;
            if (text != null) {
                for (Text str : text) {
                    hightStr = str.string();
                }
                //遍历 高亮结果集，覆盖 正常结果集
                hit.getSourceAsMap().put(highlightField, hightStr);
            }
        }
        return hit.getSourceAsMap();
    }

    /**
     * ik分词测试
     *
     * @param: []
     * @return: void
     * @auther: LHL
     * @date: 2018/10/15 11:31
     */
    public static String ik() {
        StringBuilder stringBuilder = new StringBuilder();
        AnalyzeRequest analyzeRequest = new AnalyzeRequest("entity")
                .text("我和漂亮空姐同居的日子")
                .analyzer("ik_max_word");  //ik_smart  ik_max_word  standard
        List<AnalyzeResponse.AnalyzeToken> tokens = client.admin().indices()
                .analyze(analyzeRequest)
                .actionGet()
                .getTokens();
        for (AnalyzeResponse.AnalyzeToken token : tokens) {
            stringBuilder.append(token.getTerm() + "\\r\\n");
        }
        return stringBuilder.toString();
    }


    /**
     * 聚合查询,分组统计等
     * 统计每个分组的数据个数:单个字段聚合
     * select type,count(id) as count from table group by type
     */
    public static void agSearch() {
        SearchRequestBuilder searchRequestBuilder = client.prepareSearch("blog").setTypes("blog") //指定索引和类型
                //添加聚合,按type分组    terms:指定聚合属性别名为count,聚合的字段为type
                .addAggregation(AggregationBuilders.terms("count").field("type"));
        SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();//执行
        //遍历结果
        Map<String, Aggregation> aggregationMap = searchResponse.getAggregations().getAsMap();
        //获取聚合属性
        StringTerms terms = (StringTerms) aggregationMap.get("count");
        terms.getBuckets().forEach(x -> {
            System.out.println(x.getKey());  //分组名称
            System.out.println(x.getDocCount()); //数量
        });

    }

    /**
     * 多个字段聚合:
     * select team, position, count(*) as pos_count from player group by team, position;
     */
    public static void agManySearch() {
        SearchRequestBuilder searchRequestBuilder = client.prepareSearch("blog").setTypes("blog") //指定索引和类型
                //添加聚合,按type分组    terms:指定聚合属性别名为count,聚合的字段为type
                .addAggregation(AggregationBuilders.terms("team").field("type")
                        //指定排序,排序需要再team这里指定
                        .order(BucketOrder.aggregation("pos_count", true))//可传一个list,指定多个排序字段
                        //search默认只返回10条,如果需要返回指定数据,需要这里加size
                        .size(20)
                        //添加第二个聚合字段,是第一个字段的子聚合对象
                        .subAggregation(AggregationBuilders.terms("pos_count").field("position")));
        SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
        //遍历结果
        Map<String, Aggregation> aggregationMap = searchResponse.getAggregations().getAsMap();
        //获取聚合属性
        StringTerms terms = (StringTerms) aggregationMap.get("count");
        terms.getBuckets().forEach(x -> {
            System.out.println(x.getKey());  //第一个分组名称,比如球队
            //获取第二个分组聚合属性
            StringTerms posterms = (StringTerms) x.getAggregations().getAsMap().get("pos_count");
            posterms.getBuckets().forEach(y -> {
                //第二个分组字段聚合信息,先是按球队分组,之后再按位置分组
                //球队下面的位置名字
                System.out.println(x.getKey());
                //此位置的人数
                System.out.println(x.getDocCount());
            });

        });
    }

    /**
     * 统计查询:max/min/sum/avg
     * select team, max/min/sum/avg(age) as max_age,sum(salary) as salaryTotal from player group by team;
     */
    public static void test366() {
        SearchRequestBuilder searchRequestBuilder = client.prepareSearch("blog").setTypes("blog") //指定索引和类型
                //添加聚合,按type分组    terms:指定聚合属性别名为count,聚合的字段为type
                .addAggregation(AggregationBuilders.terms("team").field("type")
                        //统计每个分组中最大的年龄
                        .subAggregation(AggregationBuilders.max("maxage").field("age")
                                        //统计这个分组的总薪水,可添加
                                        .subAggregation(AggregationBuilders.sum("salaryTotal").field("salary"))
                                //注意都是     terms这个在添加   subAggregation
                        ));
        SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
        //遍历结果
        Map<String, Aggregation> aggregationMap = searchResponse.getAggregations().getAsMap();
        //获取聚合属性
        StringTerms terms = (StringTerms) aggregationMap.get("team");
        terms.getBuckets().forEach(x -> {
            System.out.println(x.getKey());  //第一个分组名称,比如球队
            //获取最大年龄数据
            InternalMax max = (InternalMax) x.getAggregations().getAsMap().get("maxage");
            //获取总薪水数据
            InternalSum salaryterms = (InternalSum) x.getAggregations().getAsMap().get("salaryTotal");
            // InternalAvg: 平均数据

        });
    }
}
