package com.elastic.service;

import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.elastic.documents.AggregationForOne;
import com.elastic.documents.DrillDownForWeb;
import org.apache.lucene.index.Terms;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.CountResponse;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.Cardinality;
import org.elasticsearch.search.aggregations.metrics.ParsedCardinality;
import org.elasticsearch.search.aggregations.metrics.ParsedValueCount;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;

import java.io.IOException;
import java.util.*;

public class HotelEsService {

    private Logger logger = LoggerFactory.getLogger(HotelEsService.class);

    @Autowired
    private RestHighLevelClient client;


    /**
     * 1.查询符合条件的数据量
     *  类似sql如下：
     *  select count(1) from myIndex where src = '1.1.1.1';
     *  getClient():  获取es查询的连接对象(不做赘述)
     *  closeClient(): 关闭连接
     * @return
     */
    public long query1() throws IOException {

//        去重查询并返回去重后的总数
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        AggregationBuilder aggregation = AggregationBuilders.cardinality("DISTINCT_TOTAL_COUNT").field("name.keyword");
        searchSourceBuilder.aggregation(aggregation);

        SearchRequest request = new SearchRequest();

        SearchResponse search = client.search(request, RequestOptions.DEFAULT);

        Aggregations aggregations = search.getAggregations();

        Cardinality cardinality = aggregations.get("DISTINCT_TOTAL_COUNT");
        System.out.println(cardinality.getValue());

        long result = 0;
        QueryBuilder qBuilder = QueryBuilders.boolQuery().must(QueryBuilders.termQuery("src.keyword", "1.1.1.1"));	//查询条件 可以任意组合

        try{
            result = getAllCount(client,qBuilder);	// 查询
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            try {
                closeClient(client);        		//关闭连接对象
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return result;
    }

    private void closeClient(RestHighLevelClient client) throws IOException {
        if (client != null){
            client.close();
        }
        
    }

    /**
     * 查询符合条件的数据量
     *
     * @param client       RestHighLevelClient类型的连接对象
     * @param queryBuilder 查询限制条件          可以为null（为空时查询全部的数据量）
     * @return long
     * @throws IOException 查询数据抛出的异常
     */
    private long getAllCount(RestHighLevelClient client, QueryBuilder queryBuilder) throws IOException {

        CountRequest countRequest = new CountRequest("myIndex");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        if (queryBuilder != null) {
            searchSourceBuilder.query(queryBuilder);
        }
        countRequest.source(searchSourceBuilder);
        CountResponse countResponse = client.count(countRequest, RequestOptions.DEFAULT);
        return countResponse.getCount();
    }



    /**
     *  2. 查询某字段去重后的数据量
     *      类似sql如下：
     *      select count(distinct src) from myIndex where date between '2020-01-01 00:00:00' and '2020-01-02 00:00:00';
     *
     *      getClient():  获取es查询的连接对象(不做赘述)
     * 		closeClient(): 关闭连接
     *      Util.date(): 把日期字符串 减八小时 添加 TZ 样式的方法
     * 			2020-01-02 00:00:00  -->  2020-01-01T16:00:00.000Z
     */
    public long query2(){
        String esIndex_time = "";
        long result = 0;
        QueryBuilder qBuilder = QueryBuilders.boolQuery()
                .must(QueryBuilders.rangeQuery(esIndex_time)
                        .from(0)
                        .to(1));		//查询条件 可以任意组合
        try{
            result = getDistinctCount(client,qBuilder,"src.keyword");	// 查询
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            try {
                closeClient(client);        		//关闭连接对象
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return result;
    }
    /**
     * 获取一个字段的去重后的总数据量
     *
     * @param client       RestHighLevelClient类型的连接对象
     * @param queryBuilder 查询限制条件          可以为null
     * @param Field        需要查询去重的字段
     *                     特别注意，如果段为keyword类型，字段后面必需拼接上.keyword
     *                     **   如  msg.keyword   **
     * 优点：性能快，亿级别的记录在1秒内完成
     * 缺点：存在只能保证最大40000条记录内的精确，超过的存在5%的误差，不适合需要精确去重场景
     */
    private long getDistinctCount(RestHighLevelClient client, QueryBuilder queryBuilder, String Field) throws IOException {
        SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
                .aggregation(AggregationBuilders.cardinality("distinct_count")	//别名
                        .field(Field).precisionThreshold(10000))    //去重统计某个字段的数量（有少量误差）
                .size(0);
        if (queryBuilder != null) {
            sourceBuilder.query(queryBuilder);          //组装查询条件
        }
        SearchRequest searchRequest = new SearchRequest("myIndex");
        searchRequest.source(sourceBuilder);
        Aggregations agg = client.search(searchRequest, RequestOptions.DEFAULT).getAggregations();
        ParsedCardinality types = agg.get("distinct_count");
        return types.getValue();
    }

    /**
     *  3. 单字段分组聚合
     *      类似sql如下：
     *      select src,count(src) as count from myIndex
     * 			where date between '2020-01-01 00:00:00' and '2020-01-02 00:00:00'
     * 			group by count desc
     * 			limit 15;
     *      getClient():  获取es查询的连接对象(不做赘述)
     * 		closeClient(): 关闭连接
     *      Util.date(): 把日期字符串 减八小时 添加 TZ 样式的方法
     * 			2020-01-02 00:00:00  -->  2020-01-01T16:00:00.000Z
     */
    public List<AggregationForOne> query3(){
        List<AggregationForOne> result = new ArrayList<>();
        QueryBuilder qBuilder = QueryBuilders.boolQuery()
                .must(QueryBuilders.rangeQuery("date")
                        .from(0)
                        .to(1));	//查询条件 可以任意组合
        try{
            result = aggregationForOneFieldByQuery(client, qBuilder, "src.keyword", 15);
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            try {
                closeClient(client);        //关闭连接对象
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return result;
    }

    private List<AggregationForOne> aggregationForOneFieldByQuery(RestHighLevelClient client, QueryBuilder qBuilder, String s, int i) {

        return null;
    }

    /**
     * 单字段分组统计模板 结果默认是按count降序排列
     * |------------------------------------------|
     * |    ****   查询后再统计 桶分组统计   ****   |
     * |------------------------------------------|
     *
     * @param client       连接对象         (必须参数)
     * @param queryBuilder 查询条件         (必须参数)   可以为null
     * @param field        需要聚合的字段    (必须参数)
     *                     特别注意，如果段为keyword类型，字段后面必需拼接上.keyword
     *                     **   如  msg.keyword   **
     *                     <p>
     *                     该方法的高级用法 --->
     * @param patterns     默认值参数列表      int[]
     *                     patterns[0]     sizeCount=10      查询的数据量，，     最大为10000
     *
     * 			AggregationForOne:单字段聚合结果实体类
     * @throws IOException 抛出查询时遇到的异常
     */
    private List<AggregationForOne> aggregationForOneField(RestHighLevelClient client, QueryBuilder queryBuilder, String field, int... patterns) throws IOException {
        int sizeCount = 10;
        if(patterns.length==1){
            sizeCount = patterns[0];
        }
        if (sizeCount > 10000) {
            sizeCount = 10000;
        }
        // 1、创建search请求
        SearchRequest searchRequest = new SearchRequest("myIndex");
        // 2、用SearchSourceBuilder来构造查询请求体 ,请仔细查看它的方法，构造各种查询的方法都在这。
        SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0);
        if (queryBuilder != null) {
            sourceBuilder.query(queryBuilder);
        }
        //加入聚合
        //字段值项分组聚合 .collectMode()
        TermsAggregationBuilder aggregation = AggregationBuilders.terms("by_group")   //别名
                .field(field).order(BucketOrder.count(false));  	//对count降序

        //计算每组的平均balance指标
        aggregation.subAggregation(AggregationBuilders.count("count")
                        .field("sessionid"))
                .size(sizeCount);            //返回数据量
        sourceBuilder.aggregation(aggregation);
        searchRequest.source(sourceBuilder);
        //3、发送请求
        SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        //4、处理响应
        //搜索结果状态信息
        List<AggregationForOne> result = new ArrayList<>();
//        if (RestStatus.OK.equals(searchResponse.status())) {
//            // 获取聚合结果
//            Aggregations aggregations = searchResponse.getAggregations();
//            Terms byAgeAggregation = aggregations.get("by_group");
//            for (Terms.Bucket buck : byAgeAggregation.getBuckets()) {
//                ParsedValueCount averageBalance = buck.getAggregations().get("count");
//                AggregationForOne aggregationForOne = new AggregationForOne(buck.getKeyAsString(), buck.getDocCount(), buck.getDocCountError(), averageBalance.getValue());
//                result.add(aggregationForOne);
//            }
//        }
        return result;
    }

    /**
     *  2. 查询某字段去重后的数据量
     *      类似sql如下：
     *      select src,dst,msg,count(1) as count from myIndex
     * 			where date between '2020-01-01 00:00:00' and '2020-01-02 00:00:00'
     * 			group by src,dst,msg
     * 			order by count desc
     * 			limit 500;
     *      getClient():  获取es查询的连接对象(不做赘述)
     * 		closeClient(): 关闭连接
     *      Util.date(): 把日期字符串 减八小时 添加 TZ 样式的方法
     * 			2020-01-02 00:00:00  -->  2020-01-01T16:00:00.000Z
     * 		DrillDownForWeb ： 返回聚合后的实体类 改对象必须包含需要聚合的字段
     */
    public List<DrillDownForWeb> query4() {
        String esIndex_time = "";
        List<DrillDownForWeb> result = new ArrayList<>();
        QueryBuilder qBuilder = QueryBuilders.boolQuery()
                .must(QueryBuilders.rangeQuery(esIndex_time)
                        .from(0)
                        .to(1));		//查询条件 可以任意组合

        try {

            // 1、创建search请求
            SearchRequest searchRequest = new SearchRequest("myIndex");

            // 2、用SearchSourceBuilder来构造查询请求体 ,请仔细查看它的方法，构造各种查询的方法都在这。
            SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
                    .query(qBuilder)
                    .size(0)
                    .timeout(new TimeValue(60000));

            //加入聚合
            //字段值项分组聚合
            TermsAggregationBuilder aggregation = AggregationBuilders.terms("by_fngroup")
                    .script(new Script("doc['src.keyword'] +'#'+doc['dst.keyword'] +'#'+ doc['msg.keyword']"))
                    .size(Integer.MAX_VALUE)
                    .order(BucketOrder.count(false))
                    .subAggregation(AggregationBuilders.count("count")   //计算每组的平均balance指标
                            .field("sessionid"))
                    .size(500);
            sourceBuilder.aggregation(aggregation);
            searchRequest.source(sourceBuilder);

            //3、发送请求
            SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);

            //4、处理响应
            //搜索结果状态信息
            if (RestStatus.OK.equals(searchResponse.status())) {
                // 获取聚合结果
                Aggregations aggregations = searchResponse.getAggregations();
                Terms byAgeAggregation = aggregations.get("by_fngroup");
//                for (Terms.Bucket buck : byAgeAggregation.getBuckets()) {
//                    DrillDownForWeb drillDownForWeb = new DrillDownForWeb();
//                    String[] arr = buck.getKeyAsString().split("#");
//                    drillDownForWeb.setSrc(arr[0].replace("[", "").replace("]", ""));
//                    drillDownForWeb.setDst(arr[1].replace("[", "").replace("]", ""));
//                    drillDownForWeb.setMsg(arr[2].replace("[", "").replace("]", ""));
//                    //取子聚合
//                    ParsedValueCount averageBalance = buck.getAggregations().get("count");
//                    drillDownForWeb.setCount_backup(averageBalance.getValue());
//                    drillDownForWeb.setCount(buck.getDocCount());
//                    result.add(drillDownForWeb);
//                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("DrillDownDao getOnlyTableDetailsData Exception", e);
        }finally {
            try {
                closeClient(client);
            } catch (IOException e) {
                e.printStackTrace();
                logger.error("DrillDownDao getOnlyTableDetailsData colseClient Exception", e);
            }
        }
        return result;
    }

    public void test(){
        //用过设置es服务的ip，端口号和协议建立es客户端工厂
        //建立客户端
        //设置es的索引index和类型他type
        SearchRequest searchRequest = new SearchRequest("data_node");
        searchRequest.types("data_node");
        //查询工厂
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();

        //bool工厂，相当于sql中where条件，where之后的条件都写在这里
        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
        //queryBuilder.should方法是或的逻辑，满足一个，queryBuilder.must是必须满足
        queryBuilder.should(QueryBuilders.termQuery("data.name", "014"));
        //将where条件放入工厂
        searchSourceBuilder.query(queryBuilder);

        //聚合工厂，相当于sql中的group by，如果es数据中是对象包含对象，就如下写法，以下就是以data对象中的name作为分组
        AggregationBuilder termsBuilder = AggregationBuilders.terms("by_data.name").field("data.name");
        //将聚合条件放入工厂
        searchSourceBuilder.aggregation(termsBuilder);
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = null;
        try {
            //执行请求
            searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }
        //返回查询聚合后的结果，处理
        Aggregations terms= searchResponse.getAggregations();

//        for (Aggregation a:terms){
//            ParsedTerms parsedTerms = (ParsedTerms) a;
//            for (Terms.Bucket bucket : parsedTerms.getBuckets()) {
//                logger.info(bucket.getKeyAsString() + "   " + bucket.getDocCount());
//            }
//        }
        //返回查询到的具体数据
        SearchHits searchHits = searchResponse.getHits();
        for (SearchHit hit : searchHits.getHits()) {
            logger.info(hit.getSourceAsString());
        }
    }

    /**
     * 查询时间范围内所有接口调用次数
     * @param index
     * @param type
     * @param startTime
     * @param endTime
     * @return
     * @throws IOException
     */
    public SearchHits getCountByTimeRange(String index, String type, Long startTime, Long endTime) throws IOException {
        SearchRequest request = new SearchRequest(index);
        request.types(type);
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();

        queryBuilder.must(QueryBuilders.rangeQuery("data.requestTime").gt(startTime).lt(endTime));
        searchSourceBuilder.query(queryBuilder);

        request.source(searchSourceBuilder);
        SearchResponse searchResponse = client.search(request, RequestOptions.DEFAULT);
        SearchHits searchHits = searchResponse.getHits();
        return searchHits;
    }

   /**
      * 聚合中设置过滤条件
     * @param index
     * @param type
     * @param startTime
     * @param endTime
     * @return
             */
    public Aggregations getOnDayInvokeAndError(String index, String type, Long startTime, Long endTime, Integer from, Integer size) throws Exception{
        SearchRequest request = new SearchRequest(index);
        request.types(type);
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();

        queryBuilder.must(QueryBuilders.rangeQuery("data.requestTime").gt(startTime).lt(endTime))
                .must(QueryBuilders.termQuery("resource.logsource", "qqzx"));
        searchSourceBuilder.query(queryBuilder);
        //以interfaceCode分组，取interfaceName作为别名
        AggregationBuilder termsBuilder = AggregationBuilders.terms("by_data.interfaceCode").field("data.interfaceCode").field("resource.interfaceName");
        //设置过滤条件
        AggregationBuilder filter = AggregationBuilders.filter("当日异常数量", QueryBuilders.boolQuery().must(QueryBuilders.termQuery("resource.status", "1")));
        termsBuilder.subAggregation(filter);
        searchSourceBuilder.aggregation(termsBuilder).from(from).size(size);

        request.source(searchSourceBuilder);
        SearchResponse searchResponse = client.search(request, RequestOptions.DEFAULT);
        return searchResponse.getAggregations();
    }
    /**
     *设置需要查询的字段和排除的字段
     * @param index
     * @param type
     * @param startTime
     * @param endTime
     * @return
     * @throws Exception
     */
    public Set<String> getActiveInterfaceNotRegister(String index, String type, Long startTime, Long endTime, Page page) throws Exception {
        SearchRequest request = new SearchRequest(index);
        request.types(type);
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();

        queryBuilder.must(QueryBuilders.rangeQuery("data.requestTime").gt(startTime).lt(endTime));
        AggregationBuilder termsBuilder = AggregationBuilders.terms("by_data.interfaceCode").field("data.interfaceCode");
        searchSourceBuilder.aggregation(termsBuilder);
        //fetchSource中设置需要查询字段和排除字段
        searchSourceBuilder.query(queryBuilder).fetchSource("data.interfaceCode", null).from(0).size(1);

        request.source(searchSourceBuilder);
        SearchResponse searchResponse = client.search(request, RequestOptions.DEFAULT);
        SearchHit[] hits = searchResponse.getHits().getHits();
        Set<String> result = new HashSet<>();
        for (SearchHit hit : hits) {
            String sourceAsString = hit.getSourceAsString();
            JSONObject jsonObject = JSONObject.parseObject(sourceAsString);
            JSONObject data = jsonObject.getJSONObject("data");
            result.add(data.getString("interfaceCode"));
        }
        return result;
    }
//    public List selectModuleCount(Map param) throws IOException {
//            // 1、创建search请求
//            //SearchRequest searchRequest = new SearchRequest();
//            SearchRequest searchRequest = new SearchRequest("INDEX_PAGEACCESS");
//            // 2、用SearchSourceBuilder来构造查询请求体 ,请仔细查看它的方法，构造各种查询的方法都在这。
//            SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0);
//            //加入聚合
//            //字段值项分组聚合
//            TermsAggregationBuilder aggregation = AggregationBuilders.terms("by_fngroup")
//                    .script(new Script("doc['fngroup.keyword'] +'#'+doc['user_id']"))
//                    //.field("fngroup.keyword")
//                    .size(Integer.MAX_VALUE).order(BucketOrder.aggregation("count", true));
//            //计算每组的平均balance指标
//            aggregation.subAggregation(AggregationBuilders.count("count").field("sessionid"));
//            sourceBuilder.aggregation(aggregation);
//            searchRequest.source(sourceBuilder);
//            //3、发送请求
//            SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
//            //4、处理响应
//            //搜索结果状态信息
//            List<Map> result = new ArrayList<>();
//            if (RestStatus.OK.equals(searchResponse.status())) {
//                // 获取聚合结果
//                Aggregations aggregations = searchResponse.getAggregations();
//                Terms byAgeAggregation = aggregations.get("by_fngroup");
//                for (Terms.Bucket buck : byAgeAggregation.getBuckets()) {
//                    Map map = new HashMap();
//                    String[] arr = buck.getKeyAsString().split("#");
//                    map.put("module", arr[0].replace("[", "").replace("]", ""));
//                    map.put("user_id", arr[1].replace("[", "").replace("]", ""));
//                    //取子聚合
//                    ParsedValueCount averageBalance = buck.getAggregations().get("count");
//                    map.put("count", averageBalance.getValue());
//                    result.add(map);
//                }
//            }
//            return result;
//    }


}
