package com.example.monit.service;

import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch._types.FieldValue;
import co.elastic.clients.elasticsearch._types.aggregations.*;
import co.elastic.clients.elasticsearch._types.query_dsl.Query;
import co.elastic.clients.elasticsearch._types.query_dsl.RangeQuery;
import co.elastic.clients.elasticsearch._types.query_dsl.TermQuery;
import co.elastic.clients.elasticsearch._types.query_dsl.TermsQuery;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.sql.QueryResponse;
import co.elastic.clients.json.JsonData;
import com.example.monit.bean.BasicIndicator;
import com.example.monit.bean.InterfaceIndicator;
import com.example.monit.bean.ResponseRust;
import com.example.monit.dto.QueryInterErrorDto;
import com.example.monit.dto.TotalDto;
import com.example.monit.enums.IndexName;
import com.example.monit.utils.TimeUtils;
import com.example.monit.vo.InterfaceerrorsTotalVo;
import com.example.monit.vo.InterfaceerrorsVo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;

import java.io.IOException;
import java.io.StringReader;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
@Service
public class InterfaceErrorsServiceImpl extends BasEsService implements InterfaceErrorsService {

    @Autowired
    ElasticsearchClient client;

    @Override
    public ResponseRust getInterfaceErrors(InterfaceerrorsVo querys) throws IOException {
        String sqlString = "SELECT url,count(url),userID,pageUrl,sum(duration) FROM \"interface_indicator\" ";
        String sqlWhere = "where appId='%s' and mainType=%d and subType=%d and startTime between %d and %d ";
        ArrayList<Object> args = new ArrayList<>();
        args.add(querys.getApp_id());
        args.add(querys.getMain_type());
        args.add(querys.getSub_type());
        args.add(querys.getStart_time().toInstant(ZoneOffset.of("+0")).toEpochMilli());
        args.add(querys.getEnd_time().toInstant(ZoneOffset.of("+0")).toEpochMilli());
        if (ObjectUtils.isEmpty(querys.getStatus_code())) {
            sqlWhere = sqlWhere.concat("and (statusCode=500 or statusCode=400) ");
        } else {
            sqlWhere = sqlWhere.concat("and statusCode = %d ");
            args.add(querys.status_code);
        }
        // 分组
        String group = "group by url ,userID,pageUrl ";
        // 排序
        String order = "order by count(url) desc ";
        // 组装最终sql 语句 字符串
        sqlString = sqlString + sqlWhere + group + order;
        String format = String.format(sqlString,
                querys.getApp_id(),
                querys.getMain_type(),
                querys.getSub_type(),
                querys.getStart_time().toInstant(ZoneOffset.of("+0")).toEpochMilli(),
                querys.getEnd_time().toInstant(ZoneOffset.of("+0")).toEpochMilli()
                );
        QueryResponse response = client.sql().query(sql -> sql.query(format));
        List<List<JsonData>> list = response.rows();

        HashMap<String, QueryInterErrorDto> map = new HashMap<>();
        for (List<JsonData> jsonData : list) {
            String url = jsonData.get(0).to(String.class);
            Integer count = jsonData.get(1).to(Integer.class);
            String userID = jsonData.get(2).to(String.class);
            String pageUrl = jsonData.get(3).to(String.class);
            BigDecimal sumValue = jsonData.get(4).to(BigDecimal.class);

            QueryInterErrorDto build = QueryInterErrorDto.builder()
                    .url(url)
                    .userCount(1)
                    .pageCount(1)
                    .userList(Arrays.asList(userID))
                    .pageList(Arrays.asList(pageUrl))
                    .sumAverage(sumValue)
                    .count(count).build();

            QueryInterErrorDto queryDto = map.get(pageUrl);
            if (ObjectUtils.isEmpty(queryDto)) {
                map.put(pageUrl, build);
            } else {
                if (!queryDto.getUserList().contains(userID)) {
                    queryDto.setUserCount(queryDto.getUserCount() + 1);
                    queryDto.getUserList().add(userID);
                }
                if (!queryDto.getPageList().contains(pageUrl)) {
                    queryDto.setPageCount(queryDto.getPageCount() + 1);
                    queryDto.getPageList().add(pageUrl);
                }
                queryDto.setCount(queryDto.getCount() + build.getCount());
            }
        }

        Collection<QueryInterErrorDto> values = map.values();
        for (QueryInterErrorDto queryDto : values) {
            if (!queryDto.getSumAverage().equals(BigDecimal.ZERO)) {
                queryDto.setAverage(queryDto.getSumAverage().divide(new BigDecimal(queryDto.getCount()), 2, RoundingMode.HALF_UP));
            }
        }

        if (!ObjectUtils.isEmpty(querys.getSize())) {
            values = values.stream().limit(querys.getSize()).collect(Collectors.toList());
        }

        return ResponseRust.success(values);
    }

    @Override
    public ResponseRust totalInterfaceErrorStatistics(InterfaceerrorsTotalVo querys) throws IOException {
        SearchRequest searchRequest = SearchRequest.of(s ->
                s.index(IndexName.InterfaceIndicator)
                        .query(getQuery(querys))
                        .aggregations("count", getAggregation(querys))
                        .size(0));

        SearchResponse<InterfaceIndicator> response = client.search(searchRequest, InterfaceIndicator.class);

        Map<String, Aggregate> aggregations = response.aggregations();
        Aggregate count = aggregations.get("count");

        Buckets<HistogramBucket> buckets = count.histogram().buckets();
        List<TotalDto> data = totalData(buckets, querys);
        return ResponseRust.success(data);
    }

    private Aggregation getAggregation(InterfaceerrorsTotalVo querys) {
        if (!StringUtils.hasLength(querys.getGranularity())) {
            querys.setGranularity("1d");
        }
        int intValue = Integer.parseInt(querys.getGranularity().split("[smhdMy]")[0]);
        String type = querys.getGranularity().split(intValue + "")[1];
        ChronoUnit unit = TimeUtils.getType(type);

        HashMap<String, Aggregation> aggregationHashMap = new HashMap<>(2);
        aggregationHashMap.put("userCount", Aggregation.of(userCount -> userCount.cardinality(e -> e.field("userID"))));
        aggregationHashMap.put("pageCount", Aggregation.of(pageCount -> pageCount.cardinality(e -> e.field("pageUrl"))));

        return Aggregation.of(agg -> agg.histogram(time -> time.interval((double) unit.getDuration().toMillis() * intValue)
                .minDocCount(0)
                .field("startTime")).aggregations(aggregationHashMap));
    }

    private Query getQuery(InterfaceerrorsTotalVo querys) {
        LinkedList<Query> list = new LinkedList<>();
        if (StringUtils.hasLength(querys.getUrl())){
            list.add(TermQuery.of(t->t.field("url").value(querys.getUrl()))._toQuery());
        }
        // 是否有 状态码条件,默认查询 400 和 500
        if (ObjectUtils.isEmpty(querys.getStatus_code())){
            // 数组匹配要用 terms
           list.add(TermsQuery.of(t->t.field("statusCode").terms(terms->terms.value(Arrays.asList(FieldValue.of(400L),FieldValue.of(500L)))))._toQuery());
        }else {
            list.add(TermQuery.of(t->t.field("statusCode").value(querys.getStatus_code()))._toQuery());
        }

        list.add(TermQuery.of(t->t.field("appId").value(e->e.stringValue(querys.getApp_id())))._toQuery());
        list.add(TermQuery.of(t->t.field("mainType").value(e->e.longValue(querys.getMain_type())))._toQuery());
        list.add(TermQuery.of(t->t.field("subType").value(e->e.longValue(querys.getSub_type())))._toQuery());
        list.add(RangeQuery.of(r->r.gte(JsonData.of(querys.getStart_time().toInstant(ZoneOffset.of("+0")).toEpochMilli()))
                .lte(JsonData.of(querys.getEnd_time().toInstant(ZoneOffset.of("+0")).toEpochMilli()))
                .field("startTime"))._toQuery());

        return  Query.of(query -> query.bool(bool->bool.must(list)));
    }

}
