package com.zshield.queryEs;

import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.zshield.config.KafkaConfig;
import com.zshield.config.QueryEsConfig;
import com.zshield.producer.CustomKafkaProducer;
import com.zshield.util.ESclient;
import org.apache.http.HttpEntity;
import org.apache.kafka.common.protocol.types.Field;
import org.apache.log4j.Logger;
import org.elasticsearch.action.search.*;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;

import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

public class QueryEsSendKafka implements Runnable {
    private JsonParser jp;
    private RestHighLevelClient highClient;
    private CustomKafkaProducer producer;
    private static final Logger logger = Logger.getLogger(QueryEsSendKafka.class);

    public QueryEsSendKafka(CustomKafkaProducer producer) {
        jp = new JsonParser();
        this.producer = producer;
        highClient = ESclient.getHighClient();
    }

    @Override
    public void run() {
        List<String> indexes = obtainRecomputeIndex();
        for (String index : indexes) {
            //如果日志的时间日期大于index的时间日期加1天,则该日志丢弃。
            try {
                String year = index.substring(14,18);
                String month = index.substring(19,21);
                String day = index.substring(22,24);
                String indexTime = year + "-" + month + "-" + day + "T23:59:59.000";
                LocalDateTime locaTime = LocalDateTime.parse(indexTime);
                String indexPlusOneDay = locaTime.plusDays(1).toString();
                termsQueryAndSendToKafka(index,indexPlusOneDay);
            } catch (Throwable e) {
                logger.error("[this index(" + index + ") do not process] [The reason of error:{" + e + "}]");
            }
        }
    }

    public List<String> obtainRecomputeIndex() {
        Response response = null;
        String content = null;
        try {
            response = highClient.getLowLevelClient().performRequest("GET", "/_cat/indices");
            HttpEntity entity = response.getEntity();
            byte[] bytes = new byte[512];
            entity.getContent().read(bytes);

            content = new String(bytes, "utf-8");
        } catch (Exception e) {
            KafkaConfig.printErrorLog(logger,e);
        }

        List<String> logstashList = new ArrayList<>();
        String[] indices = content.split("\n");
        for (int i = 0;i < indices.length; i++ ) {
            indices[i] = indices[i].split("\\s+")[2];
            if (indices[i].startsWith("logstash")) {
                logstashList.add(indices[i]);
            }
        }
        Collections.sort(logstashList);
        return logstashList;
    }

    public void termsQueryAndSendToKafka (String index,String indexPlusOneDay) {
        SearchRequest searchRequest = new SearchRequest(index);
        final Scroll scroll = new Scroll(TimeValue.timeValueMinutes(1L));
        searchRequest.scroll(scroll);
        SearchSourceBuilder searchBuilder = new SearchSourceBuilder();
        searchBuilder.size(9000);
        QueryBuilder qb = QueryBuilders.termsQuery(QueryEsConfig.FIELD_NAME, QueryEsConfig.FIRST_FIELD_VALUE,QueryEsConfig.SECOND_FIELD_VALUE,QueryEsConfig.THREE_FIELD_VALUE,QueryEsConfig.FOUR_FIELD_VALUE,QueryEsConfig.FIVE_FIELD_VALUE,QueryEsConfig.SIX_FIELD_VALUE);
        searchBuilder.query(qb);
        searchRequest.source(searchBuilder);

        try {
            SearchResponse searchResponse = highClient.search(searchRequest);
            long total = searchResponse.getHits().totalHits;
            logger.info("<<<It is found in es [" + index + "]:" + total +">>>");
            String scroId = searchResponse.getScrollId();
            SearchHit[] searchHits = searchResponse.getHits().getHits();

            int number = 0;
            long startTime = System.currentTimeMillis();
            long sendStartTime = System.currentTimeMillis();
            while (searchHits != null && searchHits.length > 0) {
                for (SearchHit hit : searchHits) {
                    String jsonData = hit.getSourceAsString();
                    try {
                        JsonObject obj = jp.parse(jsonData).getAsJsonObject();
                        String logTime = obj.get("TIME").getAsString();
                        if (logTime.compareTo(indexPlusOneDay) < 0) {
                            String sensorId = obj.get("SENSOR_ID").getAsString();
                            producer.producerMsg(sensorId,jsonData);
                            number++;
                            if (number % 20000 == 0) {
                                long sendEndTime = System.currentTimeMillis();
                                double sendSpeed = 20000 * 1000.0 / (sendEndTime - sendStartTime);
                                logger.info("[send to kafka successfully] from " + (number - 20000) + " to " + number + ", sendSpeed: " + String.format("%.2f",sendSpeed));
                                sendStartTime = System.currentTimeMillis();
                            }
                        } else {
                            logger.info("[this is a test data] index:" + index + ",TIME in log:" + logTime);
                        }
                    } catch (Exception e) {
                        logger.error("[log parse exception] [ " + e + " ]" + " [" + jsonData + "]");
                        KafkaConfig.printErrorLog(logger,e);
                    }
                }
                producer.flush();
                try {
                    SearchScrollRequest scrollRequest = new SearchScrollRequest(scroId);
                    scrollRequest.scroll(scroll);
                    searchResponse = highClient.searchScroll(scrollRequest);
                    scroId = searchResponse.getScrollId();
                    searchHits = searchResponse.getHits().getHits();
                } catch (Exception e) {
                    logger.error("[scroll query exception] [" + e + "]");
                    KafkaConfig.printErrorLog(logger,e);
                }
            }

            try {
                ClearScrollRequest clearRequest = new ClearScrollRequest();
                clearRequest.addScrollId(scroId);
                ClearScrollResponse clearScrollResponse = highClient.clearScroll(clearRequest);
            } catch (Exception e) {
                logger.error("[scroll clear exception] [" + e + "]");
                KafkaConfig.printErrorLog(logger,e);
            }

            long endTime = System.currentTimeMillis();
            double speed = number * 1000.0 / (endTime - startTime);
            logger.info("[It is found in es[" + index + "]:" + total + ", sent to kafka successfully:" + number + "], overall mean speed:" + String.format("%.2f",speed));
        } catch (Exception e) {
            logger.error("[SearchRequest search failure] [ " + e + " ]");
            KafkaConfig.printErrorLog(logger,e);
        }
    }

}
