package com.zshield.proc;

import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.zshield.config.KafkaConfig;
import com.zshield.entry.*;
import com.zshield.util.ESclient;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.processor.AbstractProcessor;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.PunctuationType;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.log4j.Logger;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.concurrent.TimeUnit;

public class OfflineQueryESProc<T extends EntryInterface> extends AbstractProcessor<String,String> {
    private ProcessorContext context;
    private KeyValueStore<String, String> kvOffLine;
    private long count = 0;
    private Gson gson;
    private Class<T> type;
    private Logger logger = Logger.getLogger(OfflineQueryESProc.class);

    public OfflineQueryESProc(Class<T> t) {
        gson = new Gson();
        this.type = t;
    }

    @Override
    public void init(ProcessorContext context) {
        this.context = context;
        gson = new Gson();
        kvOffLine = (KeyValueStore) context.getStateStore("kvOffLine");
        this.context.schedule(30 * 60 * 1000 , PunctuationType.WALL_CLOCK_TIME, (timestamp) -> {
            KeyValueIterator<String,String> iter = this.kvOffLine.all();
            while (iter.hasNext()) {
                KeyValue<String, String> kv = iter.next();
                String docId = kv.key;
                String data = kv.value;
                try {
                    String encodeEsId = URLEncoder.encode(docId,"UTF-8");
                    context.forward(encodeEsId, data, "Generic_query_proc");
                } catch (Exception e) {
                    logger.error("[offline docId encode exception] [The reason of error:{" + e + "}]");
                    KafkaConfig.printErrorLog(logger,e);
                }
                this.kvOffLine.delete(docId);
            }
            logger.info("[context to kafka every 30 minutes count:" + count + "]");
            count = 0;
            iter.close();
            this.context.commit();
        });
    }

    @Override
    public void process(String key, String value) {
        EntryInterface newEntry = gson.fromJson(value,type);
        String docId = newEntry.getDocId();
        String oldLogValue = kvOffLine.get(docId);
        String afterUpdate = null;
        if (oldLogValue == null) {
            String searchEsResult = searchEs(docId);
            if (searchEsResult != null) {
                EntryInterface esEntry = gson.fromJson(searchEsResult,type);
                newEntry.update(esEntry);
                afterUpdate = gson.toJson(newEntry);
            } else {
                afterUpdate = value;
            }
        } else {
            EntryInterface oldEntry = gson.fromJson(oldLogValue,type);
            newEntry.update(oldEntry);
            afterUpdate = gson.toJson(newEntry);
        }
        kvOffLine.put(docId,afterUpdate);
        count++;
    }

    public String searchEs (String docId) {
        RestHighLevelClient highClient = ESclient.getHighClient();
        SearchRequest searchRequest = new SearchRequest("datamap_precompute*");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));
        try {
            String esId = URLEncoder.encode(docId,"UTF-8");
            IdsQueryBuilder queryBuilder = QueryBuilders.idsQuery();
            queryBuilder.addIds(esId);
            searchSourceBuilder.query(queryBuilder);
            searchRequest.source(searchSourceBuilder);
            SearchResponse searchResponse = highClient.search(searchRequest);
            SearchHit[] hits = searchResponse.getHits().getHits();
            if (hits != null && hits.length > 0){
                return hits[0].getSourceAsString();
            }
            return null;
        } catch (Exception e) {
            logger.error("[es search failure offline] [The reason of error:{" + e + "}]");
            KafkaConfig.printErrorLog(logger,e);
            return null;
        }
    }
}
