package com.seari.movedata.main.dataHander;

import com.seari.movedata.utils.*;

import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;

import java.util.*;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;

import static org.influxdb.dto.Point.measurement;

public class HistoryDataHandlerWorker implements Runnable {

    private static Logger logger = LoggerFactory.getLogger(HistoryDataHandlerWorker.class);

    private static String username = PropertiesUtil.getProperty("commonConfig.properties", "username");
    private static String password = PropertiesUtil.getProperty("commonConfig.properties", "password");
    private static String openurl = PropertiesUtil.getProperty("commonConfig.properties", "openurl");
    private static String database = PropertiesUtil.getProperty("commonConfig.properties", "database");
    private static String measurement = PropertiesUtil.getProperty("commonConfig.properties", "measurement");

    BlockingQueue<List<String>> aiQueue;
    BlockingQueue<List<String>> diQueue;
    Jedis jedis = JedisPoolUtils.getJedis();

    InfluxDBConnUtils influxDB = new InfluxDBConnUtils(openurl, database);

    public HistoryDataHandlerWorker(BlockingQueue<List<String>> aiQueue, BlockingQueue<List<String>> diQueue) {
        this.aiQueue = aiQueue;
        this.diQueue = diQueue;
    }

    @Override
    public void run() {
        try {
            influxDB.influxDbBuild();
            influxDB.createRetentionPolicy();
            process();
        } catch (Exception ex) {
            HistoryDataHandlerStarter.workerCount.decrementAndGet();
            ex.printStackTrace();
        } finally {
            jedis.disconnect();
            jedis.close();
        }
    }

    public void process() throws Exception {
        while (true) {
            List<String> diDatas = diQueue.poll(3, TimeUnit.SECONDS);
            if (null == diDatas) {
                List<String> aiDatas = aiQueue.poll(3, TimeUnit.SECONDS);
                if (null == aiDatas) {
                    logger.info("diQueue and aiQueue is null,sleep 3 seconds");
                    TimeUnit.SECONDS.sleep(5);
                    continue;
                } else {
                    sendAiData(aiDatas);
                }
            } else {
               sendDiData(diDatas);
            }

        }
    }

    public void sendDiData(List<String> diDatas) throws Exception {

        Map<String, String> tags = new HashMap<String, String>();
        Map<String, Object> fields = new HashMap<String, Object>();
        String diDataKey = diDatas.get(diDatas.size() - 1);
        diDatas.remove(diDatas.size() - 1);

        Pipeline pipeline = jedis.pipelined();
        for (String diData : diDatas) {
            pipeline.hget(diDataKey, diData);
        }

        List<Object> results = pipeline.syncAndReturnAll();

        BatchPoints batchPoints = BatchPoints
                .database(database)
                //.tag("async", "true") //Add a tag to this set of points.
                //  .retentionPolicy("default")
                .consistency(InfluxDB.ConsistencyLevel.ALL)
                .build();

        for (int i = 0; i < results.size(); i++) {
            String keyStr = diDatas.get(i);
            String valueStr = (String) results.get(i);

            Point point = Point
                    .measurement(measurement)
                     .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
                    .tag("TAG_HASH_KEY", diDataKey).tag("TAG_KEY", keyStr)
                    .addField("VALUE", valueStr)
                    .build();
            batchPoints.point(point);
        }
        List<Point> pp = batchPoints.getPoints();
        logger.info("HistoryDataHandlerWorker.sendDiData.pp size" +pp.size());
        influxDB.write(batchPoints);
    }


    public void sendAiData(List<String> aiDatas) throws Exception {
        Map<String, String> tags = new HashMap<String, String>();
        Map<String, Object> fields = new HashMap<String, Object>();
        String aiDataKey = aiDatas.get(aiDatas.size() - 1);
        aiDatas.remove(aiDatas.size() - 1);

        Pipeline pipeline = jedis.pipelined();
        for (String aiData : aiDatas) {
            pipeline.hget(aiDataKey, aiData);
        }
        List<Object> results = pipeline.syncAndReturnAll();

        BatchPoints batchPoints = BatchPoints
                .database(database)
                //.tag("async", "true") //Add a tag to this set of points.
                //  .retentionPolicy("default")
                .consistency(InfluxDB.ConsistencyLevel.ALL)
                .build();
        for (int i = 0; i < results.size(); i++) {
            String keyStr = aiDatas.get(i);
            String valueStr = (String) results.get(i);

            Point point = Point.measurement(measurement)
                    .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
                    .tag("TAG_HASH_KEY", aiDataKey).tag("TAG_KEY", keyStr)
                    .addField("VALUE", valueStr)
                    .build();

            batchPoints.point(point);

        }
        List<Point> pp = batchPoints.getPoints();
        logger.info("HistoryDataHandlerWorker.sendAiData.pp size" +pp.size());
        influxDB.write(batchPoints);
}
}
