package com.seari.main.dataHandler;

import com.seari.bean.DataTable;
import com.seari.common.Const;
import com.seari.service.CommonDataService;
import com.seari.utils.*;
import org.influxdb.InfluxDB;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;

import java.util.*;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;

public class HistoryDataHandlerWorker implements Runnable {

    private static Logger logger = LoggerFactory.getLogger(HistoryDataHandlerWorker.class);

    private static String username = PropertiesUtil.getProperty("commonConfig.properties", "username");
    private static String password = PropertiesUtil.getProperty("commonConfig.properties", "password");
    private static String openurl = PropertiesUtil.getProperty("commonConfig.properties", "openurl");
    private static String database = PropertiesUtil.getProperty("commonConfig.properties", "database");
    private static String measurement = PropertiesUtil.getProperty("commonConfig.properties", "measurement");

    BlockingQueue<List<String>> aiQueue;
    BlockingQueue<List<String>> diQueue;
    Jedis jedis = JedisPoolUtils.getJedis();

    GuavaLocalCacheUtils guavaLocalCacheUtils = new GuavaLocalCacheUtils();
    InfluxDBConnUtils influxDB = new InfluxDBConnUtils(openurl, database);

    public HistoryDataHandlerWorker(BlockingQueue<List<String>> aiQueue, BlockingQueue<List<String>> diQueue) {
        this.aiQueue = aiQueue;
        this.diQueue = diQueue;
    }

    @Autowired
    private CommonDataService commonDataService;

    @Override
    public void run() {
        try {
            influxDB.influxDbBuild();
            process();
        } catch (Exception ex) {
            HistoryDataHandlerStarter.workerCount.decrementAndGet();
            ex.printStackTrace();
        } finally {
            jedis.disconnect();
            jedis.close();
        }
    }

    public void process() throws Exception {

        while (true) {
            List<String> diDatas = diQueue.poll(3, TimeUnit.SECONDS);
            if (null == diDatas) {
                List<String> aiDatas = aiQueue.poll(3, TimeUnit.SECONDS);
                if (null == aiDatas) {
                    logger.info("diQueue and aiQueue is null,sleep 3 seconds");
                    TimeUnit.SECONDS.sleep(5);
                    continue;
                } else {
                    sendAiData(aiDatas);
                }
            } else {
                sendDiData(diDatas);
            }

        }
    }

    /**
     *
     * @param diDatas
     * @throws Exception
     */
    public void sendDiData(List<String> diDatas) throws Exception {

        String diDataKey = diDatas.get(diDatas.size() - 1);
        diDatas.remove(diDatas.size() - 1);
        Map<String, Object> localCacheValue = (Map<String, Object>) guavaLocalCacheUtils.cache.getIfPresent(diDataKey);
        BatchPoints batchPoints = BatchPoints
                .database(database)
                .consistency(InfluxDB.ConsistencyLevel.ALL)
                .build();

        Pipeline pipeline = jedis.pipelined();
        for (String diData : diDatas) {
            pipeline.hget(diDataKey, diData);
        }
        List<Object> results = pipeline.syncAndReturnAll();

        for (int i = 0; i < results.size(); i++) {
            String keyStr = diDatas.get(i);
            String valueStr = (String) results.get(i);
            String key = diDataKey+keyStr;
            List<DataTable> dataTableList =  (List<DataTable>)guavaLocalCacheUtils.cache.getIfPresent(key);
            //本地是否有redis hash数据
            if (!CommonUtils.objectIsNull(localCacheValue)) {
                //本地 redis hash key 是否有数据
                if (!CommonUtils.objectIsNull(localCacheValue.get(keyStr))) {
                    //线上值是否变化
                    if (!localCacheValue.get(keyStr).equals(valueStr)) {
                        localCacheValue.put(keyStr, valueStr);
                        diDataHandler(key,diDataKey,keyStr,dataTableList,valueStr,batchPoints);
                    }
                } else {
                    localCacheValue.put(keyStr, valueStr);
                    diDataHandler(key,diDataKey,keyStr,dataTableList,valueStr,batchPoints);
                }
            }else{
                localCacheValue.put(keyStr, valueStr);
                diDataHandler(key,diDataKey,keyStr,dataTableList,valueStr,batchPoints);
            }
        }
        List<Point> pp = batchPoints.getPoints();
        guavaLocalCacheUtils.cache.put(diDataKey, localCacheValue);
        logger.info("HistoryDataHandlerWorker.sendAiData.pp size" +pp.size());
        influxDB.write(batchPoints);
    }

    /**
     * @param key
     * @param diDataKey
     * @param keyStr
     * @param dataTableList
     * @param valueStr
     * @param batchPoints
     */
    public void diDataHandler(String key,String diDataKey,String keyStr,List<DataTable> dataTableList,String valueStr,BatchPoints batchPoints){

        //本地是否有mysql缓存
        if(!CommonUtils.objectIsNull(dataTableList)) {
            diDataMatching(diDataKey,keyStr, dataTableList,valueStr,batchPoints);
        }else{
            List<DataTable> dataTableMatchList = commonDataService.getDataTableListByKey(keyStr);
            //在mysql配置表中取出数据放到本地缓存中 分析数据后放到influxdb中
            if(!CommonUtils.objectIsNull(dataTableMatchList)){
                guavaLocalCacheUtils.cache.put(key, dataTableMatchList);
                diDataMatching(diDataKey,keyStr, dataTableMatchList,valueStr,batchPoints);
            }else{
                logger.info("HistoryDataHandlerWorker.diDataHandler mysqlTable and localMysqlCache is null");
            }
        }
    }

    /**
     *
     * @param diDataKey
     * @param keyStr
     * @param dataTableList
     * @param valueStr
     * @param batchPoints
     */
    public void diDataMatching(String diDataKey,String keyStr,List<DataTable> dataTableList,String valueStr,BatchPoints batchPoints){

        //DI取反  判断
        if(Const.DI_REVERSE_YES==dataTableList.get(0).getReverse()) {
            if("0"==valueStr){
                valueStr="1";
            }else{
                valueStr="0";
            }
        }

        for(DataTable dataTable:dataTableList){
            //值匹配mysql配置  DI
            if(valueStr.equals(dataTable.getBitAddressValueCode())) {
                Point point = Point
                        .measurement(measurement)
                        .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
                        .tag("TAG_HASH_KEY", diDataKey)
                        .tag("TAG_KEY", keyStr)
                        .tag("TAG_KEY_BIT_ADDRESS", dataTable.getBitAddress())
                        .addField("COMMMENT", dataTable.getComment())
                        .addField("STATION_EN", dataTable.getStationEn())
                        .addField("STATION_CN", dataTable.getStationEn())
                        .addField("ADDRESS", dataTable.getAddress())
                        .addField("ACTUAL_ADDRESS", dataTable.getActualAddress())
                        .addField("RATIO", dataTable.getRatio())
                        .addField("IS_DI", dataTable.getIsDi())
                        .addField("SYSTEM_NAME", dataTable.getSystemName())
                        .addField("BIT_ADDRESS_EN", dataTable.getBitAddressEn())
                        .addField("BIT_ADDRESS_CN", dataTable.getBitAddressCn())
                        .addField("VALUE", valueStr)
                        .build();
                batchPoints.point(point);
            }
        }
    }

    /**
     *
     * @param aiDatas
     * @throws Exception
     */
    public void sendAiData(List<String> aiDatas) throws Exception {

        String aiDataKey = aiDatas.get(aiDatas.size() - 1);
        aiDatas.remove(aiDatas.size() - 1);
        Map<String, Object> localCacheValue = (Map<String, Object>) guavaLocalCacheUtils.cache.getIfPresent(aiDataKey);
       // logger.info("localCacheValue="+localCacheValue);
        BatchPoints batchPoints = BatchPoints
                .database(database)
                .consistency(InfluxDB.ConsistencyLevel.ALL)
                .build();

        Pipeline pipeline = jedis.pipelined();
        for (String aiData : aiDatas) {
            pipeline.hget(aiDataKey, aiData);
        }
        List<Object> results = pipeline.syncAndReturnAll();

        for (int i = 0; i < results.size(); i++) {
            String keyStr = aiDatas.get(i);
            String valueStr = (String) results.get(i);
            String key = aiDataKey+keyStr;
            List<DataTable> dataTableList =  (List<DataTable>)guavaLocalCacheUtils.cache.getIfPresent(key);
            //本地是否有redis hash数据
            if (!CommonUtils.objectIsNull(localCacheValue)) {
                //本地 redis hash key 是否有数据
                if (!CommonUtils.objectIsNull(localCacheValue.get(keyStr))) {
                    //线上值是否变化
                    if (!localCacheValue.get(keyStr).equals(valueStr)) {
                        localCacheValue.put(keyStr, valueStr);
                        aiDataHander(key,aiDataKey,keyStr,dataTableList,valueStr,batchPoints);
                    }
                } else {
                    localCacheValue.put(keyStr, valueStr);
                    aiDataHander(key,aiDataKey,keyStr,dataTableList,valueStr,batchPoints);
                }
            }else{
                localCacheValue.put(keyStr, valueStr);
                aiDataHander(key,aiDataKey,keyStr,dataTableList,valueStr,batchPoints);
          }
        }
        List<Point> pp = batchPoints.getPoints();
        guavaLocalCacheUtils.cache.put(aiDataKey, localCacheValue);
        logger.info(" HistoryDataHandlerWorker.sendAiData.pp size" +pp.size());
        influxDB.write(batchPoints);
    }

    /**
     *
     * @param key
     * @param aiDataKey
     * @param keyStr
     * @param dataTableList
     * @param valueStr
     * @param batchPoints
     */
    public void aiDataHander(String key,String aiDataKey,String keyStr,List<DataTable> dataTableList,String valueStr,BatchPoints batchPoints){
            //本地是否有mysql缓存   AI
            if(!CommonUtils.objectIsNull(dataTableList)){
                aiDataMatching(aiDataKey,keyStr, dataTableList,valueStr,batchPoints);
            }else{
                List<DataTable> dataTableMatchList = commonDataService.getDataTableListByKey(keyStr);
                //在mysql配置表中取出数据放到本地缓存中 分析数据后放到influxdb中
                if(!CommonUtils.objectIsNull(dataTableMatchList)){
                    guavaLocalCacheUtils.cache.put(key, dataTableMatchList);
                    aiDataMatching(aiDataKey,keyStr, dataTableMatchList,valueStr,batchPoints);
                }else{
                    logger.info("HistoryDataHandlerWorker.aiDataHander mysqlTable and localMysqlCache is null");
                }
            }
        }

    /**
     *
     * @param aiDataKey
     * @param keyStr
     * @param dataTableList
     * @param valueStr
     * @param batchPoints
     */
      public void aiDataMatching(String aiDataKey, String keyStr,List<DataTable> dataTableList,String valueStr,BatchPoints batchPoints){
          //是否合并   AI
          if(Const.REDIS_COMBINE_YES.equals(dataTableList.get(0).getCombine())) {
              logger.info("HistoryDataHandlerWorker.aiDataMatching combine key ");
          }else{
              //是否拆分   AI
              if(Const.NEEDSPLIT_YES.equals(dataTableList.get(0).getNeedSplit())){

                  String binaryStr =  DataUtil.getHexBinaryStr(Integer.parseInt(valueStr));
                  for(DataTable dataTable : dataTableList){
                      String value = binaryStr.substring(Integer.parseInt(dataTable.getBitAddress()) , Integer.parseInt(dataTable.getBitAddress())+1);
                      for (DataTable dataTable1 : dataTableList) {
                          //拆位值匹配mysql配置  AI
                          if (dataTable.getBitAddress().equals(dataTable1.getBitAddress()) && value.equals(dataTable1.getBitAddressValueCode())) {
                              Point point = Point
                                      .measurement(measurement)
                                      .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
                                      .tag("TAG_HASH_KEY", aiDataKey)
                                      .tag("TAG_KEY", keyStr)
                                      .tag("TAG_KEY_BIT_ADDRESS", dataTable.getBitAddress())
                                      .addField("COMMMENT", dataTable.getComment())
                                      .addField("STATION_EN", dataTable.getStationEn())
                                      .addField("STATION_CN", dataTable.getStationEn())
                                      .addField("ADDRESS", dataTable.getAddress())
                                      .addField("ACTUAL_ADDRESS", dataTable.getActualAddress())
                                      .addField("RATIO", dataTable.getRatio())
                                      .addField("IS_DI", dataTable.getIsDi())
                                      .addField("SYSTEM_NAME", dataTable.getSystemName())
                                      .addField("BIT_ADDRESS_EN", dataTable.getBitAddressEn())
                                      .addField("BIT_ADDRESS_CN", dataTable.getBitAddressCn())
                                      .addField("VALUE", value)
                                      .build();
                              batchPoints.point(point);
                          }
                      }
                  }
              }else{
                  for(DataTable dataTable : dataTableList) {
                      //不拆位值匹配mysql配置  AI
                      if(valueStr.equals(dataTable.getBitAddressValueCode())) {
                          Point point = Point
                                  .measurement(measurement)
                                  .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
                                  .tag("TAG_HASH_KEY", aiDataKey)
                                  .tag("TAG_KEY", keyStr)
                                  .tag("TAG_KEY_BIT_ADDRESS", dataTable.getBitAddress())
                                  .addField("COMMMENT", dataTable.getComment())
                                  .addField("STATION_EN", dataTable.getStationEn())
                                  .addField("STATION_CN", dataTable.getStationEn())
                                  .addField("ADDRESS", dataTable.getAddress())
                                  .addField("ACTUAL_ADDRESS", dataTable.getActualAddress())
                                  .addField("RATIO", dataTable.getRatio())
                                  .addField("IS_DI", dataTable.getIsDi())
                                  .addField("SYSTEM_NAME", dataTable.getSystemName())
                                  .addField("BIT_ADDRESS_EN", dataTable.getBitAddressEn())
                                  .addField("BIT_ADDRESS_CN", dataTable.getBitAddressCn())
                                  .addField("VALUE", valueStr)
                                  .build();
                          batchPoints.point(point);
                      }
                  }
              }
          }
      }
}
