package org.example.kg_back.service.imp;


import com.alibaba.fastjson2.JSON;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.example.kg_back.common.Constants;
import org.example.kg_back.utils.HBasePageModel;
import org.example.kg_back.utils.HQueryFilterUtil;
import org.example.kg_back.utils.HbaseUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;

import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

//@Service
public class TimeTaskService {

    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    //本次查询的最后一个rowKey
    //kg_PlanData计划数据
    private static byte[] planDataLastRowKey = null;
    //Kg_WarnFlightHistory航班指令告警数据
    private static byte[] warnFlightLastRowKey = null;
    //Kg_ATCDutyInfo管制值班人员数据
    private static byte[] aTCDutyLastRowKey = null;
    //Kg_WarnSimilarHistory相似航班号告警数据
    private static byte[] warnSimilarLastRowKey = null;
    //kg_AFTN报文数据
    private static byte[] aFTNLastRowKey = null;
    //kg_ATC报文数据
    private static byte[] aTCLastRowKey = null;
    //kg_callSaturation扇区通话饱和度数据
    private static byte[] callSaturationLastRowKey = null;


    //kg_ATC扇区数据 计时
    private static String atcDataTime = "20180101";
    //kg_AFTN报文数据 计时
    private static String aftnDataTime = "20180101";
    //kg_callSaturation扇区通话饱和度数据 计时
    private static String callDataTime = "20180101";
    //kg_PlanData计划数据 计时
    private static String planDataTime = "20180101";
    //Kg_WarnFlightHistory航班指令告警数据 计时
    private static String warnFlightDataTime = "20180101";
    //Kg_WarnSimilarHistory相似航班号告警数据 计时
    private static String warnSimilarDataTime = "20180101";
    //Kg_ATCDuty 管制员数据 计时
    private static String atcDutyDataTime = "20180101";

    //每次查询的数据量
    private static int selectLimit = 30;



    /**
     * 用分页，查询数据库中的表，查询指定数量的数据
     */
    private byte[] getDateAndDepositInKafka(String topic, String tableName, byte[] lastRowKey, String date, String family, String column) throws SQLException {

        // 从habase中取出数据
        List<Filter> rowFilters = new ArrayList<Filter>();
        FilterList filterList = new FilterList(rowFilters);
        HBasePageModel pageModel = new HBasePageModel(selectLimit + 1);

        if (date != null) {
            Filter filter = HQueryFilterUtil.newSubStringFilter(family, column, date);
            filterList.addFilter(filter);
        }

        HBasePageModel hbasePageModel = HbaseUtils.scanResultByPageFilter(tableName, lastRowKey, null, filterList, 0, pageModel);
        // 获取本次查询的最后一个rowKey
        int dataSize = hbasePageModel.getList().size();
        if (dataSize > 0) {
            Map<String, String> map = hbasePageModel.getList().get(dataSize - 1);
            String rowkey = map.get("rowKey");
            lastRowKey = rowkey.getBytes();
            // 把每一条数据放入Kafka
            for (int i = 0; i < hbasePageModel.getList().size() - 1; i++) {
                kafkaTemplate.send(topic, JSON.toJSONString(hbasePageModel.getList().get(i)));
            }
            System.out.println("发送"+tableName + "此批次最后一个rowkey：" + rowkey);
        }


        return lastRowKey;
    }


    @Scheduled(cron = "*/30 * * * * *")
    public void timingGetData() {
        System.out.println("定时任务启动获取数据");
        try {
            planDataLastRowKey = getDateAndDepositInKafka(Constants.TASK_PlANDATA, Constants.TABLE_PlANDATA, planDataLastRowKey, planDataTime, Constants.FAMILY_PlANDATA, Constants.COLUMN_PlANDATA);
            warnFlightLastRowKey = getDateAndDepositInKafka(Constants.TASK_WARNFLIGHT, Constants.TABLE_WARNFLIGHT, warnFlightLastRowKey, warnFlightDataTime, Constants.FAMILY_WARNFLIGHT, Constants.COLUMN_WARNFLIGHT);
            aTCDutyLastRowKey = getDateAndDepositInKafka(Constants.TASK_ATCDUTY, Constants.TABLE_ATCDUTY, aTCDutyLastRowKey, atcDutyDataTime, Constants.FAMILY_ATCDUTY, Constants.COLUMN_ATCDUTY);
            warnSimilarLastRowKey = getDateAndDepositInKafka(Constants.TASK_WARNSIMILAR, Constants.TABLE_WARNSIMILAR, warnSimilarLastRowKey, warnSimilarDataTime, Constants.FAMILY_WARNSIMILAR, Constants.COLUMN_WARNSIMILAR);
            aFTNLastRowKey = getDateAndDepositInKafka(Constants.TASK_AFTN, Constants.TABLE_AFTN, aFTNLastRowKey, aftnDataTime, Constants.FAMILY_AFTN, Constants.COLUMN_AFTN);
            aTCLastRowKey = getDateAndDepositInKafka(Constants.TASK_ATC, Constants.TABLE_ATC, aTCLastRowKey, atcDataTime, Constants.FAMILY_ATC, Constants.COLUMN_ATC);
            callSaturationLastRowKey = getDateAndDepositInKafka(Constants.TASK_CALLSATURATION, Constants.TABLE_CALLSATURATION, callSaturationLastRowKey, callDataTime, Constants.FAMILY_CALLSATURATION, Constants.COLUMN_CALLSATURATION);
       } catch (SQLException e) {
            e.printStackTrace();
        }
    }
}
