//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//

package com.ehl.capture.kafka;

import com.alibaba.fastjson.JSONArray;
import com.ehl.capture.data.IPassCar;
import com.ehl.capture.data.WorkThread;
import com.ehl.capture.db.Config;
import com.ehl.capture.db.DBProxyZdkk;
import com.ehl.capture.db.RedisUtil;
import com.ehl.tvc.kafka.KafkaConsumerConfig;
import com.ehl.tvc.kafka.KafkaStreamManager;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import redis.clients.jedis.Jedis;

import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;

public class KafkaConsumer {
    public static Logger logger = Logger.getLogger(KafkaConsumer.class);
    private static final boolean ZDDD_CHECK_FLAG = Boolean.parseBoolean(Config.getValue("zdkk.redischeck.flag"));

    private String connect;
    private String connectiontimeout;
    private String groupid;
    private String tr;
    private String trvalue;
    public static String picpath;
    private String threadcount;

    private void baseMessage() {
        this.connect = Config.getValue("connect");
        this.connectiontimeout = Config.getValue("connectiontimeout");
        this.groupid = Config.getValue("groupid");
        this.tr = Config.getValue("tr");
        this.trvalue = Config.getValue("trvalue");
        picpath = Config.getValue("picpath");
        this.threadcount = Config.getValue("threadcount");

        logger.info("平台机器名或ip加端口=" + this.connect);
        logger.info("超时时间= " + this.connectiontimeout);
        logger.info("平台分组名= " + this.groupid);
        logger.info("查询的表名= " + this.tr);
        logger.info("查询设置的值= " + this.trvalue);
        logger.info("图片webservice地址= " + picpath);
        logger.info("Kafka开启的线程数= " + this.threadcount);
    }

    public void kafkaInit() {
        this.baseMessage();
        Map<String, String> props = new HashMap();
        props.put("zookeeper.connect", this.connect);
        props.put("group.id", this.groupid);
        props.put("auto.offset.reset", "largest");
        props.put("max.poll.records", "1000");
        ConsumerConfig config = KafkaConsumerConfig.createConfig(props);
        KafkaStreamManager kafkaStreamManager = new KafkaStreamManager(config, this.tr, Integer.parseInt(this.trvalue));
        List<KafkaStream<byte[], byte[]>> list = kafkaStreamManager.createKafkaStreams();
        ExecutorService executor = Executors.newFixedThreadPool(100);
        String projectClass = Config.getValue("projectClass");

        if(ZDDD_CHECK_FLAG){
            // kafka初始化完成=>,初始化重点卡口，初始化redis启动定时任务，启动定时任务，每多少分钟执行一次，
            // 就是比对redis中是否有重点卡口数据，如果没有，就记录下来
            List<String> zdkkList = DBProxyZdkk.getKkList();
            this.scheduledTask(zdkkList,RedisUtil.getPeriod());
        }

        try {
            Class<?> clazz = Class.forName("com.ehl.capture.data." + projectClass);
            IPassCar passcar = (IPassCar)clazz.newInstance();
            Iterator iter = list.iterator();

            while(iter.hasNext()) {
                KafkaStream<byte[], byte[]> stream = (KafkaStream)iter.next();
                logger.info("活动线程数:" + ((ThreadPoolExecutor)executor).getActiveCount());
                executor.submit(new WorkThread(passcar, stream));
            }
        } catch (Exception var11) {
            var11.printStackTrace();
            logger.error("KafkaConsumer启动失败：", var11);
        }
    }


    /**
     * 1、ScheduledExecutorService 执行定时任务,存储所有超时的重点卡口
     *
     * @param zdkkList 重点卡口列表
     * @param period 定时器的间隔时长 单位：秒
     */
    private static void scheduledTask(final List<String> zdkkList,long period) {
        ScheduledExecutorService executorService = Executors.newScheduledThreadPool(16);
        Runnable task = new Runnable() {
            @Override
            public void run() {
                Jedis jedis = null;
                try {
                    logger.info("执行定时任务，检测是否有超时的重点卡口");
                    jedis = RedisUtil.getJedis();
                    List<String> kkOverTimeList = DBProxyZdkk.getKkOverTimeList();
                    logger.info(JSONArray.toJSON(kkOverTimeList));
                    // 需要定时执行的任务=>查询redis,比对所有重点卡口
                    for (String kkbh : zdkkList) {
                        String lastTime = jedis.get(kkbh);
                        if(StringUtils.isBlank(lastTime) && !kkOverTimeList.contains(kkbh)){
                            kkOverTimeList.add(kkbh);
                        }
                    }

                }catch (Exception e){
                    e.printStackTrace();
                }finally {
                    RedisUtil.closeJedis(jedis);
                }

            }
        };
        long initialDelay = 0; // 延迟0秒后执行任务
        executorService.scheduleAtFixedRate(task, initialDelay, period, TimeUnit.SECONDS);
    }
}
