/**
 * Copyright © 2017 sdjictec.Co.Ltd. All rights reserved.
 **/
package com.sdjictec.haier;

import com.alibaba.fastjson.JSON;
import com.sdjictec.haier.config.ProFileConfig;
import com.sdjictec.haier.entity.RetailDataMQ;
import com.sdjictec.haier.utils.DBUtills;
import com.sdjictec.haier.utils.EncryptUtil;
import com.sdjictec.haier.utils.IJMSInfo;
import com.sdjictec.haier.utils.ILogic;
import org.apache.commons.cli.*;
import org.apache.kafka.clients.consumer.CommitFailedException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.SimpleDateFormat;
import java.util.*;


public class Kafka2Oracle {

    private static Logger logger = LoggerFactory.getLogger(Kafka2Oracle.class);


    public static void main(String[] args) {

        CommandLine cmd = null;
        Options options = new Options();
        try {
//            Option source_tablenameO = new Option("test", true, "");
//            source_tablenameO.setRequired(true);
//            options.addOption(source_tablenameO);

            Option resetO = new Option("reset", true, "");
            resetO.setRequired(true);
            options.addOption(resetO);

            DefaultParser parser = new DefaultParser();
            cmd = parser.parse(options, args);
        } catch (ParseException e) {
            e.printStackTrace();
            logger.error(e.getMessage());

        }
//        String test = cmd.getOptionValue("test");
        String reset = cmd.getOptionValue("reset");

        logger.info("kafka配置auto.offset.reset:" + reset);

        System.out.println("kafka配置auto.offset.reset:" + reset);

        Properties props = new Properties();

        System.setProperty("java.security.auth.login.config", ProFileConfig.confhome + "jaas.conf");
        System.setProperty("java.security.krb5.conf", ProFileConfig.confhome + "krb5.conf");

        IJMSInfo ijmsInfo = new IJMSInfo();

        ILogic iLogic = new ILogic();


        DBUtills dbUtills = new DBUtills();


        props.put("bootstrap.servers", ProFileConfig.kafka_servers);
        props.put("group.id", ProFileConfig.kafka_group_id);
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");

        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        if (reset.equals("earliest")) {
            props.put("auto.offset.reset", "earliest");
        } else {
            props.put("auto.offset.reset", "latest");
        }

        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.kerberos.service.name", "kafka");

        KafkaConsumer<String, String> consumer = null;
        try {
            consumer = new KafkaConsumer<>(props);
            consumer.subscribe(Arrays.asList(ProFileConfig.kafka_topic));
        } catch (Exception e) {
            e.printStackTrace();


            System.out.println("kafka consumer 失败");

            System.out.println(e.getMessage());


        }

//        Gson gson = new Gson();
        while (true) {

            logger.info("循环读取kafka数据");

            System.out.println("循环读取kafka数据");


            try {

                logger.info("kafka poll");

                System.out.println("kafka poll");

                ConsumerRecords<String, String> records = consumer.poll(100);


                logger.info("开始处理kafka数据");

                System.out.println("开始处理kafka数据");
                if (records != null && !records.isEmpty()) {


                    for (ConsumerRecord<String, String> record : records) {


                        logger.info("开始对每一条数据处理");

                        System.out.println("开始对每一条数据处理");


                        String kafkajson = record.value();

//
//                        logger.info("kafka数据：" + kafkajson);
//
//                        System.out.println("kafka数据：" + kafkajson);

                        Map<String, Object> mapkafka = JSON.parseObject(kafkajson);


                        String data = (String) mapkafka.get("data");

//                        logger.info("data 解密前：" + data);
//
//                        System.out.println("data 解密前：" + data);


                        String strval = "";
                        try {

                            logger.info("开始对data进行解密");

                            System.out.println("开始对data进行解密");


                            strval = EncryptUtil.decode(data, "kaFkA$Jn,#.l@4#C");

                            logger.info("data 解密后：" + strval);

//                            System.out.println("data 解密后：" + strval);


                        } catch (Exception e) {

                            logger.info("data 解密失败");

                            System.out.println("data 解密失败");
                            e.printStackTrace();


                        }


//                        Object mqdata=mapTypes.get("data");


//                        String strval = record.value();

//                        logger.info("数据strval：" + strval);
//
//                        System.out.println("数据strval：" + strval);


                        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        Date now = new Date();
                        if (Integer.valueOf(formatter.format(now).substring(14, 16)) <= 5) {
                            try {
                                Thread.sleep(480);
                            } catch (InterruptedException e) {
                                e.printStackTrace();
                            }
                        }


//                        System.out.println(strval);


                        Map<String, Object> mapTypes = JSON.parseObject(strval);
                        String flag = (String) mapTypes.get("flag");
                        System.out.println("flag:" + flag);

                        logger.info("flag:" + flag);
                        //只取flag为dj的数据
                        if (flag != null && !flag.equals(ProFileConfig.realtimeFlagFilter)) {

                            System.out.println("flag不为dj，不取该条，跳出本次循环");

                            logger.info("flag不为dj，不取该条，跳出本次循环");

                            continue;
                        }

                        List<Map<String, Object>> billDatas = (List<Map<String, Object>>) mapTypes.get("billData");


                        if (billDatas == null || billDatas.isEmpty()) {
                            System.out.println("billDatas 为空跳出本次循环");

                            logger.info("billDatas 为空跳出本次循环");
                            continue;
                        }

//
//                        System.out.println("billDatas long：" + billDatas.size());
//
//                        logger.info("billDatas long：" + billDatas.size());
//
//                        System.out.println("billDatas：" + billDatas.size());
//
//                        logger.info("billDatas：" + billDatas);
//
//
//                        System.out.println("string billDatas：" + billDatas);
//
//                        logger.info("string billDatas：" + billDatas.toString());


                        for (Map<String, Object> billData : billDatas) {


                            if (!ijmsInfo.filterFlag(mapTypes, billData)) {
                                System.out.println("无效数据过滤，跳出本次循环");

                                logger.info("无效数据过滤，跳出本次循环");


                                continue;

                            }

                            //解析MQ Json数据
                            System.out.println("解析MQ Json数据映射到实体类RetailDataMQ");
                            logger.debug("解析MQ Json数据映射到实体类RetailDataMQ");

                            RetailDataMQ retailDataMQ = ijmsInfo.parseJson(billData);


                            System.out.println("对实体类进行逻辑处理");
                            logger.debug("对实体类进行逻辑处理");

                            RetailDataMQ fromRedis = iLogic.mergeDataInfo(retailDataMQ);

                            //                        RetailDataMQ2 retailDataMQ2 = new RetailDataMQ2(fromRedis);


                            System.out.println("实体类处理后入表T_RetailDataMQ");
                            logger.debug("实体类处理后入表T_RetailDataMQ");

                            boolean savemq1flag = false;
                            try {
                                savemq1flag = dbUtills.jdbcsave(fromRedis, "mq");
                            } catch (Exception e) {
                                e.printStackTrace();

                                logger.debug(e.getMessage());


                            }


                            if (savemq1flag) {

                                System.out.println("实体类处理后入表T_RetailDataMQ成功");
                                logger.debug("实体类处理后入表T_RetailDataMQ成功");
                            } else {

                                System.out.println("实体类处理后入表T_RetailDataMQ失败");
                                logger.debug("实体类处理后入表T_RetailDataMQ失败");
                            }


                            System.out.println("实体类处理后入表T_RetailDataMQ2");
                            logger.debug("实体类处理后入表T_RetailDataMQ2");


                            boolean savemq2flag = false;
                            try {
                                savemq2flag = dbUtills.jdbcsave(fromRedis, "mq2");
                            } catch (Exception e) {
                                e.printStackTrace();
                                logger.debug(e.getMessage());
                            }


                            if (savemq2flag) {

                                System.out.println("实体类处理后入表T_RetailDataMQ2成功");
                                logger.debug("实体类处理后入表T_RetailDataMQ2成功");
                            } else {

                                System.out.println("实体类处理后入表T_RetailDataMQ2失败");
                                logger.debug("实体类处理后入表T_RetailDataMQ2失败");
                            }


                        }

                    }

                    try {
                        //                    consumer.commitSync();
                        System.out.println("消息消费完成，提交offset");
                        logger.debug("消息消费完成，提交offset");


                        consumer.commitAsync();

                    } catch (CommitFailedException e) {

                        System.out.println("消息消费完成，提交offset错误");
                        logger.debug("消息消费完成，提交offset错误");


                        e.printStackTrace();
                        logger.error(e.getMessage());
                        logger.error("commit failed", e);
                    }

                } else {
                    System.out.println("no data");

                    logger.info("no data");
                }
            } catch (NumberFormatException e) {


                System.out.println("对kafka该次poll数据处理出错");
                logger.debug("对kafka该次poll数据处理出错");


                e.printStackTrace();
                logger.error(e.getMessage());

            }
        }

    }

}