package net.hn.hnms.biz.upkafka.service;


import net.hn.hnms.biz.upkafka.config.KafkaProperties;
import net.hn.hnms.biz.upkafka.config.LoginUtil;
import net.hn.hnms.biz.upkafka.config.SpringContextUtil;
import net.hn.hnms.biz.upkafka.domain.SynergiaNowPo;
import net.hn.hnms.biz.upkafka.service.impl.SystemServiceImpl;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.ExecutionException;


public class Producer extends Thread{
    private static final Logger LOG = LoggerFactory.getLogger(Producer.class);
    
    public final KafkaProducer<String, String> producer;
    
    private final String topic;
    
    public final Boolean isAsync;

    private final String mineCode;
    

    // Broker地址列表
    private final static String BOOTSTRAP_SERVER = "bootstrap.servers";

    // 客户端ID
    private final static String CLIENT_ID = "client.id";
    
    // Key序列化类
    private final static String KEY_SERIALIZER = "key.serializer";
    
    // Value序列化类
    private final static String VALUE_SERIALIZER = "value.serializer";
    
    // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT
    private final static String SECURITY_PROTOCOL = "security.protocol";

    // 服务名
    private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name";
    
    // 域名
    private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name";

    // 分区类名
    private final static String PARTITIONER_NAME = "partitioner.class";

    // 默认发送100条消息
    private final static int MESSAGE_NUM = 1;
    
    /**
    * 用户自己申请的机机账号keytab文件名称
    */
    private static final String USER_KEYTAB_FILE = "user.keytab";

    /**
    * 用户自己申请的机机账号名称
    */
    private static final String USER_PRINCIPAL = "ll_mk";

    /**
     * Producer constructor
     *
     * @param topicName Topic名称
     * @param asyncEnable 是否异步模式发送
     */

//    @Resource
//    private SystemService systemService;

//    @Autowired
//    private ApplicationContext applicationContext;

    public Producer(String topicName, Boolean asyncEnable,String mineCode) {
        Properties props = initProperties(topicName,mineCode);
        producer = new KafkaProducer<String, String>(props);
        topic = topicName;
        isAsync = asyncEnable;
        this.mineCode = mineCode;
    }

    public static Properties initProperties(String topicName,String mineCode) {
        Properties props = new Properties();
        KafkaProperties kafkaProc = KafkaProperties.getInstance();

        // Broker地址列表
        props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "10.240.98.20:21007,10.240.98.22:21007,10.240.98.21:21007"));
//        if ("150621011015".equals(mineCode)) {
//            //高头窑ip 特殊处理
//            props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "192.168.4.200:21007,192.168.4.200:21007,192.168.4.200:21007"));
//        } else {
//            props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "10.240.98.20:21007,10.240.98.22:21007,10.240.98.21:21007"));
//        }

        // 客户端ID
//        props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer"));
        props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, mineCode+"_"+topicName));
        // Key序列化类
        props.put(KEY_SERIALIZER,
                kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer"));
        // Value序列化类
        props.put(VALUE_SERIALIZER,
                kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer"));
        // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT
        props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT"));
//        props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "PLAINTEXT"));
//         服务名
        props.put(SASL_KERBEROS_SERVICE_NAME, "kafka");
        // 域名
        props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hnmy_hadoop.com"));
        // 分区类名
        props.put(PARTITIONER_NAME,
                kafkaProc.getValues(PARTITIONER_NAME, "net.hn.hnms.biz.upkafka.config.SimplePartitioner"));
        return props;
    }

    /**
     * 发送综采系统消息
     */
    // 每隔 1分钟执行一次
//    @Scheduled(fixedRate = 60000)
//    @Scheduled(fixedDelay = 60000)
    public void sendZCMessage() {
        System.out.println("定时任务执行中...");
        //获取认证
        if (LoginUtil.isSecurityModel()) {
            try {
                LOG.info("Securitymode start.");
                LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE);
            } catch (IOException e) {
                LOG.error("The IOException occured.", e);
                return;
            }
            LOG.info("Security prepare success.");
        }
//        SystemService systemService = new SystemServiceImpl();
        // 手动从 Spring 容器中获取 systemService bean
        SystemService systemService = SpringContextUtil.getBean(SystemService.class);
        //获取综采系统采煤机数据
        SynergiaNowPo zcSystemStatus = systemService.getZCSystemStatus(mineCode);
        //状态
        Integer pointStatus = zcSystemStatus.getPointStatus();
        //数据时间
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date dataTime = zcSystemStatus.getDataTime();
        String dataTimeFormat = simpleDateFormat.format(dataTime);
        //构建消息内容

        StringBuffer message = new StringBuffer();
        //综采系统接入频率：每10s上传一次测点实时数据。
        //构建消息头：包括煤矿编码，煤矿名称，系统型号，系统名称，数据上传时间
        //数据上传时间
        LocalDateTime now = LocalDateTime.now();
        // Define the desired format
        DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
        // Format the current date and time
        //数据上传时间
        String formattedDateTime = now.format(formatter);
        String messageHeader = "150781013671;灵露煤矿;;综采系统;" + formattedDateTime+"~";
        message.append(messageHeader);

        //构建消息体：测点编码，测点名称，测点数值，测点数值单位，测点状态，数据时间~
        //（1）采煤机运行状态
        //（2）转载机运行状态
        //（3）前刮板机运行状态
        //（4）1#皮带运行状态，2#皮带运行状态（多个顺槽皮带，序号依次递增）
        //（5）后刮板机运行状态
        StringBuffer messageBody = new StringBuffer();
        messageBody.append(";采煤机运行状态;"+String.valueOf(pointStatus)+";;0;"+dataTimeFormat+"~");
        messageBody.append("||");
        message.append(messageBody);

        // 构造消息记录
        String key = String.valueOf(message);
        ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, key, message.toString());
        if (isAsync) {
            // 异步发送
            producer.send(record);
        } else {
            try {
                // 同步发送
                producer.send(record).get();
                LOG.info("The InterruptedException occured : {}.", "发送成功" + message);
            } catch (InterruptedException ie) {
                LOG.info("The InterruptedException occured : {}.", ie);
            } catch (ExecutionException ee) {
                LOG.info("The ExecutionException occured : {}.", ee);
            }
        }
        try {
            // 线程阻塞 1 分钟 (60000 毫秒)
            System.out.println("线程阻塞 1 分钟 (60000 毫秒)");
            Thread.sleep(60000);
        } catch (InterruptedException e) {
            // 处理被中断的情况
            e.printStackTrace();
        }

    }

    /**
     * 生产者线程执行函数，循环发送消息。
     */
//    @Async
    public void run() {
        while (true) {
            System.out.println("综采系统数据发送中...");
            SystemService systemService = new SystemServiceImpl();
            //获取综采系统采煤机数据
            SynergiaNowPo zcSystemStatus = systemService.getZCSystemStatus(mineCode);
            //状态
            Integer pointStatus = zcSystemStatus.getPointStatus();
            //数据时间
            SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            Date dataTime = zcSystemStatus.getDataTime();
            String dataTimeFormat = simpleDateFormat.format(dataTime);
            //构建消息内容
            StringBuffer message = new StringBuffer();
            //综采系统接入频率：每10s上传一次测点实时数据。
            //构建消息头：包括煤矿编码，煤矿名称，系统型号，系统名称，数据上传时间
            //数据上传时间
            LocalDateTime now = LocalDateTime.now();
            // Define the desired format
            DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
            // Format the current date and time
            //数据上传时间
            String formattedDateTime = now.format(formatter);
            String messageHeader = "150781013671;灵露煤矿;;综采系统;" + formattedDateTime+"~";
            message.append(messageHeader);

            //构建消息体：测点编码，测点名称，测点数值，测点数值单位，测点状态，数据时间~
            //（1）采煤机运行状态
            //（2）转载机运行状态
            //（3）前刮板机运行状态
            //（4）1#皮带运行状态，2#皮带运行状态（多个顺槽皮带，序号依次递增）
            //（5）后刮板机运行状态
            StringBuffer messageBody = new StringBuffer();
            messageBody.append(";采煤机运行状态;"+String.valueOf(pointStatus)+";;0;"+dataTimeFormat+"~");
            messageBody.append("||");
            message.append(messageBody);
            // 构造消息记录
            String key = String.valueOf(mineCode);
//            ProducerRecord<String, String> record = new ProducerRecord<String, String>(KafkaProperties.ZCXT_ZCSS_TOPIC_tmp, key.toString(), message.toString());
                ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, key, message.toString());
            if (isAsync) {
                // 异步发送
                producer.send(record);
            } else {
                try {
                    // 同步发送
                    producer.send(record).get();
                    LOG.info("The InterruptedException occured : {}", "发送成功" + message);
                } catch (InterruptedException ie) {
                    LOG.info("The InterruptedException occured : {}.", ie);
                } catch (ExecutionException ee) {
                    LOG.info("The ExecutionException occured : {}.", ee);
                }
            }
                try {
                    // 线程阻塞 1 分钟 (60000 毫秒)
                    Thread.sleep(60000);
                } catch (InterruptedException e) {
                    // 处理被中断的情况
                    e.printStackTrace();
                }
        }
    }
//    @Override
//    public void run() {
//        StringBuffer message = new StringBuffer();
//        //综采系统接入频率：每10s上传一次测点实时数据。
//        //构建消息头：包括煤矿编码，煤矿名称，系统型号，系统名称，数据上传时间
//        //数据上传时间
//        LocalDateTime now = LocalDateTime.now();
//        // Define the desired format
//        DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
//        // Format the current date and time
//        String formattedDateTime = now.format(formatter);
//        String messageHeader = "150781013671;灵露煤矿;;综采系统；" + formattedDateTime+"~";
//        message.append(messageHeader);
//
//        //构建消息体：测点编码，测点名称，测点数值，测点数值单位，测点状态，数据时间~
//        //（1）采煤机运行状态
//        //（2）转载机运行状态
//        //（3）前刮板机运行状态
//        //（4）1#皮带运行状态，2#皮带运行状态（多个顺槽皮带，序号依次递增）
//        //（5）后刮板机运行状态
//        StringBuffer messageBody = new StringBuffer();
////        messageBody.append("");
//        messageBody.append(";采煤机运行状态;1;;0;"+formattedDateTime+"~");
//        messageBody.append("||");
//        message.append(messageBody);
//        while (true) {
////            String messageStr = "Message_";
//            long startTime = System.currentTimeMillis();
//
//            // 构造消息记录
//            Integer key = Integer.getInteger("150781013671");
//            ProducerRecord<String, String> record = new ProducerRecord<String, String>(KafkaProperties.ZCXT_ZCSS_TOPIC_tmp, String.valueOf(key), message.toString());
//
//            if (isAsync) {
//                // 异步发送
//                producer.send(record, new DemoCallBack(startTime, key, messageBody.toString()));
//            } else {
//                try {
//                    // 同步发送
//                    producer.send(record).get();
//                    System.out.println("执行成功"+message);
//                } catch (InterruptedException ie) {
//                    LOG.info("The InterruptedException occured : {}.", ie);
//                } catch (ExecutionException ee) {
//                    LOG.info("The ExecutionException occured : {}.", ee);
//                }
//            }
//
//            try {
//                Thread.sleep(10000);
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//            }
////            LOG.info(KafkaProperties." {} messages.", messageNo);
//
//        }
//
//    }
    
    public static void main(String[] args) {
        if (LoginUtil.isSecurityModel()) {
            try {
                LOG.info("Securitymode start.");
                
                // !!注意，安全认证时，需要用户手动修改为自己申请的机机账号
                LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE);
            } catch (IOException e) {
                LOG.error("Security prepare failure.");
                LOG.error("The IOException occured.", e);
                return;
            }
            LOG.info("Security prepare success.");
        }

        // 是否使用异步发送模式
        final boolean asyncEnable = false;
        Producer producerThread = new Producer(KafkaProperties.ZCXT_ZCSS_TOPIC_tmp, asyncEnable, "150781013671");
        producerThread.start();
    }
//    public static void sendMessage(String topic) {
//        if (LoginUtil.isSecurityModel()) {
//            try {
//                LOG.info("Securitymode start.");
//
//                // !!注意，安全认证时，需要用户手动修改为自己申请的机机账号
//                LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE);
//            } catch (IOException e) {
//                LOG.error("Security prepare failure.");
//                LOG.error("The IOException occured.", e);
//                return;
//            }
//            LOG.info("Security prepare success.");
//        }
//
//        // 是否使用异步发送模式
//        final boolean asyncEnable = false;
////        Producer producerThread = new Producer(topic, asyncEnable);
////        producerThread.start();
//    }

    class DemoCallBack implements Callback {
        private final Logger logger = LoggerFactory.getLogger(DemoCallBack.class);

        private long startTime;

        private int key;

        private String message;

        public DemoCallBack(long startTime, int key, String message) {
            this.startTime = startTime;
            this.key = key;
            this.message = message;
        }

        /**
         * 回调函数，用于处理异步发送模式下，消息发送到服务端后的处理。
         *
         * @param metadata  元数据信息
         * @param exception 发送异常。如果没有错误发生则为Null。
         */
        @Override
        public void onCompletion(RecordMetadata metadata, Exception exception) {
            long elapsedTime = System.currentTimeMillis() - startTime;
            if (metadata != null) {
                logger.info("message(" + key + ", " + message + ") sent to partition(" + metadata.partition() + "), "
                        + "offset(" + metadata.offset() + ") in " + elapsedTime + " ms");
            } else if (exception != null) {
                logger.error("The Exception occured.", exception);
            }
        }
    }
}