package com.kafka.sync1;

import com.alibaba.fastjson.JSONObject;
import com.db.jdbc.JdbcUtils;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;

import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

/**
 * @author xq
 * @Description:
 * @date 2020/6/99:41
 */
public class MyConsumer {

    private static Logger logger;

    private static Properties properties = null;
    private static boolean isStop = false;
    private static String table;
    private static int successCount;
    private static int failCount = 0;
    private static Properties config;

//    static {
//        File programPath = ClassUtils.getCurrentProgramPath();
//        String path = programPath.getParentFile().getParentFile().getPath();
//        System.out.println(path);
//        System.setProperty("base.dir", path);
//        logger = LoggerFactory.getLogger(MyConsumer.class);
//    }

    public static void main(String[] args) {
        config = CliDefinition.getConfig(args);
        start();
    }

    public static void start() {
        init();
        KafkaConsumer<String, String> consumer = new KafkaConsumer(properties);
        logger.info("consumer start work!");
        // 订阅主题
        consumer.subscribe(Collections.singletonList(config.getProperty(Config.KAFKA_TOPIC.value())));
        Connection conn = JdbcUtils.buildConnection();
        Statement statement;
        try {
            conn.setAutoCommit(false);
            statement = conn.createStatement();
        } catch (SQLException e) {
            throw new RuntimeException("数据库访问错误:" + e.getMessage());
        }
        List<String> sqlList = new ArrayList<>();
        int retryCount = Integer.parseInt(config.getProperty(Config.DB_MAX_RETRY_COUNT.value()));
        retryCount = Math.max(retryCount, 0);
        while (!isStop) {
            //拉取消息，并设置等待时间。一次可以拉取多条数据。1.s内拉取消息，一次拉取的数量不确定
            ConsumerRecords<String, String> poll = consumer.poll(Duration.ofMillis(1000));
            if (poll.isEmpty()) {
                continue;
            }
            //遍历ConsumerRecords获取对象信息
            for (ConsumerRecord<String, String> record : poll) {
                HashMap map = JSONObject.parseObject(record.value(), HashMap.class);
                String sql = getSql(map);
//                System.out.println(sql);
                sqlList.add(sql);
            }
            logger.info(String.format("poll message size: %d", poll.count()));
            for (int i = 0; i <= retryCount; i++) {
                try {
                    for (String s : sqlList) {
                        System.out.println(s);
                        statement.addBatch(s);
                    }
                    int[] ints = statement.executeBatch();
                    conn.commit();
                    successCount += ints.length;
                    break;
                } catch (SQLException e) {
                    logger.error(String.format("数据库批处理失败，已重做次数：%d, 失败原因：%s", i, e.getMessage()));
                    if (i == retryCount) {
                        logger.error("数据库批处理失败，已达到最大重做数:" + e.getMessage());
                        //失败的记录处理, 写入日志文件
                        poll.forEach(r -> {
                            logger.error(r.toString());
                        });
                        failCount += poll.count();
                        if (failCount > Integer.parseInt(config.getProperty(Config.DB_MAX_FALI_COUNT.value()))) {
                            throw new RuntimeException("失败的记录总数达到 最大值");
                        }
                    }
                } finally {
                    try {
                        statement.clearBatch();
                    } catch (SQLException e) {
                        e.printStackTrace();
                    }
                }
            }
            sqlList.clear();
        }
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            logger.info("程序结束！成功处理消息数：" + successCount);
        }));
    }

    public static String getSql(Map map) {
        StringBuilder sb = new StringBuilder();
        Object operation = map.get("OPERATION");
        map.remove("OPERATION");
        if ("INSERT".equals(operation)) {
            StringBuilder vb = new StringBuilder();
            sb.append("INSERT INTO ").append(table).append("(");
            vb.append("(");
            map.forEach((key, value) -> {
                if (value != null) {
                    sb.append(key).append(",");
                    vb.append("'" + value + "'").append(",");
                }
            });
            sb.replace(sb.length() - 1, sb.length(), ")");
            vb.replace(vb.length() - 1, vb.length(), ")");
            sb.append(" values ").append(vb);
        } else if ("UPDATE".equals(operation)) {
            sb.append("UPDATE ").append(table).append(" SET ");
            map.forEach((key, value) -> {
                if (value != null) {
                    value = value.toString();
                    sb.append(key).append("=").append("'" + value + "'").append(",");
                }
            });
            sb.deleteCharAt(sb.length() - 1).append(" WHERE ").append(getPrimaryCondition(map));
        } else if ("DELETE".equals(operation)) {
            sb.append("DELETE FROM ").append(table).append(" WHERE ").append(getPrimaryCondition(map));
        }
        return sb.toString();
    }

    public static String getPrimaryCondition(Map map) {
        String primaryKey = config.getProperty(Config.SOURCE_KEY.value());
        if (primaryKey == null || primaryKey.trim().length() == 0) {
            throw new RuntimeException("primary key为空，请注意 -sk 配置项");
        }
        StringBuilder sb = new StringBuilder();
        String[] split = primaryKey.split(",");
        for (String k : split) {
            sb.append(k).append("= '").append(map.get(k)).append("' and ");
        }
        sb.delete(sb.length() - 4, sb.length());
        return sb.toString();
    }

    public static void init() {
//        Properties config = App.getConfig();
        table = config.getProperty(Config.TARGET_TABLE.value());
        System.setProperty("java.security.krb5.conf", config.getProperty(Config.KRB5_CONF.value()));
        System.setProperty("java.security.auth.login.config", config.getProperty(Config.JAAS_CONF.value()));

        properties = new Properties();

        //配置安全协议security.protocol和sasl.kerberos.service.name
        properties.put("security.protocol", "SASL_PLAINTEXT");
        properties.put("sasl.kerberos.service.name", "kafka");

        //连接集群
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getProperty(Config.KAFKA_BROKE_URL.value()));

        //key和value的反序列号
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

        //是否自动提交
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        //自动提交频率
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");

        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        //消费者组
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, config.getProperty(Config.KAFKA_CONSUMER_GROUP.value()));
        //Consumer每次调用poll()时取到的records的最大数。
        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 300);
    }


}
