package demo.db.mysql.binlog;

import demo.db.mysql.vo.BinLogMsg;
import demo.vo.common.TimeCost;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * BinLog 日志消费
 * 
 * @author junying.han
 *
 */
@Service
public class BinLogConsumeJob {

    private static Logger logger = LoggerFactory.getLogger(BinLogConsumeJob.class);

//    @Autowired
//    private ConsumerConnector xmanConsumerConnector;

    @Autowired
    @Qualifier("master")
    private DataSource masterDataSource;

    @Value("${xman.topic}")
    private String topic;

    @Value("${xman.group.id}")
    private String groupId;

    public static final String NO_CONSUME_GROUP = "xdcsAlert.NanHui";

    /**
     * 更新操作时，哪些表的哪些字段不需要更新
     * <p>
     * KEY: 表名 ； VALUE: 不需要更新的字段
     */
    private Map<String, List<String>> tableIgnoreColumnsMap;

    /**
     * KafkaStream
     */
//    private List<KafkaStream<String, String>> consumerIteratorList;

    /**
     * 初始化
     */
    @PostConstruct
    public void init() {
        // 南汇机房不需要消费自己的BIN_LOG数据
        if (NO_CONSUME_GROUP.equals(groupId)) {
            logger.warn("current group {} , skip consume binlog data", groupId);
            return;
        }

        logger.warn("consumeBinLog begin");
        tableIgnoreColumnsMap = new HashMap<>();
        tableIgnoreColumnsMap.put("app",
                Arrays.asList("flow_check", "threshold_max_qps_http", "threshold_min_qps_http", "threshold_max_qps_rpc",
                        "threshold_min_qps_rpc", "qps", "qps_http", "qps_rpc", "qpsBalance", "cmdbSouthMachine",
                        "machine_white_list"));

        int localConsumerCount = 1;
        Map<String, Integer> topicCountMap = new HashMap<>();
        topicCountMap.put(topic, localConsumerCount);

//        StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
//        StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

        try {
//            Map<String, List<KafkaStream<String, String>>> consumerMap = xmanConsumerConnector
//                    .createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
//            consumerIteratorList = consumerMap.get(topic);
            logger.warn("启动Kafka监听任务");
        } catch (Exception e) {
            logger.error("启动Kafka监听任务异常", e);
        }
    }

    @Scheduled(initialDelay = 10 * 1000, fixedDelay = 3 * 1000)
    public void consume() {
        TimeCost cost = new TimeCost();
        logger.debug("kafka consume start");
//        if (consumerIteratorList == null || consumerIteratorList.isEmpty()) {
//            logger.debug("kafka consume end, cost {} ms,consumerIteratorList is empty ", cost.cost());
//            return;
//        }
//        try {
//            for (KafkaStream<String, String> kafkaStream : consumerIteratorList) {
//                ConsumerIterator<String, String> consumerIterator = kafkaStream.iterator();
//                while (hasNext(consumerIterator)) {
//                    MessageAndMetadata<String, String> msgAndMeta = consumerIterator.next();
//                    String msg = msgAndMeta.message();
//                    logger.info("receive msg: {}", msg);
//                    BinLogMsg vo = JSON.parseObject(msg, BinLogMsg.class);
//                    String sql = generateSql(vo);
//                    executeSql(sql);
//                }
//            }
//        } catch (Exception e) {
//            logger.error("consume binLog failed", e);
//        }
        logger.info("kafka consume end, cost {} ms", cost.cost());
    }

    /**
     * Check if there are messages waiting in Kafka, waiting until timeout (10ms by
     * default) for messages to arrive. and catching the timeout exception to return
     * a boolean
     */
//    public static boolean hasNext(ConsumerIterator<String, String> it) {
//        if (it == null) {
//            return false;
//        }
//        try {
//            return it.hasNext();
//        } catch (ConsumerTimeoutException e) {
//            return false;
//        }
//    }

    /**
     * 获取下个消息
     * 
     * @return
     */
//    public MessageAndMetadata<String, String> getNextMessage() {
//        if (consumerIteratorList == null || consumerIteratorList.isEmpty()) {
//            return null;
//        }
//        // it has only a single stream, because there is only one consumer
//        KafkaStream<String, String> stream = consumerIteratorList.get(0);
//        ConsumerIterator<String, String> it = stream.iterator();
//        try {
//            if (it.hasNext()) {
//                return it.next();
//            } else {
//                return null;
//            }
//        } catch (ConsumerTimeoutException e) {
//            return null;
//        }
//    }

    /**
     * 执行SQL
     * 
     * @param sql
     */
    public void executeSql(String sql) {
        if (sql == null || sql.isEmpty()) {
            return;
        }
        logger.debug(sql);
        try (Connection connection = masterDataSource.getConnection();
                Statement statement = connection.createStatement();) {
            int cnt = statement.executeUpdate(sql);
            logger.warn("executeUpdate {} return {}", sql, cnt);
        } catch (SQLException e) {
            logger.error("executeUpdate {} failed", sql, e);
        }
    }

    /**
     * 生成SQL语句
     * 
     * @param binLogMsg
     * @return
     */
    private String generateSql(BinLogMsg binLogMsg) {
        if (binLogMsg == null || binLogMsg.isDdl()) {
            return null;
        }
        if (BinLogMsg.isCreate(binLogMsg.getEventType())) {
            return insertSql(binLogMsg);
        }
        if (BinLogMsg.isDelete(binLogMsg.getEventType())) {
            return deleteSql(binLogMsg);
        }
        if (BinLogMsg.isUpdate(binLogMsg.getEventType())) {
            List<String> ignoreColumns = tableIgnoreColumnsMap.get(binLogMsg.getTableName());
            return updateSql(binLogMsg, ignoreColumns);
        }
        return null;
    }

    private static String insertSql(BinLogMsg binLogMsg) {
        String tableName = binLogMsg.getSchemaName() + "." + binLogMsg.getTableName();
        Map<String, String> afterType = binLogMsg.getAfterType();
        Map<String, String> row = binLogMsg.getRowAfter();

        List<Entry<String, String>> entryList = row.entrySet().stream().collect(Collectors.toList());

        String columnStr = entryList.stream().map(Entry::getKey).collect(Collectors.joining(","));

        String valueStr = entryList.stream().map(en -> {
            String columnType = afterType.get(en.getKey());
            String dolumnValue = en.getValue();
            return parseValue(columnType, dolumnValue);
        }).collect(Collectors.joining(","));

        StringBuilder sql = new StringBuilder("INSERT INTO ").append(tableName).append("(");
        sql.append(columnStr).append(") VALUES (").append(valueStr).append(")");
        return sql.toString();
    }

    private static String parseValue(String type, String value) {
        if (value == null) {
            return "null";
        }
        if ("DATETIME".equalsIgnoreCase(type)) {
            try {
                Date date1 = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US).parse(value);
                return "'" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date1) + "'";
            } catch (ParseException e) {
                return "'" + value + "'";
            }
        }
        return "'" + value + "'";
    }

    private static String updateSql(BinLogMsg binLogMsg, List<String> ignoreColumns) {
        Map<String, String> afterType = binLogMsg.getAfterType();
        Set<String> set = ignoreColumns == null || ignoreColumns.isEmpty() ? new HashSet<>()
                : new HashSet<>(ignoreColumns);
        set.add("id");
        String tableName = binLogMsg.getSchemaName() + "." + binLogMsg.getTableName();
        Map<String, String> row = binLogMsg.getRowAfter();
        String id = row.get("id");
        String setStr = row.entrySet().stream().filter(en -> !set.contains(en.getKey())).map(en -> {
            String value = en.getValue();
            String key = en.getKey();
            String type = afterType.get(key);
            return key + "=" + parseValue(type, value);
        }).collect(Collectors.joining(","));
        StringBuilder sql = new StringBuilder("UPDATE ").append(tableName).append(" SET ").append(setStr)
                .append(" WHERE id = ").append(id);
        return sql.toString();
    }

    private static String deleteSql(BinLogMsg binLogMsg) {
        String tableName = binLogMsg.getSchemaName() + "." + binLogMsg.getTableName();
        Map<String, String> row = binLogMsg.getRowBefore();
        String id = row.get("id");
        return String.format("DELETE FROM %s WHERE id = %s", tableName, id);
    }
}
