package com.msl.debezium.kafka.consumer;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.msl.debezium.common.DateUtil;
import com.msl.debezium.dto.DebeziumTestDto;
import com.msl.debezium.service.DebeziumTestService;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.*;

@Component
public class KafkaListeners{

    private Logger logger = LoggerFactory.getLogger(KafkaListeners.class);

    private static final String TOPICS = "mysql.kang.*";
    private static final String GROUP_ID = "debezium_cdc";

    @Autowired
    private DebeziumTestService debeziumService;

    //@KafkaListener(groupId = GROUP_ID, topicPattern = TOPICS)
    public void listen(List<ConsumerRecord> records, Acknowledgment ack, Consumer consumer){

        Map<String, Long> map = new HashMap();
        for (ConsumerRecord record : records){
            logger.info("偏移量为："+record.offset()+",获取到topic为"+record.topic()+"的消息!");
            if(StringUtils.isEmpty((String) record.value())){
                return;
            }
            JSONObject value = JSON.parseObject((String)record.value());
            JSONObject payload = value.getJSONObject("payload");
            if(Objects.isNull(payload)){
                ack.acknowledge();
                return;
            }
            JSONObject after = payload.getJSONObject("after");
            if(after.isEmpty()){
                return;
            }
            Long created_at = after.getLong("created_at");
            Long updated_at = after.getLong("updated_at");
            if(created_at != null && updated_at != null){
                String createdStr = DateUtil.dateTimeToStr(new Date(created_at));
                String updatedStr = DateUtil.dateTimeToStr(new Date(updated_at));
                logger.info("表创建时间为："+createdStr+"，表修改时间为："+updatedStr+"，写入表数据为："+JSONObject.toJSONString(after));
            }else{
                logger.info("写入表数据为："+JSONObject.toJSONString(after));
            }
            //获取表名，默认测试user表
            String tableName = "user";
            JSONObject source = payload.getJSONObject("source");
            if(!StringUtils.isEmpty(tableName)){
                tableName = source.getString("table");
            }
            //统计出同一个表消费到的数量
            if(!map.containsKey(tableName)){
                map.put(tableName,1L);
            }else{
                map.put(tableName, map.get(tableName)+1L);
            }
        }
        //获取系统当前时区的时间
        LocalDateTime now = LocalDateTime.now();
        ZoneId zoneId = ZoneId.systemDefault();
        Instant instant = now.atZone(zoneId).toInstant();
        Date date = Date.from(instant);
        for (Map.Entry<String, Long> entry:map.entrySet()){
            DebeziumTestDto debezium = debeziumService.findByTableName(entry.getKey(), date);
            if(debezium == null){
                debezium = new DebeziumTestDto();
                debezium.setTableName(entry.getKey());
                debezium.setCountProcessed(1L);
                debezium.setCreatedAt(date);
                debezium.setUpdatedAt(date);
                debeziumService.addDebeziumTest(debezium);
            }
            debezium.setCountProcessed(debezium.getCountProcessed() + entry.getValue());
            debezium.setUpdatedAt(date);
            debeziumService.updateDebeziumTest(debezium);
        }
        //spring方式手动提交偏移量
        //ack.acknowledge();
        //kafka手动提交偏移量，可以使用回调方法，添加入参Consumer consumer即可
        consumer.commitAsync();
        /*consumer.commitAsync(new OffsetCommitCallback() {
            @Override
            public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) {
                if(exception == null){
                    logger.info("--- offsets ---" + offsets);
                }else{
                    logger.error("fail to commit offsets {}", offsets, exception);
                }
            }
        });*/
    }

}
