package com.oowoo.binlog2mq.listener;

import com.alibaba.fastjson.JSON;
import com.github.shyiko.mysql.binlog.BinaryLogClient;
import com.github.shyiko.mysql.binlog.event.*;
import com.oowoo.binlog2mq.entity.BinlogDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/*
 * @Author ZhangBin
 * @Date 2019/12/6 20:38
 * @Description: 监听binlig日志，将需要用到的数据发送到kafka
 * 
 **/

@Slf4j
@Component
public class BiglogListener implements CommandLineRunner {
    @Value("${mysql.binlog.host}")
    private String binlogHost;
    @Value("${mysql.binlog.port}")
    private int binlogPort;
    @Value("${mysql.binlog.username}")
    private String binlogUsername;
    @Value("${mysql.binlog.password}")
    private String binlogPassword;
    @Value("${mysql.binlog.serverId}")
    private long binlogServerId;
    @Value("#{'${mysql.binlog.tableList}'.split(',')}")
    private List<String> tableList;
    @Value("${kafka.topic.binlog_blogcore}")
    private String binlogBlogcoreTopic ;

    public static final String UPDATE = "update";
    public static final String INSERT = "insert";
    public static final String DELETE = "delete";

    @Autowired
    private KafkaTemplate kafkaTemplate;

    @Async
    @Override
    public void run(String... args) throws Exception {

        HashMap<Long, String> tableMap = new HashMap<>();
        // 创建binlog监听客户端
        BinaryLogClient client = new BinaryLogClient(binlogHost, binlogPort, binlogUsername, binlogPassword);
        //数据库的serverid
        client.setServerId(binlogServerId);
        //开始监听
        client.registerEventListener((event -> eventListerAndSendKaFKa(event,tableMap)));
        //关闭连接
        client.connect();
    }


    //监听binlog数据变化，过滤出需要监听的表数据，最后使用kafka发送出去
    //为什么要用消息队列发出去呢，一方面是因为数据量大的时候可以做缓冲，另一方面是可以实现发布订阅模式，可能其他系统也需要这个数据
    //为什么使用kafka，因为kafka吞吐量大啊，也可以换其他消息队列，自己思量咯。
    public void eventListerAndSendKaFKa(Event event, HashMap<Long, String> tableMap){
        // binlog事件
        EventData data = event.getData();
        if (data == null) {
            return;
        }
        log.info("收到binlog日志变化,当前事件类型--->{}",data.getClass().getTypeName());
        if (data instanceof TableMapEventData) {//获取监听到的所有表
            TableMapEventData tableMapEventData = (TableMapEventData) data;
            tableMap.put(tableMapEventData.getTableId(), tableMapEventData.getDatabase() + "." + tableMapEventData.getTable());
        }
        if (data instanceof UpdateRowsEventData) {//更新数据事件
            UpdateRowsEventData updateRowsEventData = (UpdateRowsEventData) data;
            String dataBaseTableName = tableMap.get(updateRowsEventData.getTableId());
            if (dataBaseTableName != null && tableList.contains(dataBaseTableName)) {//判断 更新的表数据 是不是 我们需要监听的表
                String fullName = dataBaseTableName +"."+ UPDATE;
                for (Map.Entry<Serializable[], Serializable[]> row : updateRowsEventData.getRows()) {
                    String msg = JSON.toJSONString(new BinlogDto(fullName, row.getValue()));
                    log.info("监听到数据变化--->{}--->{}",fullName,msg);
                    kafkaTemplate.send(binlogBlogcoreTopic,msg);//将数据怼入kafka
                }
            }
        }else if (data instanceof WriteRowsEventData) {//插入数据事件
            WriteRowsEventData writeRowsEventData = (WriteRowsEventData) data;
            String dataBaseTableName = tableMap.get(writeRowsEventData.getTableId());
            if (dataBaseTableName != null && tableList.contains(dataBaseTableName)) {
                String fullName = dataBaseTableName +"."+ INSERT;
                for (Serializable[] row : writeRowsEventData.getRows()) {
                    String msg = JSON.toJSONString(new BinlogDto(fullName, row));
                    log.info("监听到数据变化--->{}--->{}",fullName,msg);
                    kafkaTemplate.send(binlogBlogcoreTopic,msg);
                }
            }
        }else if (data instanceof DeleteRowsEventData) {//删除数据事件
            DeleteRowsEventData deleteRowsEventData = (DeleteRowsEventData) data;
            String dataBaseTableName = tableMap.get(deleteRowsEventData.getTableId());
            if (dataBaseTableName != null && tableList.contains(dataBaseTableName)) {
                String fullName = dataBaseTableName +"."+ DELETE;
                for (Serializable[] row : deleteRowsEventData.getRows()) {
                    String msg = JSON.toJSONString(new BinlogDto(fullName, row));
                    log.info("监听到数据变化--->{}--->{}",fullName,msg);
                    kafkaTemplate.send(binlogBlogcoreTopic,msg);
                }
            }
        }
    }
}
