package com.itzx.controller;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.otter.canal.protocol.CanalEntry.*;
import com.itzx.common.Common;
import com.itzx.common.TaskCallBack;
import com.itzx.model.ColumModel;
import com.itzx.server.KafkaServer;
import com.itzx.utils.CanalUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.List;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * @Author yyeleven
 */
public class CanalController implements TaskCallBack {
    private static CanalController mConnector;
    private static ExecutorService mPool = null;
    private Logger mLogger;
    private String mTopic;

    static {
        mPool = Executors.newFixedThreadPool(1);
    }

    /**
     * 单例对象获取(获取controller类的单例对象)
     */
    public static CanalController getInstance() {
        if (null == mConnector) {
            synchronized (CanalController.class) {
                if (null == mConnector) {
                    mConnector = new CanalController();
                }
            }
        }
        return mConnector;
    }

    private CanalController() {
        mLogger = LoggerFactory.getLogger(this.getClass());
    }

    /**
     * 开始读取数据
     */
    public CanalController start() throws Exception {
        Properties properties = CanalUtil.loadProperties("/canal-config.properties");
        String ip = properties.getProperty(Common.SERVER_IP_KEY);
        int port = Integer.parseInt(properties.getProperty(Common.SERVER_PORT_KEY));
        String destination = properties.getProperty(Common.DESTINATION_KEY);
        mTopic = properties.getProperty(Common.TOPIC_KEY);
        mPool.execute(new CanalTask(ip, port, destination, this));
        return this;
    }

    public void running(List<Entry> entrys) {
        for (Entry entry : entrys) {
            //mysql的事务开始前 和事务结束后的内容  不要的
            EntryType entryType = entry.getEntryType();
            if (entryType == EntryType.TRANSACTIONBEGIN
                    || entryType == EntryType.TRANSACTIONEND) {
                continue;
            }
            //如果不是以上的事务，那么解析binlog
            try {
                RowChange rowChange = RowChange.parseFrom(entry.getStoreValue());
                //获取关键字段 哪一个数据库有事务发生  那张表 、 增加  删除  修改
                EventType eventType = rowChange.getEventType(); //操作的是insert 还是delete 还是update
                mLogger.info(eventType.name() + "-----------------------------");
                ColumModel model = new ColumModel();
                model.setDataBase(entry.getHeader().getSchemaName());//当前操作的mysql数据库
                model.setTableName(entry.getHeader().getTableName());//当前操作的是哪一张表

                //迭代所有获取到的binlog数据，然后根据当前mysql的INSERT  UPDATE  DELETE操作，进行解析
                JSONObject jsonObject = null;
                for (RowData rowData : rowChange.getRowDatasList()) {
                    //判断：当前是什么操作
                    if (eventType == EventType.DELETE) {
                        jsonObject = structModel(entry, rowData.getBeforeColumnsList(), eventType.name());
                    } else if (eventType == EventType.INSERT || eventType == EventType.UPDATE) {
                        jsonObject = structModel(entry, rowData.getAfterColumnsList(), eventType.name());
                    }
                }
                if (null != jsonObject) {
                    //把数据封装成json输出到kafka中
                    model.setContent(jsonObject.toJSONString());
                    String data = JSONObject.toJSONString(model);
                    mLogger.info(data);
                    KafkaServer.getInstance().sendMsg(mTopic, UUID.randomUUID().toString(), data);
                }
            } catch (Exception e) {
                mLogger.error("parser msg is  error ", e);
                throw new RuntimeException("ERROR ## parser of eromanga-event has an error , data:" + entry.toString(),
                        e);
            }
        }
    }

    private JSONObject structModel(
            Entry entry, List<Column> afterColumnsList, String entryType) {
        JSONObject jsonObject = new JSONObject();
        for (Column column : afterColumnsList) {
            jsonObject.fluentPut(column.getName(), column.getValue());
        }
        jsonObject.fluentPut("executeTime", entry.getHeader().getExecuteTime());
        jsonObject.fluentPut("eventType", entryType);
        return jsonObject;
    }

    public void failError(Exception e) {
        if (mPool != null && !mPool.isShutdown()) {
            mPool.shutdown();
        }
    }
}
