package org.lk.common.writer;

import cn.hutool.db.Db;
import cn.hutool.db.DbUtil;
import cn.hutool.db.Entity;
import cn.hutool.json.JSONUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.lk.common.build.DataSourceBuild;
import org.lk.common.log.CdcJobHelper;
import org.lk.config.SpringUtil;
import org.lk.entity.CdcJob;
import org.lk.entity.CdcLog;
import org.lk.entity.DataSourceEntity;
import org.lk.mapper.CdcJobMapper;
import org.lk.mapper.CdcLogMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;

/**
 * @author: lk
 * @since: 流式写入
 */
public class StreamSinkFunction extends RichSinkFunction<String> {
    private static Logger logger = LoggerFactory.getLogger(StreamSinkFunction.class);

    /**
     * flink的配置类
     */
    private CdcJob cdcJob;


    private DataSourceEntity dataSourceEntity;
    private CdcLog cdcLog;

    private Db use;
    private Connection connection;

    public StreamSinkFunction() {

    }

    public StreamSinkFunction(CdcJob cdcJob, DataSourceEntity dataSourceEntity, CdcLog cdcLog) {
        this.cdcJob = cdcJob;
        this.dataSourceEntity = dataSourceEntity;
        this.cdcLog = cdcLog;
    }

    /**
     * 这里可以做打开连接操作
     */
    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        DataSource dataSource = DataSourceBuild.build(dataSourceEntity);
        use = DbUtil.use(dataSource);
        connection = use.getConnection();
    }


    /**
     * 这里可以进行真正的处理
     */

    @Override
    public void invoke(String value, Context context) throws Exception {
        HashMap hashMap = JSONUtil.toBean(value, HashMap.class);
        Integer pos = (Integer) hashMap.get("pos");
        String file = (String) hashMap.get("file");
        cdcJob.setSavePoint(String.valueOf(pos));
        cdcJob.setBinlogFileName(file);
        // 执行对应的增删改操作
        operation(hashMap);
        CdcJobHelper.log("操作数据:{}", value);
//        logger.info("操作的数据:{}", value);
    }

    private void operation(HashMap hashMap) throws SQLException {
        String type = (String) hashMap.get("type");
        String table = (String) hashMap.get("table");
        Map<String, Object> data = (Map<String, Object>) hashMap.get("data");
        Entity record = new Entity();
        if ("insert".equals(type) || "select".equals(type)) {
            record.setTableName(table);
            data.forEach((key, values) -> record.put(key, values));
            try {
                use.insert(record);
            } catch (Exception e) {
//            use.del(record);
//            use.insert(record);
            }
        } else if ("update".equals(type)) {
            Map<String, Object> updateBeforeData = (Map<String, Object>) hashMap.get("updateBeforeData");
            record.setTableName(table);
            data.forEach((key, values) -> record.put(key, values));
            Entity where = new Entity();
            updateBeforeData.forEach((key, values) -> where.put(key, values));
            use.update(record, where);
        } else if ("delete".equals(type)) {
            record.setTableName(table);
            data.forEach((key, values) -> record.put(key, values));
            use.del(record);
        }
    }

    @Override
    public void close() throws Exception {
        super.close();
        // 关闭connection
        use.closeConnection(connection);
        // 关闭flink任务时将数据记录到数据库中
        CdcJobMapper cdcJobMapper = SpringUtil.getBean(CdcJobMapper.class);
        cdcJob.setHasFirstSync(true);
        cdcJobMapper.updateById(cdcJob);
        // 任务日志标识 成功
        CdcLogMapper logMapper = SpringUtil.getBean(CdcLogMapper.class);
        cdcLog.setHandlerCode(200);
        logMapper.updateById(cdcLog);
    }


}
