package datax.hook;

import com.alibaba.datax.common.spi.Hook;
import com.alibaba.datax.common.util.Configuration;
import com.alibaba.datax.core.util.container.CoreConstant;
import com.alibaba.fastjson.JSON;
import com.qihoo.finance.govern.common.database.constants.DbType;
import com.qihoo.finance.govern.modules.metadata.enums.JobType;
import com.qihoo.finance.govern.modules.metadata.enums.SyncLogEventType;
import com.qihoo.finance.govern.modules.metadata.mq.SyncLogMsg;
import com.qihoo.finance.govern.modules.metadata.utils.DButils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Date;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * 测试spi加载datax hook实现类
 * 1、datax 目录下新建
 *
 * @author: luozehuan
 * @date: 2022/03/31
 **/
public class SyncLogHook implements Hook {

    private final  Logger logger = LoggerFactory.getLogger(this.getClass());

    private static final String SYNCLOG_TOPIC = "topic_data_govern_syncLog";

    @Override
    public String getName() {
        return "syncLogHook";
    }

    @Override
    public void invoke(Configuration jobConf, Map<String, Number> msg) {
        // 判断是否要发送到kafka，默认不发送；
        boolean syncLogSwitch = (jobConf.getInt(CoreConstant.DATAX_JOB_SETTING_SYNCLOGSWITCH, 0) > 0);
        logger.info("HOOK: syncLogSwitch:{}",syncLogSwitch);
        // 获取writer插件名
        String writerName = StringUtils.lowerCase(jobConf.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME,""));
        // 只有同步到gp 数据中心的数据才需要发送mq
        if(syncLogSwitch && writerName.contains("gp")){
            // 获取dbName
            String jdbcUrl = jobConf.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_PARAMETER_JDBCURL,"");
            logger.info("HOOK: writer jdbcUrl:{}",jdbcUrl);
            String dbName = DButils.parseDbNameFromJdbc(jdbcUrl);
            // 获取schemaName、tableName
            String schemaTableName = jobConf.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_PARAMETER_TABLE,"");
            logger.info("HOOK: writer table:{}",schemaTableName);
            String[] schemaTable = StringUtils.split(schemaTableName, ".");
            String schemaName = schemaTable[0];
            String tableName = schemaTable[1];
            // 构造请求
            SyncLogMsg msgObj = new SyncLogMsg();
            msgObj.setEventType(SyncLogEventType.SYNC_DATA.getCode());
            msgObj.setJobType(JobType.DATAX.getCode());
            msgObj.setDbType(DbType.POSTGRE_SQL.getCode());
            msgObj.setDbName(dbName);
            msgObj.setSchemaName(schemaName);
            msgObj.setTableName(tableName);
            msgObj.setTableJson("");
            // 设置最新同步时间
            msgObj.setLatestSyncTime(new Date());
            new SyncLogProducer().send(JSON.toJSONString(msgObj),SYNCLOG_TOPIC);
        }
    }
}
