package cn.cmft.jc.transform;

import cn.cmft.jc.transform.joiner.CollectedTable;
import cn.cmft.jc.transform.joiner.ImpactTask;
import cn.cmft.jc.transform.joiner.ImpactTaskResult;
import cn.cmft.jc.transform.parse.ChangeOperationType;
import cn.cmft.jc.transform.parse.ChangeOptionMessage;
import cn.cmft.jc.transform.parse.ChangeDetail;
import cn.cmft.jc.utils.DruidClient;
import com.alibaba.fastjson2.JSONObject;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.*;
import java.util.stream.Collectors;

public class DimTableJoinerAnalyze extends RichMapFunction<ChangeOptionMessage, ImpactTaskResult> {

    private static final Logger logger = LoggerFactory.getLogger(DimTableJoinerAnalyze.class);
    private final DruidClient client;
    private static final String IS_TABLE_MODE = "1";

    public DimTableJoinerAnalyze(ParameterTool tool) {
        String driver =  tool.get("bigdata.platform.driver");
        String url =  tool.get("bigdata.platform.url");
        String username = tool.get("bigdata.platform.username");
        String password = tool.get("bigdata.platform.password");
        client = new DruidClient(driver,url,username,password);
    }


    @Override
    public ImpactTaskResult map(ChangeOptionMessage changeOptionMessage) {
        String host = changeOptionMessage.getHost();
        List<CollectedTable> collectedTables = queryCollectedTable(host);
        return  getAffectedForCollectionTask(changeOptionMessage, collectedTables);
    }

    /**
     * 查询采集任务
     * @return 采集任务信息清单
     */
    private List<CollectedTable> queryCollectedTable(String ip) {
        List<CollectedTable> collectedTables = new ArrayList<>();
        String SQL = "select \n" +
                     "  a.unit_code,a.step_cfg_val \n" +
                     "from \n" +
                     "  dataplatform.dacp_datastash_unit_step a\n" +
                     "  join dataplatform.dacp_dataflow_job b on a.unit_code = b.job_code\n" +
                     "where \n" +
                     "  a.step_cfg_val LIKE ?\n" +
                     "  and b.state = 1\n";
        logger.info(MessageFormat.format("查询大数据平台: {0} 数据源下采集任务",ip));
        try {
            collectedTables = client.query(SQL, new DruidClient.ResultSetMapper<CollectedTable>() {
                @Override
                public CollectedTable map(ResultSet resultSet) throws SQLException {
                    String unitCode = resultSet.getString("unit_code"); // unit_code 必须填，不会为空
                    String stepCfgVal = resultSet.getString("step_cfg_val");
                    JSONObject stepCfgJson = JSONObject.parse(stepCfgVal);
                    String isTableMode = stepCfgJson.getString("isTableMode");
                    String tbName = "";
                    String dbName = "";
                    String[] columns_ = new String[0];
                    if (IS_TABLE_MODE.equals(isTableMode)) {
                        String tb = stepCfgJson.getString("table");
                        // 有时候可能不配置table属性
                        Optional<String> tbOption = Optional.ofNullable(tb);
                        if (tbOption.isPresent()) {
                            if (tb.contains(".")) {
                                String[] tbList = tb.split("\\.");
                                dbName = tbList[0];
                                tbName = tbList[1];
                            } else {
                                // 有时候没有配置数据库，无法获取到数据库信息
                                tbName = tb;
                            }

                            Optional<String> columnOp = Optional.ofNullable(stepCfgJson.getString("column"));
                            if (columnOp.isPresent()) {
                                columns_ = columnOp.get().split(",");
                            }
                        }
                    } else {
                        //TODO：暂不做处理（需要解析SQL语句, 获取里边的数据库/表/列等）
                    }
                    return new CollectedTable(unitCode, dbName, tbName, columns_);
                }
            }, MessageFormat.format("%{0}%",ip));
        } catch (SQLException e) {
            logger.warn("查询大数据平台元数据表报错: ", e);
        }
        return collectedTables;
    }

    /**
     * @desc  根据采集任务结果中表名/列名是否包含DDL事件中的表名列名判断是否对采集任务有影响
     * @param coMsg   源端数据库DDL事件解析结果
     * @param collectedTables 大数据平台查询的采集任务结果
     * @return ImpactTaskResult 影响的任务清单
     */
    private ImpactTaskResult getAffectedForCollectionTask(ChangeOptionMessage coMsg, List<CollectedTable> collectedTables){
        String table = coMsg.getTable();
        Map<ChangeOperationType, List<ChangeDetail>> changeOperations = coMsg.getChangeOperations();

        ArrayList<ImpactTask> impactTasks = new ArrayList<>();
        for (CollectedTable collectedTable: collectedTables){
            String objName = collectedTable.getObjName();
            // 表名匹配上的情况对采集任务有影响, 如果因为重命名表后匹配上了，不做处理
            if (table.equals(collectedTable.getTableName())){
                List<String> collectCol = Arrays.asList(collectedTable.getColumns());

                for (Map.Entry<ChangeOperationType,List<ChangeDetail>> entry: changeOperations.entrySet()){
                    ChangeOperationType operationType = entry.getKey();
                    List<ChangeDetail> changeDetails = entry.getValue();
                    switch (operationType){
                        case ADD_COLUMN:
                        case DROP_TABLE:
                            impactTasks.add(new ImpactTask(operationType,changeDetails,objName));
                            break;
                        case RENAME_COLUMN:
                        case DROP_COLUMN:
                        case CHANGE_COLUMN:{
                            // 采集任务中已经采集的列中包含columnOldName，则认为有影响
                            List<ChangeDetail> hasImpactList = changeDetails.stream().filter(detail -> {
                                String oldColumnName = detail.getOldColumnName();
                                return collectCol.contains(oldColumnName);
                            }).collect(Collectors.toList());
                            // 如果不为空，认为对采集任务有影响
                            if (!hasImpactList.isEmpty()){
                                impactTasks.add(new ImpactTask(operationType,hasImpactList,objName));
                            }
                            break;
                        }

                    }
                }
            } else {
                // 重命名表情况下：公共属性中的table是变更后的表，匹配不上，没有办法比对，使用变更明细中的旧表名进行确认是有有影响
                if (changeOperations.containsKey(ChangeOperationType.RENAME_TABLE)){
                    List<ChangeDetail> changeDetails = changeOperations.get(ChangeOperationType.RENAME_TABLE);
                    List<ChangeDetail> hasImpact = changeDetails.stream().filter(changeDetail -> {
                        String oldTableName = changeDetail.getOldTableName();
                        return oldTableName.equals(collectedTable.getTableName());
                    }).collect(Collectors.toList());

                    if (!hasImpact.isEmpty()){
                        impactTasks.add(new ImpactTask(ChangeOperationType.RENAME_TABLE, hasImpact,objName));
                    }
                }
            }
        }
        return new ImpactTaskResult(coMsg, impactTasks);
    }
    @Override
    public void open(Configuration parameters){
        client.open();
    }
    @Override
    public void close() {
        client.close();
    }
}
