package io.github.bigbird0101.datatransfer.utils;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import integration.data.model.dto.IntegrationColumn;
import integration.data.model.dto.batch.SrcParamDTO;
import integration.data.model.dto.batch.TgtParamDTO;
import integration.data.model.entity.TaskInstance;
import io.github.bigbird0101.datatransfer.model.config.*;
import io.github.bigbird0101.datatransfer.model.param.*;
import org.springframework.beans.BeanUtils;

import java.util.ArrayList;
import java.util.List;

import static io.github.bigbird0101.datatransfer.constants.Constant.*;

public class ReaderAndWriterConfigUtils {

    public static String getReaderWriterConfig(TaskInstance taskInstance) {

        String taskParams = taskInstance.getTaskParams();
        JSONObject param = JSONUtil.parseObj(taskParams);
        String transformation = param.getStr("transformation");
        String jobMapping = param.getStr("jobMapping");
        String jobSrcJson = param.getStr("jobSrcJson");
        String jobTgtJson = param.getStr("jobTgtJson");

        SrcParamDTO srcParamDTO = JSONUtil.toBean(jobSrcJson, SrcParamDTO.class);
        TgtParamDTO tgtParamDTO = JSONUtil.toBean(jobTgtJson, TgtParamDTO.class);
        List<IntegrationColumn> mapping = JSONUtil.toList(jobMapping, IntegrationColumn.class);

        // 转换函数配置
        List<TransformationParameters> transformationList = new ArrayList<>(16);
        if (StrUtil.isNotBlank(transformation)) {
            transformationList = JSONUtil.toList(transformation, TransformationParameters.class);
        }

        // 字段配置
        List<ColumnParameters> readerColumn = new ArrayList<>(mapping.size());
        List<ColumnParameters> writerColumn = new ArrayList<>(mapping.size());
        for (IntegrationColumn column : mapping) {

            ColumnParameters rColumnParameters = new ColumnParameters();
            rColumnParameters.setName(column.getSrcName());
            rColumnParameters.setType(column.getSrcDataType());
            readerColumn.add(rColumnParameters);

            ColumnParameters wColumnParameters = new ColumnParameters();
            wColumnParameters.setName(column.getSinkName());
            String sinkDataType = column.getSinkDataType() == null ? column.getSrcDataType() : column.getSrcDataType();
            wColumnParameters.setType(sinkDataType);
            writerColumn.add(wColumnParameters);
        }

        TaskParameters jobParam = new TaskParameters();
        BeanUtils.copyProperties(taskInstance, jobParam);

        jobParam.setReader(getReaderConfig(srcParamDTO, readerColumn));
        jobParam.setWriter(getWriterConfig(tgtParamDTO, mapping));
        jobParam.setTransformation(transformationList);

        // TODO： 集成配置id，当展示任务实例数据时，最好取任务实例id，方便查询任务执行时的配置信息
        jobParam.setTaskId(taskInstance.getIntegrationId());
        jobParam.setJobName(taskInstance.getName());
        jobParam.setIntegrationType(taskInstance.getTaskType());
//        jobParam.setApplicationId(taskInstance.getApplicationId());
//        jobParam.setTenantInstanceId(taskInstance.getTenantInstanceId());
//        jobParam.setIntegrationId(taskInstance.getIntegrationId());
//        jobParam.setUserViewType(taskInstance.getUserViewType());
        return JSONUtil.toJsonStr(jobParam);
    }


    /**
     * 读取端配置
     *
     * @param srcParamDTO
     * @param readerColumn
     * @return
     */
    public static ReaderConfig getReaderConfig(SrcParamDTO srcParamDTO, List<ColumnParameters> readerColumn) {
        String datasourceType = srcParamDTO.getDatasourceType();
        String readerPluginName = ReaderWriterPluginUtils.getReaderPluginName(datasourceType);
        ReaderParameters readerParameters = new ReaderParameters();
        readerParameters.setColumn(readerColumn);
        readerParameters.setDataBaseType(datasourceType);
        List<String> tables = srcParamDTO.getTables();
        readerParameters.setTables(tables);
        readerParameters.setConnectionParams(srcParamDTO.getConnectionParams());
        if (READER_PLUGIN_MQ.equals(readerPluginName)) {
            String connectionParams = srcParamDTO.getConnectionParams();
            JSONObject jsonObject = JSONUtil.parseObj(connectionParams);
            // 只传入选择的topic
            JSONArray jsonArray = JSONUtil.createArray();
            for (String table : tables) {
                JSONObject obj = JSONUtil.createObj();
                obj.set("name", table);
                obj.set("title", table);
                jsonArray.add(obj);
            }
            jsonObject.set("topics", jsonArray);
            readerParameters.setConnectionParams(jsonObject.toString());
            readerParameters.setParseType("json");
        } else if (READER_PLUGIN_API.equals(readerPluginName)) {
            readerParameters.setParseType("json");
        }

        // 读取端配置
        ReaderConfig readerConfig = new ReaderConfig();
        readerConfig.setName(readerPluginName);
        readerConfig.setParameter(readerParameters);
        return readerConfig;
    }

    /**
     * 写入端配置
     *
     * @param tgtParamDTO
     * @param mapping
     * @return
     */
    public static WriterConfig getWriterConfig(TgtParamDTO tgtParamDTO, List<IntegrationColumn> mapping) {
        String datasourceType = tgtParamDTO.getDatasourceType();
        String writerPluginName = ReaderWriterPluginUtils.getWriterPluginName(datasourceType);

        // 写入端配置
        WriterConfig writerConfig = new WriterConfig();
        writerConfig.setName(writerPluginName);

        WriterParameters writerParameters = null;
        switch (writerPluginName) {
            case WRITER_PLUGIN_MONGODB:
                writerParameters = getMongoDbWriterParam(tgtParamDTO, mapping);
                break;
            case WRITER_PLUGIN_ELASTICSEARC:
                writerParameters = getEsWriterParam(tgtParamDTO, mapping);
                break;
            case WRITER_PLUGIN_CASSANDRA:
                writerParameters = getCassandraWriterParam(tgtParamDTO, mapping);
                break;
            case WRITER_PLUGIN_KAFKA:
                writerParameters = getKafkaWriterParam(tgtParamDTO, mapping);
                break;
            default:
                writerParameters = getRdbmsWriterParam(tgtParamDTO, mapping);
        }

        writerConfig.setParameter(writerParameters);
        return writerConfig;
    }

    private static WriterParameters getEsWriterParam(TgtParamDTO tgtParamDTO, List<IntegrationColumn> mapping) {
        ESWriterConfig writerParameters = new ESWriterConfig();

        List<ColumnParameters> writerColumn = new ArrayList<>(mapping.size());
        for (IntegrationColumn column : mapping) {
            String sinkDataType = column.getSinkDataType();
            if (StrUtil.isBlank(sinkDataType)) {
                sinkDataType = type2EsType(column.getSrcDataType());
            }
            ColumnParameters wColumnParameters = new ColumnParameters();
            wColumnParameters.setName(column.getSinkName());
            wColumnParameters.setType(sinkDataType);
            writerColumn.add(wColumnParameters);
        }

        String connectionParams = tgtParamDTO.getConnectionParams();
        JSONObject jsonObject = JSONUtil.parseObj(connectionParams);

        String username = jsonObject.getStr("username");
        String password = jsonObject.getStr("password");
//        String index = jsonObject.getStr("index");
        String index = tgtParamDTO.getTableName();
        // TODO: settings 处理
        List<Object> settings = jsonObject.getBeanList("settings", Object.class);

//        JSONArray addressArr = jsonObject.getJSONArray("address");
//        List<String> addressList = new ArrayList<>(4);
//        for (int i = 0; i < addressArr.size(); i++) {
//            JSONObject addressJson = addressArr.getJSONObject(i);
//            String ip = addressJson.getStr("ip");
//            String port = addressJson.getStr("port");
//            String host = String.format("%s:%s:%s", HTTP_PREFIX, ip, port);
//            addressList.add(host);
//        }

        List<String> addressList = new ArrayList<>(4);
        String ip = jsonObject.getStr("ip");
        String port = jsonObject.getStr("port");
        String host = String.format("%s%s:%s", HTTP_PREFIX, ip, port);
        addressList.add(host);

        writerParameters.setEndpoint(CollectionUtil.join(addressList, ","));
        writerParameters.setIndex(index);
        writerParameters.setUsername(username);
        writerParameters.setPassword(password);
        writerParameters.setColumn(JSONUtil.toJsonStr(writerColumn));
        writerParameters.setMultiThread(true); // 允许并发请求es
        writerParameters.setBatchSize(1);
        return writerParameters;

    }

    private static WriterParameters getCassandraWriterParam(TgtParamDTO tgtParamDTO, List<IntegrationColumn> mapping) {
        CassandraWriterConfig writerParameters = new CassandraWriterConfig();

        List<String> writerColumn = new ArrayList<>(mapping.size());
        for (IntegrationColumn column : mapping) {
            writerColumn.add(column.getSinkName());
        }

        // TODO: 参数格式未确定
        String connectionParams = tgtParamDTO.getConnectionParams();
        JSONObject jsonObject = JSONUtil.parseObj(connectionParams);
        String username = jsonObject.getStr("username");
        String password = jsonObject.getStr("password");
        String host = jsonObject.getStr("host");
        Integer port = jsonObject.getInt("port");
        String keyspace = jsonObject.getStr("keyspace");
//        String table = jsonObject.getStr("table");
        String table = tgtParamDTO.getTableName();

        // TODO: settings 处理
        List<Object> settings = jsonObject.getBeanList("settings", Object.class);

        writerParameters.setUsername(username);
        writerParameters.setPassword(password);
        writerParameters.setHost(host);
        writerParameters.setPort(port);
        writerParameters.setKeyspace(keyspace);
        writerParameters.setTable(table);
        writerParameters.setColumn(writerColumn);
        writerParameters.setBatchSize(1);
        return writerParameters;
    }

    /**
     * @param tgtParamDTO
     * @param mapping
     * @return
     */
    private static WriterParameters getKafkaWriterParam(TgtParamDTO tgtParamDTO, List<IntegrationColumn> mapping) {
        KafkaWriterConfig writerParameters = new KafkaWriterConfig();

        List<ColumnParameters> writerColumn = new ArrayList<>(mapping.size());
        for (IntegrationColumn column : mapping) {
            String sinkDataType = column.getSinkDataType();
            if (StrUtil.isBlank(sinkDataType)) {
                sinkDataType = column.getSrcDataType();
            }
            ColumnParameters wColumnParameters = new ColumnParameters();
            wColumnParameters.setName(column.getSinkName());
            wColumnParameters.setType(sinkDataType);
            writerColumn.add(wColumnParameters);
        }

        String connectionParams = tgtParamDTO.getConnectionParams();
        JSONObject jsonObject = JSONUtil.parseObj(connectionParams);

        JSONArray addressList = jsonObject.getJSONArray("address");
        List<String> hostList = new ArrayList<>();
        for (Object obj : addressList) {
            JSONObject address = JSONUtil.parseObj(obj);
            String host = String.format("%s:%d", address.getStr("ip"), address.getInt("port"));
            hostList.add(host);
        }
        String bootstrapServers = StrUtil.join(",", hostList);

        // TODO: 来源于页面配置 获取topic
//        String topic = jsonObject.getByPath("topics[0].name", String.class);
        String topic = tgtParamDTO.getTableName();
        Integer retries = jsonObject.getInt("retries", 0);
        // TODO: settings 处理
        List<Object> settings = jsonObject.getBeanList("mqSettings", Object.class);

        writerParameters.setTopic(topic);
        writerParameters.setRetries(retries);
        writerParameters.setBootstrapServers(bootstrapServers);
        writerParameters.setColumn(writerColumn);
        writerParameters.setBatchSize(1);
        return writerParameters;
    }

    /**
     * 关系型数据库writer配置
     *
     * @param tgtParamDTO
     * @param mapping
     * @return
     */
    public static WriterParameters getRdbmsWriterParam(TgtParamDTO tgtParamDTO, List<IntegrationColumn> mapping) {

        RdbmsWriterConfig writerParameters = new RdbmsWriterConfig();

        List<String> writerColumn = new ArrayList<>(mapping.size());
        for (IntegrationColumn column : mapping) {
            writerColumn.add(column.getSinkName());
        }
        writerParameters.setColumn(writerColumn);
        writerParameters.setWriteMode(tgtParamDTO.getWriteMode());
        writerParameters.setTable(tgtParamDTO.getTableName());
        writerParameters.setConnectionParams(tgtParamDTO.getConnectionParams());
        writerParameters.setDataBaseType(tgtParamDTO.getDatasourceType());
        //TODO: batchSize 设置为1 读写1条，不等待
        writerParameters.setBatchSize(1);
        return writerParameters;
    }


    /**
     * MongoDB writer配置
     *
     * @param tgtParamDTO
     * @param mapping
     * @return
     */
    public static WriterParameters getMongoDbWriterParam(TgtParamDTO tgtParamDTO, List<IntegrationColumn> mapping) {

        MongoDBWriterConfig writerParameters = new MongoDBWriterConfig();

        List<ColumnParameters> writerColumn = new ArrayList<>(mapping.size());
        for (IntegrationColumn column : mapping) {
            String sinkDataType = column.getSinkDataType();
            if (StrUtil.isBlank(sinkDataType)) {
                sinkDataType = type2MongoDbType(column.getSrcDataType());
            }
            ColumnParameters wColumnParameters = new ColumnParameters();
            wColumnParameters.setName(column.getSinkName());
            wColumnParameters.setType(sinkDataType);
            writerColumn.add(wColumnParameters);
        }


        writerParameters.setColumn(writerColumn);
        writerParameters.setWriteMode(tgtParamDTO.getWriteMode());
        // TODO: 参数格式未确定
        // TODO: 从数据源配置中获取 database，address，collection
        writerParameters.setDatabase(null);
        writerParameters.setAddress(null);
        writerParameters.setCollection(null);
        writerParameters.setBatchSize(1);
        return writerParameters;
    }


    /**
     * MongoDB 数据类型处理
     * columnType mongodbType
     * <p>
     * Long	int, long
     * Double	double
     * String	string,array
     * Date	date
     * Boolean	boolean
     * Bytes	bytes
     * <p>
     * 自定义类型    mongodbType
     * <p>
     * BOOL         boolean
     * STRING       string
     * INT          long
     * FLOAT        double
     * LIST_INT     array
     * LIST_STRING  array
     * LIST_FLOAT   array
     * LIST_BOOL    array
     * LIST_ANY     array
     */
    private static String type2MongoDbType(String type) {
        switch (type) {
            case DATA_TYPE_BOOL:
                return "boolean";
            case DATA_TYPE_STRING:
                return "string";
            case DATA_TYPE_INT:
            case DATA_TYPE_BYTE:
                return "long";
            case DATA_TYPE_FLOAT:
                return "double";
            case DATA_TYPE_DATE:
                return "date";
            case DATA_TYPE_LIST_INT:
            case DATA_TYPE_LIST_STRING:
            case DATA_TYPE_LIST_FLOAT:
            case DATA_TYPE_LIST_BOOL:
            case DATA_TYPE_LIST_ANY:
                return "array";
            default:
                return type;
        }
    }


    private static String type2EsType(String type) {
        switch (type) {
            case DATA_TYPE_BOOL:
                return "boolean";
            case DATA_TYPE_INT:
                return "integer";
            case DATA_TYPE_BYTE:
                return "byte";
            case DATA_TYPE_FLOAT:
                return "double";
            case DATA_TYPE_DATE:
                return "date";
            case DATA_TYPE_STRING:
            case DATA_TYPE_LIST_INT:
            case DATA_TYPE_LIST_STRING:
            case DATA_TYPE_LIST_FLOAT:
            case DATA_TYPE_LIST_BOOL:
            case DATA_TYPE_LIST_ANY:
                return "text";
            default:
                return type;
        }
    }

}
