package org.sxp.common.utils.datax;


import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.StrUtil;
import com.google.common.collect.Maps;
import lombok.Data;
import org.sxp.common.dto.datax.*;
import org.sxp.common.enums.DbTypeEnum;
import org.sxp.common.exception.DataAccessException;
import org.sxp.common.utils.datax.reader.*;
import org.sxp.common.utils.datax.writer.*;
import org.sxp.modules.dataworks.dto.SyncDTO;
import org.sxp.common.dto.datax.DataXCore.Transport;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;


/**
 * 构建 com.wugui.datax json的工具类
 *
 * @author jingwk
 * @ClassName DataxJsonHelper
 * @Version 2.1.1
 * @since 2020/03/14 08:24
 */
@Data
public class DataxJsonHelper {

    public static final String BYTE = "byte";
    public static final String RECORD = "record";
    public static final String CHANNEL = "channel";
    public static final int INT_5 = 5;
    public static final int INT_10 = 10;
    /**
     * 读取的表，根据datax示例，支持多个表（先不考虑，后面再去实现， 这里先用list保存吧）
     * <p>
     * 目的表的表名称。支持写入一个或者多个表。当配置为多张表时，必须确保所有表结构保持一致
     */
    private List<String> readerTables;
    /**
     * 读取的字段列表
     */
    private List<String> readerColumns;

    /**
     * 写入的表
     */
    private List<String> writerTables;
    /**
     * 写入的字段列表
     */
    private List<String> writerColumns;

    private Map<String, Object> buildReader;

    private Map<String, Object> buildWriter;

    public static SyncDTO backBuildDataXJob(SyncDTO syncDTO){
        if(StrUtil.isNotBlank(syncDTO.getSourceDbType())){
            initReader(syncDTO).backBuild();
        }
        if(StrUtil.isNotBlank(syncDTO.getTargetDbType())){
            initWriter(syncDTO).backBuild();
        }
        return syncDTO;
    }

    public static DataXJobRoot buildDataXJob(SyncDTO syncDTO) {


        IDataxReader reader = initReader(syncDTO);
        IDataxWriter writer = initWriter(syncDTO);

        DataXJobRoot root = new DataXJobRoot();
        DataXJob job = new DataXJob();
        DataXContent content = new DataXContent();
        content.addAll(buildContent(syncDTO, reader, writer));

        DataXSetting setting = new DataXSetting();
        setting.setSpeed(buildSpeedMap(syncDTO));
        if(syncDTO.getErrorLimit() != null){
            //出错限制
            //"errorLimit": {
            //出错的record条数上限，当大于该值即报错。
            //"record": 0,
            //出错的record百分比上限 1.0表示100%，0.02表示2%
            //"percentage": 0.02
            Map<String, Object> errorLimit = new HashMap<>(6);
            errorLimit.put("record", syncDTO.getErrorLimit());
            errorLimit.put("percentage", "1");
            setting.setErrorLimit(errorLimit);
        }

        job.setSetting(setting);
        job.setContent(content);

        DataXCore core = new DataXCore();
        Transport transport = core.new Transport();
        DataXCore.Transport.Channel channel = transport.new Channel();
        transport.setChannel(channel);
        core.setTransport(transport);
        Map<String, Object> speed = new HashMap<>(12);
        channel.setSpeed(speed);
        // 解决DataX提示：在有总bps限速条件下，单个channel的bps值不能为空
        if(setting.getSpeed().get(BYTE) > 0){
            if(setting.getSpeed().get(BYTE) > INT_10){
                // 在有总bps的限速条件下，单个core取总10分之一
                speed.put(BYTE, NumberUtil.ceilDiv(setting.getSpeed().get(BYTE), INT_10));
            }else{
                speed.put(BYTE, setting.getSpeed().get(BYTE));
            }
        }else{
            speed.put(BYTE, -1);
        }
        if(setting.getSpeed().get(RECORD) > 0){
            if(setting.getSpeed().get(RECORD) > INT_5) {
                // 在有总记录数限速条件下，单个channel的记录数限速取5分之一
                speed.put(RECORD, NumberUtil.ceilDiv(setting.getSpeed().get(RECORD), INT_5));
            }else{
                speed.put(RECORD, setting.getSpeed().get(RECORD));
            }
        }else{
            speed.put(RECORD, -1);
        }
        root.setCore(core);
        root.setJob(job);
        return root;
    }

    private static Map<String, Integer> buildSpeedMap(SyncDTO syncDTO) {
        Map<String, Integer> speedMap = Maps.newLinkedHashMap();
        speedMap.put(BYTE, syncDTO.getSyncRate() != null && syncDTO.getSyncRate() > 0 ? syncDTO.getSyncRate()*1024*1024 : -1);
        speedMap.put(RECORD, syncDTO.getSyncRecord() != null && syncDTO.getSyncRecord() > 0 ? syncDTO.getSyncRecord() : -1);

        if(speedMap.get(BYTE) == -1 && speedMap.get(RECORD) == -1){
            speedMap.put(CHANNEL, syncDTO.getSyncNum() != null ? syncDTO.getSyncNum() : -1);
        }else{
            speedMap.put(CHANNEL, -1);
        }
        // 批提交数量
        speedMap.put("batchSize", 2048);
        return speedMap;
    }

    private static List<Map<String, Object>> buildContent(SyncDTO syncDTO, IDataxReader reader,IDataxWriter writer) {
        Map<String, Object> obj = Maps.newLinkedHashMap();
        obj.put("reader", reader.build());
        obj.put("writer", writer.build());

        List<Map<String, Object>> result = new ArrayList<>();
        result.add(obj);
        return result;
    }

    private static IDataxReader initReader(SyncDTO syncDTO) {
        IDataxReader reader = null;
        if (DbTypeEnum.mysql.getCode().equals(syncDTO.getSourceDbType())) {
            reader = new MysqlReader(syncDTO);
        } else if (DbTypeEnum.oracle.getCode().equals(syncDTO.getSourceDbType())) {
            reader = new OracleReader(syncDTO);
        } else if (DbTypeEnum.sqlServer.getCode().equals(syncDTO.getSourceDbType())){
            reader = new SqlServerReader(syncDTO);
        } else if (DbTypeEnum.dm.getCode().equals(syncDTO.getSourceDbType())){
            reader = new RdbmsReader(syncDTO);
        } else if (DbTypeEnum.postgresql.getCode().equals(syncDTO.getSourceDbType())){
            reader = new PostgresqlReader(syncDTO);
        } else if (DbTypeEnum.oceanbase.getCode().equals(syncDTO.getSourceDbType())){
            reader = new OceanBaseReader(syncDTO);
        } else if (DbTypeEnum.mongodb.getCode().equals(syncDTO.getSourceDbType())){
            reader = new MongoDbReader(syncDTO);
        } else if (DbTypeEnum.hive.getCode().equals(syncDTO.getSourceDbType())){
            reader = new HiveReader(syncDTO);
        } else if (DbTypeEnum.hbase.getCode().equals(syncDTO.getSourceDbType())){
            reader = new HbaseReader(syncDTO);
        } else{
            throw new DataAccessException("DataX初始化读配置，暂不支持的数据源类型!!!");
        }
        return reader;
    }

    private static IDataxWriter initWriter(SyncDTO syncDTO) {
        IDataxWriter writer = null;
        if (DbTypeEnum.mysql.getCode().equals(syncDTO.getTargetDbType())) {
            writer = new MysqlWriter(syncDTO);
        } else if (DbTypeEnum.oracle.getCode().equals(syncDTO.getTargetDbType())) {
            writer = new OracleWriter(syncDTO);
        } else if(DbTypeEnum.sqlServer.getCode().equals(syncDTO.getTargetDbType())){
            writer = new SqlServerWriter(syncDTO);
        } else if(DbTypeEnum.dm.getCode().equals(syncDTO.getTargetDbType())){
            writer = new RdbmsWriter(syncDTO);
        } else if(DbTypeEnum.postgresql.getCode().equals(syncDTO.getTargetDbType())){
            writer = new PostgresqlWriter(syncDTO);
        } else if(DbTypeEnum.oceanbase.getCode().equals(syncDTO.getTargetDbType())){
            writer = new OceanBaseWriter(syncDTO);
        } else if(DbTypeEnum.mongodb.getCode().equals(syncDTO.getTargetDbType())){
            writer = new MongoDbWriter(syncDTO);
        } else if(DbTypeEnum.hive.getCode().equals(syncDTO.getTargetDbType())){
            writer = new HiveWriter(syncDTO);
        } else if(DbTypeEnum.hbase.getCode().equals(syncDTO.getTargetDbType())){
            writer = new HbaseWriter(syncDTO);
        } else{
            throw new DataAccessException("DataX初始化写配置，暂不支持的数据源类型!!!");
        }
        return writer;
    }

//    public Map<String, Object> buildReader() {
//        DataxRdbmsPojo dataxPluginPojo = new DataxRdbmsPojo();
//        dataxPluginPojo.setJobDatasource(readerDatasource);
//        dataxPluginPojo.setTables(readerTables);
//        dataxPluginPojo.setRdbmsColumns(readerColumns);
//        dataxPluginPojo.setSplitPk(rdbmsReaderDto.getReaderSplitPk());
//        if (StringUtils.isNotBlank(rdbmsReaderDto.getQuerySql())) {
//            dataxPluginPojo.setQuerySql(rdbmsReaderDto.getQuerySql());
//        }
//        //where
//        if (StringUtils.isNotBlank(rdbmsReaderDto.getWhereParams())) {
//            dataxPluginPojo.setWhereParam(rdbmsReaderDto.getWhereParams());
//        }
//        return readerPlugin.build(dataxPluginPojo);
//    }
//
//    public Map<String, Object> buildHiveReader() {
//        DataxHivePojo dataxHivePojo = new DataxHivePojo();
//        dataxHivePojo.setJdbcDatasource(readerDatasource);
//        List<Map<String, Object>> columns = Lists.newArrayList();
//        readerColumns.forEach(c -> {
//            Map<String, Object> column = Maps.newLinkedHashMap();
//            column.put("index", c.split(Constants.SPLIT_SCOLON)[0]);
//            column.put("type", c.split(Constants.SPLIT_SCOLON)[2]);
//            columns.add(column);
//        });
//        dataxHivePojo.setColumns(columns);
//        dataxHivePojo.setReaderDefaultFS(hiveReaderDto.getReaderDefaultFS());
//        dataxHivePojo.setReaderFieldDelimiter(hiveReaderDto.getReaderFieldDelimiter());
//        dataxHivePojo.setReaderFileType(hiveReaderDto.getReaderFileType());
//        dataxHivePojo.setReaderPath(hiveReaderDto.getReaderPath());
//        dataxHivePojo.setSkipHeader(hiveReaderDto.getReaderSkipHeader());
//        return readerPlugin.buildHive(dataxHivePojo);
//    }
//
//    public Map<String, Object> buildHBaseReader() {
//        DataxHbasePojo dataxHbasePojo = new DataxHbasePojo();
//        dataxHbasePojo.setJdbcDatasource(readerDatasource);
//        List<Map<String, Object>> columns = Lists.newArrayList();
//        for (int i = 0; i < readerColumns.size(); i++) {
//            Map<String, Object> column = Maps.newLinkedHashMap();
//            column.put("name", readerColumns.get(i));
//            column.put("type", "string");
//            columns.add(column);
//        }
//        dataxHbasePojo.setColumns(columns);
//        dataxHbasePojo.setReaderHbaseConfig(readerDatasource.getZkAdress());
//        String readerTable=!CollectionUtils.isEmpty(readerTables)?readerTables.get(0):Constants.STRING_BLANK;
//        dataxHbasePojo.setReaderTable(readerTable);
//        dataxHbasePojo.setReaderMode(hbaseReaderDto.getReaderMode());
//        dataxHbasePojo.setReaderRange(hbaseReaderDto.getReaderRange());
//        return readerPlugin.buildHbase(dataxHbasePojo);
//    }
//
//    public Map<String, Object> buildMongoDBReader() {
//        DataxMongoDBPojo dataxMongoDBPojo = new DataxMongoDBPojo();
//        dataxMongoDBPojo.setJdbcDatasource(readerDatasource);
//        List<Map<String, Object>> columns = Lists.newArrayList();
//        buildColumns(readerColumns, columns);
//        dataxMongoDBPojo.setColumns(columns);
//        dataxMongoDBPojo.setAddress(readerDatasource.getJdbcUrl());
//        dataxMongoDBPojo.setDbName(readerDatasource.getDatabaseName());
//        dataxMongoDBPojo.setReaderTable(readerTables.get(0));
//        return readerPlugin.buildMongoDB(dataxMongoDBPojo);
//    }
//
//
//    public Map<String, Object> buildWriter() {
//        DataxRdbmsPojo dataxPluginPojo = new DataxRdbmsPojo();
//        dataxPluginPojo.setJobDatasource(writerDatasource);
//        dataxPluginPojo.setTables(writerTables);
//        dataxPluginPojo.setRdbmsColumns(writerColumns);
//        dataxPluginPojo.setPreSql(rdbmsWriterDto.getPreSql());
//        dataxPluginPojo.setPostSql(rdbmsWriterDto.getPostSql());
//        return writerPlugin.build(dataxPluginPojo);
//    }
//
//    public Map<String, Object> buildHiveWriter() {
//        DataxHivePojo dataxHivePojo = new DataxHivePojo();
//        dataxHivePojo.setJdbcDatasource(writerDatasource);
//        List<Map<String, Object>> columns = Lists.newArrayList();
//        writerColumns.forEach(c -> {
//            Map<String, Object> column = Maps.newLinkedHashMap();
//            column.put("name", c.split(Constants.SPLIT_SCOLON)[1]);
//            column.put("type", c.split(Constants.SPLIT_SCOLON)[2]);
//            columns.add(column);
//        });
//        dataxHivePojo.setColumns(columns);
//        dataxHivePojo.setWriterDefaultFS(hiveWriterDto.getWriterDefaultFS());
//        dataxHivePojo.setWriteFieldDelimiter(hiveWriterDto.getWriteFieldDelimiter());
//        dataxHivePojo.setWriterFileType(hiveWriterDto.getWriterFileType());
//        dataxHivePojo.setWriterPath(hiveWriterDto.getWriterPath());
//        dataxHivePojo.setWriteMode(hiveWriterDto.getWriteMode());
//        dataxHivePojo.setWriterFileName(hiveWriterDto.getWriterFileName());
//        return writerPlugin.buildHive(dataxHivePojo);
//    }
//
//    public Map<String, Object> buildHBaseWriter() {
//        DataxHbasePojo dataxHbasePojo = new DataxHbasePojo();
//        dataxHbasePojo.setJdbcDatasource(writerDatasource);
//        List<Map<String, Object>> columns = Lists.newArrayList();
//        for (int i = 0; i < writerColumns.size(); i++) {
//            Map<String, Object> column = Maps.newLinkedHashMap();
//            column.put("index", i + 1);
//            column.put("name", writerColumns.get(i));
//            column.put("type", "string");
//            columns.add(column);
//        }
//        dataxHbasePojo.setColumns(columns);
//        dataxHbasePojo.setWriterHbaseConfig(writerDatasource.getZkAdress());
//        String writerTable=!CollectionUtils.isEmpty(writerTables)?writerTables.get(0):Constants.STRING_BLANK;
//        dataxHbasePojo.setWriterTable(writerTable);
//        dataxHbasePojo.setWriterVersionColumn(hbaseWriterDto.getWriterVersionColumn());
//        dataxHbasePojo.setWriterRowkeyColumn(hbaseWriterDto.getWriterRowkeyColumn());
//        dataxHbasePojo.setWriterMode(hbaseWriterDto.getWriterMode());
//        return writerPlugin.buildHbase(dataxHbasePojo);
//    }
}
