package avicit.bdp.dcs.json.service.writer;

import avicit.bdp.common.datasource.HiveDataSource;
import avicit.bdp.common.utils.HiveUtils;
import avicit.bdp.common.utils.database.ColumnInfo;
import avicit.bdp.common.utils.database.DBUtils;
import avicit.bdp.common.utils.enums.FileType;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.dcs.datasource.dto.JobDatasource;
import avicit.bdp.dcs.datasource.service.BaseDbService;
import avicit.bdp.dcs.datasource.service.DataSourceUtils;
import avicit.bdp.dcs.job.dto.JobDTO;
import avicit.bdp.dcs.json.service.util.BuildJsonUtil;
import avicit.bdp.dcs.plugin.BaseDataxParam;
import avicit.bdp.dcs.plugin.hdfs.param.HdfsDataxParam;
import avicit.bdp.dcs.semistructure.dto.HiveJsonDto;
import avicit.bdp.dcs.stream.dto.MsgTemplateDto;
import avicit.bdp.dcs.stream.service.MsgTemplateService;
import avicit.bdp.dcs.task.dto.ColumnMappingDto;
import avicit.bdp.dcs.task.dto.StreamColumnMappingDto;
import avicit.bdp.dcs.tools.enums.HiveDataType;
import avicit.bdp.dcs.tools.enums.MeasureParaTypeEnum;
import avicit.bdp.dcs.tools.enums.ProcessMode;
import avicit.bdp.dcs.utils.DatabaseUtil;
import avicit.bdp.dcs.utils.DcsConstants;
import avicit.bdp.dcs.utils.JdbcConstants;

import java.util.*;

import org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

@Component
public class HiveWriterBuilder {

  private static final Logger logger = LoggerFactory.getLogger(HiveWriterBuilder.class);

  @Autowired private DataSourceUtils dataSourceUtils;
  @Autowired private BaseDbService baseDbService;
  @Autowired private BuildJsonUtil buildJsonUtil;
  @Autowired private MsgTemplateService msgTemplateService;

  private static final String JDBC_URL = "jdbcUrl";
  private static final String USERNAME = "username";
  private static final String PASSWORD = "password";
  private static final String HIVE_KRB5_PATH = "hive_krb5_path";
  private static final String HIVE_KEYTAB_PATH = "hive_keytab_path";
  private static final String HIVE_PRNCIPAL_NAME = "hive_prncipal_name";
  private static final String HIVE_PRNCIPAL = "hive_prncipal";
  private static final String TYPE = "type";
  private static final String NAME = "name";
  private static final String TASK_TYPE = "1";
  private static final String TABLENAME = "tableName";
  private static final String PATH = "path";
  private static final String DEVICEID = "deviceId";
  // HDFS写入模式
  private static final String HDFS_WRITE_MODE_APPEND = "append";
  private static final int MAX_FILE_SIZE = 1024;

  /** 构建结构化采集HiveWriter */
  public BaseDataxParam buildHiveWriterByStructure(
      JobDTO jobDTO,
      HiveJsonDto hiveJsonDto,
      List<ColumnInfo> srcColumnList,
      Map<String, String> columnMap,
      List<ColumnMappingDto> columnMappingDtoList,
      JobDatasource writerDatasource)
      throws Exception {
    // 1、构建Writer
    HdfsDataxParam dataxParam = new HdfsDataxParam();

    String defaultFS = "";
    if (JdbcConstants.HIVE.equals(writerDatasource.getDbType())) {
      HiveDataSource hiveDataSource = (HiveDataSource) writerDatasource.getBaseDataSource();
      defaultFS = hiveDataSource.getDefaultFS();
      dataxParam.setDefaultFS(defaultFS);

      // 设置kerberosParam
      if (hiveDataSource.getKerberos() != null && hiveDataSource.getKerberos() == 1) {
        dataxParam.setHaveKerberos(true);
        Map<String, Object> kerberosParam = new HashMap<>();
        kerberosParam.put(HIVE_KRB5_PATH, hiveDataSource.getKrb5Path());
        kerberosParam.put(HIVE_KEYTAB_PATH, hiveDataSource.getKeytabPath());
        kerberosParam.put(HIVE_PRNCIPAL_NAME, hiveDataSource.getUser());
        kerberosParam.put(HIVE_PRNCIPAL, hiveDataSource.getPrincipal());
        dataxParam.setKerberosParam(kerberosParam);
        logger.info("kerberosParam={}", kerberosParam);
      }

      // 设置hiveConnection
      Map<String, Object> hiveConnectionMap = new HashMap<>();
      hiveConnectionMap.put(JDBC_URL, hiveDataSource.getJdbcUrl());
      hiveConnectionMap.put(USERNAME, hiveDataSource.getUser());
      hiveConnectionMap.put(PASSWORD, hiveDataSource.getPassword());
      dataxParam.setHiveConnection(hiveConnectionMap);
    }

    if (hiveJsonDto.getFileType() == FileType.TEXT.getCode()) {
      dataxParam.setFileType(FileType.TEXT.getDescp());
    } else if (hiveJsonDto.getFileType() == FileType.CSV.getCode()) {
      dataxParam.setFileType(FileType.CSV.getDescp());
    } else if (hiveJsonDto.getFileType() == FileType.ORC.getCode()) {
      dataxParam.setFileType(FileType.ORC.getDescp());
    } else {
      dataxParam.setFileType(FileType.TEXT.getDescp());
    }

    // 3、根据hive表获取对应hdfs对应存储路径
    String hiveTableLocation;
    if (baseDbService.isExistTable(writerDatasource.getId(), hiveJsonDto.getDstTable())
        && jobDTO.getProcessMode() == ProcessMode.RETAIN_DST_TABLE.getCode()) {
      hiveTableLocation =
          HiveUtils.getHiveTableLocation(
              DatabaseUtil.getConnection(writerDatasource), hiveJsonDto.getDstTable());
    } else {
      hiveTableLocation =
          DcsConstants.HIVE_TABLE_LOCATION + hiveJsonDto.getDstTable().toLowerCase();
    }

    // 4、设置path
    dataxParam.setWriterPath(hiveTableLocation);
    dataxParam.setFileName(DBUtils.convertTableName(JdbcConstants.HIVE, hiveJsonDto.getFileName()));
    dataxParam.setWriteMode(HDFS_WRITE_MODE_APPEND);
    dataxParam.setFieldDelimiter(hiveJsonDto.getFileSeparator());

    // 5、设置column
    List<Map<String, Object>> writerColumn = new ArrayList<>();
    JobDatasource srcDatasource = dataSourceUtils.convertDatasource(jobDTO.getSrcDatasourceId());

    if (jobDTO.getProcessMode() == ProcessMode.COVER_DST_TABLE.getCode()
        || jobDTO.getProcessMode() == ProcessMode.PRECHECK_ERROR.getCode()) {
      if (CollectionUtils.isNotEmpty(srcColumnList)) {
        for (ColumnInfo columnInfo : srcColumnList) {
          if (columnMap.get(columnInfo.getName()) == null) {
            continue;
          }
          Map<String, Object> map = new HashMap<>();
          map.put(
              NAME,
              buildJsonUtil.convertColumnName(
                  writerDatasource.getDbType(), columnMap.get(columnInfo.getName())));
          convertHiveDataType(map, columnInfo, srcDatasource.getDbType());
          writerColumn.add(map);
        }
      }
    } else if (jobDTO.getProcessMode() == ProcessMode.RETAIN_DST_TABLE.getCode()
        || jobDTO.getProcessMode() == ProcessMode.CLEAR_DST_TABLE_DATA.getCode()) {
      for (ColumnMappingDto columnMappingDto : columnMappingDtoList) {
        Map<String, Object> map = new HashMap<>();
        map.put(
            NAME,
            buildJsonUtil.convertColumnName(
                writerDatasource.getDbType(), columnMap.get(columnMappingDto.getSrcColumnName())));
        String hiveDataType =
            buildJsonUtil.convertDataType(
                srcDatasource.getDbType(), columnMappingDto.getSrcColumnType());
        map.put(TYPE, hiveDataType);
        writerColumn.add(map);
      }
    }

    dataxParam.setWriterColumn(writerColumn);
    dataxParam.setSourceType(JdbcConstants.HDFS);
    return dataxParam;
  }

  /** 转换hive数据类型 */
  private void convertHiveDataType(Map<String, Object> map, ColumnInfo columnInfo, String dbType)
      throws Exception {
    String dataType = columnInfo.getType();
    String hiveDataType = buildJsonUtil.convertDataType(dbType, dataType);
    map.put(TYPE, hiveDataType);
  }

  /** 构建半结构化采集HiveWriter */
  public BaseDataxParam buildHiveWriterBySemi(
      JobDTO jobDTO,
      HiveJsonDto hiveJsonDto,
      List<ColumnInfo> columnInfoList,
      Map<String, String> columnMap,
      List<ColumnMappingDto> columnMappingDtoList,
      Map<Integer, String> orderMap)
      throws Exception {

    HdfsDataxParam dataxParam = new HdfsDataxParam();
    // 1、构建Writer
    if (hiveJsonDto.getFileType() == FileType.TEXT.getCode()) {
      dataxParam.setFileType(FileType.TEXT.getDescp());
    } else if (hiveJsonDto.getFileType() == FileType.CSV.getCode()) {
      dataxParam.setFileType(FileType.CSV.getDescp());
    } else if (hiveJsonDto.getFileType() == FileType.ORC.getCode()) {
      dataxParam.setFileType(FileType.ORC.getDescp());
    } else {
      dataxParam.setFileType(FileType.TEXT.getDescp());
    }

    JobDatasource writerDatasource = dataSourceUtils.convertDatasource(jobDTO.getDstDatasourceId());
    HiveDataSource hiveDataSource = (HiveDataSource) writerDatasource.getBaseDataSource();
    dataxParam.setDefaultFS(hiveDataSource.getDefaultFS());

    // 设置kerberosParam
    if (hiveDataSource.getKerberos() != null && hiveDataSource.getKerberos() == 1) {
      dataxParam.setHaveKerberos(true);
      Map<String, Object> kerberosParam = new HashMap<>();
      kerberosParam.put(HIVE_KRB5_PATH, hiveDataSource.getKrb5Path());
      kerberosParam.put(HIVE_KEYTAB_PATH, hiveDataSource.getKeytabPath());
      kerberosParam.put(HIVE_PRNCIPAL_NAME, hiveDataSource.getUser());
      kerberosParam.put(HIVE_PRNCIPAL, hiveDataSource.getPrincipal());
      dataxParam.setKerberosParam(kerberosParam);
    }

    // 设置hiveConnection
    Map<String, Object> hiveConnectionMap = new HashMap<>();
    hiveConnectionMap.put(JDBC_URL, hiveDataSource.getJdbcUrl());
    hiveConnectionMap.put(USERNAME, hiveDataSource.getUser());
    hiveConnectionMap.put(PASSWORD, hiveDataSource.getPassword());
    dataxParam.setHiveConnection(hiveConnectionMap);

    // 3、根据hive表获取对应hdfs对应存储路径
    String hiveTableLocation;
    if (baseDbService.isExistTable(writerDatasource.getId(), hiveJsonDto.getDstTable())
        && jobDTO.getProcessMode() == ProcessMode.RETAIN_DST_TABLE.getCode()) {
      hiveTableLocation =
          HiveUtils.getHiveTableLocation(
              DatabaseUtil.getConnection(writerDatasource), hiveJsonDto.getDstTable());
    } else {
      hiveTableLocation = DcsConstants.HIVE_TABLE_LOCATION + hiveJsonDto.getDstTable();
    }

    // 4、设置path
    dataxParam.setWriterPath(hiveTableLocation);
    dataxParam.setFileName(DBUtils.convertTableName(JdbcConstants.HIVE, hiveJsonDto.getFileName()));
    dataxParam.setWriteMode(HDFS_WRITE_MODE_APPEND);
    dataxParam.setFieldDelimiter(hiveJsonDto.getFileSeparator());

    // 5、设置column
    List<Map<String, Object>> writerColumn = new ArrayList<>();

    if (jobDTO.getProcessMode() == ProcessMode.COVER_DST_TABLE.getCode()
        || jobDTO.getProcessMode() == ProcessMode.PRECHECK_ERROR.getCode()) {
      if (CollectionUtils.isNotEmpty(columnInfoList)) {
        for (ColumnInfo columnInfo : columnInfoList) {
          if (columnMap.get(columnInfo.getName()) == null) {
            continue;
          }
          Map<String, Object> map = new HashMap<>(16);
          map.put(NAME, columnMap.get(columnInfo.getName()));

          String type = columnInfo.getType();
          if (type.equals(MeasureParaTypeEnum.BOOLEAN.name())) {
            map.put(TYPE, HiveDataType.BOOLEAN.getDescp());
          } else if (type.equals(MeasureParaTypeEnum.INTEGER.name())) {
            map.put(TYPE, HiveDataType.INT.getDescp());
          } else if (type.equals(MeasureParaTypeEnum.LONG.name())) {
            map.put(TYPE, HiveDataType.BIGINT.getDescp());
          } else if (type.equals(MeasureParaTypeEnum.FLOAT.name())) {
            map.put(TYPE, HiveDataType.FLOAT.getDescp());
          } else if (type.equals(MeasureParaTypeEnum.DOUBLE.name())) {
            map.put(TYPE, HiveDataType.DOUBLE.getDescp());
          } else if (type.equals(MeasureParaTypeEnum.STRING.name())) {
            map.put(TYPE, HiveDataType.STRING.getDescp());
          }
          writerColumn.add(map);
        }
      }
    } else if (jobDTO.getProcessMode() == ProcessMode.RETAIN_DST_TABLE.getCode()
        || jobDTO.getProcessMode() == ProcessMode.CLEAR_DST_TABLE_DATA.getCode()) {
      for (ColumnMappingDto columnMappingDto : columnMappingDtoList) {
        Map<String, Object> map = new HashMap<>();
        map.put(NAME, columnMap.get(columnMappingDto.getSrcColumnName()));

        String type = columnMappingDto.getSrcColumnType();
        if (type.equals(MeasureParaTypeEnum.BOOLEAN.name())) {
          map.put(TYPE, HiveDataType.BOOLEAN.getDescp());
        } else if (type.equals(MeasureParaTypeEnum.INTEGER.name())) {
          map.put(TYPE, HiveDataType.INT.getDescp());
        } else if (type.equals(MeasureParaTypeEnum.LONG.name())) {
          map.put(TYPE, HiveDataType.BIGINT.getDescp());
        } else if (type.equals(MeasureParaTypeEnum.FLOAT.name())) {
          map.put(TYPE, HiveDataType.FLOAT.getDescp());
        } else if (type.equals(MeasureParaTypeEnum.DOUBLE.name())) {
          map.put(TYPE, HiveDataType.DOUBLE.getDescp());
        } else if (type.equals(MeasureParaTypeEnum.STRING.name())) {
          map.put(TYPE, HiveDataType.STRING.getDescp());
        }
        writerColumn.add(map);
      }
    }

    List<Map<String, Object>> orderWriterColumn = new ArrayList<>();
    Set<Integer> set = orderMap.keySet();
    Iterator<Integer> iterator = set.iterator();
    String columnName;
    while (iterator.hasNext()) {
      columnName = columnMap.get(orderMap.get(iterator.next()));
      for (Map<String, Object> map : writerColumn) {
        if (columnName.equals(String.valueOf(map.get(NAME)))) {
          orderWriterColumn.add(map);
        }
      }
    }

    dataxParam.setWriterColumn(orderWriterColumn);
    dataxParam.setSourceType(JdbcConstants.HDFS);
    return dataxParam;
  }

  /** 构建流式采集HiveWriter */
  public BaseDataxParam buildHiveWriterByStream(
      JobDTO jobDTO,
      List<StreamColumnMappingDto> streamColumnMappingDtoList,
      JobDatasource writerDatasource)
      throws Exception {
    // 1、构建Writer
    HdfsDataxParam dataxParam = new HdfsDataxParam();

    String defaultFS = "";
    if (JdbcConstants.HIVE.equals(writerDatasource.getDbType())) {
      HiveDataSource hiveDataSource = (HiveDataSource) writerDatasource.getBaseDataSource();
      defaultFS = hiveDataSource.getDefaultFS();
      dataxParam.setDefaultFS(defaultFS);

      // 设置kerberosParam
      if (hiveDataSource.getKerberos() != null && hiveDataSource.getKerberos() == 1) {
        dataxParam.setHaveKerberos(true);
        Map<String, Object> kerberosParam = new HashMap<>();
        kerberosParam.put(HIVE_KRB5_PATH, hiveDataSource.getKrb5Path());
        kerberosParam.put(HIVE_KEYTAB_PATH, hiveDataSource.getKeytabPath());
        kerberosParam.put(HIVE_PRNCIPAL_NAME, hiveDataSource.getUser());
        kerberosParam.put(HIVE_PRNCIPAL, hiveDataSource.getPrincipal());
        dataxParam.setKerberosParam(kerberosParam);
        logger.info("kerberosParam={}", kerberosParam);
      }

      // 设置hiveConnection
      Map<String, Object> hiveConnectionMap = new HashMap<>();
      hiveConnectionMap.put(JDBC_URL, hiveDataSource.getJdbcUrl());
      hiveConnectionMap.put(USERNAME, hiveDataSource.getUser());
      hiveConnectionMap.put(PASSWORD, hiveDataSource.getPassword());
      dataxParam.setHiveConnection(hiveConnectionMap);
    }
    dataxParam.setFileType(FileType.ORC.getDescp());

    // 3、设置paths
    List<Map<String, Object>> pathList = new ArrayList<>();
    if (CollectionUtils.isNotEmpty(streamColumnMappingDtoList)) {
      for (StreamColumnMappingDto streamColumnMappingDto : streamColumnMappingDtoList) {
        Map<String, Object> map = new HashMap<>();
        map.put(TABLENAME, streamColumnMappingDto.getTableName());

        String hiveTableLocation;
        if (baseDbService.isExistTable(
                writerDatasource.getId(), streamColumnMappingDto.getTableName())
            && jobDTO.getProcessMode() == ProcessMode.RETAIN_DST_TABLE.getCode()) {
          hiveTableLocation =
              HiveUtils.getHiveTableLocation(
                  DatabaseUtil.getConnection(writerDatasource),
                  streamColumnMappingDto.getTableName());
        } else {
          hiveTableLocation =
              DcsConstants.HIVE_TABLE_LOCATION + streamColumnMappingDto.getTableName();
        }

        map.put(PATH, hiveTableLocation);
        pathList.add(map);
      }
    }
    dataxParam.setWriterPaths(pathList);

    dataxParam.setWriteMode(HDFS_WRITE_MODE_APPEND);
    dataxParam.setFieldDelimiter(Constants.COMMA);
    dataxParam.setMaxFileSize(MAX_FILE_SIZE);

    // 4、设置tbl2DeviceMap
    List<Map<String, Object>> tbl2DeviceMapList = new ArrayList<>();
    if (CollectionUtils.isNotEmpty(streamColumnMappingDtoList)) {
      for (StreamColumnMappingDto streamColumnMappingDto : streamColumnMappingDtoList) {
        Map<String, Object> map = new HashMap<>();
        map.put(TABLENAME, streamColumnMappingDto.getTableName());
        MsgTemplateDto msgTemplateDto =
            msgTemplateService.selectByPrimaryKey(streamColumnMappingDto.getTemplateId());
        if (msgTemplateDto != null) {
          map.put(DEVICEID, msgTemplateDto.getDeviceId());
        }
        tbl2DeviceMapList.add(map);
      }
    }
    dataxParam.setTbl2DeviceMap(tbl2DeviceMapList);

    // 5、设置columns
    List<HdfsDataxParam.HdfsParam> writerColumnList = new ArrayList<>();
    if (CollectionUtils.isNotEmpty(streamColumnMappingDtoList)) {
      for (StreamColumnMappingDto streamColumnMappingDto : streamColumnMappingDtoList) {
        HdfsDataxParam.HdfsParam dataxHdfsParasPojo = new HdfsDataxParam.HdfsParam();
        dataxHdfsParasPojo.setTableName(streamColumnMappingDto.getTableName());
        // 获取设备id
        MsgTemplateDto msgTemplateDtoQuery =
            msgTemplateService.selectByPrimaryKey(streamColumnMappingDto.getTemplateId());
        dataxHdfsParasPojo.setDeviceId(msgTemplateDtoQuery.getDeviceId());

        List<Map<String, Object>> column = new ArrayList<>();
        List<ColumnMappingDto> columnList = streamColumnMappingDto.getColumnList();
        if (CollectionUtils.isNotEmpty(columnList)) {
          for (ColumnMappingDto columnMappingDto : columnList) {
            Map<String, Object> map = new HashMap<>();
            map.put(NAME, columnMappingDto.getDstColumnName());
            if (jobDTO.getProcessMode() == ProcessMode.COVER_DST_TABLE.getCode()
                || jobDTO.getProcessMode() == ProcessMode.PRECHECK_ERROR.getCode()) {
              // 如果是新建表，需要根据模板和iotdb数据类型进行转换
              String type = columnMappingDto.getSrcColumnType();
              if (type.equalsIgnoreCase(MeasureParaTypeEnum.STRING.getType())) {
                map.put(TYPE, HiveDataType.STRING.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.BOOLEAN.getType())) {
                map.put(TYPE, HiveDataType.BOOLEAN.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.BYTE.getType())) {
                map.put(TYPE, HiveDataType.TINYINT.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.CHAR.getType())) {
                map.put(TYPE, HiveDataType.CHAR.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.DATE.getType())) {
                map.put(TYPE, HiveDataType.DATE.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.DOUBLE.getType())) {
                map.put(TYPE, HiveDataType.DOUBLE.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.FLOAT.getType())) {
                map.put(TYPE, HiveDataType.FLOAT.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.INTEGER.getType())) {
                map.put(TYPE, HiveDataType.INT.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.LONG.getType())) {
                map.put(TYPE, HiveDataType.BIGINT.getDescp());
              } else if (type.equalsIgnoreCase(MeasureParaTypeEnum.SHORT.getType())) {
                map.put(TYPE, HiveDataType.SMALLINT.getDescp());
              }
            } else if (jobDTO.getProcessMode() == ProcessMode.RETAIN_DST_TABLE.getCode()) {
              // 如果是保留表，类型直接用表中的字段类型，不需要转换
              map.put(TYPE, columnMappingDto.getDstColumnType());
            }
            column.add(map);
          }
        }
        dataxHdfsParasPojo.setColumn(column);
        writerColumnList.add(dataxHdfsParasPojo);
      }
    }
    dataxParam.setWriterColumns(writerColumnList);

    dataxParam.setTaskType(TASK_TYPE);
    dataxParam.setSourceType(JdbcConstants.KAFKA);
    return dataxParam;
  }
}
