package avicit.bdp.dcs.json.service.reader;

import avicit.bdp.common.datasource.HdfsDataSource;
import avicit.bdp.common.dto.StorageResourceConf;
import avicit.bdp.common.utils.database.ColumnInfo;
import avicit.bdp.common.utils.enums.FileType;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dcs.datasource.dto.JobDatasource;
import avicit.bdp.dcs.plugin.BaseDataxParam;
import avicit.bdp.dcs.plugin.hdfs.param.HdfsDataxParam;
import avicit.bdp.dcs.semistructure.dto.FileTemplateDTO;
import avicit.bdp.dcs.semistructure.service.TemplateFieldService;
import avicit.bdp.dcs.task.dto.ColumnMappingDto;
import avicit.bdp.dcs.task.dto.TaskDTO;
import avicit.bdp.dcs.tools.enums.MeasureParaTypeEnum;
import avicit.bdp.dcs.utils.DataTypeConstants;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Component
public class HdfsReaderBuilder {

  private static final String TYPE = "type";
  private static final String NAME = "name";
  private static final String INDEX = "index";

  private static final String HDFS_FILE_PATH = "${HDFS_FILE_PATH";
  private static final String HDFS_KRB5_PATH = "hdfs_krb5_path";
  private static final String HDFS_KEYTAB_PATH = "hdfs_keytab_path";
  private static final String HDFS_PRNCIPAL_NAME = "hdfs_prncipal_name";
  private static final String HDFS_CONFIG_PATH = "hdfs_config_path";

  @Autowired private TemplateFieldService templateFieldService;

  /** 构建文件采集HdfsReader */
  public BaseDataxParam buildHdfsReaderByFile(
      TaskDTO taskDTO,
      JobDatasource readerDatasource,
      FileTemplateDTO fileTemplate)
      throws Exception {
    HdfsDataxParam dataxParam = new HdfsDataxParam();
    // 构建Reader
    List<String> readerPathList = new ArrayList<>();

    // 变量，运行时进行替换
    readerPathList.add(HDFS_FILE_PATH + Constants.SUBTRACT_STRING + taskDTO.getId() + "}");
    dataxParam.setReaderPath(readerPathList);
    dataxParam.setDefaultFS(readerDatasource.getJdbcUrl());
    dataxParam.setHaveKerberos(readerDatasource.getKerberos());
    dataxParam.setSheetTab(fileTemplate.getSheetTab());
    dataxParam.setSkipHeader(true);
    String startLine = fileTemplate.getStartLine();
    if (fileTemplate.getType() == FileType.JSON.getCode()) {
      dataxParam.setJsonPath(startLine);
    } else {
      if (StringUtils.isNotEmpty(startLine)) {
        dataxParam.setSkipLines(Integer.parseInt(startLine) - 1);
      }
    }

    // 设置kerberosParam
    if (readerDatasource.getKerberos()) {
      if (readerDatasource.getSystemBuilt() == 1) {
        StorageResourceConf conf = readerDatasource.getStorageResourceConf();
        Map<String, Object> kerberosParam = new HashMap<>();
        kerberosParam.put(HDFS_KRB5_PATH, conf.getKrb5Path());
        kerberosParam.put(HDFS_KEYTAB_PATH, conf.getKeytabPath());
        kerberosParam.put(HDFS_PRNCIPAL_NAME, conf.getKerberosUser());
        kerberosParam.put(HDFS_CONFIG_PATH, conf.getConfigPath());
        dataxParam.setKerberosParam(kerberosParam);
      } else {
        HdfsDataSource hdfsDataSource = (HdfsDataSource) readerDatasource.getBaseDataSource();
        Map<String, Object> kerberosParam = new HashMap<>();
        kerberosParam.put(HDFS_KRB5_PATH, hdfsDataSource.getKrb5Path());
        kerberosParam.put(HDFS_KEYTAB_PATH, hdfsDataSource.getKeytabPath());
        kerberosParam.put(HDFS_PRNCIPAL_NAME, hdfsDataSource.getHdfsUser());
        kerberosParam.put(HDFS_CONFIG_PATH, hdfsDataSource.getFilePath());
        dataxParam.setKerberosParam(kerberosParam);
      }
    }

    // 单双引号处理
    if (fileTemplate.getQuotation() != null && fileTemplate.getQuotation() == 1) {
      dataxParam.setQuotation(fileTemplate.getQuotation());
      dataxParam.setSingleQuotation(fileTemplate.getSingleQuotation());
      dataxParam.setDoubleQuotation(fileTemplate.getDoubleQuotation());
    }

    String readerFileType = "";
    FileType[] fileTypeArray = FileType.values();
    for (FileType fileType : fileTypeArray) {
      if (fileType.getCode() == fileTemplate.getType()) {
        readerFileType = fileType.getDescp();
        break;
      }
    }
    dataxParam.setFileType(readerFileType);
    dataxParam.setFieldDelimiter(fileTemplate.getFileSeparator());

    // 根据列映射关系转换列名
    Map<String, String> columnMap = new HashMap<>(16);
    List<ColumnMappingDto> columnMappingDtoList =
        JSONUtils.toList(taskDTO.getMappingColumn(), ColumnMappingDto.class);
    if (CollectionUtils.isNotEmpty(columnMappingDtoList)) {
      for (ColumnMappingDto columnMappingDto : columnMappingDtoList) {
        columnMap.put(columnMappingDto.getSrcColumnName(), columnMappingDto.getDstColumnName());
      }
    }

    // 解决列之间顺序问题
    List<Map<String, Object>> columns = new ArrayList<>();
    List<ColumnInfo> columnInfoList =
        templateFieldService.getTemplateColumnListById(taskDTO.getSrcTableName());
    if (CollectionUtils.isNotEmpty(columnInfoList)) {
      int i = 0;
      for (ColumnInfo columnInfo : columnInfoList) {
        if (columnMap.get(columnInfo.getName()) == null) {
          i++;
          continue;
        }

        Map<String, Object> map = new HashMap<>();
        // 获取模板字段配置时指定的字段顺序
        map.put(INDEX, columnInfo.getPrecision());
        map.put(NAME, columnInfo.getName());
        String type;
        if (MeasureParaTypeEnum.STRING.name().equalsIgnoreCase(columnInfo.getType())) {
          type = DataTypeConstants.STRING;
        } else if (MeasureParaTypeEnum.BOOLEAN.name().equalsIgnoreCase(columnInfo.getType())) {
          type = DataTypeConstants.BOOLEAN;
        } else if (MeasureParaTypeEnum.INTEGER.name().equalsIgnoreCase(columnInfo.getType())) {
          type = DataTypeConstants.LONG;
        } else if (MeasureParaTypeEnum.LONG.name().equalsIgnoreCase(columnInfo.getType())) {
          type = DataTypeConstants.LONG;
        } else if (MeasureParaTypeEnum.FLOAT.name().equalsIgnoreCase(columnInfo.getType())) {
          type = DataTypeConstants.DOUBLE;
        } else if (MeasureParaTypeEnum.DOUBLE.name().equalsIgnoreCase(columnInfo.getType())) {
          type = DataTypeConstants.DOUBLE;
        } else {
          type = DataTypeConstants.STRING;
        }
        map.put(TYPE, type);
        columns.add(map);
        i++;
      }
    }
    dataxParam.setReaderColumn(columns);
    return dataxParam;
  }
}
