package com.bigdata.quality.config.converter;

import com.bigdata.quality.config.enums.DatabaseEnums;
import com.bigdata.quality.config.exception.DatabaseTypeException;

import java.util.concurrent.ConcurrentHashMap;

public class ConverterFactory {

  private final ConcurrentHashMap<DatabaseEnums, ColumnTypeConverter> concurrentHashMap = new ConcurrentHashMap<>();

  public ColumnTypeConverter getColumnTypeConverter(DatabaseEnums databaseEnums) {
    if (databaseEnums == null) {
      throw new DatabaseTypeException("获取ColumnTypeConverter失败，不支持的数据源类型");
    }

    ColumnTypeConverter converter = concurrentHashMap.get(databaseEnums);
    if (converter == null) {
      switch (databaseEnums) {
        case MYSQL:
          converter = MySQLColumnTypeConverter.instance;
          break;
        case DORIS:
          converter = DorisColumnTypeConverter.instance;
          break;
        case HIVE2:
          converter = Hive2ColumnTypeConverter.instance;
          break;
        case GREAT_DB:
          converter = GreatDBColumnTypeConverter.instance;
          break;
        case SQL_SERVER:
          converter = MSSQLColumnTypeConverter.instance;
          break;
        case OCEAN_BASE:
          converter = OBMySQLColumnTypeConverter.instance;
          break;
        case POSTGRE_SQL:
          converter = PGColumnTypeConverter.instance;
          break;
        default:
          throw new DatabaseTypeException(String.format("获取ColumnTypeConverter失败，没有该数据源类型[%s]到flink的类型映射", databaseEnums.getCode()));
      }
      concurrentHashMap.put(databaseEnums, converter);
    }
    return converter;
  }
}
