package com.kingsoft.dc.khaos.module.spark.source

import java.sql.{Date, Timestamp}

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.api.DmTableSplit
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.constants.{MetaDataConstants, MppConstants, MppSqlTypes, SchedulerConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.source.{GreenPlumSourceConfig, MppSourceConfig}
import com.kingsoft.dc.khaos.module.spark.model.cos.CosAccessConfig
import com.kingsoft.dc.khaos.module.spark.request.model.{JdbcConnectEntity, StructFieldEntity}
import com.kingsoft.dc.khaos.module.spark.util.CosApiUtils.{createBucketIfNotExist, getCosAccessAkSk}
import com.kingsoft.dc.khaos.module.spark.util._
import com.kingsoft.dc.khaos.util.Logging
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

/**
 * create by yansu on 2019/06/19
 */
class MppSource extends SourceStrategy with Logging with Serializable {
  override type MapSet = Map[String, String]
  private var _connectEntity: JdbcConnectEntity = null
  private var _cosConfig: CosAccessConfig = new CosAccessConfig
  private var _cosFs: FileSystem = null
  private var _mpp_Config: MppSourceConfig = null
  private var _kc: KhaosContext = null

  // mpp数据库物理地址
  private var _host: String = null
  private var _port: String = null
  private var _username: String = null
  private var _password: String = null
  private var _instansename: String = null
  private var dbName = ""
  private var tblName = ""

  // 配置文件
  private var _loginTimeout: Int = 6000
  private var _region = "ap-beijing"
  private var _bucket = "1"
  private var _endpoint = "cos.ap-beijing.myqcloud.com"
  private var _delimeter = ";"

  //cos 临时存储数据目录
  private val COS_WORK_DIR = "/di/.working_output"
  private val currTime = System.currentTimeMillis()
  private var _module_id = ""

  // 分表
  private var tblNameList: scala.List[String] = scala.List[String]()
  private var tableSplit: DmTableSplit = null

  // cos配置
  private val OSS_TYPE = "cos"
  // cos存储格式
  private val FILE_FORMAT = "orc"

  // 文件类型格式
  object FormatEnum {
    val ORC_FORMAT = "orc"
    val CSV_FORMAT = "csv"
    val TEXT_FORMAT = "text"
    val PARQUET_FORMAT = "parquet"
  }

  object SourceMode {
    val HashData = "hashdata" //mpp
    val GreenPlum = "greenplum"
    val HAWQ = "hawq"
  }

  // 使用ascii字符
  private var DELIMETER: String = "|"
  private var _dataSourceType: String = ""

  /** 数据抽取 */
  override def source(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependence: Dependency): DataFrame = {
    // 根据json解析出module
    implicit val formats = DefaultFormats
    val mppConfig: MppSourceConfig = parse(config, true).extract[MppSourceConfig]
    this._mpp_Config = mppConfig
    this._kc = kc
    this._module_id = module_id
    //库表名加引号
    dbName = s"""\"${_mpp_Config.db_name}\""""
    tblName = s"""\"${_mpp_Config.table_name}\""""

    // 权限校验
    MetaUtils.checkReadAuth(kc,
      mppConfig.db_name,
      mppConfig.table_name,
      mppConfig.extender.auth.clazz,
      compact(render(mppConfig.extender.auth.params)))

    // 初始化配置
    init()
    var mppDF: DataFrame = null
    log.info("SourceMode ==> " + _dataSourceType)
    // 删除cos缓存
    kc.conf.set("dataSourceType", _dataSourceType)
    _dataSourceType.toLowerCase match {
      case SourceMode.HashData => {
        log.info("HashData KDW_for_GreenPlum DataSource Synchronization!")
        try {
          val successFlag = read4Mpp()
          if (successFlag == true) {
            mppDF = read4Cos()
          } else {
            log.info("mpp内部表导入cos外部表失败!")
          }
        } catch {
          case e: Exception =>
            e.printStackTrace()
            throw new Exception("缓存数据失败,失败信息:" + e.getMessage + "失败原因:" + e.getCause)
        } finally {
          val dropExtTblSql = s"DROP EXTERNAL TABLE IF EXISTS ${dbName}.${getMppTmpExtTableName()}"
          log.info("开始删除外部表!")
          val successFlag = MppUtils.executeSql(getConnectUrl(), _username, _password, dropExtTblSql)
          if (successFlag == true) {
            log.info("成功删除外部表!")
          }
          // 关闭文件系统
          if (_cosFs != null) {
            log.info("关闭文件系统!")
            _cosFs.close()
          }
        }
      }
      case SourceMode.GreenPlum => {
        log.info("GreenPlum DataSource Synchronization!")
        val greenPlumSource = new GreenPlumSourceV2
        val dataSourceConfig = Map.apply(
          "host" -> _host,
          "port" -> _port,
          "username" -> _username,
          "password" -> _password,
          "instansename" -> _instansename)
        mppDF = greenPlumSource.source(kc, module_id, config, dependence, dataSourceConfig)
      }
      case SourceMode.HAWQ => {
        log.info("KDW_for_HAWQ DataSource Synchronization!")
        val hawqSource = new HAWQSource
        val dataSourceConfig = Map.apply(
          "host" -> _host,
          "port" -> _port,
          "username" -> _username,
          "password" -> _password,
          "instansename" -> _instansename)
        mppDF = hawqSource.source(kc, module_id, config, dependence, dataSourceConfig)
      }
      case _ => throw new IllegalArgumentException(s"Unknown Data Source: ${_dataSourceType}!")
    }
    mppDF
  }

  /**
   * 初始化数据管理、jdbc和cos
   */
  def init() = {
    initMetaData()
    if (_dataSourceType.equalsIgnoreCase(SourceMode.HashData)) {
      loadProperties(_kc)
      initJdbcConnectInfo()
      initCosConfig()
      // 获取配置文件中的分隔符
      this.DELIMETER = _cosConfig.getDelimeter
    }
  }

  /**
   * 从数据管理获取mpp物理地址
   */
  def initMetaData() = {
    log.info("初始化物理地址...")
    val className = _mpp_Config.extender.meta.clazz

    val mppConnect = MetaUtils.getMPPMeta(_kc,
      _mpp_Config.db_name,
      _mpp_Config.table_name,
      className,
      compact(render(_mpp_Config.extender.auth.params)), this).getDsMppConnect

    _host = mppConnect.getHost
    _port = mppConnect.getPort
    _username = mppConnect.getUsername
    _password = mppConnect.getPassword
    _instansename = mppConnect.getInstanceName
    _dataSourceType = mppConnect.getSourceMode
  }

  /**
   * 初始化cos配置
   */
  def initCosConfig(): Unit = {
    log.info("初始化COS文件系统")
    val cosAccessConfig = getCosAccessAkSk(_kc)
    cosAccessConfig.setRegion(_region)
    cosAccessConfig.setEndPoint(_endpoint)
    cosAccessConfig.setBucket(_bucket + "-" + cosAccessConfig.getAppId)
    //    cosAccessConfig.setWorkingDir("/di/tmp")
    cosAccessConfig.setDelimeter(_delimeter)
    _cosConfig = cosAccessConfig
    log.info("开始加载COS文件系统...")
    createBucketIfNotExist(_cosConfig)
    _cosFs = FileSystem.get(addCosFileSystem())
  }

  /**
   * 添加cosFileSystem
   */
  def addCosFileSystem(): org.apache.hadoop.conf.Configuration = {
    val hadoopConf = HadoopCosUtils.appendCosHadoopConfigs(_kc.sparkSession.sparkContext.hadoopConfiguration, _cosConfig)
    _kc.sparkSession.sparkContext.hadoopConfiguration.addResource(hadoopConf)
    _kc.sparkSession.sparkContext.hadoopConfiguration
  }

  /**
   * 初始化JDBC
   */
  def initJdbcConnectInfo() = {
    log.info("初始化JDBC")
    val url = getConnectUrl()
    val user = _username
    val password = _password

    this._connectEntity = new JdbcConnectEntity(url,
      user,
      password,
      dbName,
      tblName
    )
  }

  /**
   * 获取jdbc连接url
   *
   * @return
   */
  def getConnectUrl(): String = {
    val host = _host
    val port = _port
    val dbName = _instansename
    val url = s"jdbc:postgresql://${host}:${port}/${dbName}"
    log.info("url: " + url)
    url.trim
  }

  /**
   * 构建外部表名
   *
   * @return 表名
   */
  def getMppTmpExtTableName() = {
    val idArr = _kc.conf.getString("job.inst.id")
    s"""\"${_mpp_Config.table_name}_${idArr}_${currTime}\""""
  }

  /**
   * 根据内部表元数据字段类型转换为schema类型（String->StringType）
   *
   * @param columnNameEntity
   * @return (字段索引,(字段名,字段转换后类型)),例如(1,(id,StringType))
   */
  def fieldTypeToSchema(columnNameEntity: mutable.HashMap[String, StructFieldEntity]) = {
    var dfType: DataType = null
    val columnNameAndDataType = new collection.mutable.HashMap[Int, (String, DataType)]
    log.info("源表字段数: " + columnNameEntity.size)
    for (columFields <- columnNameEntity) {
      // 表中列的名字
      val columuName = columFields._1
      // 表中列的类型
      val columnType = columFields._2.getFieldType
      // 表中的列的索引
      val index = columFields._2.getFieldIndex
      columnType.toLowerCase match {
        case MppSqlTypes.CHAR | MppSqlTypes.VARCHAR | MppSqlTypes.LONGVARCHAR | MppSqlTypes.BPCHAR => {
          dfType = StringType
        }
        case MppSqlTypes.INT | MppSqlTypes.INT2 | MppSqlTypes.INT4 | MppSqlTypes.INT8 | MppSqlTypes.TINYINT | MppSqlTypes.SMALLINT | MppSqlTypes.INTEGER => {
          dfType = IntegerType
        }
        case MppSqlTypes.BOOLEAN => {
          dfType = BooleanType
        }
        case MppSqlTypes.FLOAT | MppSqlTypes.FLOAT8FLOAT | MppSqlTypes.FLOAT8 => {
          dfType = FloatType
        }
        case MppSqlTypes.DOUBLE => {
          dfType = DoubleType
        }
        case MppSqlTypes.DECIMAL | MppSqlTypes.NUMERIC => {
          //这里所有浮点型都转换为Double类型,原因是mpp对于外部表数据格式不支持decimal
          dfType = DoubleType
        }
        case MppSqlTypes.DATE | MppSqlTypes.DATETIME => {
          dfType = DateType
        }
        case MppSqlTypes.TIMESTAMP | MppSqlTypes.TIMESTAMPTZ => {
          dfType = TimestampType
        }
        case MppSqlTypes.TIME_WITH_TIMEZONE | MppSqlTypes.TIME_WITHOUT_TIMEZONE => {
          // DF不支持time类型字段,这里转成string类型,需要在sink端根据目标表转换回来
          dfType = StringType
        }
        case MppSqlTypes.SERIAL => {
          dfType = IntegerType
        }
        case MppSqlTypes.BIGSERIAL => {
          dfType = LongType
        }
        case _ => {
          dfType = StringType
        }
      }
      // 根据索引进行排序确保构建的StructType与rowRDD顺序数量一致
      columnNameAndDataType += index -> (columuName -> dfType)
    }
    val res: List[(Int, (String, DataType))] = columnNameAndDataType.toList.sortBy(_._1)
    log.info("构建有序DataType成功!")
    res
  }

  /**
   * 根据内部表映射外部表到cos
   *
   * @param connectEntity   jdbc连接对象
   * @param dataformat      数据格式,目前只支持csv
   * @param delimiter       文件分割符
   * @param accessKeyId     连接cos所需的ak
   * @param serectAccessKey 连接cos所需的sk
   * @param location        cos文件系统临时存储目录,默认为/di/.working_output
   * @param ossType         对象存储类型,默认为 cos
   * @return 创建的sql语句(创建外部表,导入外部表数据,删除外部表)
   */
  def getMppExtTableSql(connectEntity: JdbcConnectEntity,
                        dataformat: String,
                        delimiter: String,
                        accessKeyId: String,
                        serectAccessKey: String,
                        appId: String,
                        location: String,
                        ossType: String,
                        filter: String): String = {

    // 获取分表状态
    tableSplit = TableSplitUtils.getTableSplit(_kc,
      _mpp_Config.db_name,
      _mpp_Config.table_name,
      _mpp_Config.extender.meta.clazz,
      compact(render(_mpp_Config.extender.meta.params)))

    if (tableSplit == null) { // 未开启分表
      var copySql = ""
      val createTableDDL = s"CREATE WRITABLE EXTERNAL TABLE ${dbName}.${getMppTmpExtTableName()} (LIKE ${dbName}.${tblName} )LOCATION ('oss://${location} oss_type=${ossType} cos_appid=${appId} access_key_id=${accessKeyId} secret_access_key=${serectAccessKey}') FORMAT '${dataformat}' (DELIMITER '${delimiter}')"
      if (filter != "" && filter != null) {
        copySql = s"INSERT INTO ${dbName}.${getMppTmpExtTableName} (SELECT * FROM ${dbName}.${tblName} where ${_mpp_Config.filter})"
      } else {
        copySql = s"INSERT INTO ${dbName}.${getMppTmpExtTableName} (SELECT * FROM ${dbName}.${tblName})"
      }
      val dropExtTblSql = s"DROP EXTERNAL TABLE IF EXISTS ${dbName}.${getMppTmpExtTableName}"
      createTableDDL + ";" + copySql + ";" + dropExtTblSql
    } else { // 开启分表,每张分表构造一组建表语句
      tableSplit.getStrategyType match {
        case TableSplitUtils.StrategyTypeEnum.CUSTOM_ENUM => { // 枚举分表
          val splitValues = tableSplit.getStrategyValue.split(",").toList
          //获取分表名称列表
          tblNameList = TableSplitUtils.getRealTable(_kc,
            _mpp_Config.db_name,
            _mpp_Config.table_name,
            _mpp_Config.extender.meta.clazz,
            compact(render(_mpp_Config.extender.meta.params)), this, tableSplit, "in", splitValues)
          //          tblNameList = tblNameList.map(tblname => tblname.toLowerCase())
          val totalSQL = generateSQLstatements(
            dataformat,
            delimiter,
            accessKeyId,
            serectAccessKey,
            appId,
            location,
            ossType,
            tblNameList,
            filter)
          totalSQL
        }
        case TableSplitUtils.StrategyTypeEnum.DATETIME => { // 时间分表
          val jobBizDate: String = _kc.conf.getString(SchedulerConstants.BIZ_DATE)
          val splitTime = tableSplit.getStrategyValue match {
            case TableSplitUtils.StrategyValueEnum.year => jobBizDate.substring(0, 4)
            case TableSplitUtils.StrategyValueEnum.month => jobBizDate.substring(0, 6)
            case TableSplitUtils.StrategyValueEnum.day => jobBizDate
          }
          val splitValues: scala.List[String] = scala.List[String](splitTime)
          //获取分表名称列表
          tblNameList = TableSplitUtils.getRealTable(_kc,
            _mpp_Config.db_name,
            _mpp_Config.table_name,
            _mpp_Config.extender.meta.clazz,
            compact(render(_mpp_Config.extender.meta.params)), this, tableSplit, "=", splitValues)
          val totalSQL = generateSQLstatements(
            dataformat,
            delimiter,
            accessKeyId,
            serectAccessKey,
            appId,
            location,
            ossType,
            tblNameList,
            filter)
          totalSQL
        }
        // 业务分表
        case TableSplitUtils.StrategyTypeEnum.BUSSINESS => {
          var suffixValue = ""
          var realTable = _mpp_Config.table_name

          if (_mpp_Config.sub_table != None) {
            if (_mpp_Config.sub_table.get.on_off == "true") {
              suffixValue = _mpp_Config.sub_table.get.suffix.trim
              if (suffixValue.contains("___")) {
                throw new IllegalArgumentException("业务分表后缀禁止包含三个连续下划线!")
              }
              if (suffixValue != "" && suffixValue != null) {
                realTable = realTable + "___" + suffixValue
              } else {
                throw new IllegalArgumentException("未正确填写业务分表后缀!")
              }
            } else {
              throw new IllegalArgumentException("未开启业务分表开关!")
            }
          }
          //获取分表真实名字
          tblNameList = List[String](realTable)
          val totalSQL = generateSQLstatements(
            dataformat,
            delimiter,
            accessKeyId,
            serectAccessKey,
            appId,
            location,
            ossType,
            tblNameList,
            filter)
          totalSQL
        }
      }
    }
  }

  /**
   * 开启分表,生成sql语句
   *
   * @param tblList 真实表名列表
   * @param filter  过滤条件
   * @return 一系列sql语句
   */
  def generateSQLstatements(dataformat: String,
                            delimiter: String,
                            accessKeyId: String,
                            serectAccessKey: String,
                            appId: String,
                            location: String,
                            ossType: String,
                            tblList: scala.List[String],
                            filter: String): String = {
    val tblArr = new ArrayBuffer[String]
    var copySql = ""
    for (tbl <- tblList) {
      // 防止出现外部表名重复,进行拼接.拼接规则:内部表名_任务实例id_时间戳
      val tblEXT = s"""\"${tbl}_${_kc.conf.getString("job.inst.id")}_${currTime}\""""
      var tbName = s"""\"${tbl}\""""
      val createTableDDL = s"CREATE WRITABLE EXTERNAL TABLE ${dbName}.${tblEXT} (LIKE ${dbName}.${tbName} )LOCATION ('oss://${location + "/" + tbl} oss_type=${ossType} cos_appid=${appId} access_key_id=${accessKeyId} secret_access_key=${serectAccessKey}') FORMAT '${dataformat}' (DELIMITER '${delimiter}')"
      if (filter != "" && filter != null) {
        copySql = s"INSERT INTO ${dbName}.${tblEXT} (SELECT * FROM ${dbName}.${tbName} where ${_mpp_Config.filter})"
      } else {
        copySql = s"INSERT INTO ${dbName}.${tblEXT} (SELECT * FROM ${dbName}.${tbName})"
      }
      val dropExtTblSql = s"DROP EXTERNAL TABLE IF EXISTS ${dbName}.${tblEXT}"
      tblArr += createTableDDL + ";" + copySql + ";" + dropExtTblSql
    }
    // sql语句
    tblArr.mkString(";")
  }

  /**
   * 获取数据在cos的物理存储目录
   *
   * @param workDir 默认存储路径
   * @return cos路径 cosn://1-111111/di/working/
   */
  def getLocation(workDir: String): String = {
    val bucket = _cosConfig.getBucket
    val appID = _cosConfig.getAppId
    val endpoint = _cosConfig.getEndPoint
    val jobID = _kc.conf.getString("job.inst.id")
    var location: String = ""
    if (workDir.startsWith("/")) {
      //      location = s"${bucket}.${endpoint}${workDir}/${jobID}/${_module_id}/${currTime}"
      location = s"${endpoint}/${bucket}${workDir}/${jobID}/${_module_id}/${currTime}"
    } else {
      location = s"${endpoint}/${bucket}/${workDir}/${jobID}/${_module_id}/${currTime}"
    }
    log.info(s"LOCATION: ${location}")
    location
  }

  /**
   * 将数据导入外部表
   *
   * @return 是否成功
   */
  def read4Mpp() = {
    var flag = false
    val url = getConnectUrl()
    val user = _username
    val password = _password

    if (DELIMETER == "" || DELIMETER == null) {
      // 使用分隔符"|"
      log.info("use delimeter \"|\"")
      DELIMETER = "|"
    }
    log.info("DELIMETER ==> " + DELIMETER)
    val totalSQL = getMppExtTableSql(_connectEntity, FILE_FORMAT, DELIMETER, _cosConfig.getAccessKey, _cosConfig.getSecretKey, _cosConfig.getAppId, getLocation(COS_WORK_DIR), OSS_TYPE, _mpp_Config.filter)
    // 提交所有sql语句
    log.info("totalSQL ==> " + totalSQL)
    val res = MppUtils.executeSql(url, user, password, totalSQL)
    if (res == true) {
      log.info("数据映射到外部表成功!")
      flag = true
    }
    flag
  }

  /*
  初始化配置文件
   */
  def loadProperties(kc: KhaosContext): Unit = {
    try {
      val mppProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.mpp.source.").toMap
      log.info("MPPSource Properties")
      mppProperties.foreach { case (k, v) => log.info(k + "   " + v) }
      _loginTimeout = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_JDBC_CONNECT_TIMEOUT, MppConstants.DEFAULT_CONNECT_TIMEOUT).toInt

      _bucket = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_BUCKET, "none")
      _endpoint = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_ENDPOINT, "none")
      _region = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_REGION, "none")
      _delimeter = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_DELIMETER, "none")

    } catch {
      case e: Exception =>
        log.error("未读取到MPP配置! 改用默认配置")
    }
  }

  /**
   * 从mpp外部表中读取数据 mpp->cos->df
   *
   * @return 输出到下游的DataFrame
   */
  def read4Cos(): DataFrame = {
    log.info("开始读取COS中的文件...")
    var cosReadPath = ""
    if (COS_WORK_DIR.endsWith("/")) {
      cosReadPath = COS_WORK_DIR + _kc.conf.getString("job.inst.id") + "/" + _module_id + "/" + currTime + "/*"
    } else {
      cosReadPath = COS_WORK_DIR + "/" + _kc.conf.getString("job.inst.id") + "/" + _module_id + "/" + currTime + "/*"
    }
    log.info("cosReadPath: " + cosReadPath)

    // 定义文件格式
    val type_format = FILE_FORMAT
    var resDataFrame: DataFrame = null

    type_format match {
      case FormatEnum.ORC_FORMAT => {
        if (_cosFs.exists(new Path(COS_WORK_DIR + "/" + _kc.conf.getString("job.inst.id") + "/" + _module_id + "/" + currTime + "/"))) {
          // 读取cos中orc格式的文件
          resDataFrame = _kc.sparkSession.read.orc(cosReadPath)
          log.info("读取cos数据成功!")
        } else {
          // 构造一个数据为空的DataFrame,其元数据存在
          var tableSchema: mutable.HashMap[String, StructFieldEntity] = null
          // 获取真实表的元数据信息
          if (tableSplit != null) {
            tableSchema = _connectEntity.getMppTrueTableSchema(tblNameList.head, _loginTimeout)
          } else {
            // 获取源表schema
            tableSchema = _connectEntity.getMppTableSchema(_loginTimeout)
          }
          // 表的名字和转换后的dataType类型:Map{(1,("id",IntegerType))}
          val tblNameAndDataType: List[(Int, (String, DataType))] = fieldTypeToSchema(tableSchema)

          // 构建structType
          val schemaArr = new ArrayBuffer[StructField]()
          for (elem <- tblNameAndDataType) {
            schemaArr.append(StructField(elem._2._1, elem._2._2, true))
          }
          // 1.构造一个空的RDD
          val emptyRDD = _kc.sparkSession.emptyDataFrame.rdd
          // 2.构造元数据
          val schema = StructType(schemaArr)
          // 3.生成空DataFrame
          resDataFrame = _kc.sparkSession.createDataFrame(emptyRDD, schema)
          log.warn("源表数据不存在!!!")
        }
      }
      case FormatEnum.CSV_FORMAT => {
        // 读取cos中csv格式的文件
        val cosRDD: RDD[String] = _kc.sparkSession.sparkContext.textFile(cosReadPath)

        if (cosRDD.isEmpty()) {
          throw new Exception(s"没有读取到COS源文件数据......")
        }
        var tableSchema: mutable.HashMap[String, StructFieldEntity] = null

        // 获取真实表的元数据信息
        if (tableSplit != null) {
          tableSchema = _connectEntity.getMppTrueTableSchema(tblNameList.head, _loginTimeout)
        } else {
          // 获取源表schema
          tableSchema = _connectEntity.getMppTableSchema(_loginTimeout)
        }

        // 表的名字和转换后的dataType类型:Map{(1,("id",IntegerType))}
        val tblNameAndDataType: List[(Int, (String, DataType))] = fieldTypeToSchema(tableSchema)

        // 构建structType
        val schemaArr = new ArrayBuffer[StructField]()
        for (elem <- tblNameAndDataType) {
          schemaArr.append(StructField(elem._2._1, elem._2._2, true))
        }
        // 根据表字段和表字段类型生成structype
        //Array(StructField("id", IntegerType, true), StructField("name", StringType, true), StructField("phone", DoubleType, true))
        val schema = StructType(schemaArr.toArray)

        if (DELIMETER == "" || DELIMETER == null) {
          // 使用分隔符"|"
          log.info("use delimeter \"|\"")
          DELIMETER = "|"
        }

        // 从配置中获取到的分隔符
        val delimeter: String = DELIMETER
        log.info("DELIMETER ==> " + delimeter)
        val rowRDD = cosRDD.filter(!_.trim.equals(""))
          .map(_.split("\\" + delimeter, -1)).map(attributes => {
          var row: Row = Row()
          for (i <- 0 until schema.fields.length) {
            //元数据schema和文件对应schema顺序及数量一致
            val colType = schema.fields.toList(i).dataType
            colType match {
              case StringType => {
                if (MetaDataConstants.NULL == attributes(i)) {
                  row = Row.merge(row, Row(null))
                } else {
                  row = Row.merge(row, Row(attributes(i)))
                }
              }
              case IntegerType => {
                if (MetaDataConstants.NULL == attributes(i)) {
                  //数据中包含NULL值
                  row = Row.merge(row, Row(null))
                } else {
                  row = Row.merge(row, Row(attributes(i).toInt))
                }
              }
              case DateType => {
                if (MetaDataConstants.NULL == attributes(i)) {
                  row = Row.merge(row, Row(null))
                } else {
                  //  row = Row.merge(row, Row(attributes(i)))
                  row = Row.merge(row, Row(Date.valueOf(attributes(i))))
                }
              }
              case TimestampType => {
                if (MetaDataConstants.NULL == attributes(i)) {
                  row = Row.merge(row, Row(null))
                } else {
                  if (attributes(i).contains("\\+")) {
                    val colArr = attributes(i).split("\\+", -1)
                    if (colArr.length > 1) {
                      row = Row.merge(row, Row(Timestamp.valueOf(colArr(0))))
                    } else {
                      row = Row.merge(row, Row(Timestamp.valueOf(attributes(i))))
                    }
                  } else {
                    row = Row.merge(row, Row(Timestamp.valueOf(attributes(i))))
                  }
                }
              }
              case FloatType => {
                if (MetaDataConstants.NULL == attributes(i)) {
                  row = Row.merge(row, Row(null))
                } else {
                  row = Row.merge(row, Row(attributes(i).toFloat))
                }
              }
              case DoubleType => {
                if (MetaDataConstants.NULL == attributes(i)) {
                  row = Row.merge(row, Row(null))
                } else {
                  row = Row.merge(row, Row(attributes(i).toDouble))
                }
              }
              case NullType => row = {
                Row.merge(row, Row(attributes(i)))
              }
              case _ => {
                row = Row.merge(row, Row(attributes(i)))
              }
            }
          }
          row
        })
        // Apply the schema to the RDD
        resDataFrame = _kc.sparkSession.createDataFrame(rowRDD, schema)
        log.info("创建DataFrame成功!")
      }
      case _ => {
        throw new IllegalArgumentException(s"暂不支持读取 ${type_format} 类型文件格式!")
      }
    }
    resDataFrame
  }


  /** 获取上游的Schema */
  override def schema(kc: KhaosContext,
                      config: String,
                      dependence: Dependency): List[KhaosStructField] = {
    val fieldSchema = ArrayBuffer[KhaosStructField]()
    if (_dataSourceType.equalsIgnoreCase(SourceMode.HashData)) {
      implicit val formats = DefaultFormats
      val mppInfo = parse(config, true).extract[MppSourceConfig]
      val extrFields = mppInfo.extract_fields
      for (ef <- extrFields) {
        fieldSchema += KhaosStructField(ef.field, ef.data_type)
      }
    } else if (_dataSourceType.equalsIgnoreCase(SourceMode.GreenPlum)) {
      implicit val formats = DefaultFormats
      val gpInfo = parse(config, true).extract[GreenPlumSourceConfig]
      val extrFields = gpInfo.extract_fields
      for (ef <- extrFields) {
        fieldSchema += KhaosStructField(ef.field, ef.data_type)
      }
    }
    fieldSchema.toList
  }
}
