package com.kingsoft.dc.khaos.module.spark.util

import java.util
import java.util.{ArrayList, List}
import com.alibaba.fastjson.{JSON, JSONObject}
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.exception.MetaException
import com.kingsoft.dc.khaos.extender.meta.api.{AuthRequest, DatasourceListResult, DmRequest, TablePartitionsResult}
import com.kingsoft.dc.khaos.extender.meta.model.MetaParams
import com.kingsoft.dc.khaos.extender.meta.model.MetaParams.TABLE_TYPE
import com.kingsoft.dc.khaos.extender.meta.model.auth.DmAuth
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.extender.meta.model.db.DmDatabase
import com.kingsoft.dc.khaos.extender.meta.model.ds.{HAWQConnect, SqlServerConnect, _}
import com.kingsoft.dc.khaos.extender.meta.model.table.DmTable
import com.kingsoft.dc.khaos.extender.meta.utils.DmMetaUtils
import com.kingsoft.dc.khaos.module.spark.constants.DmInterfaceEnum.DmInterfaceEnum
import com.kingsoft.dc.khaos.module.spark.constants.{DmInterfaceEnum, MetaDataConstants, SchedulerConstants}
import com.kingsoft.dc.khaos.module.spark.model.MetaDataEntity
import com.kingsoft.dc.khaos.module.spark.model.cos.CosAccessConfig
import com.kingsoft.dc.khaos.module.spark.model.ks3.Ks3AccessConfig
import com.kingsoft.dc.khaos.module.spark.sink.SinkStrategy
import com.kingsoft.dc.khaos.module.spark.source.SourceStrategy
import com.kingsoft.dc.khaos.util.Logging
import org.apache.commons.lang3.StringUtils
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

import scala.collection.mutable.ArrayBuffer


/**
 * Created by jing on 19/7/2.
 */
object MetaUtils extends Logging {

  def buildAuthRequest(kc: KhaosContext): DmRequest = {
    val request = new DmRequest()
    request.setUrl(kc.conf.getString(SchedulerConstants.META_RIGHT_QUERY_URL_KEY))
    val authRequest: AuthRequest = new AuthRequest()
    authRequest.setAuthUrl(kc.conf.getString(SchedulerConstants.AUTH_SERVER_KEY))
    authRequest.setAuthProjectId(kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0))
    request.setAuthRequest(authRequest)
    request
  }

  def buildMetaRequest(kc: KhaosContext): DmRequest = {
    val request = new DmRequest()
    request.setUrl(kc.conf.getString(SchedulerConstants.META_QUERY_URL_KEY))
    val authRequest: AuthRequest = new AuthRequest()
    authRequest.setAuthUrl(kc.conf.getString(SchedulerConstants.AUTH_SERVER_KEY))
    authRequest.setAuthProjectId(kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0))
    request.setAuthRequest(authRequest)
    request
  }

  def checkReadAuth(kc: KhaosContext, dbName: String, tblName: String, className: String, authParam: String): Boolean = {
    val pivlist = new ArrayList[String]()
    pivlist.add("select")
    val result = checkAuth(kc, dbName, tblName, className, authParam, pivlist)
    result
  }

  def checkWriteAuth(kc: KhaosContext, dbName: String, tblName: String, className: String, authParam: String): Boolean = {
    val pivlist = new ArrayList[String]()
    pivlist.add("select")
    pivlist.add("insert")
    val result = checkAuth(kc, dbName, tblName, className, authParam, pivlist)
    result
  }

  /**
   * 统一鉴权方法
   *
   * @param kc
   * @param dbName    //库名
   * @param tblName   //表名
   * @param className //鉴权实现类名
   * @param authParam //鉴权json
   * @param pivlist   //权限列表,比如List("insert","select")
   * @return
   */
  def checkAuth(kc: KhaosContext, dbName: String, tblName: String, className: String, authParam: String, pivlist: ArrayList[String]): Boolean = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)
    //logInfo(s"checkAuth authParam ==> $authParam")
    val request = buildAuthRequest(kc)
    val metaParams = new MetaParams(authParam)
    metaParams.buildAuthParams(runEnv, projectId, dbName, tblName, pivlist)
    //metaParams.setJobProjectName(jobProjectName)

    request.setMetaParams(metaParams)
    val auth: DmAuth = DmMetaUtils.checkProjectAuth(className, request, classOf[DmAuth])
    val checkResutlt = auth.getTotalPrivs
    //    if (checkResutlt) {
    //      logInfo("权限校验通过......." + request.getMetaParams.getPrivSet)
    //    } else {
    //      logInfo("权限校验失败......." + request.getMetaParams.getPrivSet)
    //      //System.exit(-1)
    //      throw new Exception("权限校验失败......." + request.getMetaParams.getPrivSet)
    //    }
    if (!checkResutlt) {
      throw new Exception("权限校验失败..." + request.getMetaParams.getPrivSet)
    }
    checkResutlt
  }

  /**
   * 根据库名及表名获取数据源连接信息
   *
   * @param kc
   * @param dbName         //库名
   * @param tblName        //表名
   * @param className      //获取元数据实现类
   * @param metaParamsJson //元数据json串
   * @return
   */
  def getDsConnect(kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String): Connect = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    val authRequest = buildMetaRequest(kc)
    val metaParams = new MetaParams(metaParamsJson)
    //metaParams.setJobProjectName(jobProjectName)
    metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    authRequest.setMetaParams(metaParams)

    metaParams.buildNosqlTableParams(runEnv, dbName, tblName)
    val ds = DmMetaUtils.getDatasource(className, authRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)
    //    val table = DmMetaUtils.getTable(className, authRequest, classOf[DmTable])
    //    val columns = DmMetaUtils.getColumns(className, authRequest, classOf[List[DmTableColumn]])
    ds.getConnect
  }

  def getCosMetaByDsName(kc: KhaosContext, dsName: String): CosConnect = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    val metaRequest = MetaUtils.buildMetaRequest(kc);
    val metaParams = new MetaParams()
    metaParams.setJobProjectName(jobProjectName)
    metaParams.buildWriteDatasourceParams(runEnv, dsName)
    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasourceByName(MetaDataConstants.COS_DM_META_IMPL_CLASS, metaRequest).asInstanceOf[DmDatasource]
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, s"dsName=$dsName")
    ds.getConnect.asInstanceOf[CosConnect]
  }

  def getOracleDatasourceByName(kc: KhaosContext, className: String, metaParamsJson: String): OracleConnect = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    metaParams.buildWriteDatasourceParams(runEnv, null, null)

    metaRequest.setMetaParams(metaParams)
    val ds: DmDatasource = DmMetaUtils.getDatasourceByName(className, metaRequest)
    return ds.getConnect.asInstanceOf[OracleConnect]
  }

  def getMysqlDatasourceByName(kc: KhaosContext, className: String, metaParamsJson: String): DmDatasource = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    metaParams.buildWriteDatasourceParams(runEnv, null, null)

    metaRequest.setMetaParams(metaParams)
    val ds: DmDatasource = DmMetaUtils.getDatasourceByName(className, metaRequest)
    return ds
  }

  def getOracleMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    //metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)

    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildTableParams(runEnv, dbName, tblName)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val oracleConnect = ds.getConnect.asInstanceOf[OracleConnect]
    val mde = new MetaDataEntity
    mde.setDsOracleConnect(oracleConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getOracleShardingMeta[T](kc: KhaosContext, dsName: String, dsId: String, dataType: String, className: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)
    val metaParamsJson =
      s"""
         |{
         |  "ds_type": "$dataType",
         |  "ds_id": "$dsId",
         |  "ds_name": "$dsName",
         |  "project_id": "$jobProjectId",
         |  "project_name": "$jobProjectName"
         |}
         |""".stripMargin
    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    //metaParams.setJobProjectName(jobProjectName)

    metaParams.buildReadDatasourceParams(runEnv, dsName)

    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasourceByName(className, metaRequest).asInstanceOf[DmDatasource]
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv)

    val oracleConnect = ds.getConnect.asInstanceOf[OracleConnect]
    val mde = new MetaDataEntity
    mde.setDsOracleConnect(oracleConnect)
    mde
  }

  def getOracleDs[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val requestJson: String = generateQueryUnmanagedDataSourceJson(metaRequest)
    metaRequest.setParams(requestJson)
    val ds = DmMetaUtils.getUnmanagedDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    val oracleConnect = ds.getConnect.asInstanceOf[OracleConnect]
    val mde = new MetaDataEntity
    mde.setDsOracleConnect(oracleConnect)

    mde
  }

  def getSQLServerMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    //metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildTableParams(runEnv, dbName, tblName, TABLE_TYPE.SQLSERVER)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val sqlServerConnect = ds.getConnect.asInstanceOf[SqlServerConnect]

    val mde = new MetaDataEntity
    mde.setDsSqlServerConnect(sqlServerConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getMysqlMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    //metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildTableParams(runEnv, dbName, tblName)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val mysqlConnect = ds.getConnect.asInstanceOf[MysqlConnect]
    val mde = new MetaDataEntity
    mde.setDsMysqlConnect(mysqlConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getMysqlShardingMeta[T](kc: KhaosContext, dsName: String, dsId: String, dataType: String, className: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)
    val metaParamsJson =
      s"""
         |{
         |  "ds_type": "$dataType",
         |  "ds_id": "$dsId",
         |  "ds_name": "$dsName",
         |  "project_id": "$jobProjectId",
         |  "project_name": "$jobProjectName"
         |}
         |""".stripMargin
    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    //metaParams.setJobProjectName(jobProjectName)

    metaParams.buildReadDatasourceParams(runEnv, dsName)

    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasourceByName(className, metaRequest).asInstanceOf[DmDatasource]
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv)

    val mysqlConnect = ds.getConnect.asInstanceOf[MysqlConnect]
    val mde = new MetaDataEntity
    mde.setDsMysqlConnect(mysqlConnect)
    mde
  }

  def getMysqlDs[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)
    metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }

    metaRequest.setMetaParams(metaParams)
    val requestJson: String = generateQueryUnmanagedDataSourceJson(metaRequest)
    metaRequest.setParams(requestJson)
    val ds = DmMetaUtils.getUnmanagedDatasource(className, metaRequest, classOf[DmDatasource])

    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    val mysqlConnect = ds.getConnect.asInstanceOf[MysqlConnect]
    val mde = new MetaDataEntity
    mde.setDsMysqlConnect(mysqlConnect)

    mde
  }

  /**
   * 获取表分区信息
   *
   * @param partitions
   * @return
   */
  def getPartitionNames(partitions: util.List[TablePartitionsResult]): Array[String] = {
    val partitionsNames = ArrayBuffer[String]()
    logInfo(s"hive table partitions size: ${
      partitions.size()
    }")
    if (null != partitions && partitions.size() > 0) {
      for (i <- 0 until partitions.size()) {
        partitionsNames += partitions.get(i).getPartitionColName
      }
    }
    logInfo(s"hive table partitions:${
      partitionsNames.mkString("[", ",", "]")
    }")
    partitionsNames.toArray
  }

  /**
   * 包装Hive元数据信息
   *
   * @param kc
   * @param dbName
   * @param tblName
   * @param className
   * @param metaParamsJson
   * @param classType
   * @tparam T
   * @return
   */
  def getHiveMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)
    val mde = new MetaDataEntity
    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId)
    //metaParams.setJobProjectName(jobProjectName)
    //构造数据源参数
    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }

    metaRequest.setMetaParams(metaParams)
    val dsList = DmMetaUtils.getDatasourceList(className, metaRequest, classOf[DatasourceListResult])
    mde.setDefaultDs(if (0.equals(dsList.getType)) true else false)

    if (mde.getDefaultDs) {
      //内置
      if (classType.isInstanceOf[SinkStrategy]) {
        //sink端需要获取元数据
        metaParams.buildTableParams(runEnv, dbName, tblName)
        metaRequest.setMetaParams(metaParams)
        val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
        checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

        val partitions = DmMetaUtils.getTablePartitons(className, metaRequest, classOf[List[TablePartitionsResult]])
        mde.setTablePartition(getPartitionNames(partitions))

        mde.setColumnEntiy(columns)
        mde
      } else {
        mde
      }
    } else {
      //获取物理地址
      val dmds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
      checkNullException(dmds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

      val hiveConnect = dmds.getConnect.asInstanceOf[HiveConnect]
      //构造表参数
      metaParams.buildTableParams(runEnv, dbName, tblName)
      metaRequest.setMetaParams(metaParams)
      //获取元数据
      val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
      checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

      val partitions = DmMetaUtils.getTablePartitons(className, metaRequest, classOf[List[TablePartitionsResult]])
      mde.setTablePartition(getPartitionNames(partitions))

      mde.setDsHiveConnect(hiveConnect)
      mde.setColumnEntiy(columns)
      mde
    }
  }

  def getMPPMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //metaParamsJson重新赋值project_id,使用调度传递的,不使用前端传递的
    //val dsMetaParamsJson: String = changeProId(metaParamsJson, projectId)

    //获取物理地址
    val metaRequest = buildMetaRequest(kc)
    val dsMetaParams = new MetaParams(metaParamsJson)
    dsMetaParams.setJobProjectId(jobProjectId)
    //dsMetaParams.setJobProjectName(jobProjectName)
    if (classType.isInstanceOf[SourceStrategy]) {
      dsMetaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      dsMetaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(dsMetaParams)

    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)
    //    val dslist = DmMetaUtils.getDatasourceList(className, metaRequest, classOf[DatasourceListResult])
    //    ds.setDefaultDs(if ("0".equals(dslist.getType)) true else false) //回填数据源标识 "0"代表内置数据源 "1"代表外置数据源

//    dsMetaParams.buildTableParams(runEnv, dbName, tblName)
    dsMetaParams.buildMppTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(dsMetaParams)

    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val mppConnect = ds.getConnect.asInstanceOf[MppConnect]
    val mde = new MetaDataEntity
    mde.setDsMppConnect(mppConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getESMeta(kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //metaParamsJson重新赋值project_id,使用调度传递的,不使用前端传递的
    //val dsMetaParamsJson: String = changeProId(metaParamsJson, projectId)
    //获取物理地址
    val authRequest: DmRequest = buildMetaRequest(kc)
    val dsMetaParams = new MetaParams(metaParamsJson)
    dsMetaParams.setJobProjectId(jobProjectId)
    //dsMetaParams.setJobProjectName(jobProjectName)

    dsMetaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    authRequest.setMetaParams(dsMetaParams)

    val ds: DmDatasource = DmMetaUtils.getDatasource(className, authRequest, classOf[DmDatasource])
    log.info(s"ds is $ds, className is $className, authRequest is ${authRequest.toString}")
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    dsMetaParams.buildNosqlTableParams(runEnv, dbName, tblName)
    authRequest.setMetaParams(dsMetaParams)

    val table = DmMetaUtils.getTable(className, authRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, authRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val esConnect = ds.getConnect.asInstanceOf[ESConnect]
    val mde = new MetaDataEntity
    val dslist = DmMetaUtils.getDatasourceList(className, authRequest, classOf[DatasourceListResult])
    mde.setDefaultDs(if (0.equals(dslist.getType)) true else false)
    mde.setDatasourceEntiy(ds)
    mde.setDsEsConnect(esConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }


  def getHBaseMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId)
    //metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)

    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)
    val dslist = DmMetaUtils.getDatasourceList(className, metaRequest, classOf[DatasourceListResult])
    ds.setDefaultDs(if (0.equals(dslist.getType)) true else false) //回填数据源标识 0代表内置数据源 1代表外置数据源

    metaParams.buildNosqlTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(metaParams)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val hbaseConnect = ds.getConnect.asInstanceOf[HbaseConnect]
    val mde = new MetaDataEntity
    mde.setDsHBaseConnect(hbaseConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde.setDatasourceEntiy(ds)
    mde.setDefaultDs(if (0.equals(dslist.getType)) true else false)
    mde
  }


  def getPhoenixMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)

    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)
    val dslist = DmMetaUtils.getDatasourceList(className, metaRequest, classOf[DatasourceListResult])
    ds.setDefaultDs(if (0.equals(dslist.getType)) true else false) //回填数据源标识 0代表内置数据源 1代表外置数据源

    metaParams.buildNosqlTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(metaParams)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val phoenixConnect = ds.getConnect.asInstanceOf[PhoenixConnect]
    val mde = new MetaDataEntity
    mde.setDsPhoenixConnect(phoenixConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde.setDatasourceEntiy(ds)
    mde.setDefaultDs(if (0.equals(dslist.getType)) true else false)
    mde
  }


  def getRedisMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setProjectId(jobProjectId)
    //metaParams.setProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val requestJson: String = generateQueryUnmanagedDataSourceJson(metaRequest)
    metaRequest.setParams(requestJson)
    val ds = DmMetaUtils.getUnmanagedDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    val redisConnect = ds.getConnect.asInstanceOf[RedisConnect]

    val mde = new MetaDataEntity
    mde.setDsRedisConnect(redisConnect)
    mde.setDatasourceEntiy(ds)
    mde
  }


  def getInfluxMeta[T](kc: KhaosContext,
                       dbName: String,
                       tblName: String,
                       className: String,
                       metaParamsJson: String,
                       classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //metaParamsJson重新赋值project_id,使用调度传递的,不使用前端传递的
    //val dsMetaParamsJson: String = changeProId(metaParamsJson, projectId)
    //获取物理地址
    val authRequest = buildMetaRequest(kc)
    val dsMetaParams = new MetaParams(metaParamsJson)
    dsMetaParams.setJobProjectId(jobProjectId)
    //dsMetaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      dsMetaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      dsMetaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    authRequest.setMetaParams(dsMetaParams)
    val ds = DmMetaUtils.getDatasource(className, authRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    dsMetaParams.buildInfluxParams(runEnv, dbName, tblName)
    authRequest.setMetaParams(dsMetaParams)
    val table = DmMetaUtils.getTable(className, authRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, authRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val influxConnect = ds.getConnect.asInstanceOf[InfluxConnect]
    val mde = new MetaDataEntity
    mde.setDsInfluxConnect(influxConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getGreenPlumMeta[T](kc: KhaosContext,
                          dbName: String,
                          tblName: String,
                          className: String,
                          metaParamsJson: String,
                          classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //metaParamsJson重新赋值project_id,使用调度传递的,不使用前端传递的
    //val dsMetaParamsJson: String = changeProId(metaParamsJson, projectId)

    //获取物理地址
    val metaRequest = buildMetaRequest(kc)
    val dsmetaParams = new MetaParams(metaParamsJson)
    dsmetaParams.setJobProjectId(jobProjectId)
    //dsmetaParams.setJobProjectName(jobProjectName)
    if (classType.isInstanceOf[SourceStrategy]) {
      dsmetaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      dsmetaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(dsmetaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    dsmetaParams.buildTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(dsmetaParams)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val gpConnect = ds.getConnect.asInstanceOf[GreenPlumConnect]
    val mde = new MetaDataEntity
    mde.setDsGreenPlumConnect(gpConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getHAWQMeta[T](kc: KhaosContext,
                     dbName: String,
                     tblName: String,
                     className: String,
                     metaParamsJson: String,
                     classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //metaParamsJson重新赋值project_id,使用调度传递的,不使用前端传递的
    //val dsMetaParamsJson: String = changeProId(metaParamsJson, projectId)

    //获取物理地址
    val metaRequest = buildMetaRequest(kc)
    val dsmetaParams = new MetaParams(metaParamsJson)
    dsmetaParams.setJobProjectId(jobProjectId)
    dsmetaParams.setJobProjectName(jobProjectName)
    if (classType.isInstanceOf[SourceStrategy]) {
      dsmetaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      dsmetaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(dsmetaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    dsmetaParams.buildTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(dsmetaParams)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val hawqConnect = ds.getConnect.asInstanceOf[HAWQConnect]
    val mde = new MetaDataEntity
    mde.setDsHAWQConnect(hawqConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getPGSqlMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId.toInt)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildTableParams(runEnv, dbName, tblName, TABLE_TYPE.PGSQL)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val pgsqlConnect = ds.getConnect.asInstanceOf[PGSqlConnect]

    val mde = new MetaDataEntity
    mde.setDsPGSqlConnect(pgsqlConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  def getKafkaMeta[T](kc: KhaosContext,
                      db_name: String,
                      topic_name: String,
                      className: String,
                      metaParamsJson: String,
                      classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val projectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //metaParamsJson重新赋值project_id,使用调度传递的,不使用前端传递的
    //val dsMetaParamsJson: String = changeProId(metaParamsJson, projectId)

    //获取物理地址
    val metaRequest = buildMetaRequest(kc)
    val dsmetaParams = new MetaParams(metaParamsJson)
    dsmetaParams.setJobProjectId(jobProjectId)
    //dsmetaParams.setJobProjectName(jobProjectName)
    if (classType.isInstanceOf[SourceStrategy]) {
      dsmetaParams.buildReadDatasourceParams(runEnv, db_name, topic_name)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      dsmetaParams.buildWriteDatasourceParams(runEnv, db_name, topic_name)
    }
    metaRequest.setMetaParams(dsmetaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, db_name, topic_name)

    dsmetaParams.buildTopicParams(runEnv, topic_name)
    metaRequest.setMetaParams(dsmetaParams)
    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, db_name, topic_name)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, db_name, topic_name)

    val kafkaConnect = ds.getConnect.asInstanceOf[KafkaConnect]
    val mde = new MetaDataEntity

    val dslist = DmMetaUtils.getDatasourceList(className, metaRequest, classOf[DatasourceListResult])
    mde.setDefaultDs(if (0.equals(dslist.getType)) true else false)
    mde.setDatasourceEntiy(ds)
    mde.setDsKafkaConnect(kafkaConnect)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde
  }

  /** 生成查询非托管数据源的请求json */
  def generateQueryUnmanagedDataSourceJson(metaRequest: DmRequest): String = {

    val dsIds = new util.ArrayList[Int]()
    dsIds.add(metaRequest.getMetaParams.getDsId)
    val dsNames = new util.ArrayList[String]()
    dsNames.add(metaRequest.getMetaParams.getDsName)
    val requestJObject: JSONObject = new JSONObject()
    requestJObject.put("env", metaRequest.getMetaParams.getEnv)
    requestJObject.put("projectId", metaRequest.getMetaParams.getProjectId)
    //requestJObject.put("projectName", metaRequest.getMetaParams.getProjectName)
    requestJObject.put("type", 1)
    requestJObject.put("privilege", "readAndWrite")
    requestJObject.put("dsIds", dsIds)
    requestJObject.put("dsNames", dsNames)

    requestJObject.toJSONString
  }


  /**
   * 包装cos元数据信息
   *
   * @param kc
   * @param dbName
   * @param tblName
   * @param className
   * @param metaParamsJson
   * @param classType
   * @tparam T
   * @return
   */
  def getCosMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId)
    //metaParams.setJobProjectName(jobProjectName)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildFileTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(metaParams)
    val db = DmMetaUtils.getDatabase(className, metaRequest, classOf[DmDatabase])
    checkNullException(db, DmInterfaceEnum.DB, jobProjectName, runEnv, dbName, tblName)

    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val cosConnect = ds.getConnect.asInstanceOf[CosConnect]
    val fileDelimiter = getDelimiter(table)

    //封装实体
    val mde = new MetaDataEntity
    mde.setDsCosConnect(cosConnect)
    mde.setDbEntiy(db)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde.setCosInputPath(getInputPath(kc, table))
    mde.setCosOutputPath(getCosFileDir(table))
    mde.setCosFileDelimiter(fileDelimiter)
    //    mde.setCosAccessConfig(getCosAccessConfig(kc, cosConnect, runEnv, db, table))
    mde.setCosAccessConfig(getCosAccessConfig(cosConnect, runEnv, db))
    mde
  }

  /**
   * 添加扩展名
   *
   * @param table
   * @param filePath
   * @return
   */
  def addFileExtension(table: DmTable, filePath: String): String = {
    var path = filePath + "*" //支持通配符方式
    //添加扩展名
    val fileFormat = MetaUtils.getFileFormat(table).trim.toLowerCase
    fileFormat match {
      case "csv" => path = path + ".csv"
      case "orc" => path = path + ".orc"
      case "json" => path = path + ".json"
      case "txt" => path = path + ".txt"
      case _ => throw new Exception(s"=>>> 不支持的文件类型！ type=${
        fileFormat
      }")
    }
    path
  }

  /**
   * 获取cos的文件路径
   *
   * @return
   */
  private def getInputPath(kc: KhaosContext, table: DmTable): String = {
    var filePath = ""
    val objectKeySuffix = getCosDynamicsPath(kc)
    var cospath = kc.conf.getString(SchedulerConstants.COS_FILE_PATH, "")
    var tableCosPath = getCosFileDir(table)

    //采集上传路径由调度传递，为空则取元数据中的路径
    if (cospath == null || cospath.equals("") || cospath.equals("\"\"")) {
      logInfo("=>>> scheduler cos_file_path args is Empty and get dm filePath！")
      if (!tableCosPath.trim.startsWith("/")) {
        tableCosPath = "/" + tableCosPath
      }
      if (tableCosPath.endsWith("/")) {
        //路径以"/"结尾  eg：/aaa/bbb/
        filePath = tableCosPath + "*" + objectKeySuffix + "*" //通配符匹配文件
      } else {
        filePath = tableCosPath + "/*" + objectKeySuffix + "*" //通配符匹配文件
      }
      addFileExtension(table, filePath)
    } else {
      //采集基于事件触发，文件约定为${bizDate}_${batchNo}.xxx格式
      //      if (!cospath.trim.startsWith("/")) {
      //        cospath = "/" + cospath
      //      }
      if (!cospath.trim.startsWith("/")) {
        cospath = cospath.substring(cospath.indexOf("/")) //如果不是“/”开头，截掉数据交换接口名（与采集对接）
      }
      cospath
    }

  }

  private def getCosAccessConfig(cosConnect: CosConnect, env: String, db: DmDatabase): CosAccessConfig = {
    val appId = cosConnect.getAppId
    val cosAccessConfig = new CosAccessConfig
    cosAccessConfig.setAccessKey(cosConnect.getAccessKeyId)
    cosAccessConfig.setSecretKey(cosConnect.getAccessKeySecret)

    cosAccessConfig.setRegion(cosConnect.getRegion)
    cosAccessConfig.setBucket(db.getDbName + "-" + env + "-" + appId)
    cosAccessConfig.setEndPoint("cos." + cosConnect.getRegion + "." + cosConnect.getEndpoint)

    cosAccessConfig.setAppId(appId)
    logInfo(s"cos info:bucketName=>[${
      cosAccessConfig.getBucket
    }] appId=>[$appId] region=>[${
      cosAccessConfig.getRegion
    }]   endpoint=>[${
      cosAccessConfig.getEndPoint
    }]")

    cosAccessConfig
  }

  private def getCosAccessConfig(kc: KhaosContext, cosConnect: CosConnect, env: String, db: DmDatabase, table: DmTable): CosAccessConfig = {
    val appId = cosConnect.getAppId
    var cosAccessConfig = new CosAccessConfig
    cosAccessConfig.setAccessKey(cosConnect.getAccessKeyId)
    cosAccessConfig.setSecretKey(cosConnect.getAccessKeySecret)
    cosAccessConfig.setRegion(cosConnect.getRegion)
    cosAccessConfig.setBucket(db.getDbName + "-" + env + "-" + appId)
    cosAccessConfig.setEndPoint("cos." + cosConnect.getRegion + "." + cosConnect.getEndpoint)

    cosAccessConfig.setAppId(appId)
    logInfo(s"cos info:bucketName=>[${
      cosAccessConfig.getBucket
    }] appId=>[$appId] region=>[${
      cosAccessConfig.getRegion
    }]   endpoint=>[${
      cosAccessConfig.getEndPoint
    }]")

    cosAccessConfig
  }

  /**
   * 动态获取文件名
   *
   * @return
   */
  def getCosDynamicsPath(kc: KhaosContext): String = {
    val batchNo = kc.conf.getString(SchedulerConstants.BATCH_NO)
    val bizDate = kc.conf.getString(SchedulerConstants.BIZ_DATE)
    val objectKeySuffix = s"${
      bizDate
    }_${
      batchNo
    }"
    objectKeySuffix
  }

  /**
   * 获取文件分隔符属性
   *
   * @return
   */
  private def getDelimiter(table: DmTable): String = {
    val params = table.getParams
    var delimiter = ""
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.CSV_DELIMITER)) {
      //包含分隔符key值
      delimiter = mapParams.get(MetaDataConstants.CSV_DELIMITER).get
      //检查定长开关
      val isfixed = isfixedLength(table)
      if (!isfixed && StringUtils.isBlank(delimiter)) {
        delimiter = MetaDataConstants.DATA_SEPARATOR //定长未开启且元数据分隔符为空，则使用默认分隔符
      }
    } else {
      //不包含分隔符key值
      delimiter = mapParams.getOrElse(MetaDataConstants.CSV_DELIMITER, MetaDataConstants.DATA_SEPARATOR)
    }
    delimiter
  }

  /**
   * 获取文件格式
   *
   * @param table
   * @return
   */
  def getFileFormat(table: DmTable): String = {
    val params = table.getParams
    var fileFormat = ""
    val mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.FILE_FORMAT)) {
      //包含分隔符key值
      fileFormat = mapParams.getOrElse(MetaDataConstants.FILE_FORMAT, "others")
    }
    fileFormat
  }

  /**
   * 获取文件字符集
   *
   * @param table
   * @return
   */
  def getTableCharacterSet(table: DmTable): String = {
    val params = table.getParams
    var fileCharset = "UTF-8"
    val mapParams = parseToMap(params)
    logInfo(s"=>> table params:$mapParams")
    if (mapParams.contains(MetaDataConstants.TABLE_CHARSET)) {
      //包含分隔符key值
      fileCharset = mapParams.getOrElse(MetaDataConstants.TABLE_CHARSET, "UTF-8")
    }
    fileCharset
  }


  /**
   * 获取是否定长属性
   *
   * @return 是/否
   */
  def isfixedLength(table: DmTable): Boolean = {
    val params = table.getParams
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.FIELD_LENGTH)) {
      if ("true".equalsIgnoreCase(mapParams.get(MetaDataConstants.FIELD_LENGTH).get)) {
        //开启定长
        true
      } else {
        false
      }
    } else {
      //没有定长key值
      false
    }
  }


  /**
   * 从数据管理获取文件路径（目录）
   *
   * @param table
   * @return
   */
  def getCosFileDir(table: DmTable): String = {
    val params = table.getParams
    var outputDir = "/"
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.RELATIVE_LOCATION)) {
      val relativeLocation = mapParams.get(MetaDataConstants.RELATIVE_LOCATION).get.trim
      if (!relativeLocation.isEmpty) {
        outputDir = mapParams.get(MetaDataConstants.RELATIVE_LOCATION).get
      }
    } else {
      logWarning("数据目录为空！")
    }
    outputDir.replaceAll("//", "/") //解决cos路径不统一问题 //和/的意义不一样
  }


  /**
   * 获取bucket真实名称
   * ef:bucket="21202F2938212B3E22272626252E434D"
   *
   * @return
   */
  private def getBucketName(env: String, table: DmTable): String = {
    val params = table.getParams
    val mapParams = parseToMap(params)
    var bucketName = ""
    if (env.equalsIgnoreCase((MetaDataConstants.ENV_TEST)) && mapParams.contains(MetaDataConstants.TEST_BUCKET_NAME)) {
      bucketName = mapParams.getOrElse(MetaDataConstants.TEST_BUCKET_NAME, "")
    } else if (env.equalsIgnoreCase((MetaDataConstants.ENV_ONLINE)) && mapParams.contains(MetaDataConstants.ONLINE_BUCKET_NAME)) {
      bucketName = mapParams.getOrElse(MetaDataConstants.ONLINE_BUCKET_NAME, "")
    }
    logInfo(s"bucketName:==>[$bucketName]")
    bucketName
  }

  /**
   * 提取属性值转map
   *
   * @param params
   * @return
   */
  private def parseToMap(params: util.List[util.Map[String, AnyRef]]) = {

    import scala.collection.JavaConverters._

    var paramMap = new scala.collection.immutable.HashMap[String, String]
    if (params.size() > 0) {
      for (i <- params.asScala) {
        val key = i.get("pKey").toString
        val value = i.get("pValue").toString
        paramMap += (key -> value)
      }
    }
    paramMap
  }


  //add by 20190802
  def getDependenceInfo(mid: String /*,kconf:KhaosConf*/): /*DbInfo*/ Unit = {

    implicit val formats = DefaultFormats
    val parser = parse(mid, true)

    val meta = try {
      (parser \ "strategy" \ "config" \ "extender" \ "meta")
    }

    println("meta=" + mid)

    val dsId = try {
      (parser \ "meta" \ "params" \ "ds_id").extract[String]
    } catch {
      case ex: Exception => "null-"
    }

    val dsType = try {
      (parser \ "meta" \ "params" \ "ds_type").extract[String]
    } catch {
      case ex: Exception => "null-"
    }

    val dsName = try {
      (parser \ "meta" \ "params" \ "ds_name").extract[String]
    } catch {
      case ex: Exception => "null-"
    }

    val projectId = try {
      (parser \ "meta" \ "params" \ "project_id").extract[String]
    } catch {
      case ex: Exception => "null-"
    }

    val projectId2 = try {
      (parser \ "meta" \ "params" \ "project_id").extract[String]
    } catch {
      case ex: Exception => "null-"
    }

    null
  }

  //更新project_id值
  def changeProId(metaParamsJson: String, projectId: String): String = {
    val metaMap: util.Map[String, Object] = JSON.parseObject(metaParamsJson, classOf[util.Map[String, Object]])
    metaMap.put("project_id", projectId)
    val nObject = new JSONObject(metaMap)
    val newMetaParamsJson: String = nObject.toJSONString
    newMetaParamsJson
  }


  /**
   * 包装ks3元数据信息
   *
   * @param kc             khaos上下文
   * @param dbName         dbName
   * @param tblName        tableName
   * @param className      extender.className
   * @param metaParamsJson 元数据json
   * @param classType      dsClassName
   * @return
   */
  def getKs3Meta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv: String = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId: Int = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName: String = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest: DmRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId)
    //metaParams.setJobProjectName(jobProjectName)

    classType match {
      case _: SourceStrategy =>
        metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
      case _: SinkStrategy =>
        metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
      case _ =>
    }
    metaRequest.setMetaParams(metaParams)
    val ds: DmDatasource = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildFileTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(metaParams)
    val db: DmDatabase = DmMetaUtils.getDatabase(className, metaRequest, classOf[DmDatabase])
    checkNullException(db, DmInterfaceEnum.DB, jobProjectName, runEnv, dbName, tblName)

    val table: DmTable = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns: util.List[DmTableColumn] = DmMetaUtils.getColumns(className, metaRequest, classOf[util.List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val ks3Connect: Ks3Connect = ds.getConnect.asInstanceOf[Ks3Connect]
    val fileDelimiter: String = getKs3Delimiter(table)

    //封装实体
    val mde = new MetaDataEntity
    mde.setDsKs3Connect(ks3Connect)
    mde.setDbEntiy(db)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde.setKs3InputPath(getKs3InputPath(kc, table))
    mde.setKs3OutputPath(getKs3FileDir(table))
    mde.setKs3FileDelimiter(fileDelimiter)
    mde.setKs3AccessConfig(getKs3AccessConfig(ks3Connect, runEnv, db))
    mde
  }

  /**
   * 获取ks3文件分隔符属性
   *
   * @return
   */
  private def getKs3Delimiter(table: DmTable): String = {
    val params = table.getParams
    var delimiter = ""
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.KS3_CSV_DELIMITER)) {
      //包含分隔符key值
      delimiter = mapParams.get(MetaDataConstants.KS3_CSV_DELIMITER).get
      //检查定长开关
      val isfixed = isfixedLength4Ks3(table)
      if (!isfixed && StringUtils.isBlank(delimiter)) {
        delimiter = MetaDataConstants.KS3_DATA_SEPARATOR //定长未开启且元数据分隔符为空，则使用默认分隔符(,)
      }
    } else {
      //不包含分隔符key值
      delimiter = mapParams.getOrElse(MetaDataConstants.KS3_CSV_DELIMITER, MetaDataConstants.KS3_DATA_SEPARATOR)
    }
    delimiter
  }

  /**
   * 获取cos的文件路径
   *
   * @return
   */
  private def getKs3InputPath(kc: KhaosContext, table: DmTable): String = {
    var filePath = ""
    val objectKeySuffix = getKs3DynamicsPath(kc)
    var ks3Path = kc.conf.getString(SchedulerConstants.KS3_FILE_PATH, "")
    var tableKs3Path = getKs3FileDir(table)

    //采集上传路径由调度传递，为空则取元数据中的路径
    if (ks3Path == null || ks3Path.equals("") || ks3Path.equals("\"\"")) {
      logInfo("=>>> scheduler ks3_file_path args is Empty and get dm filePath！")
      if (!tableKs3Path.trim.startsWith("/")) {
        tableKs3Path = "/" + tableKs3Path
      }
      if (tableKs3Path.endsWith("/")) {
        //路径以"/"结尾  eg：/aaa/bbb/
        filePath = tableKs3Path + "*" + objectKeySuffix + "*" //通配符匹配文件
      } else {
        filePath = tableKs3Path + "/*" + objectKeySuffix + "*" //通配符匹配文件
      }
      addKs3FileExtension(table, filePath)
    } else {
      //采集基于事件触发，文件约定为${bizDate}_${batchNo}.xxx格式
      //      if (!cospath.trim.startsWith("/")) {
      //        cospath = "/" + cospath
      //      }
      if (!ks3Path.trim.startsWith("/")) {
        ks3Path = ks3Path.substring(ks3Path.indexOf("/")) //如果不是“/”开头，截掉数据交换接口名（与采集对接）
      }
      ks3Path
    }

  }

  private def getKs3AccessConfig(ks3Connect: Ks3Connect, env: String, db: DmDatabase): Ks3AccessConfig = {

    var ks3AccessConfig = new Ks3AccessConfig
    ks3AccessConfig.setAccessKey(ks3Connect.getAccessKeyId)
    ks3AccessConfig.setSecretKey(ks3Connect.getAccessKeySecret)

    ks3AccessConfig.setEndPoint(ks3Connect.getEndpoint)
    ks3AccessConfig.setBucket(db.getDbName + "-" + env)


    logInfo(s"ks3 info:bucketName=>[${
      ks3AccessConfig.getBucket
    }]  endpoint=>[${
      ks3AccessConfig.getEndPoint
    }]")

    ks3AccessConfig
  }

  /**
   * 动态获取文件名
   *
   * @return
   */
  def getKs3DynamicsPath(kc: KhaosContext): String = {
    val batchNo = kc.conf.getString(SchedulerConstants.BATCH_NO)
    val bizDate = kc.conf.getString(SchedulerConstants.BIZ_DATE)
    val objectKeySuffix = s"${
      bizDate
    }_${
      batchNo
    }"
    objectKeySuffix
  }

  /**
   * 从数据管理获取文件路径（目录）
   *
   * @param table
   * @return
   */
  def getKs3FileDir(table: DmTable): String = {
    val params = table.getParams

    var outputDir = "/"
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.KS3_RELATIVE_LOCATION)) {
      val relativeLocation = mapParams.get(MetaDataConstants.KS3_RELATIVE_LOCATION).get.trim
      if (!relativeLocation.isEmpty) {
        outputDir = mapParams.get(MetaDataConstants.KS3_RELATIVE_LOCATION).get
      }
    } else {
      logWarning("数据目录为空！")
    }
    outputDir.replaceAll("//", "/") //解决cos路径不统一问题 //和/的意义不一样
  }

  /**
   * 添加扩展名
   *
   * @param table
   * @param filePath
   * @return
   */
  def addKs3FileExtension(table: DmTable, filePath: String): String = {
    var path = filePath + "*" //支持通配符方式
    //添加扩展名
    val fileFormat = MetaUtils.getKs3FileFormat(table).trim.toLowerCase
    fileFormat match {
      case "csv" => path = path + ".csv"
      case "json" => path = path + ".json"
      case "txt" => path = path + ".txt"
      case _ => throw new Exception(s"=>>> 不支持的文件类型！ type=${
        fileFormat
      }")
    }
    path
  }

  /**
   * 获取文件格式
   *
   * @param table
   * @return
   */
  def getKs3FileFormat(table: DmTable): String = {
    val params = table.getParams
    var fileFormat = ""
    val mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.KS3_FILE_FORMAT)) {
      //包含分隔符key值
      fileFormat = mapParams.getOrElse(MetaDataConstants.KS3_FILE_FORMAT, "others")
    }
    fileFormat
  }

  /**
   * 获取是否定长属性
   *
   * @return 是/否
   */
  def isfixedLength4Ks3(table: DmTable): Boolean = {
    val params = table.getParams
    //    var fieldLength = ""
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.KS3_FIELD_LENGTH)) {
      if ("true".equalsIgnoreCase(mapParams.get(MetaDataConstants.KS3_FIELD_LENGTH).get)) {
        //开启定长
        true
      } else {
        false
      }
    } else {
      //没有定长key值
      false
    }
  }

  /**
   * 包装hdfs元数据信息
   *
   * @param kc
   * @param dbName
   * @param tblName
   * @param className
   * @param metaParamsJson
   * @param classType
   * @tparam T
   * @return
   */
  def getHdfsMeta[T](kc: KhaosContext, dbName: String, tblName: String, className: String, metaParamsJson: String, classType: T): MetaDataEntity = {
    val runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jobProjectId = kc.conf.getInt(SchedulerConstants.PROJECT_ID, 0)
    val jobProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)

    //获取物理地址
    var metaRequest = buildMetaRequest(kc)
    var metaParams = new MetaParams(metaParamsJson)
    metaParams.setJobProjectId(jobProjectId)

    if (classType.isInstanceOf[SourceStrategy]) {
      metaParams.buildReadDatasourceParams(runEnv, dbName, tblName)
    } else if (classType.isInstanceOf[SinkStrategy]) {
      metaParams.buildWriteDatasourceParams(runEnv, dbName, tblName)
    }
    metaRequest.setMetaParams(metaParams)
    val ds = DmMetaUtils.getDatasource(className, metaRequest, classOf[DmDatasource])
    checkNullException(ds, DmInterfaceEnum.DS, jobProjectName, runEnv, dbName, tblName)

    metaParams.buildHDFSTableParams(runEnv, dbName, tblName)
    metaRequest.setMetaParams(metaParams)
    val db = DmMetaUtils.getDatabase(className, metaRequest, classOf[DmDatabase])
    checkNullException(db, DmInterfaceEnum.DB, jobProjectName, runEnv, dbName, tblName)

    val table = DmMetaUtils.getTable(className, metaRequest, classOf[DmTable])
    checkNullException(table, DmInterfaceEnum.TABLE, jobProjectName, runEnv, dbName, tblName)

    val columns = DmMetaUtils.getColumns(className, metaRequest, classOf[List[DmTableColumn]])
    checkNullException(columns, DmInterfaceEnum.COLUMNS, jobProjectName, runEnv, dbName, tblName)

    val hdfsConnect = ds.getConnect.asInstanceOf[HdfsConnect]
    val fileDelimiter = getHdfsDelimiter(table)

    //封装实体
    val mde = new MetaDataEntity
    mde.setDsHdfsConnect(hdfsConnect)
    mde.setDbEntiy(db)
    mde.setTableEntiy(table)
    mde.setColumnEntiy(columns)
    mde.setHdfsInputPath(getHdfsInputPath(kc, table))
    mde.setHdfsOutputPath(getHDFSFileDir(table))
    mde.setHdfsFileDelimiter(fileDelimiter)
    mde
  }


  /**
   * 获取HDFS的文件路径
   *
   * @return
   */
  private def getHdfsInputPath(kc: KhaosContext, table: DmTable): String = {
    var filePath = ""
    //    val hdfsDynamicsPath = getHdfsDynamicsPath(kc)
    var hdfspath = kc.conf.getString(SchedulerConstants.HDFS_FILE_PATH, "")
    var tableHdfsPath = getHDFSFileDir(table)

    //采集上传路径由调度传递，为空则取元数据中的路径
    if (hdfspath == null || hdfspath.equals("") || hdfspath.equals("\"\"")) {
      logInfo("=>>> scheduler hdfs_file_path args is Empty and get dm filePath！")
      if (!tableHdfsPath.trim.startsWith("/")) {
        tableHdfsPath = "/" + tableHdfsPath
      }
      //      if (tableHdfsPath.endsWith("/")) { //路径以"/"结尾  eg：/aaa/bbb/
      //        filePath = tableHdfsPath + "*" + hdfsDynamicsPath + "*" //通配符匹配文件
      //      } else {
      //        filePath = tableHdfsPath + "/*" + hdfsDynamicsPath + "*" //通配符匹配文件
      //      }
      //      addFileExtension(table, filePath)
      tableHdfsPath
    } else {
      //采集基于事件触发，文件约定为${bizDate}_${batchNo}.xxx格式
      //      if (!hdfspath.trim.startsWith("/")) {
      //        hdfspath = "/" + hdfspath
      //      }
      if (!hdfspath.trim.startsWith("/")) {
        hdfspath = hdfspath.substring(hdfspath.indexOf("/")) //如果不是“/”开头，截掉数据交换接口名（与采集对接）
      }
      hdfspath
    }

  }

  /**
   * 获取文件字符集
   *
   * @param table
   * @return
   */
  def getHdfsCharacterSet(table: DmTable): String = {
    val params = table.getParams
    var fileCharset = "UTF-8"
    val mapParams = parseToMap(params)
    logInfo(s"=>> table params:$mapParams")
    if (mapParams.contains(MetaDataConstants.HDFS_TABLE_CHARSET)) {
      //包含分隔符key值
      fileCharset = mapParams.getOrElse(MetaDataConstants.HDFS_TABLE_CHARSET, "UTF-8")
    }
    fileCharset
  }

  /**
   * 动态获取文件名
   *
   * @return
   */
  def getHdfsDynamicsPath(kc: KhaosContext): String = {
    val batchNo = kc.conf.getString(SchedulerConstants.BATCH_NO)
    val bizDate = kc.conf.getString(SchedulerConstants.BIZ_DATE)
    val fileSuffix = s"${
      bizDate
    }_${
      batchNo
    }"
    fileSuffix
  }

  /**
   * 获取文件格式
   *
   * @param table
   * @return
   */
  def getHdfsFileFormat(table: DmTable): String = {
    val params = table.getParams
    var fileFormat = ""
    val mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.HDFS_FILE_FORMAT)) {
      //包含分隔符key值
      fileFormat = mapParams.getOrElse(MetaDataConstants.HDFS_FILE_FORMAT, "others")
    }
    fileFormat
  }


  /**
   * 获取文件分隔符属性
   *
   * @return
   */
  private def getHdfsDelimiter(table: DmTable): String = {
    val params = table.getParams
    var delimiter = ""
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.HDFS_CSV_DELIMITER)) {
      //包含分隔符key值
      delimiter = mapParams.get(MetaDataConstants.HDFS_CSV_DELIMITER).get
      //检查定长开关
      val isfixed = isfixedHdfsLength(table)
      if (!isfixed && StringUtils.isBlank(delimiter)) {
        delimiter = MetaDataConstants.HDFS_DATA_SEPARATOR //定长未开启且元数据分隔符为空，则使用默认分隔符
      }
    } else {
      //不包含分隔符key值
      delimiter = mapParams.getOrElse(MetaDataConstants.HDFS_CSV_DELIMITER, MetaDataConstants.HDFS_DATA_SEPARATOR)
    }
    delimiter
  }


  /**
   * 获取是否定长属性
   *
   * @return 是/否
   */
  def isfixedHdfsLength(table: DmTable): Boolean = {
    val params = table.getParams
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.FIELD_LENGTH)) {
      if ("true".equalsIgnoreCase(mapParams.get(MetaDataConstants.FIELD_LENGTH).get)) {
        //开启定长
        true
      } else {
        false
      }
    } else {
      //没有定长key值
      false
    }
  }


  /**
   * 从数据管理获取HDFS文件路径（目录）
   *
   * @param table
   * @return
   */
  def getHDFSFileDir(table: DmTable): String = {
    val params = table.getParams
    var outputDir = "/"
    var mapParams = parseToMap(params)
    if (mapParams.contains(MetaDataConstants.HDFS_FOLDER_PATH)) {
      val folderLocation = mapParams.get(MetaDataConstants.HDFS_FOLDER_PATH).get.trim
      logInfo("folderLocation==>" + folderLocation)
      if (!folderLocation.isEmpty) {
        outputDir = mapParams.get(MetaDataConstants.HDFS_FOLDER_PATH).get
      }
    } else {
      logWarning("数据目录为空！")
    }
    outputDir.replaceAll("//", "/") //解决HDFS路径不统一问题 //和/的意义不一样
  }

  /**
   * @param _object
   * @param interfaceType 枚举：AUTH,DS,DSLIST,DB,TABLE,COLUMNS
   * @param projectName
   * @param runEnv
   * @param db
   * @param table
   * @tparam T
   */
  def checkNullException[T](_object: T, interfaceType: DmInterfaceEnum, projectName: String = "", runEnv: String = "", db: String = "", table: String = ""): Unit = {
    var exceptionMessage = s"请求数据管理${interfaceType}元数据接口异常或者接口返回结果为NULL！请求参数：project=$projectName env=$runEnv dbName=$db table=$table"
    if (null == _object) {
      throw new Exception(exceptionMessage)
    }
  }
}

