package com.kingsoft.dc.khaos.module.spark.source

import java.text.SimpleDateFormat
import java.util
import java.util.{Date, Properties}

import com.ksyun.bigdata.di.AnnotationUtils

//import com.alibaba.druid.DbType
import com.alibaba.druid.sql.SQLUtils
import com.alibaba.druid.sql.ast.SQLStatement
import com.alibaba.druid.sql.dialect.hive.visitor.HiveSchemaStatVisitor
import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlSchemaStatVisitor
import com.alibaba.druid.sql.dialect.oracle.visitor.OracleSchemaStatVisitor
import com.alibaba.druid.sql.dialect.postgresql.visitor.PGSchemaStatVisitor
import com.alibaba.druid.stat.TableStat
import com.alibaba.druid.util.JdbcConstants
import com.google.common.base.Splitter
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.constants.{ColumnType, SqlConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.source.{ExtractFieldInfo, SqlExtendedAuthParams, SqlSourceConfig}
import com.kingsoft.dc.khaos.module.spark.util.MetaUtils
import com.kingsoft.dc.khaos.util.Logging
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.types.{DataType, DataTypes}
import org.apache.spark.sql.{Column, DataFrame}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

import java.util.regex.Pattern
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}

/**
 * create by yansu on 2020/03/17 11:44
 */
class SqlSource extends SourceStrategy with Logging with Serializable {
  private var _kc: KhaosContext = null
  private var _sql_config: SqlSourceConfig = null
  private var _mysql_jdbc_driver = ""
  private var _mysql_jdbc_url_param = ""
  private var params: SqlExtendedAuthParams = null

  // 数据源类型
  object DsTypeEnum {
    val GREENPLUM = "GreenPlum"
    val RULERDB = "RulerDB"
    val KDW = "KDW"
    val HIVE = "hive"
    val MYSQL = "mysql"
    val ORACLE = "oracle"
  }

  /** 数据抽取 */
  override def source(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependence: Dependency): DataFrame = {
    implicit val formats = DefaultFormats
    val sql_config: SqlSourceConfig = parse(config, true).extract[SqlSourceConfig]
    this._sql_config = sql_config
    this._kc = kc

    var res: DataFrame = null
    // 规范sql
    val new_sql = standardSQL(_sql_config.sql)
    params = _sql_config.extender.auth.params.extract[SqlExtendedAuthParams]
    val project_id = _kc.conf.getString("project.id")
    val metaParamsStr =
      s"""
         |{"ds_id":"${params.ds_id}","ds_type":"${params.ds_type}","ds_name":"${params.ds_name}"}
              """.stripMargin
    // 解析出SQL中的库表
    val dbAndTableNameList = resolveDBAndTableOfSQL(params.ds_type, new_sql)


    // 初始化
    val map = init(dbAndTableNameList, params.ds_type, metaParamsStr)
    // 执行sql _sql_config.ds_type
    params.ds_type match {
      case DsTypeEnum.MYSQL => {
        res = readFromMysql(s"(${new_sql}) tmp_spark_sql", map)
      }
      case DsTypeEnum.ORACLE => {
        res = readFromOracle(s"(${new_sql}) tmp_spark_sql", map)
      }
      case DsTypeEnum.GREENPLUM => {
        res = readFromMPP(s"(${new_sql}) tmp_spark_sql", map)
      }
      case DsTypeEnum.RULERDB => {
        res = readFromMPP(s"(${new_sql}) tmp_spark_sql", map)
      }
      case DsTypeEnum.KDW => {
        res = readFromMPP(s"(${new_sql}) tmp_spark_sql", map)
      }
      case DsTypeEnum.HIVE => {
        res = readFromHive(new_sql, map)
      }
      case _ => {
        throw new IllegalArgumentException("暂不支持该类型数据源!")
      }
    }
    import org.apache.spark.sql.functions._
    val colArr = new ArrayBuffer[Column]()
    for (elem <- _sql_config.extract_fields) {
      colArr += col(elem.field)
    }
    res = res.select(colArr.toArray: _*)
    convertDataType(_sql_config.extract_fields, res)
  }

  def standardSQL(sql: String): String = {
    log.info("originSQL  ==> " + sql)
    //TODO sqlSource算子注释处理逻辑

    val extractSQL = sql.split("[\n]", -1).filter(line => {
      !line.trim().startsWith("--")
    }).mkString("\n")
    log.info("extractSQL  ==> " + extractSQL)
    var standar_sql = extractSQL.toLowerCase
    log.info("standarSQL  ==> " + standar_sql)
    if (standar_sql.equals("") || standar_sql.equals(null)) {
      throw new IllegalArgumentException("SQL为空,请重新填写!")
    } else if (standar_sql.contains("create") ||
      standar_sql.contains("drop") ||
      standar_sql.contains("alter") ||
      standar_sql.contains("delete") ||
      standar_sql.contains("truncate ") ||
      standar_sql.contains("insert") ||
      standar_sql.contains("update ") ||
      standar_sql.contains("copy")) {
      throw new IllegalArgumentException("sql中含有非法字段!")
    }

    standar_sql = extractSQL
    var offset = ""
    var business = ""
    if (standar_sql.contains(";")) {
      if (standar_sql.last.toString.equals(";")) {
        standar_sql = standar_sql.substring(0, standar_sql.size - 1)
      }
//      val sqlArrays = standar_sql.split(";", -1)
      val sqlArrays = AnnotationUtils.removeCommentGetMultiStatement(standar_sql)
      if (sqlArrays.size == 1) {
        standar_sql = sqlArrays(0)
      } else {
        val args = sqlArrays(0).replace(" ", "")
        standar_sql = sqlArrays(1)
        val holder = Splitter.on(",").trimResults.withKeyValueSeparator("=").split(s"${args}")
        offset = holder.get("offset")
        business = holder.get("business")
        // 获取调度时间,计算业务时间
        val day = _kc.conf.getString("job.biz.date")
        // 作业调度时间
        var time = day + _kc.conf.getString("job.biz.time").replace(":", "").substring(0, 4)
        val timel = time.toLong / 5 * 5
        time = timel.toString
        // 作业业务时间
        val dateFormat = new SimpleDateFormat("yyyyMMddHHmm")
        var timeInMillis = 0l
        if (!StringUtils.isEmpty(time)) if (time.length == 12) timeInMillis = dateFormat.parse(time).getTime
        else throw new RuntimeException("时间必须是12位字符串")
        var offsetLong = 0l
        try {
          if (offset == "") {
            offset = "15"
          }
          offsetLong = offset.toLong
        } catch {
          case e: Exception => {
            offsetLong = 15
          }
        }

        if (business.equalsIgnoreCase("normal_d")) {
          val tempTime = 60 * 1000 * 60 * 24 * offsetLong
          timeInMillis = timeInMillis - tempTime
        } else if (business.equalsIgnoreCase("normal_h")) {
          val tempTime = 60 * 1000 * 60 * offsetLong
          timeInMillis = timeInMillis - tempTime
        } else if (business.equalsIgnoreCase("normal_m")) {
          val tempTime = 60 * 1000 * offsetLong
          timeInMillis = timeInMillis - tempTime
        }
        timeInMillis = timeInMillis / 60 / 5 / 1000 * 60 * 5 * 1000
        val formattedTime = dateFormat.format(new Date(timeInMillis))
        standar_sql = String.format(standar_sql,
          formattedTime.subSequence(2, 8),
          formattedTime.subSequence(8, 10),
          formattedTime.subSequence(10, 12))

      }
    }
    println("offset:" + offset + " " + " business:" + business)
    standar_sql
  }


  def getRegexDbTb(dbtb: String): mutable.HashMap[String, String] = {
    val dbTbMap = new mutable.HashMap[String, String]()
    var dbname = ""
    var tablename = ""
    if (params.ds_type.equals(DsTypeEnum.MYSQL)) {
      val strings = dbtb.split("\\.")
      if (strings.length > 2) {
        tablename = strings(strings.length - 1)
        dbname = dbtb.substring(0, dbtb.length - tablename.length - 1)
      } else {
        dbname = dbtb.split("\\.")(0)
        tablename = dbtb.split("\\.")(1)
      }
    } else if (params.ds_type.equals(DsTypeEnum.HIVE)) {
      dbname = dbtb.split("\\.")(0)
      tablename = dbtb.split("\\.")(1)
    } else {
      dbname = dbtb.split("\\.")(0).stripSuffix("\"").stripPrefix("\"")
      tablename = dbtb.split("\\.")(1).stripSuffix("\"").stripPrefix("\"")
    }
    dbTbMap.put("dbname", dbname)
    dbTbMap.put("tablename", tablename)
    dbTbMap
  }

  def init(dbAndTableNameList: List[String], ds_type: String, params: String) = {
    loadProperties(_kc)

    var db_name = ""
    var table_name = ""
    var dbTbMap = new mutable.HashMap[String, String]()

    for (elem <- dbAndTableNameList) {
      log.info("dbAndTable===> " + elem)
      dbTbMap = getRegexDbTb(elem)
      db_name = dbTbMap.getOrElse("dbname", "")
      table_name = dbTbMap.getOrElse("tablename", "")
      //开始库表鉴权
      MetaUtils.checkReadAuth(_kc,
        db_name,
        table_name,
        _sql_config.extender.auth.clazz,
        params)
    }

    val map_config = new mutable.HashMap[String, String]()
    ds_type match {
      case DsTypeEnum.MYSQL => {
        val mysqlConnect = MetaUtils.getMysqlMeta(_kc,
          db_name,
          table_name,
          "com.kingsoft.dc.khaos.extender.meta.impl.MysqlDmMetaImpl",
          params,
          this).getDsMysqlConnect()
        map_config.put("mysql_host", mysqlConnect.getHost)
        map_config.put("mysql_port", mysqlConnect.getPort)
        map_config.put("mysql_username", mysqlConnect.getUserName)
        map_config.put("mysql_password", mysqlConnect.getPassWord)
        map_config.put("mysql_jdbc_driver", _mysql_jdbc_driver)
        map_config.put("mysql_jdbc_param", _mysql_jdbc_url_param)
      }
      case DsTypeEnum.ORACLE => {
        val oracleConnect = MetaUtils.getOracleMeta(_kc,
          db_name,
          table_name,
          "com.kingsoft.dc.khaos.extender.meta.impl.OracleDmMetaImpl",
          params,
          this).getDsOracleConnect()
        map_config.put("oracle_host", oracleConnect.getHost)
        map_config.put("oracle_connectType", oracleConnect.getConnectType)
        map_config.put("oracle_username", oracleConnect.getUsername)
        map_config.put("oracle_password", oracleConnect.getPassword)
        map_config.put("oracle_instanceName", oracleConnect.getInstanceName)

      }
      case DsTypeEnum.KDW => {
        val mppConnect = MetaUtils.getMPPMeta(_kc,
          db_name,
          table_name,
          "com.kingsoft.dc.khaos.extender.meta.impl.MppDmMetaImpl",
          params,
          this).getDsMppConnect()
        map_config.put("mpp_host", mppConnect.getHost)
        map_config.put("mpp_port", mppConnect.getPort)
        map_config.put("mpp_username", mppConnect.getUsername)
        map_config.put("mpp_password", mppConnect.getPassword)
        map_config.put("mpp_instanceName", mppConnect.getInstanceName)
        map_config.put("mpp_sourceMode", mppConnect.getSourceMode)
      }
      case DsTypeEnum.GREENPLUM => {
        val mppConnect = MetaUtils.getMPPMeta(_kc,
          db_name,
          table_name,
          "com.kingsoft.dc.khaos.extender.meta.impl.MppDmMetaImpl",
          params,
          this).getDsMppConnect()
        map_config.put("mpp_host", mppConnect.getHost)
        map_config.put("mpp_port", mppConnect.getPort)
        map_config.put("mpp_username", mppConnect.getUsername)
        map_config.put("mpp_password", mppConnect.getPassword)
        map_config.put("mpp_instanceName", mppConnect.getInstanceName)
        map_config.put("mpp_sourceMode", mppConnect.getSourceMode)
      }
      case DsTypeEnum.RULERDB => {
        val mppConnect = MetaUtils.getMPPMeta(_kc,
          db_name,
          table_name,
          "com.kingsoft.dc.khaos.extender.meta.impl.MppDmMetaImpl",
          params,
          this).getDsMppConnect()
        map_config.put("mpp_host", mppConnect.getHost)
        map_config.put("mpp_port", mppConnect.getPort)
        map_config.put("mpp_username", mppConnect.getUsername)
        map_config.put("mpp_password", mppConnect.getPassword)
        map_config.put("mpp_instanceName", mppConnect.getInstanceName)
        map_config.put("mpp_sourceMode", mppConnect.getSourceMode)
      }
      case DsTypeEnum.HIVE => {
        val hiveDefault = MetaUtils.getHiveMeta(_kc,
          db_name,
          table_name,
          "com.kingsoft.dc.khaos.extender.meta.impl.HiveDmMetaImpl",
          params,
          this).getDefaultDs().toString
        map_config.put("hiveDefault", hiveDefault)
      }
      case _ => {
        throw new IllegalArgumentException("暂不支持该数据源类型!")
      }

    }
    map_config
  }

  def readFromMysql(sql: String, mysql_config: mutable.HashMap[String, String]): DataFrame = {
    val ip = mysql_config("mysql_host")
    val port = mysql_config("mysql_port")
    val userName = mysql_config("mysql_username")
    val passWord = mysql_config("mysql_password")
    val driver = mysql_config("mysql_jdbc_driver")
    val jdbcParam = mysql_config("mysql_jdbc_param")

    val url = s"jdbc:mysql://$ip:$port${jdbcParam}"
    //val driver = "com.mysql.jdbc.Driver"  // for support mysql8
    val prop = new Properties
    prop.put("driver", driver)
    prop.put("user", userName)
    prop.put("password", passWord)
    _kc.sparkSession.read.jdbc(url, sql, prop)
  }

  /** 通过不同的连接方式获取oracle对应的jdbcUrl，host中将ip:port拼接在了一起 */
  def getOracleConnectTypeURL(host: String, instanceName: String, connectType: String): String = {
    var url: String = null
    connectType match {
      case "SID" => {
        url = s"jdbc:oracle:thin:@$host:$instanceName"
      }
      case "ServiceName" => {
        url = s"jdbc:oracle:thin:@//$host/$instanceName"
      }
      case "RAC" => {
        val hostArray = host.split(",")
        var address = ""
        for (i <- 0 until hostArray.length) {
          //(ADDRESS=(PROTOCOL=TCP)(HOST=x.x.x.x)(PORT=1521))
          address += s"(ADDRESS=(PROTOCOL=TCP)(HOST=${hostArray(i).split(":")(0)})(PORT=${hostArray(i).split(":")(1)}))"
        }
        //jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=x.x.x.x)(PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=x.x.x.y)(PORT=1521)))(LOAD_BALANCE=yes)(FAILOVER=ON)(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=db.domain)))
        url = s"jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS_LIST=${address})(LOAD_BALANCE=yes)(FAILOVER=ON)(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=$instanceName)))"
      }
      case _ =>
    }
    url
  }

  def readFromOracle(sql: String, oracle_config: mutable.HashMap[String, String]): DataFrame = {
    val ip = oracle_config("oracle_host")
    val connectType = oracle_config("oracle_connectType")
    val userName = s"""\"${oracle_config("oracle_username")}\""""
    log.info("userName===>" + userName)
    val passWord = oracle_config("oracle_password")
    log.info("passWord===>" + passWord)
    val instanceName = oracle_config("oracle_instanceName")

    //    val url = s"jdbc:oracle:thin:@$ip:$port:$instanceName"
    val url = getOracleConnectTypeURL(ip, instanceName, connectType)
    val driver = "oracle.jdbc.driver.OracleDriver"
    val prop = new Properties
    prop.put("driver", driver)
    prop.put("user", userName)
    prop.put("password", passWord)
    //处理ojdbc6驱动，osuser>30 报错问题
    val osuser = System.getProperty("user.name")
    if (osuser.length > 30)
      prop.put("oracle.jdbc.v$session.osuser", osuser.substring(0, 30))
    _kc.sparkSession.read.jdbc(url, sql, prop)
  }

  def readFromHive(sql: String, hive_config: mutable.HashMap[String, String]): DataFrame = {
    if (hive_config("hiveDefault").toBoolean) {
      _kc.sparkSession.sqlContext.sql(sql)
    } else {
      throw new IllegalArgumentException("暂不支持外部HIVE数据源!")
    }
  }

  def readFromMPP(sql: String,
                  mpp_config: mutable.HashMap[String, String]): DataFrame = {
    val host = mpp_config("mpp_host")
    val port = mpp_config("mpp_port")
    val userName = mpp_config("mpp_username")
    val passWord = mpp_config("mpp_password")
    val instanceName = mpp_config("mpp_instanceName")

    val url = s"jdbc:postgresql://${host}:${port}/${instanceName}"
    val driver = "org.postgresql.Driver"
    val prop = new Properties
    prop.put("driver", driver)
    prop.put("user", userName)
    prop.put("password", passWord)
    _kc.sparkSession.read.jdbc(url, sql, prop)
  }

  def resolveDBAndTableOfSQL(dsType: String, sql: String) = {
    var dialect = ""
    var dbAndTableName = new ListBuffer[String]()
    dsType match {
      case DsTypeEnum.MYSQL => {
        dialect = JdbcConstants.MYSQL
        val visitor: MySqlSchemaStatVisitor = new MySqlSchemaStatVisitor()
        val stmtList: util.List[SQLStatement] = SQLUtils.parseStatements(sql, dialect)
        val iterator = stmtList.iterator()
        while (iterator.hasNext) {
          val stmt = iterator.next()
          stmt.accept(visitor)
        }
        val nameIterator: util.Iterator[TableStat.Name] = visitor.getTables().keySet().iterator()
        while (nameIterator.hasNext) {
          dbAndTableName += nameIterator.next().getName.replaceAll("`", "").replaceAll("\"", "")
        }
      }
      case DsTypeEnum.ORACLE => {
        dialect = JdbcConstants.ORACLE
        val visitor: OracleSchemaStatVisitor = new OracleSchemaStatVisitor()
        val stmtList: util.List[SQLStatement] = SQLUtils.parseStatements(sql, dialect)
        val iterator = stmtList.iterator()
        while (iterator.hasNext) {
          val stmt = iterator.next()
          stmt.accept(visitor)
        }
        val nameIterator: util.Iterator[TableStat.Name] = visitor.getTables().keySet().iterator()
        while (nameIterator.hasNext) {
          dbAndTableName += nameIterator.next().getName
        }
      }
      case DsTypeEnum.KDW => {
        dialect = JdbcConstants.POSTGRESQL
        val visitor: PGSchemaStatVisitor = new PGSchemaStatVisitor()
        val stmtList: util.List[SQLStatement] = SQLUtils.parseStatements(sql, dialect)
        val iterator = stmtList.iterator()
        while (iterator.hasNext) {
          val stmt = iterator.next()
          stmt.accept(visitor)
        }
        val nameIterator: util.Iterator[TableStat.Name] = visitor.getTables().keySet().iterator()
        while (nameIterator.hasNext) {
          dbAndTableName += nameIterator.next().getName
        }
      }
      case DsTypeEnum.GREENPLUM => {
        dialect = JdbcConstants.POSTGRESQL
        val visitor: PGSchemaStatVisitor = new PGSchemaStatVisitor()
        val stmtList: util.List[SQLStatement] = SQLUtils.parseStatements(sql, dialect)
        val iterator = stmtList.iterator()
        while (iterator.hasNext) {
          val stmt = iterator.next()
          stmt.accept(visitor)
        }
        val nameIterator: util.Iterator[TableStat.Name] = visitor.getTables().keySet().iterator()
        while (nameIterator.hasNext) {
          dbAndTableName += nameIterator.next().getName
        }
      }
      case DsTypeEnum.RULERDB => {
        dialect = JdbcConstants.POSTGRESQL
        val visitor: PGSchemaStatVisitor = new PGSchemaStatVisitor()
        val stmtList: util.List[SQLStatement] = SQLUtils.parseStatements(sql, dialect)
        val iterator = stmtList.iterator()
        while (iterator.hasNext) {
          val stmt = iterator.next()
          stmt.accept(visitor)
        }
        val nameIterator: util.Iterator[TableStat.Name] = visitor.getTables().keySet().iterator()
        while (nameIterator.hasNext) {
          dbAndTableName += nameIterator.next().getName
        }
      }

      case DsTypeEnum.HIVE => {
        dialect = JdbcConstants.HIVE
        val visitor: HiveSchemaStatVisitor = new HiveSchemaStatVisitor()
        val stmtList: util.List[SQLStatement] = SQLUtils.parseStatements(sql, dialect)
        val iterator = stmtList.iterator()
        while (iterator.hasNext) {
          val stmt = iterator.next()
          stmt.accept(visitor)
        }
        val nameIterator: util.Iterator[TableStat.Name] = visitor.getTables().keySet().iterator()
        while (nameIterator.hasNext) {
          dbAndTableName += nameIterator.next().getName
        }
      }
    }
    dbAndTableName.toList
  }

  /** 转换DF字段类型 具体转换查看 getDataType()方法 */
  def convertDataType(sinkSchema: List[ExtractFieldInfo], data: DataFrame): DataFrame = {
    var value: DataFrame = data
    for (ef <- sinkSchema) {
      val field: String = ef.field
      val data_type: String = ef.data_type
      value = value.withColumn(field, value.col(field).cast(getDataType(data_type)))
    }
    value
  }

  /** 获得ROW中对应的值的类型 */
  def getDataType(dataType: String): DataType = {
    var value: DataType = null
    value = dataType match {
      case ColumnType.STRING => DataTypes.StringType
      case ColumnType.NUMBER => DataTypes.LongType
      case ColumnType.DATE => DataTypes.DateType
      case ColumnType.DECIMAL => DataTypes.DoubleType
      case ColumnType.TIME => DataTypes.StringType
      case ColumnType.DATETIME => DataTypes.TimestampType
      case _ => DataTypes.NullType
    }
    value
  }

  def loadProperties(kc: KhaosContext): Unit = {
    val mysqlProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.sql.source.").toMap
    mysqlProperties.foreach { case (k, v) => log.info(k + "   " + v) }
    _mysql_jdbc_driver = mysqlProperties.getOrElse(SqlConstants.MODULE_SQL_SOURCE_MYSQL_JDBC_DRIVER, "com.mysql.cj.jdbc.Driver")
    _mysql_jdbc_url_param = mysqlProperties.getOrElse(SqlConstants.MODULE_SQL_SOURCE_MYSQL_JDBC_URL_PARAM, "")
    log.info("loadProperties sql source properties driver=" + _mysql_jdbc_driver + " param=" + _mysql_jdbc_url_param)
  }


  /** 获取上游的Schema */
  override def schema(dc: KhaosContext, config: String, dependence: Dependency) = {
    val fieldSchema = ArrayBuffer[KhaosStructField]()
    implicit val formats = DefaultFormats
    val sqlInfo = parse(config, true).extract[SqlSourceConfig]
    val extrFields = sqlInfo.extract_fields
    for (ef <- extrFields) {
      fieldSchema += KhaosStructField(ef.field, ef.data_type)
    }
    fieldSchema.toList
  }
}
