package com.kingsoft.dc.khaos.module.spark.util

import java.sql.Connection
import java.sql.DriverManager.getConnection

import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.DataFrame

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer


/**
 * Created by zoujing on 19/4/1.
 */
object MppUtils extends Logging {

  def executeSql(url: String, user: String, password: String, sql: String): Boolean = {
    var conn: Connection = null
    try {
      Class.forName("org.postgresql.Driver")
      conn = getConnection(url, user, password)
      val st = conn.createStatement
      for (elem <- sql.split(";", -1).toList) {
        st.addBatch(elem)
      }
      st.executeBatch()
      st.close
      conn.close
      true
    } catch {
      case e: Exception =>
        e.printStackTrace()
        throw e
        false
    }
  }

  //  def excuteSql(url: String, user: String, password: String, sql: String): Boolean = {
  //    var conn: Connection = null
  //    try {
  //      Class.forName("org.postgresql.Driver")
  //      conn = getConnection(url, user, password)
  //      val st = conn.createStatement
  //      val rs = st.execute(sql)
  //      st.close
  //      conn.close
  //      true
  //    } catch {
  //      case e: Exception =>
  //        e.printStackTrace()
  //        throw e
  //        false
  //    }
  //  }


  def getHAWQExternalTable(tblAndDF: mutable.HashMap[String, DataFrame],
                           schema: String,
                           tblColStr: String,
                           location: String,
                           sourceColExpList: List[String],
                           destColNameList: List[String],
                           extFlag: String, ip: String): String = {
    val sqlArr = new ArrayBuffer[String]

    for (nameAndDF <- tblAndDF) {
      // 内部表名
      val tblName = "\"" + nameAndDF._1 + "\""
      // 外部表名
      val tblNameEXT = "\"" + nameAndDF._1.stripSuffix("\"") + "_" + extFlag + "\""
      var hdfsPath = ""
      //      if (!location.startsWith("gp")) {
      //        hdfsPath = "gp" + location + "*"
      //      } else {
      hdfsPath = ip + location
      //      }
      log.info("hdfs path: " + hdfsPath)
      // 创建外部表
      val createTableDDL = s"CREATE EXTERNAL TABLE ${schema}.${tblNameEXT} (${tblColStr}) LOCATION ('${hdfsPath}') FORMAT 'orc'"
      //      val createTableDDL = s"CREATE READABLE EXTERNAL TABLE ${schema}.${tblNameEXT} (${tblColStr}) LOCATION ('${hdfsPath} ') FORMAT 'csv' (DELIMITER '|')"
      // copy数据语句
      val colNames = destColNameList.mkString(",")
      val sourceColExps = sourceColExpList.mkString(",")
      val ddl = s"INSERT INTO ${schema}.${tblName} (${colNames}) SELECT ${sourceColExps} FROM ${schema}.${tblNameEXT}"

      // 删除外部表语句
      val delExternalTable = s"DROP EXTERNAL TABLE IF EXISTS ${schema}.${tblNameEXT}"

      sqlArr += createTableDDL + ";" + ddl + ";" + delExternalTable
    }
    sqlArr.mkString(";")
  }

  def executeSqls(url: String, user: String, password: String, sqls: List[String]): Boolean = {
    var conn: Connection = null
    try {
      Class.forName("org.postgresql.Driver")
      conn = getConnection(url, user, password)
      val st = conn.createStatement
      val rs = st.execute(sqls.mkString(";"))
      st.close
      conn.close
      true
    } catch {
      case e: Exception =>
        e.printStackTrace()
        throw e
        false
    }
  }

  def executeBatchs(url: String, user: String, password: String, sqls: List[String]): Boolean = {
    var conn: Connection = null
    try {
      Class.forName("org.postgresql.Driver")
      conn = getConnection(url, user, password)
      val st = conn.createStatement
      conn.setAutoCommit(false)
      for (sql <- sqls) {
        st.addBatch(sql)
      }
      val rs = st.executeBatch()
      println("rs==> " + rs.mkString(","))

      st.clearBatch()
      conn.commit()
      st.close
      conn.close
      true
    } catch {
      case e: Exception =>
        e.printStackTrace()
        throw e
        false
    }
  }

  /**
   *
   * @param tblAndDF         表名和表对应的DataFrame
   * @param schema           mpp的schema,对应前端的数据库名
   * @param tblColStr        外部表字段名和类型
   * @param dataformat       数据格式,目前为csv格式
   * @param delimiter        分隔符,默认为"|"
   * @param accessKeyId      连接cos的ak
   * @param serectAccessKey  连接cos的sk
   * @param appId            连接cos的appId
   * @param location         临时数据缓存路径
   * @param ossType          对象存储的类型
   * @param destTable        (弃用)
   * @param sourceColExpList 外部表字段列表
   * @param destColNameList  内部表(源表)字段列表
   * @param extFlag          外部表后缀标识
   * @return sql语句(创建外部表,内部表数据导入外部表,删除外部表)
   */
  def getOssExternalTable(tblAndDF: mutable.HashMap[String, DataFrame],
                          schema: String,
                          tblColStr: String,
                          dataformat: String,
                          delimiter: String,
                          accessKeyId: String,
                          serectAccessKey: String,
                          appId: String,
                          location: String,
                          ossType: String,
                          destTable: String,
                          sourceColExpList: List[String],
                          destColNameList: List[String],
                          extFlag: String): String = {
    val sqlArr = new ArrayBuffer[String]

    for (nameAndDF <- tblAndDF) {
      // 内部表名
      val tblName = nameAndDF._1
      // 外部表名
      val tblNameEXT = nameAndDF._1 + "_" + extFlag

      // 创建外部表
      val createTableDDL = s"CREATE READABLE EXTERNAL TABLE ${schema}.${tblNameEXT} (${tblColStr}) LOCATION ('oss://${location + "/" + tblName} oss_type=${ossType} cos_appid=${appId} access_key_id=${accessKeyId} secret_access_key=${serectAccessKey}') FORMAT '${dataformat}' (DELIMITER '${delimiter}')"

      // copy数据语句
      val colNames = destColNameList.mkString(",")
      val sourceColExps = sourceColExpList.mkString(",")
      val ddl = s"INSERT INTO ${schema}.${tblName} (${colNames}) SELECT ${sourceColExps} FROM ${schema}.${tblNameEXT}"

      // 删除外部表语句
      val delExternalTable = s"DROP EXTERNAL TABLE IF EXISTS ${schema}.${tblNameEXT}"

      sqlArr += createTableDDL + ";" + ddl + ";" + delExternalTable
    }
    sqlArr.mkString(";")
  }

  //  /**
  //    * 并行拷贝源表数据至目标表
  //    * @param destTable
  //    * @param sourceTable
  //    */
  //  def getCopyTableDDL(sourceTable: String, destTable: String,  colNameList: List[String]): String = {
  //    val colNames = colNameList.mkString(",")
  //    val ddl = s"INSERT INTO ${destTable} (${colNames}) SELECT ${colNames} FROM ${sourceTable}"
  //    logInfo("ddl:" + ddl)
  //    ddl
  //  }

  /**
   *
   * @param schema           数据库
   * @param sourceTable      源表
   * @param destTable        外部表
   * @param sourceColExpList 源表字段
   * @param destColNameList  外部表
   * @return sql语句
   */
  def getCopyTableDDL(schema: String, sourceTable: String, destTable: String, sourceColExpList: List[String], destColNameList: List[String]): String = {
    val colNames = destColNameList.mkString(",")
    val sourceColExps = sourceColExpList.mkString(",")
    val ddl = s"INSERT INTO ${schema}.${destTable} (${colNames}) SELECT ${sourceColExps} FROM ${schema}.${sourceTable}"
    logInfo("ddl:" + ddl)
    ddl
  }
}
