//package com.wyt.spark.clickhouse.reference
//
//import java.sql.{Date, PreparedStatement, Timestamp}
//
//import ru.yandex.clickhouse.ClickHouseDataSource
//import ru.yandex.clickhouse.settings.ClickHouseProperties
////import ru.yandex.clickhouse.{ClickHouseConnection, ClickHouseDataSource}
//
//
////import com.github.housepower.jdbc.ClickHouseConnection
//
//import com.github.housepower.jdbc.ClickHouseConnection
//import com.tgou.data.stanford.market.clickhouse.utils.CkMode.CkMode
//import org.apache.spark.sql.functions.lit
//import org.apache.spark.sql.types._
//import org.apache.spark.sql.{DataFrame, SparkSession}
//
//import scala.collection.mutable.ArrayBuffer
//
///**
//  * 基于ClickHouse Native JDBC的Sink类
//  * @author zhangdongwei
//  * @create 2020-08-01-21:53
//  */
//@Deprecated
//object ClickHouseNJSink {
//
//  /**
//    *
//    * @param df          dataFrame
//    * @param dbName      Ck数据库名
//    * @param tableName   Ck表名
//    * @param clusterName Ck集群名
//    * @param ckUrl       Ck连接url
//    * @param mode        OVERWRITE或者APPEND
//    * @param props       分区字段,索引字段信息
//    */
//  def sink2Ck(spark: SparkSession,
//              dataframe: DataFrame,
//              dfPartitions: Int,
//              dbName: String,
//              tableName: String,
//              clusterName: String,
//              ckUrl: String,
//              mode: CkMode,
//              props: Map[String, Object] = Map()): Unit = {
//
//    //为df添加ck_date字段
//    val df = if(props.contains("ck_date")){
//      dataframe.withColumn("ck_date", lit(props.getOrElse("ck_date","").toString))
//    } else {
//      dataframe
//    }
//
//    val distributedTableName = tableName + "_distributed"
//    val fields = df.schema.fields.map(t => new Tuple2[String,String](t.name, dataTypeToString(t.dataType)))
//    val partitionColumns: Array[String] = props.get("partitionColumns").getOrElse(Array[String]()).asInstanceOf[Array[String]]
//    val indexColumns: Array[String] = props.get("indexColumns").getOrElse(Array[String]()).asInstanceOf[Array[String]]
//
//    var connection: ClickHouseConnection = ClickHouseNJUtils.getCkConnection(ckUrl)
//    try{
//      //第一步：创建表（不存在），包括本地表、分布式表
//      ClickHouseNJUtils.createReplicatedMergeTreeTable(connection,
//        clusterName,
//        dbName,
//        tableName,
//        fields,
//        partitionColumns,
//        indexColumns)
//      ClickHouseNJUtils.createDistributedTable(connection,clusterName,dbName,tableName)
//      //第二步 处理数据（针对APPEND、OVERRITE）
//      if (mode == CkMode.OVERWRITE) {
//        // 清空表（针对OVERWRITE模式）
//        ClickHouseNJUtils.truncateTable(connection, clusterName, dbName, tableName)
//        println(s"表${dbName}.${tableName}清空完毕...")
//      }else if(mode == CkMode.APPEND && props.contains("append_delete_condition")) {
//        // 清理数据
//        val condition: String = props.getOrElse("append_delete_condition", " 1 = 0 ").toString
//        val sql = s"ALTER TABLE $dbName.$tableName ON CLUSTER ${clusterName} DELETE WHERE ${condition}"
//        ClickHouseNJUtils.alterTable(connection,clusterName,dbName,tableName,sql)
//        println(s"表${dbName}.${tableName}执行${sql}操作完毕...")
//      }
//    }catch {
//      case e: Exception => throw new RuntimeException(e)
//    }
//    finally {
//      ClickHouseNJUtils.closeCkConnection(connection)
//    }
//
//    //第三步：分批数据插入，默认一次插入量为30w，可自定义修改
//    val batchSize: Int = props.getOrElse("batchSize", "300000").toString.toInt
//    df.coalesce(dfPartitions).foreachPartition(rows => {
//
//      var connection1: ClickHouseConnection = null
//      var pstmt1: PreparedStatement = null
//
//      try {
//        connection1 = ClickHouseNJUtils.getCkConnection(ckUrl)
//
//        val sqlText = s"INSERT INTO $dbName.$distributedTableName VALUES (${Array.fill(fields.length)("?").mkString(",")})"
//        pstmt1 = connection1.prepareStatement(sqlText)
//
//        var count = 0
//        rows.foreach(
//          line => {
//            count += 1
//            var indexField = 0
//            line.schema.fields.foreach(field => {
//              // 处理字段类型
//              field.dataType match {
//                case IntegerType => pstmt1.setInt(indexField + 1, if (line.getAs[AnyRef](indexField) != null) line.getInt(indexField) else 0)
//                case LongType => pstmt1.setLong(indexField + 1, if (line.getAs[AnyRef](indexField) != null) line.getLong(indexField) else 0)
//                /*case DoubleType => pstmt.setDouble(indexField + 1, line.getDouble(indexField))
//                case FloatType => pstmt.setFloat(indexField + 1, line.getFloat(indexField))
//                case ShortType => pstmt.setInt(indexField + 1, line.getShort(indexField))
//                case ByteType => pstmt.setInt(indexField + 1, line.getByte(indexField))*/
//                // ck不支持boolean
//                case BooleanType => pstmt1.setInt(indexField + 1, if (line.getBoolean(indexField)) 1 else 0)
//                case StringType => pstmt1.setString(indexField + 1, if (line.getString(indexField) != null) line.getString(indexField) else "")
//                case TimestampType => pstmt1.setTimestamp(indexField + 1,
//                  if (line.getAs[java.sql.Timestamp](indexField) == null) new Timestamp(0) else line.getAs[java.sql.Timestamp](indexField))
//                case DateType => pstmt1.setDate(indexField + 1, if (line.getAs[java.sql.Date](indexField) == null) new Date(0) else new Date(line.getAs[java.sql.Date](indexField).getTime+24*60*60*1000))
//                // 该驱动支持的Decimal精度不能超过18,定义表时需要注意
//                case t: DecimalType => pstmt1.setBigDecimal(indexField + 1, if (line.getDecimal(indexField) != null) line.getDecimal(indexField) else new java.math.BigDecimal(0))
//                // 处理Array类型
//                case ArrayType(et, _) =>
//                  var typeName = "String"
//                  var value = line.getSeq[AnyRef](indexField)
//                  if (value == null) {
//                    value = Seq[AnyRef]()
//                  }
//                  if (et.equals(IntegerType)) {
//                    typeName = "Int"
//                  } else if (et.equals(StringType)) {
//                    typeName = "String"
//                  } else if (et.equals(LongType)) {
//                    typeName = "Long"
//                  } else {
//                    throw new IllegalArgumentException(
//                      s"Not Support Array ElementType ${et.typeName}")
//                  }
//
//                  val arr = connection1.createArrayOf(
//                    typeName,
//                    value.toArray)
//                  pstmt1.setArray(indexField + 1, arr)
//                case _ => throw new IllegalArgumentException(
//                  s"Not Support DataType ${field.dataType.typeName}")
//              }
//              indexField += 1
//            })
//            // 批量写入
//            pstmt1.addBatch()
//            if (count >= batchSize) {
//              pstmt1.executeBatch()
//              Thread.sleep(1000)
//              count = 0
//            }
//          }
//        )
//        if (count > 0) {
//          pstmt1.executeBatch()
//          Thread.sleep(1000)
//        }
//        connection1.commit()
//      } catch {
//        case e: Exception => throw new RuntimeException(e)
//      }
//      finally {
//        ClickHouseNJUtils.closePrepareStatement(pstmt1)
//        ClickHouseNJUtils.closeCkConnection(connection1)
//      }
//      println("当前分区插入数据完毕...")
//    })
//  }
//
//  /**
//    *
//    * @param df          dataFrame
//    * @param dbName      Ck数据库名
//    * @param tableName   Ck表名
//    * @param clusterName Ck集群名
//    * @param ckUrl       Ck连接url
//    * @param mode        OVERWRITE或者APPEND
//    * @param props       分区字段,索引字段信息
//    */
//  def sink2CkWithJdbc(spark: SparkSession,
//                      dataframe: DataFrame,
//                      dfPartitions: Int,
//                      dbName: String,
//                      tableName: String,
//                      clusterName: String,
//                      ckUrl: String,
//                      mode: CkMode,
//                      props: Map[String, Object] = Map()): Unit = {
//
//    //为df添加ck_date字段
//    val df = if(props.contains("ck_date")){
//      dataframe.withColumn("ck_date", lit(props.getOrElse("ck_date","").toString))
//    } else {
//      dataframe
//    }
//
//    val distributedTableName = tableName + "_distributed"
//    val fields = df.schema.fields.map(t => new Tuple2[String,String](t.name, dataTypeToString(t.dataType)))
//    val partitionColumns: Array[String] = props.get("partitionColumns").getOrElse(Array[String]()).asInstanceOf[Array[String]]
//    val indexColumns: Array[String] = props.get("indexColumns").getOrElse(Array[String]()).asInstanceOf[Array[String]]
//
//    val driver = "ru.yandex.clickhouse.ClickHouseDriver"
//    val user = "default"
//    val password = ""
//    val url = "jdbc:clickhouse://hnode22:8123/default"
//    Class.forName(driver) // com.github.housepower.jdbc.ClickHouseDriver  ru.yandex.clickhouse.ClickHouseDriver
//    var connection: ru.yandex.clickhouse.ClickHouseConnection = null
//    var pstmt: PreparedStatement = null
////    val properties = new ClickHouseProperties()
////    properties.setUser(user)
////    properties.setPassword(password)
////    properties.setDatabase(dbName)
//
//    try{
////      connection = new ClickHouseDataSource(url, properties).getConnection()
//
//      //第一步：创建表（不存在），包括本地表、分布式表
////      ClickHouseUtils.createReplicatedMergeTreeTable(connection,
////        clusterName,
////        dbName,
////        tableName,
////        fields,
////        partitionColumns,
////        indexColumns)
////      ClickHouseUtils.createDistributedTable(connection,clusterName,dbName,tableName)
////      //第二步 处理数据（针对APPEND、OVERRITE）
////      if (mode == CkMode.OVERWRITE) {
////        // 清空表（针对OVERWRITE模式）
////        ClickHouseUtils.truncateTable(connection, clusterName, dbName, tableName)
////        println(s"表${dbName}.${tableName}清空完毕...")
////      }else if(mode == CkMode.APPEND && props.contains("append_delete_condition")) {
////        // 清理数据
////        val condition: String = props.getOrElse("append_delete_condition", " 1 = 0 ").toString
////        val sql = s"ALTER TABLE $dbName.$tableName ON CLUSTER ${clusterName} DELETE WHERE ${condition}"
////        ClickHouseUtils.alterTable(connection,clusterName,dbName,tableName,sql)
////        println(s"表${dbName}.${tableName}执行${sql}操作完毕...")
////      }
//    }catch {
//      case e: Exception => throw new RuntimeException(e)
//    }
//    finally {
////      ClickHouseUtils.closeCkConnection(connection)
////      connection.close()
//    }
//
//    //第三步：分批数据插入，默认一次插入量为30w，可自定义修改
//    val batchSize: Int = props.getOrElse("batchSize", "100000").toString.toInt
//    df.coalesce(dfPartitions).foreachPartition(rows => {
//
//
//      var connection1: ru.yandex.clickhouse.ClickHouseConnection = null
//      var pstmt1: PreparedStatement = null
//
//      val properties = new ClickHouseProperties()
//      properties.setUser(user)
//      properties.setPassword(password)
//      properties.setDatabase(dbName)
//
//      try {
//        connection1 = new ClickHouseDataSource(url, properties).getConnection()
//
//        val sqlText = s"INSERT INTO $dbName.$distributedTableName VALUES (${Array.fill(fields.length)("?").mkString(",")})"
//        pstmt1 = connection1.prepareStatement(sqlText)
//
//        var count = 0
//        rows.foreach(
//          line => {
//            count += 1
//            var indexField = 0
//            line.schema.fields.foreach(field => {
//              // 处理字段类型
//              field.dataType match {
//                case IntegerType => pstmt1.setInt(indexField + 1, if (line.getAs[AnyRef](indexField) != null) line.getInt(indexField) else 0)
//                case LongType => pstmt1.setLong(indexField + 1, if (line.getAs[AnyRef](indexField) != null) line.getLong(indexField) else 0)
//                /*case DoubleType => pstmt.setDouble(indexField + 1, line.getDouble(indexField))
//                case FloatType => pstmt.setFloat(indexField + 1, line.getFloat(indexField))
//                case ShortType => pstmt.setInt(indexField + 1, line.getShort(indexField))
//                case ByteType => pstmt.setInt(indexField + 1, line.getByte(indexField))*/
//                // ck不支持boolean
//                case BooleanType => pstmt1.setInt(indexField + 1, if (line.getBoolean(indexField)) 1 else 0)
//                case StringType => pstmt1.setString(indexField + 1, if (line.getString(indexField) != null) line.getString(indexField) else "")
//                case TimestampType => pstmt1.setTimestamp(indexField + 1,
//                  if (line.getAs[java.sql.Timestamp](indexField) == null) new Timestamp(0) else line.getAs[java.sql.Timestamp](indexField))
//                case DateType => pstmt1.setDate(indexField + 1, if (line.getAs[java.sql.Date](indexField) == null) new Date(0) else new Date(line.getAs[java.sql.Date](indexField).getTime+24*60*60*1000))
//                // 该驱动支持的Decimal精度不能超过18,定义表时需要注意
//                case t: DecimalType => pstmt1.setBigDecimal(indexField + 1, if (line.getDecimal(indexField) != null) line.getDecimal(indexField) else new java.math.BigDecimal(0))
//                // 处理Array类型
//                case ArrayType(et, _) =>
//                  var typeName = "String"
//                  var value = line.getSeq[AnyRef](indexField)
//                  if (value == null) {
//                    value = Seq[AnyRef]()
//                  }
//                  if (et.equals(IntegerType)) {
//                    typeName = "Int"
//                  } else if (et.equals(StringType)) {
//                    typeName = "String"
//                  } else if (et.equals(LongType)) {
//                    typeName = "Long"
//                  } else {
//                    throw new IllegalArgumentException(
//                      s"Not Support Array ElementType ${et.typeName}")
//                  }
//
//                  val arr = connection1.createArrayOf(
//                    typeName,
//                    value.toArray)
//                  pstmt1.setArray(indexField + 1, arr)
//                case _ => throw new IllegalArgumentException(
//                  s"Not Support DataType ${field.dataType.typeName}")
//              }
//              indexField += 1
//            })
//            // 批量写入
//            pstmt1.addBatch()
//            if (count >= batchSize) {
//              pstmt1.executeBatch()
//              count = 0
//            }
//          }
//        )
//        if (count > 0) {
//          pstmt1.executeBatch()
//        }
//        connection1.commit()
//      } catch {
//        case e: Exception => throw new RuntimeException(e)
//      }
//      finally {
//        ClickHouseNJUtils.closePrepareStatement(pstmt1)
////        ClickHouseUtils.closeCkConnection(connection1)
//        connection1.close()
//      }
//      println("当前分区插入数据完毕...")
//    })
//  }
//
//  /**
//    * 获取创建ck表的字段语句
//    *
//    * @param df DataFrame
//    */
//  def getCreateFields(fields: Array[StructField]): String = {
//    var arr = ArrayBuffer[String]()
//    for (field <- fields) {
//      arr += (field.name + " " + dataTypeToString(field.dataType))
//    }
//    arr.mkString(",\n")
//  }
//
//  //todo 需要不断完善
//  def dataTypeToString(dataType: DataType): String = {
//    dataType match {
//      case LongType => "Int64"
//      case IntegerType => "Int32"
//      case ShortType => "Int16"
//      case BooleanType => "Int8" //boolean转成字节类型1,0
//      case StringType => "String"
//      case t: DecimalType => "Decimal(18, 3)"
//      case TimestampType => "DateTime"
//      case DateType => "Date"
//      case ArrayType(et, _) =>
//        et match {
//          case IntegerType => "Array(Int32)"
//          case LongType => "Array(Int64)"
//          case StringType => "Array(String)"
//          case _ => throw new IllegalArgumentException(
//            s"Not Support ArrayType ElementType ${et.typeName}")
//        }
//      case _ => throw new IllegalArgumentException(
//        s"Not Support DataType ${dataType.typeName}")
//    }
//
//  }
//
//}
