package spark

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.{SaveMode, SparkSession}

import java.util.Properties

class ClickHouseWriter{

  // 将df写入数据库中
  def writeToClickHouse(tableName: String, df: DataFrame): Unit = {
    val url = "jdbc:clickhouse://127.0.0.1:9000/dm"
    val dbtableName = tableName
    val pro = new Properties()
    pro.setProperty("user", "default")
    pro.setProperty("password", "root")
    pro.setProperty("batchsize", "1000")  // 分批写入 一次1万条数据
    pro.put("driver","com.github.housepower.jdbc.ClickHouseDriver")

    df.write.mode(SaveMode.Append).jdbc(url, dbtableName, pro)
  }

}
