package com.atguigu.userprofile.app

import java.util.Properties

import com.atguigu.userprofile.util.{MyClickhouseUtil, MyPropertiesUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object TaskTransformChApp {


  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setAppName("task_transform_ch_app")
      //.setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

    //1、把hive中数据 sql   dataframe
    //  select * from user_tag_merge_$taskDate
    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val hdfsStorePath: String = properties.getProperty("hdfs-store.path")
    val upDBName: String = properties.getProperty("user-profile.dbname")
    val clickhouseURL: String = properties.getProperty("clickhouse.url")
    val upDBNameCH: String = properties.getProperty("user-profile.dbname.ch")


    var taskDateOrigin=args(1)
    var taskDate=taskDateOrigin.replace("-","")
    val tableName=s"user_tag_merge_$taskDate"

    //先删表
    var dropSQL=s"drop table if exists  $tableName"
    MyClickhouseUtil.executeSql(dropSQL)

    val selectSQL=s"select * from $tableName"

    sparkSession.sql("use "+upDBName)
    val tagMergeDF: DataFrame = sparkSession.sql(selectSQL)
   // 2、建clickhouse的宽表
    val columns: Array[String] = tagMergeDF.columns
    // create table tableName (  uid string ,..... ) engine= MergeTree()  order by uid

    val colSql: String = columns.map(_+" String").mkString(",")
    val createTableSql=s"create table  $upDBNameCH.$tableName ( $colSql ) engine= MergeTree()  order by uid "
    println(createTableSql)
    MyClickhouseUtil.executeSql(createTableSql)

    //3、dataframe 通过jdbc写入clickhouse
    tagMergeDF.write.mode(SaveMode.Append)
      .option("batchsize", "1000")
      .option("isolationLevel", "NONE") // 关闭事务
      .option("numPartitions", "4") // 设置并发
      .option("driver","ru.yandex.clickhouse.ClickHouseDriver")
      .jdbc(clickhouseURL,tableName,new Properties())



  }

}
