package com.atguigu.userprofile.app

import java.util.Properties

import com.atguigu.userprofile.common.bean.TagInfo
import com.atguigu.userprofile.common.dao.TagInfoDAO
import com.atguigu.userprofile.common.util.{MyClickhouseUtil, MyPropertiesUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object TaskExportApp {


// 1 读取hive的数据     RDD  DataFrame DataSet
//
//2  写入到clickhouse    spark ->  JDBC->  DB(SQL)
//  前置工作： 建表 （每天 ， 程序中建表）

  def main(args: Array[String]): Unit = {
    // 1 读取hive的数据     RDD  DataFrame DataSet
    val sparkConf: SparkConf = new SparkConf().setAppName("task_export_app")//.setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()


    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val upName: String = properties.getProperty("user-profile.dbname")
    val clickhouseURL: String = properties.getProperty("clickhouse.url")

    val taskId: String = args(0)
    val taskDate: String = args(1)
    val tableName: String = "user_tag_merge_"+taskDate.replace("-","")

    val selectSQL=s"select * from $upName.$tableName"
    val dataFrame: DataFrame = sparkSession.sql(selectSQL)
//    dataFrame.show(111)    //ex ? dr?



    val columns: Array[String] = dataFrame.columns
    val columnsSQL=columns.map(colName=> s" $colName String").mkString(",")


   //driver ? executor ?
   val tagInfoeList: List[TagInfo] = TagInfoDAO.getTagInfoWithOn()


    //2  写入到clickhouse    spark ->  JDBC->  DB(SQL)
    //  前置工作： 建表 （每天 ， 程序中建表）
    //  create table  $tablename
    //  ( uid String, xxxx String,xxxx String ...)
    //  engine=MergeTree
    //  partition //?? 不分区
    //  primary key uid // 可省
    //  order by uid



    val createTableSQL=
      s"""
         |create table  $upName.$tableName
         |($columnsSQL) engine=MergeTree
         |order by uid
       """.stripMargin


    //driver 执行 ddl
    println(s"drop table if exists $upName.$tableName")
    MyClickhouseUtil.executeSql(s"drop table if exists $upName.$tableName")  //幂等 清楚历史残留数据
    println(createTableSQL)
    MyClickhouseUtil.executeSql(createTableSQL)



    // executor 执行 dml
    //  优化写
    dataFrame.write.mode(SaveMode.Append)
      .option("batchsize", "100")  //批量操作
      .option("isolationLevel", "NONE") // 关闭事务
      .option("numPartitions", "8") // 设置并发 //一般是executor 的总core数
      .option("driver","ru.yandex.clickhouse.ClickHouseDriver")
      .jdbc(clickhouseURL, tableName,new Properties())
  }

}
