package com.atguigu.userprofile.app

import com.atguigu.userprofile.bean.TagInfo
import com.atguigu.userprofile.dao.TagInfoDao
import com.atguigu.userprofile.util.{ClickhouseUtil, MyPropertiesUtil}
import java.util.Properties
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}


object TaskExportCkApp {
  def main(args: Array[String]): Unit = {

    //0  spark运行环境
    val sparkConf: SparkConf = new SparkConf().setAppName("task_export_app")
            //.setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()


    // 1.在clickhouse中创建表
    // 表名  字段名
    // 引擎  分区
    // 排序  索引

    val taskId:String = args(0)
    val taskDate:String = args(1)

    val tableName = s"user_tag_merge_${taskDate.replace("-","")}"

    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val hdfsPath: String = properties.getProperty("hdfs-store.path")
    val dwName: String = properties.getProperty("data-warehouse.dbname")
    val upName: String = properties.getProperty("user-profile.dbname")
    val clickhouseURL: String = properties.getProperty("clickhouse.url")

    //所有可用标签集合
    val tagInfoList: List[TagInfo] = TagInfoDao.getTagListWithOn()
    val tagCodeList:List[String]= tagInfoList.map(tagInfo => s"${tagInfo.tagCode.toLowerCase} String")
    val tagCodeSQL: String = tagCodeList.mkString(",")

    val createTableSQL =
      s"""
        |  create table if not exists $upName.$tableName(
        |  uid UInt64,
        |  $tagCodeSQL )
        |  engine=MergeTree
        |  order by uid
        |""".stripMargin


    //
    val dropTabSQL=s"drop table if exists $upName.$tableName "

    println(dropTabSQL)
    println(createTableSQL)

    ClickhouseUtil.executeSql(dropTabSQL)
    ClickhouseUtil.executeSql(createTableSQL)

    // 2.读取hive宽表中的数据
    val sql = s"select * from $upName.$tableName"

    val dataFrame: DataFrame = sparkSession.sql(sql)

    // 3. 把dataFrame插入到clickhouse
    dataFrame.write.mode(SaveMode.Append)      //写入方式
            .option("batchsize","200")         //批量提交
            .option("isolationLevel","NONE")   //关闭事务
            .option("numPartitions","12")      // 增加并发
            .option("driver","ru.yandex.clickhouse.ClickHouseDriver")
            .jdbc(clickhouseURL,tableName,new Properties())


  }

}
