package com.hkbigdata.app

import java.util.Properties

import com.hkbigdata.userprofile.common.dao.TagInfoDAO
import com.hkbigdata.userprofile.common.util.{MyClickHouseUtil, MyPropertiesUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * @author liuanbo
 * @creat 2024-04-24-16:16
 * @see 2194550857@qq.com
 *
 */
//1   建表 每天建一个
//
//
//插入数据  因为是异构的数据库  不能使用insert xxx select 完成
//2   读取要插入的数据
//
//3   写入到clickhouse
//      spark 通过jdbc写入到 某个数据中
object TaskHiveExportCk {
  def main(args: Array[String]): Unit = {

    /*    在ck中 建表  每天建一个
          表名  字段名   标签定义列表   查hive宽表的定义
          create table if not exists tableName  ( uid UInt64, 标签 String,xxxx ... )
          engine = MergeTree
          分区可以不用  因为表是每天的
           primary key  uid
           order by  uid*/
    System.setProperty("HADOOP_USER_NAME", "hkbigdata")
    val conf = new SparkConf().setAppName("task-sql")
      .setMaster("local[*]")
    val sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()

    val date = args(1)

    val tablename = s"user_tag_merge_${date.replace("-", "")}"

    val infoes = TagInfoDAO.getTaskTagMergeOntask()

    //获取标签字段
    val columns = infoes.map(_.tagCode.toLowerCase + " String").mkString(",")


    val createsql =
      s"""
         |create table if not exists ${tablename}  ( uid UInt64, $columns )
         |   engine = MergeTree
         |   primary key  uid
         |order by  uid
         |""".stripMargin

    println(columns)

    MyClickHouseUtil.executeSql(createsql)


    /**
     * hdfs-store.path=hdfs://hadoop102:8020/user_profile
     * data-warehouse.dbname=gmall
     * user-profile.dbname=user_profile
     */
    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val updbname = properties.getProperty("user-profile.dbname")

    val frame = sparkSession.sql(s"select * from $updbname.$tablename")

    frame.show(100, false)

    frame.write
      .mode(SaveMode.Append)
      .option("batchsize", "100")
      .option("isolationLevel", "NONE") // 关闭事务
      .option("numPartitions", "4") // 设置并发
      .option("driver", "ru.yandex.clickhouse.ClickHouseDriver")
      .jdbc("jdbc:clickhouse://192.168.6.113:8123/user_profile", tablename, new Properties())


  }
}
