package com.hkbigdata.userprofile.app

import com.hkbigdata.userprofile.common.bean.TagInfo
import com.hkbigdata.userprofile.common.dao.{TagInfoDAO, TaskInfoDAO}
import com.hkbigdata.userprofile.common.util.MyPropertiesUtil
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import java.util.Properties

/**
 * @author Clown
 */

/**
 * 1、要组合哪些标签页？ show tables （不准确）
 * 最好查询 tag_info join task_info
 * 获得启用状态的标签表
 * 2、建立宽表
 * 表名   user_tag_merge_20210609
 * 字段   uid, <tag_code1>, <tag_code2>
 * 考虑到每天的标签数是不一样的，那么宽表的字段也不一样，所以不要使用固定的表，二十每天建一张新表，每天的字段可以不同
 *
 * 3、合并数据进宽表
 * 利用 pivot 进行行转列
 */
object TaskTagMergeApp {
  def main(args: Array[String]): Unit = {
        System.setProperty("HADOOP_USER_NAME", "hkbigdata")

    val conf: SparkConf = new SparkConf().setAppName("task merge")
//      .setMaster("local[*]")
    val session: SparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()

    val taskId: String = args(0)
    val taskDate: String = args(1)

    println(taskId)
    println(taskDate)

    // 1、获得启用状态的标签表
    val tagInfoes: List[TagInfo] = TagInfoDAO.getTaskTagMergeOnTask()

    for (elem <- tagInfoes) {
      println(elem)
    }

    // 2、自动建表
    val tableName: String = s"user_tag_merge_${taskDate.replace("-", "")}"
    val list: List[String] = tagInfoes.map((taskInfo: TagInfo) => s" ${taskInfo.tagCode.toLowerCase()} string")
    val column: String = list.mkString(",")

    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val userProfileDBName: String = properties.getProperty("user-profile.dbname")
    val gmallDB: String = properties.getProperty("data-warehouse.dbname")
    val hdfsPath: String = properties.getProperty("hdfs-store.path")

    val createSql: String =
      s"""
         |  create table if not exists `$userProfileDBName`.`$tableName` ( uid string, $column)
         |  comment '${tableName}'
         |  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
         |  LOCATION '$hdfsPath/$userProfileDBName/$tableName'
         |""".stripMargin

    println(createSql)
    session.sql(createSql)

    // 3、合并宽表

    val unionSql: String = tagInfoes.map(data => s"select uid, '${data.tagCode.toLowerCase()}' as tag_code," +
      s" tag_value from ${data.tagCode.toLowerCase()}  group by uid, tag_code, tag_value").mkString(" union all ")

    //    println(unionSql)
    val insSql: String = tagInfoes.map(data => "'" + s"${data.tagCode.toLowerCase()}" + "'").mkString(",")

    val selectSql: String = s"select * from ($unionSql)" +
      s" pivot ( concat_ws(',', collect_list(tag_value))  for tag_code in ($insSql))"
//    println(selectSql)

    val insertSql: String = s"insert overwrite $userProfileDBName.$tableName " + selectSql

    println(insertSql)
    session.sql("use " + userProfileDBName)
    session.sql(insertSql)

  }
}
















