package com.atguigu.userprofile.app

import com.atguigu.userprofile.common.bean.TagInfo
import com.atguigu.userprofile.common.const.ConstCode
import com.atguigu.userprofile.common.dao.TagInfoDAO
import com.atguigu.userprofile.common.util.ClickhouseUtil
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer

object TaskBitmapApp {

  //任务四：  数据写入  insert select   动态拼接sql
  // 1   提取所有待计算的标签集合
  // 2  按照标签值类型 分成4个 子集和
  // 3  每个子集和 各自形成对应的sql语句   各自执行

  def main(args: Array[String]): Unit = {
    //伪装spark程序 ，但是考虑到整体管理监控调度部署... 还是封装成spark程序
    val sparkConf: SparkConf = new SparkConf().setAppName("task_bitmap_app")
    val sparkContext = new SparkContext(sparkConf)


    val taskId: String = args(0)
    val busiDate: String = args(1)

    // 1   提取所有待计算的标签集合
    val tagInfoList: List[TagInfo] = TagInfoDAO.getTagInfoWithOnList()


    // 2  按照标签值类型 分成4个 子集合
    val tagInfoStringList = new ListBuffer[TagInfo]()
    val tagInfoDecimalList = new ListBuffer[TagInfo]()
    val tagInfoBigIntList = new ListBuffer[TagInfo]()
    val tagInfoDateList = new ListBuffer[TagInfo]()

    for (tagInfo <- tagInfoList) {
      if (tagInfo.tagValueType == ConstCode.TAG_VALUE_TYPE_STRING) {
        tagInfoStringList.append(tagInfo)
      }
      else if (tagInfo.tagValueType == ConstCode.TAG_VALUE_TYPE_DECIMAL) {
        tagInfoDecimalList.append(tagInfo)
      }
      else if (tagInfo.tagValueType == ConstCode.TAG_VALUE_TYPE_LONG) {
        tagInfoBigIntList.append(tagInfo)
      }
      else if (tagInfo.tagValueType == ConstCode.TAG_VALUE_TYPE_DATE) {
        tagInfoDateList.append(tagInfo)
      }


    }
    // 3  每个子集合 各自形成对应的sql语句   各自执行
    insertBitmap(tagInfoStringList, "user_tag_value_string", busiDate)
    insertBitmap(tagInfoBigIntList, "user_tag_value_long", busiDate)
    insertBitmap(tagInfoDecimalList, "user_tag_value_decimal", busiDate)
    insertBitmap(tagInfoDateList, "user_tag_value_date", busiDate)




    // 3  每个子集合 各自形成对应的sql语句   各自执行

    //  insert into  user_tag_value_string
    // select  tc.1 tag_code , tc.2 tag_value , groupBitmapState( uid) ,'2020xxxx'   from ( select  uid , arrayJoin( [('tg_person_base_gender',tg_person_base_gender),
    // ('tg_person_base_agegroup',tg_person_base_agegroup) ,....] ) tc from  user_tag_merge_2020xxxxx
    // )  tv
    // group by  tc.1 ,tc.2

    def insertBitmap(tagInfoList: ListBuffer[TagInfo], targetTableName: String, busiDate: String): Unit = {
      //处理重复数据
      val dropPartitionSQL=s" alter table $targetTableName delete  where dt='$busiDate'"
      println(dropPartitionSQL)
      ClickhouseUtil.executeSql(dropPartitionSQL);

      if (tagInfoList.size > 0) {

        val tagCodeSQL = tagInfoList.map(tagInfo => s"('${tagInfo.tagCode.toLowerCase}',${tagInfo.tagCode.toLowerCase})").mkString(",")
        val sourceTableName = s"user_tag_merge_${busiDate.replace("-", "")}"
        val sql =
          s"""
             |
             |      insert into  $targetTableName
             |      select  tc.1 tag_code , tc.2 tag_value , groupBitmapState( cast ( uid  as UInt64)) ,'$busiDate'
             |     from ( select  uid , arrayJoin( [$tagCodeSQL] ) tc from  ${sourceTableName}
             |      )  tv
             |      where  tc.2 <>''  and tc.2 is not null
             |      group by  tc.1 ,tc.2
             |
             |""".stripMargin

          println(sql)

          ClickhouseUtil.executeSql(sql);

      }
    }


  }

}
