package com.itcj.dmp.tags

import com.itcj.dmp.casename.{Tags, tagcase}
import com.itcj.dmp.utils.KuduHelper
import org.apache.spark.sql.types.{StringType, StructField}
import org.apache.spark.sql.{Dataset, SparkSession}

object TagRunner {
  val todayTableName="ODS_" + KuduHelper.today()
  val yesterdayTableName="ODS_"+KuduHelper.yesterday()
  val todayTagsTableName="ODS_Tags"+KuduHelper.today()
  val yesterdayTagsTableName="ODS_Tags"+KuduHelper.yesterday()

  def main(args: Array[String]): Unit = {
//    1. 初始化sparkSession
    import com.itcj.dmp.utils.SparkConfigHelper._
    val sparkSession: SparkSession =SparkSession.builder().master("local[2]").appName("TagRunner")
      .loadConfig().getOrCreate()

//      2. 获取当日用户标签数据
    val todayTags: Dataset[tagcase] = MakeTags.makeTodayTag(sparkSession)
    todayTags.show()
//      3. 加载历史标签表数据,进行衰减
    val historyOp = HistoryTags.getHistoryTag(sparkSession)
    var unionDs:Dataset[tagcase]=null
    //      4. 合并今日和历史数据
    if (historyOp.isDefined) {
      val hisDs: Dataset[tagcase] =historyOp.get
      unionDs= todayTags.union(hisDs)
    }else{
      unionDs=todayTags
    }

    unionDs.show()

//      5. 统一用户识别
val tags: Dataset[Tags] = MergeTags.process(unionDs,sparkSession)
tags.show()
//      6. 落地Kudu
import  com.itcj.dmp.utils.KuduHelper._
    val index = tags.schema.fieldIndex("mainId")
    tags.schema.fields.update(index,new StructField("mainId", StringType, false))
    sparkSession.createKuduTable(todayTagsTableName,tags.schema,List("mainId"))
    tags.saveKuduTable(todayTagsTableName)


  }

}
