package com.atguigu.userprofile.app

import java.util.Properties

import com.atguigu.userprofile.ConstCode
import com.atguigu.userprofile.bean.{TagInfo, TaskInfo, TaskTagRule}
import com.atguigu.userprofile.dao.{TagInfoDao, TaskInfoDao, TaskTagRuleDao}
import com.atguigu.userprofile.util.{MyPropertiesUtil, MySqlUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object TaskSqlApp {



  //1      读取mysql的定义  tag_info  \ task_info \ task_tag_rule 列表
  //2      准备画像库中的 表    根据定义建立  画像库的 table ，每一个标签一张表
  //3      准备select 语句   从 task_info
  //4      结合insert overwrite +select  写入画像库中的标签表


  def main(args: Array[String]): Unit = {
    //0 spark运行环境
       val sparkConf: SparkConf = new SparkConf().setAppName("task_sql_app")//.setMaster("local[*]")
       val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

    //1      读取mysql的定义  tag_info  \ task_info \ task_tag_rule 列表
    // 运行的任务是哪个？
    //  当前运行的task 是关联那个标签tag
    //spark-submit --master yarn  .......  xxxx.jar   [task_id] [task_date]
    // task_id 和 task_date  会随着调度器 附在运行命令后面  一直传递到 主函数的args 里
    val taskId: String = args(0)
    val taskDate: String = args(1)
     //通过工具类查询
     val tagInfo: TagInfo = TagInfoDao.getTagInfoByTask(taskId)

    val taskInfo: TaskInfo = TaskInfoDao.getTaskInfo(taskId)
     val taskTagRuleList: List[TaskTagRule] = TaskTagRuleDao.getTaskTagRuleListByTaskId(taskId)

    println(tagInfo)
    println(taskInfo)
    println(taskTagRuleList)

    //2      准备画像库中的 表    根据定义建立  画像库的 table ，每一个标签一张表
    //  create table   $tablename (  uid  string ,tag_value $tagValueType )
    //  comment  $tableCmt
    //  partition by  (dt string)
    //  store  hdfsPath/$upName/$tablename
    val tableName = tagInfo.tagCode.toLowerCase()
    val tableCmt = tagInfo.tagName
    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val hdfsPath: String = properties.getProperty("hdfs-store.path")
    val dwName: String = properties.getProperty("data-warehouse.dbname")
    val upName: String = properties.getProperty("user-profile.dbname")

    val tagValueType: String = tagInfo.tagValueType match {
      case ConstCode.TAG_VALUE_TYPE_STRING => "STRING"
      case ConstCode.TAG_VALUE_TYPE_LONG => "BIGINT"
      case ConstCode.TAG_VALUE_TYPE_DECIMAL => "DECIMAL(16,2)"
      case ConstCode.TAG_VALUE_TYPE_DATE => "STRING"
    }

    val createTableSQL=
      s"""
         |    create table if not exists  $upName.$tableName (  uid  string ,tag_value $tagValueType  )
         |      comment  '$tableCmt'
         |      ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
         |      partitioned by  (dt string)
         |      location  '$hdfsPath/$upName/$tableName'
       """.stripMargin

    println(createTableSQL)
    sparkSession.sql(createTableSQL)

    //3      准备select 语句   从 task_info取
    // case  when  "M" then "男" ..   end
    var tagValueSQL: String=""
    // 要考虑兼容 连续值类型标签（没有四级标签）直接用查询结果作为标签值
    if(taskTagRuleList.size>0){  //有四级标签
      val caseWhenList: List[String] = taskTagRuleList.map { taskTagRule =>
        s"when '${taskTagRule.queryValue}' then  '${taskTagRule.subTagValue}'"
      }
      tagValueSQL=     "case  query_value " +  caseWhenList.mkString(" ")+ " end "
    }else{//连续值  无四级标签
      tagValueSQL=" query_value"
    }

    // 如果sql中需要取当前任务日期 可以进行替换
    val taskSQL: String = taskInfo.taskSql.replace("$dt",taskDate)

    val  selectSQL=
      s"""  select uid , $tagValueSQL as tag_value from  (
         |    ${taskSQL} ) tagsql
       """.stripMargin
    println(selectSQL)

    //4      结合insert overwrite +select  写入画像库中的标签表
    //  insert overwrite table $tableName  ( dt ='$taskDate')
    //  select ..
    val insertSQL=
      s"""
         |insert overwrite table  $upName.$tableName  partition ( dt ='$taskDate')
         | $selectSQL
       """.stripMargin

    println(insertSQL)
    sparkSession.sql(s" use  $dwName")
    sparkSession.sql(insertSQL)










  }

}
