package com.atguigu.userprofile.app

import com.atguigu.userprofile.bean.{TagInfo, TaskInfo, TaskTagRule}
import com.atguigu.userprofile.constant.CodeConst.{TAG_VALUE_TYPE_DATE, TAG_VALUE_TYPE_DECIMAL, TAG_VALUE_TYPE_LONG, TAG_VALUE_TYPE_STRING}
import com.atguigu.userprofile.dao.{TagInfoDAO, TaskInfoDAO, TaskTagRuleDAO}
import com.atguigu.userprofile.util.{MyPropertiesUtil, MysqlUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import java.util.Properties

object TaskSqlApp {



  def main(args: Array[String]): Unit = {
    val taskId: String = args(0)
    val busiDate: String = args(1)
    val sparConf = new SparkConf().setAppName("task_sql_app")//.setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(sparConf).enableHiveSupport().getOrCreate()

     //  1  得到标签的定义、标签任务、标签的值映射
    //     读取mysql     tag_info ,task_info , task_tag_rule
    //    利用jdbc读取
    val tagInfo: TagInfo = TagInfoDAO.getTagInfoByTaskId(taskId)
    // 补充task_info  task_tag_rule查询
    val taskInfo: TaskInfo = TaskInfoDAO.getTaskInfo(taskId)

    val taskTagRuleList: List[TaskTagRule] = TaskTagRuleDAO.getTaskTagRuleList(taskId)

    println(tagInfo)
    println(taskInfo)
    println(taskTagRuleList)

    //2 程序自动建表，根据标签的各种定义，摘取建表要素
    //create table表名         --》 标签编码
    //（字段名，字段类型）   （uid   string ,  tag_value   $tagValueType判断 ）     通过tag_info 的 标签值类型
    //分区                           每天一份数据   partitioned by (dt string)
    //comment                    标签的中文名称
    //格式  压缩                   不压缩  普通文本格式 便于计算导出
    //存储位置                     location    hdfsPath/库名/表名

    val tableName =tagInfo.tagCode.toLowerCase

    val fieldType: String = tagInfo.tagValueType match {
      case TAG_VALUE_TYPE_LONG => "bigint"
      case TAG_VALUE_TYPE_DECIMAL => "decimal"
      case TAG_VALUE_TYPE_STRING => "string"
      case TAG_VALUE_TYPE_DATE => "string"
    }

    val comment=tagInfo.tagName

    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val hdfsPath: String = properties.getProperty("hdfs-store.path")
    val upDbName: String = properties.getProperty("user-profile.dbname")
    val dwDbName: String = properties.getProperty("data-warehouse.dbname")

     val createTableSQL=
       s"""
          |create table if not exists $upDbName.$tableName
          |(uid string ,  tag_value   $fieldType )
          | partitioned by (dt string)
          |comment '$comment'
          | ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
          | location '$hdfsPath/$upDbName/$tableName'
          |""".stripMargin

    println(createTableSQL)
    sparkSession.sql(createTableSQL)


    var taskSQL: String = taskInfo.taskSql

   //3 根据把查询值转换为四级标签
    //    //作业兼容问题1：   如果没有四级标签的映射，则不用casewhen  直接用queryvalue作为查询结果即可
    var tagValueSQL="";
    if(taskTagRuleList.size>0){
      val whenThenList: List[String] = taskTagRuleList.map(taskTagRule => s" when '${taskTagRule.queryValue}' then '${taskTagRule.subTagValue}' ")

      val whenThenSQL=whenThenList.mkString(" ")

      val caseWhenSQL=s"case query_value $whenThenSQL end as  tag_value"
      tagValueSQL=caseWhenSQL
    }else{
      tagValueSQL =" query_value as tag_value"
    }


    //作业兼容问题2： 把 sql 中的$dt 替换成业务日期
    taskSQL=taskSQL.replace("$dt",busiDate)

    val selectSQL=s" select uid, $tagValueSQL from ($taskSQL) tt"

    println(selectSQL)

    // 4 生成insert 语句

    val insertSQL=s" insert overwrite table  ${upDbName}.${tableName} partition (dt='$busiDate') $selectSQL"

    println(insertSQL)
    sparkSession.sql(s"use $dwDbName")
    sparkSession.sql(insertSQL)

  }



}
