package com.atguigu.userprofile.app

import java.util.Properties

import com.atguigu.userprofile.bean.{TagInfo, TaskInfo, TaskTagRule}
import com.atguigu.userprofile.constants.CodeConst
import com.atguigu.userprofile.dao.{TagInfoDAO, TaskInfoDAO, TaskTagRuleDAO}
import com.atguigu.userprofile.util.{MyPropertiesUtil, MySqlUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object TaskSqlApp {



//1   查询出 要执行的sql标签任务的定义、规则、SQL语句、映射
//          根据task_id 从 mysql取
//           1.1  标签 tag_info
//           1.2  任务 task_info
//           1.3  值与子标签的映射
//
//2   从hive数仓中 查询出对应标签的数据
//       包装整理sql     select
//
//3    把查询结果 写入到对应的 画像库的标签表中
//       3.1   创建一个标签表
//       3.2  把标签数据写入
  def main(args: Array[String]): Unit = {

      val sparkConf: SparkConf = new SparkConf().setAppName("task_sql_app")//.setMaster("local[*]")
      val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

  //1   查询出 要执行的sql标签任务的定义、规则、SQL语句、映射
  //          根据task_id 从 mysql取
  //           1.1  标签 tag_info

  // 如何查询mysql  如何封装结果
  //    要有一个查询mysql的工具类
  val taskId: String = args(0)
  val taskDate: String = args(1)

  //          根据task_id 从 mysql取
  //           1.1  标签 tag_info
      val tagInfo: TagInfo = TagInfoDAO.getTagInfoByTaskID(taskId)
  println(tagInfo)
  //           1.2  任务 task_info
     val taskInfo: TaskInfo = TaskInfoDAO.getTaskInfoById(taskId)
  println(taskInfo)
  //           1.3  值与子标签的映射
    val taskTagRuleList: List[TaskTagRule] = TaskTagRuleDAO.getTaskTagRuleList(taskId)

    println(taskTagRuleList)

  //创建一个标签表    （一个标签一个表 ）  （1 手动建立 2自动建立  ）
  //   表名：  标签编码
  //   字段：  uid , tag_value ,dt
  //   create  table  $tableName
  //  (uid string , tag_value    $tagValueType( string,decimal ,bigint,string) )
  //  partitioned by (dt string)
  //  comment '$tagName'
  //  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
  //  location '$hdfsStorePath/$userprofiledb/$tableName'

    val tableName=tagInfo.tagCode.toLowerCase()

    val tagValueTypeSQL=tagInfo.tagValueType match {
      case CodeConst.TAG_VALUE_TYPE_STRING  => "string"
      case CodeConst.TAG_VALUE_TYPE_LONG  => "bigint"
      case CodeConst.TAG_VALUE_TYPE_DECIMAL  => "decimal(16,2)"
      case CodeConst.TAG_VALUE_TYPE_DATE  => "string"
    }

     val properties: Properties = MyPropertiesUtil.load("config.properties")
      val  hdfsStorePath: String = properties.getProperty("hdfs-store.path")
      val  dwDBname: String = properties.getProperty("data-warehouse.dbname")
      val  upDBname: String = properties.getProperty("user-profile.dbname")

     val createTableSQL=
       s"""
          |    create  table  if not exists  $upDBname.$tableName
          |    (uid string , tag_value    $tagValueTypeSQL  )
          |    partitioned by (dt string)
          |     comment '${tagInfo.tagName}'
          |     ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
          |    location '$hdfsStorePath/$upDBname/$tableName'
          |
        """.stripMargin

     println(createTableSQL)
     sparkSession.sql(createTableSQL)

    var taskSql: String = taskInfo.taskSql

  var tagValueSQL=""
  if(taskTagRuleList.size!=0){ //有四级标签
    val tagRuleList: List[String] = taskTagRuleList.map( taskTagRule=> s" when '${taskTagRule.queryValue}'   then  '${taskTagRule.subTagValue}' "  )

    //" when xxx then xxx when xxx then xxx ..."
    val whenThenSQL=tagRuleList.mkString(" ")

    tagValueSQL=s"case query_value $whenThenSQL  end  tag_value"
  }else{   //无四级标签
    tagValueSQL=s" query_value as tag_value"
  }

  // 把日期标识符进行替换
  taskSql = taskSql.replace("$dt",taskDate)

     val  selectSQL=
       s"""
          | select uid ,   $tagValueSQL
          |  from  (
          |     $taskSql
          | )
        """.stripMargin

      println(selectSQL)

      val useDBSql=s" use $dwDBname"
      println(useDBSql)
     sparkSession.sql(useDBSql)

      val insertSQL=s"insert overwrite table $upDBname.$tableName partition(dt='$taskDate') $selectSQL"
      println(insertSQL)
      sparkSession.sql(insertSQL)
  }

}
