package com.hkbigdata.userprofile.app

import com.hkbigdata.userprofile.common.bean.{TagInfo, TaskInfo, TaskTagRule}
import com.hkbigdata.userprofile.common.constcode.ConstCode
import com.hkbigdata.userprofile.common.dao.{TagInfoDAO, TaskInfoDAO, TaskTagRuleDAO}
import com.hkbigdata.userprofile.common.util.{MyPropertiesUtil, MySqlUtil}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import java.util.Properties

/**
 * @author Clown
 */
object TaskTagSqlApp {
  def main(args: Array[String]): Unit = {
    // 1、根据TaskID 读取 人物的定义、规则、SQL  读取标签 名称
    // 1.1 taskId  // spark-submit结尾处 会带两个参数  第一个参数是taskId 第二个参数是业务时间，一般是前一天的日期
    // spark-submit  --master ..xxxx.xx. xxxx.jar  1 2021-06-08

    val properties: Properties = MyPropertiesUtil.load("config.properties")

    val userProfileDbName = properties.getProperty("user-profile.dbname")
    val wareHouseDbName = properties.getProperty("data-warehouse.dbname")
    val hdfsStorePath = properties.getProperty("hdfs-store.path")


    System.setProperty("HADOOP_USER_NAME", "hkbigdata")
    //1  初始化环境
    val sparkConf: SparkConf = new SparkConf().setAppName("tag_sql_app")
//      .setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()


    // 第一个参数 会传入任务编号
    val taskId: String = args(0)
    // 第二个参数 会传入任务的业务日期
    val taskDate: String = args(1)


    // 查询任务信息
    val taskInfo: TaskInfo = TaskInfoDAO.getTaskInfo(taskId)

    // 查询标签信息
    val tagInfo: TagInfo = TagInfoDAO.getTagInfoByTaskId(taskId)

    // 查询标签规则信息
    val taskTagRules: List[TaskTagRule] = TaskTagRuleDAO.getTaskTagRuleListByTaskId(taskId)

    // 根据 标签名称建表  ，如果没有就建表    根据字段类型
    //用tagcode作为表名
    //根据标签的值类型来 定义字段类型


    // 根据 标签名称建表  ，如果没有就建表    根据字段类型
    //用tagcode作为表名
    //根据标签的值类型来 定义字段类型
    val tableName: String = tagInfo.tagCode
    val tagValueType: String = tagInfo.tagValueType match {
      case ConstCode.TAG_VALUE_TYPE_STRING => "STRING"
      case ConstCode.TAG_VALUE_TYPE_LONG => "BIGINT"
      case ConstCode.TAG_VALUE_TYPE_DECIMAL => "DECIMAL(16,2)"
      case ConstCode.TAG_VALUE_TYPE_DATE => "STRING"
    }


    val createTableSql =
      s"""create  table if not exists $userProfileDbName.$tableName
         | ( uid string, tag_value  $tagValueType )
         | comment '${tagInfo.tagName}' PARTITIONED BY (`dt` STRING)
         |  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
         |  LOCATION '$hdfsStorePath/$userProfileDbName/${tableName.toLowerCase}/'""".stripMargin

    //    sparkSession.sql("select * from gmall2021.dwd_order_info ").show(20)
    sparkSession.sql(" use " + userProfileDbName)
    sparkSession.sql(createTableSql)
    println(createTableSql)

    //提取任务sql
    //处理时间
//    println(taskInfo.taskSql)
    var taskSql: String = taskInfo.taskSql.replace("${dt}", taskDate)
//    var taskSql: String = taskInfo.taskSql
    println(taskSql)



    var casewhensql: String = ""
    if (taskTagRules.size > 0) {
      val list: List[String] = taskTagRules.map(tasktagrules => s" when '${tasktagrules.queryValue}' then '${tasktagrules.subTagValue}' ")
      casewhensql = " case query_value " + list.mkString(" ") + " end as tag_value "
    } else {
      casewhensql = "query_value as tag_value"
    }
    var selectsql: String = s"select uid, $casewhensql from ($taskSql) as ts"
    println(selectsql)

    var insertsql: String = s"insert overwrite $userProfileDbName.$tableName partition(dt='$taskDate') " + selectsql
//    var insertsql: String = s"insert overwrite $userProfileDbName.$tableName" + selectsql

    sparkSession.sql("use " + wareHouseDbName)
    sparkSession.sql(insertsql)

    println(insertsql)





  }


}
