package com.atguigu.userprofile.app

import com.atguigu.userprofile.bean.{TagInfo, TaskInfo, TaskTagRule}
import com.atguigu.userprofile.dao.{TagInfoDao, TaskInfoDao, TaskTagRuleDao}
import com.atguigu.userprofile.util.ConstCode._
import com.atguigu.userprofile.util.MyPropertiesUtil
import java.util.Properties
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession


/**
 * @Author: cpw
 * @Date: 2021/8/1 15:24
 * @Version 1.0
 */
object TaskSqlApp {

    def main(args: Array[String]): Unit = {

        //0  spark运行环境
        val sparkConf: SparkConf = new SparkConf().setAppName("task_sql_app")
                //.setMaster("local[*]")
        val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

        // 1.读取Mysql的定义  tag_info  \  task_info  \task_tag_rule
        val taskId:String = args(0)
        val taskDate:String = args(1)
        //通过工具类查询
        val tagInfo: TagInfo = TagInfoDao.getTagInfoByTask(taskId)

        val taskInfo: TaskInfo = TaskInfoDao.getTaskInfo(taskId)
        val taskTagRuleList: List[TaskTagRule] = TaskTagRuleDao.getTaskTagRuleListByTaskId(taskId)

        println(tagInfo)
        println(taskInfo)
        println(taskTagRuleList)

        // 2.准备画像库中的表  根据定义建立  画像库的 table，每一个标签一张表
        val tableName = tagInfo.tagCode.toLowerCase()
        val tableCmt: String = tagInfo.tagName

        val properties: Properties = MyPropertiesUtil.load("config.properties")
        val hdfsPath: String = properties.getProperty("hdfs-store.path")
        val dwName: String = properties.getProperty("data-warehouse.dbname")
        val upName: String = properties.getProperty("user-profile.dbname")



        val tagValueType: String = tagInfo.tagValueType match {
            case TAG_VALUE_TYPE_STRING => "STRING"
            case TAG_VALUE_TYPE_LONG => "BIGINT"
            case TAG_VALUE_TYPE_DECIMAL => "DECIMAL(16,2)"
            case TAG_VALUE_TYPE_DATE => "STRING"
        }

        //增加drop方法  如果程序重跑  而且表的字段有变化  把表重新生成

        val createTableSQL =
            s"""
              | create table if not exists $upName.$tableName
              | (uid string, tag_value $tagValueType)
              | comment '$tableCmt'
              | row format delimited fields terminated by '\\t'
              | partitioned by  (dt string)
              | location '$hdfsPath/$upName/$tableName'
              |""".stripMargin

        println(createTableSQL)
        sparkSession.sql(createTableSQL)

        // 3.准备select语句  从task_info取
        // case when "M" then "男"  .. end

        var tagValueSQL:String = ""
        //要考虑兼容  连续值类型标签（没有四级标签）  直接用查询结果作为标签值
        if(taskTagRuleList.size >0){ //有四级标签
            val caseWhenList:List[String] = taskTagRuleList.map{taskTagRule =>
                s"when '${taskTagRule.queryValue}' then '${taskTagRule.subTagValue}'"
            }
            tagValueSQL = "case query_value " + caseWhenList.mkString(" ") + " end"
        }else{// 连续值  无四级标签
            tagValueSQL="query_value"
        }

        //如果sql中需要取当前任务日期，可以进行替换
        val taskSQL:String = taskInfo.taskSql.replace("$dt", taskDate)

        val selectSQL =
            s"""
              | select uid, $tagValueSQL as tag_value from (
              | ${taskSQL} ) tagsql
              |""".stripMargin

        println(selectSQL)

        // 4.结合insert overwrite + select 写入画像库中标签表

        val insertSQL =
            s"""
              | insert overwrite table $upName.$tableName partition (dt = '$taskDate')
              | $selectSQL
              |""".stripMargin

        println(insertSQL)
        sparkSession.sql(s"use $dwName")
        sparkSession.sql(insertSQL)
    }

}
