package com.atguigu.userprofile.task;

import com.atguigu.userprofile.bean.TagInfo;
import com.atguigu.userprofile.bean.TaskInfo;
import com.atguigu.userprofile.bean.TaskTagRule;
import com.atguigu.userprofile.constant.ConstCode;
import com.atguigu.userprofile.dao.TagInfoDao;
import com.atguigu.userprofile.dao.TaskInfoDao;
import com.atguigu.userprofile.dao.TaskTagRuleDao;
import com.atguigu.userprofile.util.MyPropsUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.stream.Collectors;

/**
 * 计算统计型和规则型标签的SQL任务
 *
 * 任务步骤:
 * 1. 获取到当前要执行的标签任务id 、 业务日期
 *
 * 2. 根据taskId获取要计算的标签信息 、 任务信息 、 标签映射规则信息
 *
 * 3. 在画像库中创建标签表
 *
 * 4. 组织SQL： insert(画像库) ... select(数仓库)
 *
 * 5. 准备SparkSql环境，执行SQL .
 */
public class TaskSql {

    public static void main(String[] args) {
        //1. 获取到当前要执行的标签任务id 、 业务日期
        // 约定: 传入了两个参数 1. 任务id  2. 业务日期
        // spark-submit [options] <app jar>  [app arguments]
        String taskId = args[0] ;
        String busiDate = args[1] ;

        //2. 根据taskId获取要计算的标签信息 、 任务信息 、 标签映射规则信息
        TagInfo tagInfo = TagInfoDao.selectTagInfoByTaskId(taskId);
        TaskInfo taskInfo = TaskInfoDao.selectTaskInfoById(taskId);
        List<TaskTagRule> taskTagRules = TaskTagRuleDao.selectTaskTagRulesByTaskId(taskId);

        //3. 在画像库中创建标签表 ,
        // 宗旨: 按照计算的标签动态建表
        // 三个列:  uid(哪个人)  tag_value(标签值)  dt(哪一天)
        /*
          create table if not exists  [upDbName].[tableName]
          (
            uid  string ,
            tag_value  [ columnType ]
          )
          partitioned by (dt string)
          row format delimited fields terminated by '\t'
          location '[hdfsPath]/[upDbName]/[tableName]'
         */
        String dwDbName = MyPropsUtil.get(ConstCode.DATA_WAREHOUSE_DBNAME) ;
        String upDbName = MyPropsUtil.get(ConstCode.USER_PROFILE_DBNAME) ;
        String tableName = tagInfo.getTagCode().toLowerCase() ;
        // 整数=> bigint  浮点=> decimal(16,2)  文本=>string   日期=>string
        String columnType = null ;
        switch (tagInfo.getTagValueType()){
            case ConstCode.TAG_VALUE_TYPE_LONG :
                columnType = "bigint" ;
                break ;
            case ConstCode.TAG_VALUE_TYPE_DECIMAL :
                columnType = "decimal(16,2)" ;
                break;
            case ConstCode.TAG_VALUE_TYPE_STRING :
            case ConstCode.TAG_VALUE_TYPE_DATE :
                columnType = "string" ;
                break ;
        }

        String hdfsPath = MyPropsUtil.get(ConstCode.HDFS_STORE_PATH) ;

        String createTable =
                " create table if not exists " +  upDbName + "." + tableName +
                " (" +
                " uid  string ," +
                " tag_value "  + columnType  +
                " )" +
                " partitioned by (dt string)" +
                " row format delimited fields terminated by '\\t'" +
                " location '" + hdfsPath + "/" + upDbName + "/" +  tableName + "'" ;

        System.out.println("createTable ==> " + createTable);

        // 4. 组织SQL： insert(画像库) ... select(数仓库)
        String taskSql = taskInfo.getTaskSql();
        /*
         taskSql :
            select id  as uid , if(gender is null , 'U' , gender) as query_value from dim_user_zip where dt = '9999-12-31'
         selectSql:
            select
                tq.uid, case query_value when 'M' then '男'  when 'F' then '女' when 'U' then '未知'  end as tag_value
            from
            (
              select id  as uid , if(gender is null , 'U' , gender) as query_value from dim_user_zip where dt = '9999-12-31'
            ) tq

         */

        // 将TaskTagRule转换成 when '' then ''
        String whenThenSql  = taskTagRules.stream().map(
                taskTagRule -> " when '" + taskTagRule.getQueryValue() + "' then '" + taskTagRule.getSubTagValue() + "' "
        ).collect(Collectors.joining(" "));

        String caseSql =  "case query_value " + whenThenSql  + " end as tag_value ";
        String selectSql =
                " select " +
                " uid ,  "  + caseSql +
                " from ( " +  taskSql + " )" ;

        System.out.println("selectSql ==> " + selectSql );

        String insertSql = " insert overwrite table " + upDbName + "." + tableName + " partition(dt='"+ busiDate+"') " + selectSql ;

        System.out.println("insertSql ==> " + insertSql);

        // 5. 准备SparkSql环境，执行SQL .
        SparkConf sparkConf = new SparkConf().setAppName("task_sql_app");//.setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate();

        //建表
        sparkSession.sql(createTable) ;
        sparkSession.sql("use " +  dwDbName) ;
        // insert select
        sparkSession.sql(insertSql);
    }


}
