package com.atguigu.userprofile.task;

import com.atguigu.userprofile.bean.TagInfo;
import com.atguigu.userprofile.bean.TaskInfo;
import com.atguigu.userprofile.bean.TaskTagRule;
import com.atguigu.userprofile.constant.ConstCode;
import com.atguigu.userprofile.dao.TagInfoDao;
import com.atguigu.userprofile.dao.TaskInfoDao;
import com.atguigu.userprofile.dao.TaskTagRuleDao;
import com.atguigu.userprofile.util.MyPropsUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.stream.Collectors;

/**
 * 通用Sql任务， 负责计算统计性和规则型的标签.
 *
 * 任务步骤：
 *
 * 1. 明确计算哪个标签， 通过获取到taskId
 *
 * 2. 通过taskId,从Mysql中读取 标签信息 、 任务信息 、 规则信息
 *
 * 3. 动态创建标签表， 用于存储计算好的标签结果
 *
 * 4. 组织SQL:  insert(画像) .... select(数仓) ...
 *
 * 5. 准备SparkSql环境， 执行SQL
 */
public class TaskSql {

    public static void main(String[] args) {
        // 1. 明确计算哪个标签， 通过获取到taskId
        // spark-submit [options] <java app jar> [app arguments]
        // 画像管理平台中会传递 taskId 和 busiDate 给远端提交器， 远端提交器将任务处理成spark-submit，
        // 同时将 taskId 和 busiDate 作为app arguments传递给执行类.

        String taskId = args[0] ;
        String busiDate = args[1] ;

        //2. 通过taskId,从Mysql中读取 标签信息 、 任务信息 、 规则信息
        TagInfo tagInfo = TagInfoDao.selectTagInfoByTaskId(taskId);
        TaskInfo taskInfo = TaskInfoDao.selectTaskInfoByTaskId(taskId);
        List<TaskTagRule> taskTagRules = TaskTagRuleDao.selectTaskTagRuleListByTaskId(taskId);

        //3. 动态创建标签表， 用于存储计算好的标签结果
        // 一个标签对应一个标签表， 每天计算的结果放到同一张表中，该表为分区表。
        /*
          create table if not exists [upDbName].[tableName]
          (
            uid string ,
            tag_value [columnType]
          )
          partitioned by ( dt string )
          row format delimited fields terminated by '\t'
          location '[hdfsPath]/[upDbName]/[tableName]'

         */
        //画像库名
        String upDbName = MyPropsUtil.get(ConstCode.UP_DBNAME);
        //数仓库名
        String dwDbName = MyPropsUtil.get(ConstCode.DW_DBNAME);
        //标签表名
        String tableName = tagInfo.getTagCode().toLowerCase() ;
        //列的类型， 通过当前计算的标签，得到标签值类型， 确定列的类型
        String tagValueType = tagInfo.getTagValueType();
        String columnType = getColumnType(tagValueType);
        //表在hdfs存储的路径
        String hdfsPath = MyPropsUtil.get(ConstCode.HDFS_STORE_PATH) ;

        String createTable = " create table if not exists " + upDbName + "." + tableName +
                " ( " +
                " uid string , tag_value " + columnType +
                " ) " +
                " partitioned by ( dt string ) " +
                " row format delimited fields terminated by '\\t' " +
                " location '" + hdfsPath+ "/" +  upDbName + "/" + tableName + "'" ;


        System.out.println("createTable : " + createTable);

        // 组织SQL:  insert(画像) .... select(数仓) ...
        /*
        以性别标签为例:
            taskSql :
                select id as uid  , if(gender is null , 'U' , gender) as query_value from dim_user_zip where dt = '9999-12-31'

                select user_id as uid , total_amount_td as query_value from dws_trade_user_order_td where dt = '$dt'

            insertSelectSql:
                有四级标签:
                insert overwrite  table [upDbName].[tableName]  partition(dt = '[busiDate]')
                select uid , case query_value when 'M' then '男' when 'F' then '女' when 'U' then '未知'  end as tag_value
                from
                   (select id as uid  , if(gender is null , 'U' , gender) as query_value from dim_user_zip where dt = '9999-12-31')

                没有四级标签:
                insert overwrite  table [upDbName].[tableName]  partition(dt = '[busiDate]')
                select uid , query_value  from
                (select user_id as uid , total_amount_td as query_value from dws_trade_user_order_td where dt = '$dt')

         */
        String taskSql = taskInfo.getTaskSql() ;

        //TODO 处理SQL中的$dt占位符
        taskSql = taskSql.replace("$dt", busiDate);

        //处理selectSql

        //TODO 判断是否有四级标签
        String caseSql = null ;
        if(taskTagRules.isEmpty()){
            //没有四级标签
            caseSql = " query_value " ;
        }else{
            //有四级标签
            //将每个TaskTagRule处理成 when '' then ''
            String whenThenSql = taskTagRules.stream().map(
                    taskTagRule -> "when '" + taskTagRule.getQueryValue() + "' then '" + taskTagRule.getSubTagValue() + "'"
            ).collect(Collectors.joining(" "));
            caseSql = " case query_value " + whenThenSql + " end as tag_value " ;
        }

        String selectSql =" select uid , " + caseSql  + " from ( " + taskSql + " )" ;

        //处理insertSql
        String insertSql = " insert overwrite  table " + upDbName + "." + tableName  + " partition(dt = '" + busiDate + "') " ;

        //得到insert .... select
        String insertSelectSql = insertSql  +  selectSql ;

        System.out.println("insertSelectSql : " + insertSelectSql);

        // 5. 准备SparkSql环境， 执行SQL
        SparkConf sparkConf = new SparkConf().setAppName("task_sql_app");//.setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate();

        sparkSession.sql(createTable);
        //切换到数仓库
        sparkSession.sql("use " + dwDbName);
        sparkSession.sql(insertSelectSql);

    }

    private static String getColumnType (String tagValueType){
        String columnType = null ;
       switch (tagValueType){
           case ConstCode.TAG_VALUE_TYPE_LONG :
               columnType = "bigint" ;
               break ;
           case ConstCode.TAG_VALUE_TYPE_DECIMAL :
               columnType = "decimal(16,2)" ;
               break ;
           case ConstCode.TAG_VALUE_TYPE_STRING :
           case ConstCode.TAG_VALUE_TYPE_DATE :
               columnType = "string" ;
               break;
       }
       return columnType ;
    }
}
