package com.atguigu.userprofile.task;

import com.atguigu.userprofile.bean.TagInfo;
import com.atguigu.userprofile.bean.TaskInfo;
import com.atguigu.userprofile.bean.TaskTagRule;
import com.atguigu.userprofile.constant.ConstCode;
import com.atguigu.userprofile.dao.TagInfoDao;
import com.atguigu.userprofile.dao.TaskInfoDao;
import com.atguigu.userprofile.dao.TaskTagRuleDao;
import com.atguigu.userprofile.util.MyPropsUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.stream.Collectors;

/**
 * 批处理任务一 :  完成统计型和规则型标签的SQL处理
 *
 * 任务步骤:
 *
     1.先获取两个参数: taskId , busiDate

     2.通过任务id到mysql中查询 tag_info  task_info  task_tag_rule

     3.动态创建标签表

     4.组织: insert ...  select ...

     5.执行SQL
 */
public class TaskSql {

    public static void main(String[] args) {
        // 1.先获取两个参数: taskId , busiDate
        // spark-submit [options] <app jar> [app arguments]
        // 管理平台提交任务到远端任务提交器的时候， 会传递两个任务信息:  任务id 、 业务日期
        // 远端任务提交器将任务提交的yarn的时候， 会将两个任务信息当成App参数进行传入
        // 通过main方法的args参数来接收到外部传入的两个参数
        // 约定: 第一个参数是任务id， 第二个参数是业务日期
        String taskId = args[0] ;
        String busiDate = args[1] ;

        // 2.通过任务id到mysql中查询 tag_info  task_info  task_tag_rule
        // JDBC => 封装一个通用查询方法 =>  通用模块
        TagInfo tagInfo = TagInfoDao.selectTagInfoByTaskId(taskId);
        TaskInfo taskInfo = TaskInfoDao.selectTaskInfoById(taskId);
        List<TaskTagRule> taskTagRules = TaskTagRuleDao.selectTaskTagRulesByTaskId(taskId);

        // 3.动态创建标签表
        /*
            是否创建分区表 :  是 , 每天的数据放到一个分区中
            表结构 :  uid , tag_value , dt
            create table if not exists [upDbName].[tableName]
            (
              uid string ,
              tag_value [tagColumnType]
            )
            partitioned by (dt string )
            row format delimited fields terminated by '\t'
            location '[hdfsPath]/[upDbName]/[tableName]'

         */
        String dwDbName = MyPropsUtil.get(ConstCode.DW_DBNAME) ;
        String upDbName = MyPropsUtil.get(ConstCode.UP_DBNAME) ;
        String tableName = tagInfo.getTagCode().toLowerCase() ;  //使用标签编码作为表的名字
        String tagValueType = tagInfo.getTagValueType();
        String tagColumnType = tagValueTypeToTagColumnType(tagValueType);
        String hdfsPath = MyPropsUtil.get(ConstCode.HDFS_STORE_PATH) ;

        String createTable =
                " create table if not exists " + upDbName + "."  + tableName +
                " ( " +
                " uid string , tag_value " + tagColumnType  +
                " ) " +
                " partitioned by (dt string ) " +
                " row format delimited fields terminated by '\\t' " +
                " location '"+ hdfsPath+ "/" + upDbName +"/" + tableName+ "'" ;
        System.out.println("createTable : " + createTable ) ;


        // 4.组织: insert ...  select ...
        // 获取任务Sql
        String taskSql = taskInfo.getTaskSql() ;
        /*
          以性别标签为例:
          taskSql:
           select id as uid  , if(gender is null , 'U', gender) as query_value
                                               from dim_user_zip where dt = '9999-12-31'

          insert... select :

            insert overwrite [upDbName].[tableName] partition (dt=[busiDate])
            select uid , case query_value when 'M' then '男' when 'F' then '女' when 'U' then '未知' end as tag_value
            from
            (select id as uid  , if(gender is null , 'U', gender) as query_value
                                               from dim_user_zip where dt = '9999-12-31')
         */

        //处理 when then Sql
        String whenThenSql = taskTagRules.stream().map(
                taskTagRule -> "when '" + taskTagRule.getQueryValue() + "' then '" + taskTagRule.getSubTagValue() + "'"
        ).collect(Collectors.joining(" "));

        //处理case sql
        String caseSql = "case query_value " + whenThenSql  + " end as tag_value" ;

        //处理select Sql
        String selectSql = " select uid , " + caseSql + " from ( " + taskSql +" ) " ;

        //处理insert select
        String insertSelectSql = "insert overwrite " + upDbName + "." + tableName  + " partition(dt='"+busiDate+"') " + selectSql ;

        System.out.println("insertSelectSql : " + insertSelectSql);


        // 5.执行SQL
        SparkConf sparkConf = new SparkConf().setAppName("task_sql_app").setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate();

        sparkSession.sql(createTable) ;
        sparkSession.sql("use " +  dwDbName ) ;
        sparkSession.sql(insertSelectSql) ;

    }

    /**
     * 通过标签值类型确定hive表中列的类型
     */
    private static String tagValueTypeToTagColumnType( String tagValueType ){
        String tagColumnType = null ;
        switch (tagValueType){
            case ConstCode.TAG_VALUE_TYPE_LONG :
                tagColumnType = "bigint" ;
                break ;
            case ConstCode.TAG_VALUE_TYPE_DECIMAL :
                tagColumnType = "decimal(12,2)" ;
                break;
            case ConstCode.TAG_VALUE_TYPE_STRING :

            case ConstCode.TAG_VALUE_TYPE_DATE :
                tagColumnType = "string" ;
                break ;
        }

        return tagColumnType ;

    }

}
