package com.atguigu.userprofile.task;
import com.atguigu.userprofile.MysqlUtil.MyPropsUtil;
import com.atguigu.userprofile.bean.Taginfo;
import com.atguigu.userprofile.bean.TaskTagRule;
import com.atguigu.userprofile.bean.Taskinfo;
import com.atguigu.userprofile.constant.ConstCode;
import com.atguigu.userprofile.dao.TaginfoDao;
import com.atguigu.userprofile.dao.TaskTagRuleDao;
import com.atguigu.userprofile.dao.TaskinfoDao;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.lang.reflect.InvocationTargetException;
import java.sql.SQLException;
import java.util.List;
import java.util.stream.Collectors;


/**
 * 任务步骤
 * 1.获取外部传入的参数 taskId busidate
 * spark-submit [options] <app jar>[app arguments]
 *
 * 2.根据任务id到mysql中查询任务相关的信息,标签信息(tag_info)、 任务信息(task_info) 、 规则信息(task_tag_rule)
 *
 * 3.动态创建标签表
 *
 * 4.组织: insert(标签表)..........select(数仓表)
 *
 * 5.通过sparksql来执行处理号的insertselect语句
 * */
public class TaskSql {
    public static void main(String[] args) throws SQLException, NoSuchFieldException, ClassNotFoundException, InvocationTargetException, InstantiationException, IllegalAccessException {
        System.setProperty("HADOOP_USER_NAME","atguigu");

        //1.获取外部传入的参数  taskid busidate
        // spark-submit[options]<app jar>[app argments]
        //画像管理平台会将任务id和业务日期传递给远端任务提交器
        //远端的任务提交其通过spark-submit将任务id和业务日期作为app参数
        //传入到main方法中
        String taskId = args[0];
        String busiDate = args[1];

        //2.根据任务id到MySQL中查询任务相关的信息
        //标签信息(tag_info)、任务信息(task_info)、规则信息(task_tag_rule)
        //专门封装一个模块,用来声明公共通用功能 => common


        Taginfo taginfo = TaginfoDao.selectTagInfoByTaskId(taskId);
        Taskinfo taskinfo = TaskinfoDao.selectTaskInfoById(taskId);
        List<TaskTagRule> taskTagRules = TaskTagRuleDao.selectTaskTagRulesByTaskId(taskId);

        // 3. 动态创建标签表
        /*
            create table if not exists [upDbName].[tableName]
            (
               uid string ,
               tag_value [tagColumnType]
            )
            partitioned by (dt string)
            row format delimited fields terminated by '\t'
            location '[hdfsPath]/[upDbName]/[tableName]'

         */
        //数据库名
        String dwDbName = MyPropsUtil.get(ConstCode.DW_DBNAME);
        //画像库名
        String upDbName = MyPropsUtil.get(ConstCode.UP_DBNAME);
        //标签表名,使用标签编码作为表的名字
        String tableName = taginfo.getTagCode().toLowerCase();
        //标签列类型
        //通过所计算的标签的值类型.来决定标签表中标签列的类型
        String tagValueType = taginfo.getTagValueType();
        String tagcolumntype = null;
        switch (tagValueType) {
            case ConstCode.TAG_VALUE_TYPE_BIGINT:
                tagcolumntype = "bigint";
                break;
            case ConstCode.TAG_VALUE_TYPE_DECIMAL:
                tagcolumntype = "decimal(16,2)";
                break;
            case ConstCode.TAG_VALUE_TYPE_STRING:
                tagcolumntype = "string";
                break;
            case ConstCode.TAG_VALUE_TYPE_DATE:
                tagcolumntype = "string";
                break;
        }
        String hdfspath = MyPropsUtil.get(ConstCode.HDFS_STORE_PATH);

        String createTable =
                " create table if  not exists " + upDbName + "." + tableName +
                " ( " +
                " uid string ," +
                " tag_value " + tagcolumntype +
                " ) " +
                "  partitioned by ( dt string) " +
                "  row format delimited fields terminated by '\\t' " +
                "  location '" + hdfspath + "/" + upDbName + "/" + tableName + "'";

        System.out.println("createTable: " + createTable);


        //4.组织: insert(标签表)....select(数仓表)
        //select id, if(gender=NULL,U,gender) from  dim_user_zip where dt='9999-12-31'
        String taskSql = taskinfo.getTaskSql();
        taskSql = taskSql.replace("$dt", busiDate);
        //迭代TaskTagRules ,将每个taskTagRule 处理成一个 when '' then ''
        //for (TaskTagRule taskTagRule : taskTagrules){
        //whenThensql +="when'"+taskTagRule.getQueryValue()+"'then'"+taskTagRule.getSubTagValue()+"' ";        //
        // }

        String  caseSql=null;
        if(taskTagRules.isEmpty()){
            caseSql="t.query_value as tag_value ";
        }else{
            String whenThensql=taskTagRules.stream().map(
                    taskTagRule -> "when '"+taskTagRule.getQueryValue()+"' then '"+taskTagRule.getSubTagValue()+"'"
            ).collect(Collectors.joining(" "));
            caseSql="case  t.query_value "+whenThensql+"end AS tag_value";
        }



        String selectsql="select t.uid ,"+caseSql +" from ( " + taskSql + ") t";
        String insertSelectSql="insert overwrite table "+ upDbName + "." +tableName+ " partition (dt='" +busiDate+"') "+ selectsql;
        System.out.println("insertSelectSql: "+ insertSelectSql);

        //5.通过sparksql来执行处理好的insertselect 语句
        SparkConf sparkConf  = new SparkConf().setAppName("task_sql_app").setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate();

        sparkSession.sql(createTable);
        sparkSession.sql("use "+dwDbName);
        sparkSession.sql(insertSelectSql);
    }
}
