package com.atguigu.userprofile.task;

import com.atguigu.userprofile.beans.TagInfo;
import com.atguigu.userprofile.beans.TaskInfo;
import com.atguigu.userprofile.beans.TaskTagRule;
import com.atguigu.userprofile.constant.ConstCode;
import com.atguigu.userprofile.dao.TagInfoDao;
import com.atguigu.userprofile.dao.TaskInfoDao;
import com.atguigu.userprofile.dao.TaskTagRuleDao;
import com.atguigu.userprofile.utils.MyPropsUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.stream.Collectors;

/**
 * @Classname TaskSql
 * @Date 2022/10/2 13:44
 * @Created by arun
 * <p>
 * 1、获取计算的任务id（taskId）、业务日期
 * 约定：第一个参数是任务id、第二个参数是业务日期
 * spark-submit [options] <app jar> [app arguments]
 * <p>
 * 2、根据任务id获取 标签信息、任务信息、标签映射规则（SQL查询值与标签的对应关系）
 * 3、创建标签表（如果不存在）
 * 4、写sql insert...select...
 * 5、执行sql
 */
public class TaskSql {
    public static void main(String[] args) {
        // 1、获取外部传入的参数
        String taskId = args[0];
        String businessDate = args[1];

        // 2、根据任务id获取标签信息、任务信息、标签映射规则
        TagInfo tagInfo = TagInfoDao.selectTagInfoByTaskId(taskId);
        TaskInfo taskInfo = TaskInfoDao.selectTaskInfoById(taskId);
        List<TaskTagRule> taskTagRules = TaskTagRuleDao.selectTaskTagRulesByTaskId(taskId);

        /*
            3、创建标签表（如果不存在）
            可以用tag_info表中的tag_code做表名
            create table if not exists [upDbName].[tableName]
            (
                uid string,
                tag_value [columnType]
            )
            partitioned by (dt string)
            row format delimited fields terminated by '\t'
            location '[hdfsPath] / [upDbName] / [tableName]'

            tag_value的数据类型怎么确定？
            tag_info中的tag_value_type就是它的数据类型，在ConstCode类中有映射
         */

        // 画像库名
        String upDbName = MyPropsUtil.get(ConstCode.USER_PROFILE_DBNAME);
        // 数仓库名
        String dwDbName = MyPropsUtil.get(ConstCode.DATA_WAREHOUSE_DBNAME);
        // 标签表名
        String tableName = tagInfo.getTagCode().toLowerCase();

        // 标签表字段的数据类型
        String tagValueType = tagInfo.getTagValueType();
        String columnType = null;
        switch (tagValueType) {
            case ConstCode.TAG_VALUE_TYPE_LONG:
                columnType = "long";
                break;
            case ConstCode.TAG_VALUE_TYPE_DECIMAL:
                columnType = "decimal(16,2)";
                break;
            case ConstCode.TAG_VALUE_TYPE_STRING:
            case ConstCode.TAG_VALUE_TYPE_DATE:
                columnType = "string";
                break;
        }

        // hdfs路径
        String hdfsPath = MyPropsUtil.get(ConstCode.HDFS_STORE_PATH);

        String createTable = " create table if not exists " + upDbName + "." + tableName +
                " ( " +
                " uid string , tag_value " + columnType +
                " ) " +
                " partitioned by ( dt string ) " +
                " row format delimited fields terminated by '\\t' " +
                " location '" + hdfsPath + "/" + upDbName + "/" + tableName + "'";
        System.out.println("createSql===>" + createTable);
        /*
            4、insert...select...
            调度页面的原始sql：
            select id as uid, if(gender is null, 'U', gender) as query_value
            from dim_user_zip where dt = '9999-12-31'

            查询值与标签值映射后：
            select uid,case query_value when 'M' then '男' when 'F' then '女' when 'U' then '未知'
            end as tag_value
            from (
                select id as uid...
            )t1;
         */
        // 原始任务sql
        String taskSql = taskInfo.getTaskSql();
        // 查询值与标签值映射
        String whenThenSql = taskTagRules.stream().map(
                taskTagRule -> " when '" + taskTagRule.getQueryValue() + "' then '" + taskTagRule.getSubTagValue() + "'"
        ).collect(Collectors.joining(" "));

        String selectSql = " select uid , case query_value " + whenThenSql + " end as tag_value " +
                " from ( " + taskSql + " ) t1";

        String insertSql = "insert overwrite " + upDbName + "." + tableName + " partition(dt='" + businessDate + "') " + selectSql;

        System.out.println("insertSql ===> " + insertSql);

        // 5、执行sql
        SparkConf conf = new SparkConf().setAppName("task_sql_app");//.setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate();

        sparkSession.sql(createTable);
        sparkSession.sql("use " + dwDbName);
        sparkSession.sql(insertSql);
    }
}
