package com.atguigu.userprofile.app;
//1.	接受外部传参 taskId、busiDate
//2.	根据taskId查询Mysql中相关表的数据tag_info、task_info、task_tag_rule
//3.	从以上表中获取 表名 字段名 字段类型等相关数据。。。。。
//4.	动态拼接建表语句
//5.	动态拼接查询语句
//6.	动态拼接插入数据语句
//7.	执行

import com.atguigu.userprofile.bean.TagInfo;
import com.atguigu.userprofile.bean.TaskInfo;
import com.atguigu.userprofile.bean.TaskTagRule;
import com.atguigu.userprofile.constants.ConstCodes;
import com.atguigu.userprofile.dao.TagInfoDAO;
import com.atguigu.userprofile.dao.TaskInfoDAO;
import com.atguigu.userprofile.dao.TaskTagRuleDAO;
import com.atguigu.userprofile.util.MyPropertiesUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;

public class TaskSqlApp {
    public static void main(String[] args) {

        //创建spark环境
        //注意！！！打包部署运行的时候 一定要注释掉setMaster
        SparkConf sparkConf = new SparkConf().setAppName("TaskSqlApp");//.setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate();

        //提取配置文件中的内容
        Properties properties = MyPropertiesUtil.load("config.properties");
        //用户画像库名
        String upDatabaseName = properties.getProperty("user-profile.dbname");
        //存储路径
        String hdfsPath = properties.getProperty("hdfs-store.path");
        //电商数仓库名
        String dwDBName = properties.getProperty("data-warehouse.dbname");


        //1.接受外部传参 taskId、busiDate
        String taskId = args[0];
        String busiDate = args[1];

        //2.根据taskId查询Mysql中相关表的数据tag_info、task_info、task_tag_rule
        TagInfo tagInfo = TagInfoDAO.getTagInfoByTaskId(taskId);
        TaskInfo taskInfo = TaskInfoDAO.getTaskInfo(taskId);
        List<TaskTagRule> taskTagRuleList = TaskTagRuleDAO.getTaskTagRuleList(taskId);

//        System.out.println(tagInfo);
//        System.out.println("---------------------------------------");
//        System.out.println(taskInfo);
//        System.out.println("---------------------------------------");
//        System.out.println(taskTagRuleList);
        //3.从以上表中获取 表名(tag_code) 字段名(query_value) 字段类型 等相关数据。。。。。
        String tableName = tagInfo.getTagCode().toLowerCase();

        //获取标签值对应的类型
        String valueType = tagInfo.getTagValueType();
        String tagValueType = null;
        if (ConstCodes.TAG_VALUE_TYPE_LONG.equals(valueType)){
            tagValueType = "Bigint";
        }else if (ConstCodes.TAG_VALUE_TYPE_DECIMAL.equals(valueType)){
            tagValueType = "decimal";
        }else if (ConstCodes.TAG_VALUE_TYPE_STRING.equals(valueType)){
            tagValueType = "String";
        }else if (ConstCodes.TAG_VALUE_TYPE_DATE.equals(valueType)){
            tagValueType = "String";
        }


        //获取标签名
        String comment = tagInfo.getTagName();

        //获取SQL语句
        String taskSql = taskInfo.getTaskSql();

        //获取标签值
        List<String> whenThenList = taskTagRuleList.stream().map(taskTagRule -> "when '" + taskTagRule.getQueryValue() + "' then '" + taskTagRule.getSubTagValue() + "'").collect(Collectors.toList());

        String whenThenSQL = StringUtils.join(whenThenList, " ");

        System.out.println(whenThenSQL);


        //4.动态拼接建表语句
        /**
         * create table if not exists user_profile.tg_person_base_gender
         *   (uid String,tag_value string)
         *  partitioned by (dt String)
         *   comment '性别'
         *   ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
         *   location 'hdfs://hadoop102:8020/user_profile/user_profile/tg_person_base_gender'
         */

        String createSQL = "  create table if not exists " + upDatabaseName + "." + tableName + "\n" +
                "            (uid String,tag_value " + tagValueType + ")\n" +
                "           partitioned by (dt String)\n" +
                "            comment '" + comment + "'\n" +
                "            ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'\n" +
                "            location '" + hdfsPath + "/" + upDatabaseName + "/" + tableName + "'";

//        System.out.println(createSQL);

        //默认在电商数仓库中运行
        sparkSession.sql("use gmall");

        sparkSession.sql(createSQL);

        //5.动态拼接查询语句
        /**
         * select
         *  uid,
         *  case query_value when 'F' then '男'  when 'M' then '女'  when 'U' then '未知'  end as query_value
         *  from
         *  (
         *    select
         *     id as uid,
         *     if(gender<>"",gender,"U")  as query_value
         *    from
         *     dim_user_zip where dt='9999-12-31'
         *   )ti
         */

        String querySQL = "select\n" +
                " uid,\n" +
                " case query_value "+whenThenSQL+"  end as query_value\n" +
                " from\n" +
                " (" + taskSql +
                "  )ti";

        System.out.println(querySQL);

        //6.动态拼接插入数据语句
        String insertSQL = "insert overwrite table "+upDatabaseName+"."+tableName+" partition(dt='"+busiDate+"') " + querySQL;
        System.out.println(insertSQL);

        //7.执行
        sparkSession.sql(insertSQL);
    }
}
