package com.atguigu.upp.app;

import com.atguigu.upp.bean.TagInfo;
import com.atguigu.upp.bean.TaskInfo;
import com.atguigu.upp.bean.TaskTagRule;
import com.atguigu.upp.service.MySQLDBService;
import com.atguigu.upp.utils.TagValueTypeConstant;
import com.atguigu.upp.utils.UPPUtil;
import lombok.extern.log4j.Log4j;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.spark.sql.SparkSession;

import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;

/**
 * Created by Smexy on 2022/11/30
 *
 *  ①封装Bean
 *  ②查询当前要计算的task的元数据信息
 *  ③建表
 *  ④生成写入语句
 */
@Log4j
public class SqlTaskExecuteApp
{
    public static void main(String[] args) throws IOException {

        //环境变量添加(重启idea ，赋予idea管理员权限)和这个地方二选一
        //System.setProperty("HADOOP_USER_NAME","atguigu");

        //①接收参数
        String taskId = args[0];
        String doDate = args[1];

       /*String taskId = "1";
        String doDate = "2020-06-14";*/

        //②根据taskId查询当前这个Task的元数据信息(task_info,tag_info,task_tag_rule)
        SqlSessionFactory sqlSessionFactory = UPPUtil.createSqlSessionFactory("mysql_config.xml");
        MySQLDBService mySQLDBService = new MySQLDBService(sqlSessionFactory.openSession());

        TaskInfo taskInfo = mySQLDBService.getTaskByTaskId(Long.valueOf(taskId));
        TagInfo tagInfo = mySQLDBService.getTagInfoByTaskId(Long.valueOf(taskId));
        List<TaskTagRule> rules = mySQLDBService.getRulesByTaskId(Long.valueOf(taskId));

        //③生成建表语句
        String createTableSql = createTable(tagInfo);
        String insertSql = generateInsertSql(taskInfo,tagInfo,rules,doDate);

        //④执行sql
        SparkSession sparkSession = UPPUtil.createSparkSession("SqlTaskExecuteApp");
        sparkSession.sql(createTableSql);
        sparkSession.sql(insertSql);


    }


    /*
insert into table upp220730.TAG_POPULATION_ATTRIBUTES_NATURE_GENDER partition (dt='2020-06-14')
select
    uid,
     case tagValue
         when 'M' then '男性'
         when 'F' then '女性'
         when 'U' then '未知'
      end tagValue
from (
         task_info.task_sql
     ) tmp



     */
    private static String generateInsertSql(TaskInfo taskInfo,TagInfo tagInfo,List<TaskTagRule> rules,String do_date){

        String template = " insert overwrite table %s.%s partition (dt='%s') " +
                          " select  uid, %s from ( %s )tmp ";

        //库名
        String dbName = UPPUtil.getProperty("updbname");
        //获取表名
        String table = tagInfo.getTagCode();

        String tagValue = "";
        /*
                tagValue expression
                       有： 把sql计算的结果映射为四级标签的值
                       无： 直接写入
         */
        if (rules.size() > 0){
            // 有： 把sql计算的结果映射为四级标签的值
            String whenStr = rules.stream()
                                  .map(rule -> String.format(" when '%s' then '%s' ", rule.getQueryValue(), rule.getSubTagValue()))
                                  .collect(Collectors.joining(" "));

            tagValue =  " case tagValue " + whenStr + " end tagValue ";

        }else{
            tagValue = "tagValue";
        }

        //把查询到的task_sql,替换业务日期,去除结尾的;
        String taskSql = taskInfo.getTaskSql().replace("do_date", do_date).replace(";", " ");

        String insertSql = String.format(template, dbName, table, do_date, tagValue, taskSql);

        log.error(insertSql);
       // System.out.println(insertSql);

        return insertSql;

    }

    /*
create table if not exists upp220730.TAG_POPULATION_ATTRIBUTES_NATURE_GENDER(
    uid string,
    tagValue string
)
comment "性别"
partitioned by (dt string)
row format delimited fields terminated by "\t"
location "/upp220730/TAG_POPULATION_ATTRIBUTES_NATURE_GENDER";

     */
    private static String createTable(TagInfo tagInfo){

        String template ="create table if not exists %s.%s(  uid string,  tagValue %s )  "  +
                         "comment '%s'  partitioned by (dt string) row format delimited fields terminated by '\t' " +
                          "location '%s/%s' ";


        //获取库名及hdfs存放数据的父目录
        String dbName = UPPUtil.getProperty("updbname");
        String parentPath = UPPUtil.getProperty("hdfsPath");

        //获取表名
        String table = tagInfo.getTagCode();

        //根据标签计算值的类型确定使用hive的哪个类型存储
        String tagValueType = "";

        switch (tagInfo.getTagValueType()){
            case TagValueTypeConstant.TAG_VALUE_TYPE_LONG: tagValueType="bigint"; break;
            case TagValueTypeConstant.TAG_VALUE_TYPE_DECIMAL: tagValueType="decimal(16)"; break;
            case TagValueTypeConstant.TAG_VALUE_TYPE_STRING: tagValueType="string"; break;
            case TagValueTypeConstant.TAG_VALUE_TYPE_DATE: tagValueType="string"; break;
        }

        //格式化
        String createTableSql = String.format(template, dbName, table, tagValueType, tagInfo.getTagName(), parentPath, table);

        System.out.println(createTableSql);

        return createTableSql;

    }
}
