package com.atguigu.upp.apps;

import com.atguigu.upp.utils.PropertiesUtil;
import com.atguigu.upp.utils.SqlTaskExecuteUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.stream.Collectors;

/**
 * Created by Smexy on 2022/9/13
 */
public class MergeHiveWideTableApp
{

    public static void main(String[] args) {

        //在调度程序时，固定会传入两个参数，第一个是taskId，第二个是业务日期
        Integer taskId = Integer.parseInt(args[0]);
        String do_date = args[1];

        //准备一个SparkSession
        SparkSession sparkSession = SqlTaskExecuteUtil.createSparkSession("MergeHiveWideTableApp");

        //要合并的宽表
        List<String> tableNames = SqlTaskExecuteUtil.queryTagTableNameToMerge(sparkSession);

        //生产宽表的建表语句
        String tableSql = createTableSql(do_date, tableNames);
        //获取导数语句
        String insertSql = createInsertSql(do_date, tableNames);

        //执行sql
        sparkSession.sql(tableSql);
        sparkSession.sql(insertSql);


    }

    /*

    聚合列： tag_value
    旋转列:  tag_code。 标签名,额外生成
    分组列： uid
    insert overwrite table 库名.表名
    select
    *
    from (
        select uid, tag_value, 'tag_consume_behaivor_order_amount_7d' tag_code from  up220509.tag_consume_behaivor_order_amount_7d where dt='2020-06-14'
        union all
        select uid, tag_value ,'tag_person_nature_gender' tag_code  from  up220509.tag_person_nature_gender where dt='2020-06-14'
            union all
            select uid, tag_value, 'tag_person_nature_period' tag_code from  up220509.tag_person_nature_period where dt='2020-06-14'

    ) t1
    pivot (
        min(tag_value)
             for tag_code in( 'tag_consume_behaivor_order_amount_7d' ,'tag_person_nature_gender','tag_person_nature_period')
       )

           */
    private static String createInsertSql(String do_date,List<String> tableNames) {

        String template = "insert overwrite table %s.%s " +
                          "select * from  ( %s ) t1 pivot ( min(tag_value) for tag_code in ( %s  )   )" ;


        //确认库名和表名
        String updbname = PropertiesUtil.getValue("updbname");
        String upwideprefix = PropertiesUtil.getValue("upwideprefix");
        String tableName = upwideprefix + do_date.replace('-', '_');

        //每一个要合并的标签，都要生成:   select uid, tag_value ,'tag_person_nature_gender' tag_code  from  up220509.tag_person_nature_gender where dt='2020-06-14'
        String unionTemplate = " select uid, tag_value ,'%s' tag_code  from  %s.%s where dt='%s'";
        List<String> tagSql = tableNames.stream().map(tagName -> String.format(unionTemplate, tagName.toUpperCase(), updbname, tagName, do_date)).collect(Collectors.toList());
        String queryTableSql = StringUtils.join(tagSql, " union all ");

        //生成in中 内容
        List<String> tagNameSql = tableNames.stream().map(tag -> "'" + tag.toUpperCase() + "'").collect(Collectors.toList());
        String inSql = StringUtils.join(tagNameSql, ',');

        //生成最后的语句
        String insertSql = String.format(template, updbname, tableName, queryTableSql, inSql);

        System.out.println("导数语句:" +insertSql);


        return insertSql;
    }

    /*
            动态生成:
     tag_person_nature_period string,
    tag_person_nature_gender string,
    tag_consume_behaivor_order_amount_7d string
     */
    private static String createTableSql(String do_date,List<String> tableNames) {

        String template = "create table  if not exists %s.%s ( uid string, %s ) " +
                          "comment '标签宽表' " +
                          "row format delimited fields terminated by '\\t' " +
                          "location '%s/%s'";


        //确认库名和表名
        String updbname = PropertiesUtil.getValue("updbname");
        String upwideprefix = PropertiesUtil.getValue("upwideprefix");
        String tableName = upwideprefix + do_date.replace('-', '_');


        List<String> names = tableNames.stream().map(name -> name + " string").collect(Collectors.toList());
        String tagColumns = StringUtils.join(names, ',');

        String hdfsPath = PropertiesUtil.getValue("hdfsPath");

        String sql = String.format(template, updbname, tableName, tagColumns, hdfsPath, tableName);

        System.out.println("建表语句:"+sql);

        return sql;
    }

}
