package com.atguigu.userprofile.app;

import com.atguigu.userprofile.MysqlUtil.MyPropsUtil;
import com.atguigu.userprofile.bean.Taginfo;
import com.atguigu.userprofile.constant.ConstCode;
import com.atguigu.userprofile.dao.TaginfoDao;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;


import java.util.List;
import java.util.stream.Collectors;

public class TaskMerge {
    // 任务目标:将任务一计划好的所有标签表合成一张标签宽表
    //任务步骤
    //1.获取外部传入的参数 taskId busiDate
    //2.明确有哪些标签表
    // 3.动态创建标签宽表
    //4.组织insert...

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME","atguigu");

        String taskid=args[0];
        String busiDate=args[1];
        List<Taginfo> taginfos = TaginfoDao.selectTagInfosByTaskWithStatusEnable();
        /*
        create table if not exists [upDbName].[tableName]
        (
        uid string ,
        [tagColumns]
        )
        row format delimited fields terminated by '\t'
        location '[hdfsPath]/[upDbName]/[tableName]'
         */

        String upDbName= MyPropsUtil.get(ConstCode.UP_DBNAME);
        String tableName="up_merge_"+busiDate.replace("-","");
        String tagColumns = taginfos.stream().map(
                taginfo -> taginfo.getTagCode().toLowerCase() + " string"
        ).collect(Collectors.joining(" , "));
        String hdfspath = MyPropsUtil.get(ConstCode.HDFS_STORE_PATH);
        String createTable=
                "create table if not exists "+upDbName+"."+tableName +
            "( uid string ," +tagColumns+
                " )"+
                " row format delimited fields terminated by'\\t' "+
                " location '" +hdfspath+"/"+upDbName+"/"+tableName+"'";
        System.out.println("createTable: "+ createTable);
        //删除标签宽表
        String dropTable="drop table if exists "+upDbName+"."+tableName;

        //4.insert 组织建表语句
        String unionsql = taginfos.stream().map(
                taginfo -> " select uid , '" + taginfo.getTagCode().toLowerCase() + "' as tag_code ,tag_value from " + upDbName + "." + taginfo.getTagCode().toLowerCase()
        ).collect(Collectors.joining(" union all"));
        String pivotIn=
                taginfos.stream().map(
                        taginfo -> "'"+taginfo.getTagCode().toLowerCase()+"'"
                ).collect(Collectors.joining(","));
        String selectSql =" select * from ("+ unionsql+") pivot ( max(tag_value) as tag_values for tag_code in ("+pivotIn+"))";
        String insertselect=" insert overwrite table "+upDbName+"."+tableName+selectSql;
        System.out.println("insertselect:"+insertselect);

        //5.执行sql
        SparkConf sc = new SparkConf().setAppName("task_marge_app");//.setMaster("local[*]");
        SparkSession ssn = SparkSession.builder().enableHiveSupport().config(sc).getOrCreate();

        ssn.sql(dropTable);
        ssn.sql(createTable);
        ssn.sql(insertselect);


    }
}
