package com.atguigu.userprofile.app;

import com.atguigu.userprofile.bean.TagInfo;
import com.atguigu.userprofile.dao.TagInfoDAO;
import com.atguigu.userprofile.util.MyPropertiesUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;

public class TaskMergeApp {


    //1  要每天获得当天的启用标签集合， 要从mysql中读取
    //
    //2  建表 宽表
    //   每天建1张    一共建1张表
    //     由于每天标签的增减变化，每天的表结构不稳定，每天根据当天需要的标签进行建表  ，所以每天建表。建表要有程序实现。
    //
    //3  读
    //    3.1把单表拼接为高表
    //    3.2 高表旋转(pivot）为宽表
    //4  写
    //Insert overwrite  xxx
    public static void main(String[] args) {
        String taskId=args[0];
        String busiDate=args[1];

        //0 环境
        SparkConf sparkConf = new SparkConf().setAppName("task_merge_app");//.setMaster("local[*]");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate();

        //1  要获得当天启用的标签集合， 要从mysql中读取
        List<TagInfo> tagInfoList = TagInfoDAO.getTagInfoListWithOn();

        //2  建表 宽表
        //   每天建1张
        //     由于每天标签的增减变化，每天的表结构不稳定，每天根据当天需要的标签进行建表  ，所以每天建表。建表要有程序实现。
        //  create table if not exists $tableName
        //  (uid string ,$fieldListSQL )
        //  comment '标签宽表'
        //   ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
        //   location  '根目录/库目录/表目录'

        String tableName = "user_tag_merge_"+busiDate.replace("-","");

        // $fieldListSQL    tg_person_base_gender string , tg_person_base_agegroup string ...
        List<String> fieldList = tagInfoList.stream().map(tagInfo -> tagInfo.getTagCode().toLowerCase() + " string").collect(Collectors.toList());
        String  fieldListSQL = StringUtils.join(fieldList, ",");

        Properties properties = MyPropertiesUtil.load("config.properties");

        String hdfsPath = properties.getProperty("hdfs-store.path");
        String upName = properties.getProperty("user-profile.dbname");
        String dwName = properties.getProperty("data-warehouse.dbname");

        String createTableSQL="   create table if not exists "+upName+"." +tableName+"\n" +
                "       (uid string , "+fieldListSQL+" )\n" +
                "       comment '标签宽表'\n" +
                "          ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'\n" +
                "             location  '"+hdfsPath+"/"+upName+"/"+tableName+"'";

        String dropTableSQL="drop table  if exists "+upName+"." +tableName;
        System.out.println(dropTableSQL);
        sparkSession.sql(dropTableSQL);

        System.out.println(createTableSQL);
        sparkSession.sql(createTableSQL);

        //3  读
        //    3.1把单表拼接为高表
        //   select uid,  'tg_person_base_gender'  as tag_code, tag_value from tg_person_base_gender where dt ='$busiDate'
        //   union all
        //    select uid,  'tg_person_base_agegroup'  as tag_code, tag_value from tg_person_base_agegroup where dt ='$busiDate'
        //   union all
        //  ...
        List<String> tagSubSQLList = tagInfoList.stream().map(tagInfo -> "select uid,  '"+tagInfo.getTagCode().toLowerCase() +"' as tag_code ,tag_value from " +upName+"."+ tagInfo.getTagCode().toLowerCase() + " where dt ='" + busiDate+"'").collect(Collectors.toList());

        String unionAllSQL = StringUtils.join(tagSubSQLList, " union all ");
        System.out.println(unionAllSQL);

        //    3.2 高表旋转(pivot）为宽表
        //  select * from (unionallsql)  pivot ( max(tag_value)  for tag_code  in ('tg_person_base_gender','tg_person_base_agegroup')     )

        List<String> tagCodeList = tagInfoList.stream().map(tagInfo -> "'" + tagInfo.getTagCode().toLowerCase() + "'").collect(Collectors.toList());

        String tagCodeSQL = StringUtils.join(tagCodeList, ",");

        String selectSQL="select * from ("+unionAllSQL+")  " +
                "pivot ( max(tag_value)  for tag_code  in ("+tagCodeSQL+")  ) ";
       //System.out.println(selectSQL);


        String insertSQL="insert overwrite table "+upName+"."+tableName+ " "+ selectSQL;

        sparkSession.sql("use "+upName);
        System.out.println(insertSQL);
        sparkSession.sql(insertSQL);

    }
}
