package com.atguigu.upp.app;

import com.atguigu.upp.bean.TagInfo;
import com.atguigu.upp.bean.TagValueTypeConstant;
import com.atguigu.upp.service.ClickhouseService;
import com.atguigu.upp.service.MysqlDBService;
import com.atguigu.upp.service.UPPUtil;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.spark.sql.SparkSession;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;

/**
 * Created by Smexy on 2023/1/6
 *
 *  ①从宽边中查询数据
 *  ②使用ck中的函数，生成bitmap
 *  ③写入到目标表
 *          根据存储的标签值的类型，分类写入到不同的表中。
 *          原因： 在ck中类型检测非常严格，如果是日期类型，只能使用Date，String无法运算。
 *
 *          4张目标表： 有tagCode,tagValue,us(bitmap)， 按照doDate分区
 *
 */
public class WriteBitmapApp
{
    public static void main(String[] args) throws IOException {

        String taskId = args[0];
        String doDate = args[1];
      /*  String taskId = "3";
        String doDate = "2020-06-14";*/

        //查询当天计算了哪些标签
        SqlSessionFactory mysqlSSF = UPPUtil.getSqlSessionFactoryByConfig("mysql_config.xml");
        MysqlDBService mysqlDBService = new MysqlDBService(mysqlSSF.openSession());
        List<TagInfo> tags = mysqlDBService.getTagInfoTodayExecute();

        //从hive中读，写入ck
        SqlSessionFactory ckSSF = UPPUtil.getSqlSessionFactoryByConfig("ck_config.xml");
        ClickhouseService clickhouseService = new ClickhouseService(ckSSF.openSession());

        //需要根据标签的tagValueType分类，分为四类
        // String:  tag_population_attributes_nature_gender，tag_population_attributes_nature_period
        // Decimal: tag_consume_behavior_order_7damount
        ArrayList<TagInfo> stringTagS = new ArrayList<>();
        ArrayList<TagInfo> bigintTagS = new ArrayList<>();
        ArrayList<TagInfo> decimalTagS = new ArrayList<>();
        ArrayList<TagInfo> dateTagS = new ArrayList<>();

        for (TagInfo tag : tags) {

            switch (tag.getTagValueType()){
                case TagValueTypeConstant.TAG_VALUE_TYPE_LONG:  bigintTagS.add(tag); break;
                case TagValueTypeConstant.TAG_VALUE_TYPE_DECIMAL:  decimalTagS.add(tag); break;
                case TagValueTypeConstant.TAG_VALUE_TYPE_STRING:  stringTagS.add(tag); break;
                case TagValueTypeConstant.TAG_VALUE_TYPE_DATE:  dateTagS.add(tag); break;
            }

        }

        //写入
        writeBitmap(clickhouseService,stringTagS,"user_tag_value_string",doDate);
        writeBitmap(clickhouseService,bigintTagS,"user_tag_value_long",doDate);
        writeBitmap(clickhouseService,decimalTagS,"user_tag_value_decimal",doDate);
        writeBitmap(clickhouseService,dateTagS,"user_tag_value_date",doDate);

        //spark-submit 提交app
        // Spark-submit只能提交spark应用程序(SparkContext)
        SparkSession sparkSession = UPPUtil.getSparkSession("WriteBitmapApp");

    }

    private static void writeBitmap(ClickhouseService clickhouseService, ArrayList<TagInfo> tags, String table, String doDate) {

        //判断当前集合中是否有今天计算的标签
        if (tags.size() > 0){

            //先删除今天已经重复写的数据
            clickhouseService.dropBitmapToday(doDate,table);

            //确定从哪张宽表去查询
            String source = UPPUtil.getPropertyValue("upwideprefix") + doDate.replace("-","_");

            //今天计算的标签生成tagSql
            String tagSql = tags.stream()
                                 .map(tag -> "('" + tag.getTagCode().toLowerCase() + "'," + tag.getTagCode().toLowerCase() + ")")
                                 .collect(Collectors.joining(","));

            //写入
            clickhouseService.insertBitmapToCk(table,doDate,tagSql,source);

        }

    }
}
