package com.atguigu.upp.app;

import com.atguigu.upp.bean.TagInfo;
import com.atguigu.upp.service.CKDBService;
import com.atguigu.upp.service.MysqlDBService;
import com.atguigu.upp.utils.DBUtil;
import com.atguigu.upp.utils.PropertiesUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;

/**
 * Created by Smexy on 2022/10/16
 */
public class ExportWideTableToCK
{
    public static void main(String[] args) throws IOException {

        Integer taskId = Integer.valueOf(args[0]);
        String doDate = args[1];

        SqlSessionFactory sqlSessionFactory1 = DBUtil.getSqlSessionFactory("mybatis_ck.xml");
        SqlSession sqlSession1 = sqlSessionFactory1.openSession();
        CKDBService ckDBService = new CKDBService(sqlSession1);
        SqlSessionFactory sqlSessionFactory2 = DBUtil.getSqlSessionFactory("mybatis_mysql.xml");
        SqlSession sqlSession2 = sqlSessionFactory2.openSession();
        MysqlDBService mysqlDBService = new MysqlDBService(sqlSession2);
        //查询今天要计算的标签有哪些
        List<TagInfo> tags = mysqlDBService.getTagInfoNameEnableToday();

        SparkSession sparkSession = DBUtil.getSparkSession("ExportWideTableToCK");

        //导出到ck
        exportDataToCK(tags,doDate,ckDBService,sparkSession);

    }

    private static void exportDataToCK(List<TagInfo> tags, String doDate, CKDBService ckDBService, SparkSession sparkSession) {

        //获取表名和列名
        String db = PropertiesUtil.getProperty("updbname");  //库名
        String table = PropertiesUtil.getProperty("upwideprefix") + doDate.replace('-','_');  //表名

        //生成动态的列
        List<String> columns = tags
            .stream()
            .map(tagInfo -> tagInfo.getTagCode().toLowerCase().concat("  String "))
            .collect(Collectors.toList());

        String columnStr = StringUtils.join(columns, ",");

        //为了保证幂等性，删表
        ckDBService.dropWideTable(table);

        //建表
        ckDBService.createWideTable(table,columnStr);

        //从hive中查询
        String sql="select * from %s.%s";

        Dataset<Row> result = sparkSession.sql(String.format(sql, db, table));

        //再写出到ck
        result
            .write()
            .mode(SaveMode.Append)   //默认自动建表(不会添加if not exists)，自动写入
            .option("driver",PropertiesUtil.getProperty("ck.jdbc.driver.name"))
            .option("batchsize",500)    //批量写，500条一批
            .option("isolationLevel","NONE")   //事务关闭，ck压根没事务
            .option("numPartitions", "4") // 设置并发
            .jdbc(PropertiesUtil.getProperty("ck.jdbc.url"),table,new Properties());

    }
}
