package com.atguigu.upp.app;

import com.atguigu.upp.bean.TagInfo;
import com.atguigu.upp.service.CKDBService;
import com.atguigu.upp.service.MysqlDBService;
import com.atguigu.upp.utils.PropertiesUtil;
import com.atguigu.upp.utils.UPPUtil;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import javax.xml.crypto.Data;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;

/**
 * Created by Smexy on 2022/11/12
 *
 *  使用Spark查询hive中的宽表，再写入到CK。
 *      写入要保证幂等性。
 *          ①写入之前，先删表
 *          ②再建表
 *          ③再写入
 *
 */
public class CopyWideTableToCK
{
    public static void main(String[] args) throws IOException {

        //接收参数
        String taskId = args[0];
        String doDate = args[1];

        //准备编程环境
        SqlSessionFactory sqlSessionFactory1 = UPPUtil.getSqlSessionFactoryByConfig("config.xml");
        SqlSessionFactory sqlSessionFactory2 = UPPUtil.getSqlSessionFactoryByConfig("ckconfig.xml");
        MysqlDBService mysqlDBService = new MysqlDBService(sqlSessionFactory1.openSession());
        CKDBService ckdbService = new CKDBService(sqlSessionFactory2.openSession());

        SparkSession sparkSession = UPPUtil.getSparkSession("MergeWideTableApp");
        //查询今天要合并的标签
        List<TagInfo> tags = mysqlDBService.getTagInfoTodayExecute();

        //建表和导数
        copyDataToCK(doDate,tags,ckdbService,sparkSession);


    }

    public static void copyDataToCK(String doDate,List<TagInfo> tags,CKDBService ckdbService,SparkSession sparkSession){

        String dbName = PropertiesUtil.getProperty("updbname");
        String tableName = PropertiesUtil.getProperty("upwideprefix") + doDate.replace("-","_");

        //在执行前，删除ck中的表，为了实现幂等性
        ckdbService.dropTable(tableName);

        //生成标签列的信息
        String columnStr = tags.stream()
                               .map(t -> t.getTagCode().toUpperCase().concat(" String"))
                               .collect(Collectors.joining(","));

        //执行建表
        ckdbService.createTable(tableName,columnStr);

        //查询hive中宽表的数据
        Dataset<Row> data = sparkSession.sql(String.format("select * from %s.%s", dbName, tableName));

        //存放用户和密码
        Properties properties = new Properties();
        //写入ck
        data.write()
            .mode(SaveMode.Append)  //向表中追加写
            .option("driver",PropertiesUtil.getProperty("ck.jdbc.driver.name"))
            .option("batchsize",500)
            .option("isolationLevel","NONE")   //事务关闭
            .option("numPartitions", "4") // 设置并发
            .jdbc(PropertiesUtil.getProperty("ck.jdbc.url"),tableName,properties);

    }
}
