package com.atguigu.upp.utils;

import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.*;

import java.io.IOException;
import java.io.InputStream;
import java.util.List;

/**
 * Created by Smexy on 2022/9/9
 *
 *  封装计算sqltask需要的一些工具方法(static)
 */
public class SqlTaskExecuteUtil
{

    //查询今天要合并的标签表有哪些
    public static List<String> queryTagTableNameToMerge(SparkSession sparkSession){

        //执行sql查询要合并的标签表
        String updbname = PropertiesUtil.getValue("updbname");
        //宽表表名的前缀
        String upwideprefix = PropertiesUtil.getValue("upwideprefix");

        String sql = "show tables in " + updbname ;
        Dataset<Row> ds = sparkSession.sql(sql);

        //DataSet是RDD的高级形态，rdd的算子在这里都可以使用
        List<String> tableNames = ds.map((MapFunction<Row, String>) value -> value.getString(1), Encoders.STRING())
            //排除宽表名
            .filter((String table) -> !table.startsWith(upwideprefix))
            .collectAsList();

        System.out.println("要合并的标签表:"+tableNames);

        return tableNames;

    }

    //封装一个可以读取指定的mybaits配置文件，构造SqlSessionFactory的方法
    public static SqlSessionFactory createSqlSessionFactory(String config) throws IOException {
        InputStream inputStream = Resources.getResourceAsStream(config);
        SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream);
        return sqlSessionFactory;
    }

    //构造一个SparkSession
    public static SparkSession createSparkSession(String appName){

        SparkConf sparkConf = new SparkConf()
            .setAppName(appName)
            //默认情况下，spark在本地创建库和表，如果希望在hdfs上建表和建表需要加参数
            .set("spark.sql.warehouse.dir",PropertiesUtil.getValue("hiveWarehouse"))
            .setMaster(PropertiesUtil.getValue("masterUrl"));


        SparkSession sparkSession = SparkSession.builder()
            .config(sparkConf)
            .enableHiveSupport()
            .getOrCreate();

        return sparkSession;

    }
}
