package com.atguigu.upp.utils;

import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.spark.sql.SparkSession;

import java.io.IOException;
import java.io.InputStream;

/**
 * Created by Smexy on 2022/10/13
 */
public class DBUtil
{

    //返回一个SqlSessionFactory
    public static SqlSessionFactory getSqlSessionFactory(String config) throws IOException {

        InputStream inputStream = Resources.getResourceAsStream(config);
        SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream);
        return sqlSessionFactory;

    }

    //返回一个SparkSession
    public static SparkSession getSparkSession(String appName){

        SparkSession sparkSession = SparkSession.builder()
                                            .master(PropertiesUtil.getProperty("masterUrl"))
                                            .appName(appName)
                                            // 默认Spark写入hive在本地创建warehouse，需要更改为hdfs路径
                                            .config("spark.sql.warehouse.dir", PropertiesUtil.getProperty("hiveWarehouse"))
                                            .enableHiveSupport()
                                            .getOrCreate();

        return sparkSession;

    }
}
