package cn.ltgodm.house.util;

import cn.hutool.core.util.StrUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.Properties;

/**
 * @author ltgodm
 * @date 2024-06-23 13:13:51
 */
@Component
public class SparkUtil {
    @Value("${spark.host}")
    private String SPARK_HOST;
    @Value("${spark.port}")
    private String SPARK_PORT;
    @Value("${spring.datasource.url}")
    private String MYSQL_URL;
    @Value("${spring.datasource.username}")
    private String MYSQL_USERNAME;
    @Value("${spring.datasource.password}")
    private String MYSQL_PASSWORD;
    @Value("${spring.datasource.driver-class-name}")
    private String MYSQL_DRIVER;
    @Resource
    private SparkSession sparkSession;

    private  FileSystem fileSystem = null;

    public SparkSession getSparkSession() {
        return sparkSession;
    }

    public Dataset<Row> getDataFrame(String filePath) {
        return sparkSession.read().option("header", "true").csv(StrUtil.format("hdfs://{}:{}/{}", SPARK_HOST, SPARK_PORT,filePath)).na().drop();
    }

    public void genMysqlTable(Dataset<Row> dataFrame,String tableName) {
        Properties prop = new Properties();
        prop.setProperty("driver", MYSQL_DRIVER);
        prop.setProperty("user", MYSQL_USERNAME);
        prop.setProperty("password", MYSQL_PASSWORD);
        dataFrame.write().mode(SaveMode.Overwrite).jdbc(MYSQL_URL,tableName,prop);
        System.out.println("------数据成功保存至数据库中------");
    }

    //双重检查锁单例模式，获取hdfs文件系统
    public FileSystem getHdfsFileSystem() {
        if (fileSystem == null) {
            synchronized (SparkUtil.class) {
                if (fileSystem == null) {
                    try {
                        Configuration configuration = new Configuration();
                        configuration.set("fs.defaultFS", StrUtil.format("hdfs://{}:{}", SPARK_HOST, SPARK_PORT));
                        fileSystem = FileSystem.get(configuration);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        }
        return fileSystem;
    }



}
