package org.example.test;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @author lwc
 * @description: TODO
 * @date 2023/12/5 11:21
 */
public class ReadFile {



    public static void main(String[] args) {
        // 配置Spark
        SparkConf sparkConf = new SparkConf().setAppName("Spark HBase Example").setMaster("local[*]");
        SparkSession spark = SparkSession.builder()
                .appName("HBase to MySQL Sync")
                .config(sparkConf)
//                .config("spark.sql.warehouse.dir", "/hive/warehouse")
                .enableHiveSupport()
                .getOrCreate();
        // CSV文件路径
        String csvFilePath = "/Users/lwc/Documents/临时/公司/许继/光伏电站/南昌数据中台/csv表数据/e_mp_read_day.csv";
        // Hive表名称
        String hiveTableName = "ods_amr20_hbase.e_mp_read_day";

        // 读取CSV文件为DataFrame
        Dataset<Row> csvDataFrame = spark.read().csv(csvFilePath);

        // 将DataFrame注册为临时表
        csvDataFrame.createOrReplaceTempView("temp_table");
        csvDataFrame.show();
        // 执行Hive插入操作
        String hiveInsertQuery = "INSERT INTO TABLE " + hiveTableName + " SELECT * FROM temp_table";
        spark.sql(hiveInsertQuery);

        // 关闭Spark上下文
        spark.stop();
    }


}
