package core.sql.多数据源;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

public class Spark02_JSON {
    public static void main(String[] args) {
        SparkSession sparkSession = SparkSession
                .builder()
                .master("local[*]")
                .appName("Spark02_JSON")
                .getOrCreate();

        try {
            // 读取 CSV 数据
            Dataset<Row> rowDataset = sparkSession.read()
                    .option("header","true")//读取首行的列作为表头
//                    .option("seq","_") //设置分隔符，默认为,
                    .json("data/user_sql.json");

            rowDataset.show();


            rowDataset.write()
                    .mode(SaveMode.ErrorIfExists)
                    .option("header","true")
                    .json("data/output");

        } catch (Exception e) {
            e.printStackTrace(); // 捕获并打印异常
        } finally {
            sparkSession.close();
        }
    }
}
