package core.sql.多数据源;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.util.Properties;

public class Spark04_MYSQL {
    public static void main(String[] args) {
        SparkSession sparkSession = SparkSession
                .builder()
                .master("local[*]")
                .appName("Spark04_MYSQL")
                .getOrCreate();
//
//        125.72.54.118    8100  root/mediway-cdc123
        Properties properties = new Properties();
        properties.setProperty("user","root");
        properties.setProperty("password","mediway-cdc123");

        try {
            // 读取 CSV 数据
            Dataset<Row> dataSet = sparkSession.read()
                    .jdbc("jdbc:mysql://125.72.54.118:8100/dolphinscheduler?useSSL=false&serverTimezone=Asia/Shanghai","t_ds_user",properties);


            dataSet.show();

            dataSet.write().jdbc("jdbc:mysql://125.72.54.118:8100/test?useSSL=false&serverTimezone=Asia/Shanghai","t_ds_user_test",properties);



        } catch (Exception e) {
            e.printStackTrace(); // 捕获并打印异常
        } finally {
            sparkSession.close();
        }
    }
}
