package com.ada.spark.datasource

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

object JdbcTest2 {
    def main(args: Array[String]): Unit = {
        //创建配置对象
        val conf: SparkConf = new SparkConf().setAppName("JdbcTest2").setMaster("local[*]")

        //创建SparkSql的环境对象
        val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate();

        //从Mysql数据库加载数据方式二
        val connectionProperties = new Properties()
        connectionProperties.put("user", "root")
        connectionProperties.put("password", "888888")
        val jdbcDF = spark.read
            .jdbc("jdbc:mysql://hadoop121:3306/rdd", "rddtable", connectionProperties)

        jdbcDF.show()

        //将数据写入Mysql方式二
        jdbcDF.write
            .mode(SaveMode.Overwrite)
            .jdbc("jdbc:mysql://hadoop121:3306/rdd", "rddtable2", connectionProperties)
    }
}
