package com.atguigu.sql

import java.util.Properties

import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

object TestSpark_JDBC {

  def main(args: Array[String]): Unit = {

    //初始化sc
    val conf: SparkConf = new SparkConf().setAppName("JDBC").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    sc.setLogLevel("ERROR")

    //创建SparkConf()并设置App名称
    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()

    // 从Mysql数据库加载数据方式一
    val jdbcDF = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.8.62:3306/sec_center")
      .option("dbtable", "sys_role")
      .option("user", "root")
      .option("password", "1234")
      .load()

    // 将数据写入Mysql方式一
    jdbcDF.write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.8.62:3306/sec_center")
      .option("dbtable", "sys_role")
      .option("user", "root")
      .option("password", "1234")
      .save()


    // 从Mysql数据库加载数据方式二
    val connectionProperties = new Properties()
    connectionProperties.put("user", "root")
    connectionProperties.put("password", "1234")
    val jdbcDF2 = spark.read
      .jdbc("jdbc:mysql://192.168.8.62:3306/sec_center", "sys_role", connectionProperties)

    // 将数据写入Mysql方式二
    jdbcDF2.write
      .jdbc("jdbc:mysql://192.168.8.62:3306/sec_center", "sys_role", connectionProperties)


  }
}
