package com.king.spark.sql

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._

import scala.collection.mutable


object SparkSQL08_read_write {
  def main(args: Array[String]): Unit = {
    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .getOrCreate()

    var sc = spark.sparkContext
    //写代码不管用不用都导入。
    import spark.implicits._

    //mysql
    val dfMysql: DataFrame = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://hadoop102:3306/bigdata210701")
      //.option("driver","com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "user")
      .load()

    dfMysql.show()

    println("-------------------------------------")

    val map: mutable.Map[String, String] = mutable.Map[String,String]()
    map.put("url","jdbc:mysql://hadoop102:3306/bigdata210701")
    map.put("user","root")
    map.put("password","123456")
    map.put("dbtable","user")

    val dfMysql2: DataFrame = spark.read
      .format("jdbc")
      .options(map)
      .load()
    dfMysql2.show()

    println("-------------------------------------")

    val properties = new Properties()
    properties.put("user","root")
    properties.put("password","123456")
    val dfMysql3: DataFrame = spark.read.jdbc("jdbc:mysql://hadoop102:3306/bigdata210701","user",properties)
    dfMysql3.show()


    // 写入到Mysql
    val rdd: RDD[(Int, String, Int)] = sc.makeRDD(
      List(
        (1002, "lisi", 40),
        (1003, "wangwu", 50)
      )
    )

    val df: DataFrame = rdd.toDF("id","name","age")
    df.write.mode(SaveMode.Append).jdbc("jdbc:mysql://hadoop102:3306/bigdata210701","user",properties)


    spark.stop()

  }
}
