package com.shujia.sql

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Code06JDBC {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = GetSpark.spark("orc")

    //    spark
    //      .read
    //      .jdbc()

    //    spark
    //      .read
    //      .format("jdbc")
    //      .option("url","jdbc:mysql://master:3306/stu?useSSL=false")
    //      .option("user","root")
    //      .option("password","123456")
    //      .option("dbtable","stu")
    //      .load()
    //      .show()


    val stuDataFrame: DataFrame = spark.read
      .format("csv")
      .option("sep", ",")
      .schema("id String,name String,age int,gender String,clazz String")
      .load("spark_code/data/students.txt")


    stuDataFrame.show(10)


    stuDataFrame
      .write
      .format("jdbc")
      .option("url", "jdbc:mysql://master:3306/stu?useSSL=false&characterUnicode=utf-8")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "students")
      // mode中的Overwrite参数会修改Mysql中原有的表结构，相当于将源表删除，再重新创建
      //      .mode(SaveMode.Overwrite)
      .option("truncate", "true")
      // SaveMode: ErrorIfExists
      // 对于不修改表中的数据结构，需要在Option中添加truncate参数为true同时，SaveMode必须为 Overwrite
      .mode(SaveMode.Overwrite)
      .save()

  }
}
