package com.galeno.sparksql02

import org.apache.spark.sql.{SaveMode, SparkSession}

import java.util.Properties

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/9/618:01
 */
object ShuchuDataSet {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local")
      .appName("兵者,国之大事,死生之地,存亡之道,不可不察也!")
      .getOrCreate()
    val rdd = spark.sparkContext.makeRDD(Seq(
      (1, "aa", 18),
      (2, "bb", 19),
      (3, "cc", 20),
      (4, "dd", 21),
    ))
    import spark.implicits._
    //打印输出
    val df = rdd.toDF("id", "name", "age")

    //    df.show()
    //
    //    df.write.parquet("data/dataparquet")
    //
    //    df.selectExpr("concat_ws(',',id,name,age)").write.text("data/datatext")
    //
    //    df.write.csv("data/datacsv")

    //保存到数据库中

    val properties = new Properties()
    properties.load(this.getClass.getClassLoader.getResourceAsStream("mysql.properties"))
    val url = properties.getProperty("url")
    val user = properties.getProperty("user")
    val password = properties.getProperty("password")
    println(url + user + password)

    df.write.mode(SaveMode.Append).jdbc(url, "teachers", properties)
    //    df.write.mode(SaveMode.Overwrite).jdbc(url,"teachers",properties)
    //    df.write.mode(SaveMode.ErrorIfExists).jdbc(url,"teachers",properties)

    //df.write.mode(SaveMode.Ignore).jdbc(url,"teachers",properties)


  }

}
