package com.wu.spark

import org.apache.spark.sql.SparkSession

/**
  * spark
  */
object Sparksqmysql {

  def main(args: Array[String]): Unit = {

  val spark =  SparkSession.builder().appName("Sparksqmysql").master("local[2]").getOrCreate()

    runjdbcDataSource(spark)

      spark.close();
  }

  private  def  runjdbcDataSource(spark:SparkSession): Unit =
  {
  val  df = spark.read.format("jdbc").option("url", "jdbc:mysql://119.29.254.170:3306/Test?user=root&password=wudl5566").option("dbtable", "students").load()
    df.select("id","name","age","telephone").write.format("parquet").save("E://ideaWorkSpace2018.4.23//Spark-sql//Sources//mysqlFile");
    df.select("id","name","age","telephone").write.format("json").save("E://ideaWorkSpace2018.4.23//Spark-sql//Sources//mysqlFile2")



    df.select("id","name","age","telephone").write.mode("overwrite").saveAsTable("user_student")
    val df2 = spark.sql("select * from user_student")
    df2.show()
    df2.write.format("json").save("E://ideaWorkSpace2018.4.23//Spark-sql//Sources//mysqlFile3")
      print("-----------------------------------------------------------------------------------")
    // 读取parquet 的文件 并且打印出来
    val df3 = spark.read.load("E://ideaWorkSpace2018.4.23//Spark-sql//Sources//mysqlFile")
    df3.show(false)
  }


}
