package day02

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
  * @author wsl
  * @version
  *          与MySQL交互
  */
object MySQL {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQLTest")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    //读
    val df: DataFrame = spark.read.format("jdbc")
      .option("url", "jdbc:mysql://172.31.0.81:3306/bigdata")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "matrix")
      .option("password", "7xNetworks@C0M")
      .option("dbtable", "wsl")
      .load()

    //打开表
    df.show(500) //show默认读取20行
    //1.DSL风格
    df.select("id", "name").where("id <= 100").show()

    //2.SQL风格
    df.createOrReplaceTempView("wsl")
    //查询
    spark.sql("select id, name from wsl").show()


    //写
    val df2: DataFrame = spark.read.json("sparksql/input/user.json")
    df2.write
      .format("jdbc")
      .option("url", "jdbc:mysql://hadoop102:3306/test")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "user_info2")
      .mode(SaveMode.Append) //ErrorIfExists默认
      .save()
    spark.stop()
  }

}
