package spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * @className DateFrame
 * @author sjw
 * @date 2020/11/19 13:48
 * @description: TODO
 */
object DateFrame {

  def main(args: Array[String]): Unit = {
  //jsonFile()
    connectMySQL()
  }

  def connectMySQL(): Unit ={
    val spark = initSparkSession("MySQL","local")
    val jdbcDataFrame=spark.read.format("jdbc")
      .option("url","jdbc:mysql://192.168.10.120:3306/app?useUnicode=true&characterEncoding=UTF8&useSSL=false&serverTimezone=UTC")
      .option("driver","com.mysql.cj.jdbc.Driver")
      .option("dbtable", "user")
      .option("user", "root")
      .option("password", "123456")
      .load()

    jdbcDataFrame.show()

  }

  def jsonFile(): Unit = {
    val spark = initSparkSession("json","local")
    val jsonDataFrame = spark.read.json("G:\\SparkData\\json\\user.json")
    jsonDataFrame.printSchema()
    jsonDataFrame.show()
    jsonDataFrame.select(jsonDataFrame("name"), jsonDataFrame("age") + 1).show()
  }

  def initSparkSession(appName:String,url:String): SparkSession = {
    val sparkConf = new SparkConf().setAppName(appName).setMaster(url);
    SparkSession.builder().config(sparkConf).getOrCreate()
  }

}
