package com.bw.sparksql1.job3

import java.util.Properties

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  *
  *  读取mysql的数据
  */
object Job10 {
    def main(args: Array[String]): Unit = {
      Logger.getLogger("org").setLevel(Level.ERROR)
      val spark = SparkSession
        .builder()
        .master("local")
        .appName("Spark SQL basic example")
        .getOrCreate()
      val url="jdbc:mysql://192.168.134.130:3306/shopping_db?serverTimezone=GMT";
      val properties = new Properties()
      properties.put("url",url)
      properties.put("user","root")
      properties.put("password","hadoop")
      val df: DataFrame = spark.read.jdbc(url,"customers",properties)
      println(df.count())
//      df.count()
//      df.show()
    }
}