package chapter6
import org.apache.spark.sql.{DataFrame, SparkSession}
import java.util.Properties

object SparkReadMySQL {
  def readMySQLTable(sparkSession: SparkSession, url: String, tableName: String, user: String, password: String): DataFrame = {
    val jdbcDF = sparkSession.read.format("jdbc")//spark.read.format("csv").option("header",true).option("sep",",")
      .option("url", "jdbc:mysql://192.168.152.121:3306/spark")
      .option("dbtable", "student")
      .option("user", "root")
      .option("password", "123456")
      .load()

    jdbcDF
  }

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("MySQLSparkConnector")
      .master("local[*]")
      .getOrCreate()

    // MySQL configuration
    val jdbcUrl = "jdbc:mysql://192.168.152.121:3306/spark"//spark2为数据库的名字
    val jdbcUser = "root"
    val jdbcPassword = "123456"
    val jdbcTable = "student"

    // Read MySQL table into DataFrame
    val df = readMySQLTable(spark, jdbcUrl, jdbcTable, jdbcUser, jdbcPassword)

    // Show the DataFrame
    df.show()//对df的数据进行聚合，统计，分析，计算

    // Stop the SparkSession
    spark.stop()
  }
}
