package sparkSQL.study

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object JdbcConnect {
  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("jdbc...")
    val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    import sparkSession.implicits._

    // 1.通过load方式读取
    sparkSession.read
      .format("jdbc")
      .option("url", "jdbc:mysql://127.0.0.1:3306/market")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable","goods")
      .load().show

    // 2.通过load读取，参数不同
    sparkSession.read.format("jdbc")
      .options(Map("url"->"jdbc:mysql://127.0.0.1:3306/market?user=root&password=123456",
                   "dbtable"->"goods", "driver"->"com.mysql.jdbc.Driver"))
      .load().show

    // 3.通过jdbc方法读取
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "123456")
    val dataFrame = sparkSession.read.jdbc("jdbc:mysql://127.0.0.1:3306/market", "orders", properties)
    dataFrame.show

    sparkSession.stop()

  }

}
