package analyse.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * 2020-08-06
 * spark读取mysql数据
 * @Author:yangyang
 * 测试通过
 */
object SparkToMysql {
  def main(args: Array[String]): Unit = {

    //创建scala 版本的SparkContext
    val conf = new SparkConf().setAppName("SQL").setMaster("local").setMaster("local[2]")
    val spark = SparkSession.builder().config(conf).getOrCreate()
    //定义一个url连接，需要指定字符以避免乱码
    val url = "jdbc:mysql://localhost:3306/?useUnicode=true&characterEncoding=utf8"

    //加载数据库中表
    val jdbcDF = spark.read.format("jdbc").options(
      Map(
        "url" -> url,
        "user" -> "root",
        "password" -> "Yang@1219",
        "dbtable" -> "data.user_info")).load()

    //由于要调用jdbc，需要导入Java类，也可直接在url中指明
    val prop = new java.util.Properties
    prop.setProperty("user", "root")
    prop.setProperty("password", "Yang@1219")

    //---1
    jdbcDF.createOrReplaceTempView("user_info")
    //查询bank表数据
    val Bank = spark.sql("select * from user_info limit 10 ")
    //     Accept_Number.show()
    //    Accept_Number.write.mode(SaveMode.Overwrite).jdbc(url, "data.Accept_NumberSQL", prop)

    Bank.show()
    spark.close()
  }
}
