package Demo2

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.HashMap

/**
  * Created by lenovo on 2017/11/20.
  * scala版sparkSQL操作mysql
  */
object SparkSQL_ScalaJDBC {
  def main(args: Array[String]) {
   val conf = new SparkConf().setMaster("local[3]").setAppName("SparkSQL_ScalaJDBC").set("spark.testing.memory","2147480000")
  val sc = new SparkContext(conf)
    val sparkSQL = new SQLContext(sc)

   val jdbcMap = new HashMap[String,String]()
    jdbcMap.put("url", "jdbc:mysql://localhost:3306/cppds")
    jdbcMap.put("dbtable", "int_list")
    jdbcMap.put("user", "root")
    jdbcMap.put("password", "root")

    val int_listDF = sparkSQL.read.format("jdbc").options(jdbcMap).load()

    int_listDF.registerTempTable("int_list")
    sparkSQL.sql("select * from int_list").show()
  }
}
