package com.ada.spark.mysql

import java.sql.DriverManager

import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 支持通过Java JDBC访问关系型数据库。需要通过JdbcRDD进行
  */
object MysqlRDD {

    def main(args: Array[String]): Unit = {

        //1.创建spark配置信息
        val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("MysqlRDD")

        //2.创建SparkContext
        val sc = new SparkContext(sparkConf)

        //3.定义连接mysql的参数
        val driver = "com.mysql.jdbc.Driver"
        val url = "jdbc:mysql://hadoop121:3306/rddoperator"
        val userName = "root"
        val passWd = "888888"

        //创建JdbcRDD
        val rdd = new JdbcRDD(sc, () => {
            Class.forName(driver)
            DriverManager.getConnection(url, userName, passWd)
        },
            "select * from rddtable where id>=? and id<=?;",
            1,
            10,
            1,
            r => (r.getInt(1), r.getString(2))
        )

        //打印最后结果
        println(rdd.count())
        rdd.foreach(println)

        sc.stop()
    }


}