package com.doit.day01

import org.apache.spark.rdd.{JdbcRDD, RDD}
import org.apache.spark.{SparkConf, SparkContext}

import java.sql.{DriverManager, ResultSet}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo05MakeRDDJDBC {
  def main(args: Array[String]): Unit = {
       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    /**
     * 参数一  sc
     * 参数二  获取连接的函数
     * 参数三  查询数据的sql语句
     * 参数4,5 指定数据的行范围
     * 参数6  并行度  1
     * 参数7 处理返回结果的函数
     */
 val  getConnection = ()=>{
      DriverManager.getConnection("jdbc:mysql://localhost:3306/doe47" ,"root" ,"root")
    }
    val  sql  =
      """
        |select
        |uid ,
        |username ,
        |email ,
        |password
        |from
        |ums_user
        |where  uid >= ?  and  uid <= ?
        |""".stripMargin

   val  processResult =  (rs:ResultSet)=>{
      val uid = rs.getInt("uid")
      val userName = rs.getString("username")
      val email = rs.getString("email")
      val password = rs.getString("password")
      (uid , userName , email , password)
    }
    val res: JdbcRDD[(Int, String, String, String)] = new JdbcRDD(sc, getConnection, sql, 0, 100, 1, processResult)

    res.foreach(println)



  }
}
