package com.study.spark.scala.rdd

import java.sql.DriverManager

import org.apache.spark.rdd.{JdbcRDD, RDD}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Jdbc RDD使用示例
  * @author stephen
  * @create 2019-03-15 15:18
  * @since 1.0.0
  */
object JdbcRDDDemo {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("Jdbc RDD Demo")
    val sc = new SparkContext(conf)

    val jdbcRDD:RDD[(Int, String, Int)] = new JdbcRDD(
      sc,
      conn,
      "select * from logs where id >= ? and id < ? ",
      1L,
      10L,
      2, // 分区数，注意多个分区时数据读取范围
      rs => {
        // 结果集
        val id: Int = rs.getInt(1)
        val name: String = rs.getString(2)
        val age: Int = rs.getInt(3)
        (id, name, age)
      }
    )
    val result = jdbcRDD.collect()

    println(result.toBuffer)

  }

  val conn = () => {
    DriverManager.getConnection("jdbc:mysql://localhost:3306/test", "root", "123456")
  }
}
