package com.atguigu.sparkcore.rdd.file.mysql

import com.atguigu.sparkcore.util.{MySparkContextUtil, MysqlUtil}
import org.apache.spark.rdd.JdbcRDD

/**
 * description ：读取mysql数据
 * author ：剧情再美终是戏
 * mail : 13286520398@163.com
 * date ：Created in 2020/1/9 9:20
 * modified By ：
 * version: : 1.0
 */
object RMysql {

  def main(args: Array[String]): Unit = {

    // 获取sparkContext
    val sc = MySparkContextUtil.get(args)

    // 读取数据
    val sql = "select * from user where ? <= id and id <= ?"
    val rddMysql = new JdbcRDD(sc, () => MysqlUtil.getConnection("/rdd"), sql, 1, 10, 2, resultSet => {
      (resultSet.getInt(1), resultSet.getString(2))
    })

    // 输出结果
    val result = rddMysql.mapPartitionsWithIndex((index, it) => it.map((index, _)))
    result.foreach(println)

    // 关闭sparkContext资源
    MySparkContextUtil.close(sc)

  }

}
