package com.lhc.rdddemo

import com.lhc.DBUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ListBuffer

object MapAndMapPartitiionApp {
  def main(args: Array[String]): Unit = {

    val session = SparkSession.builder().appName("MapAndMapPartitiionApp").master("local[2]").getOrCreate()

    val sparkContext = session.sparkContext

    val students = new ListBuffer[String]()

    for (i <- 1 to 100) {
      students += "stu:" + i
    }
    val stuRDD = sparkContext.parallelize(students)

    //stuRDD.foreach(println)
    //stuRDD.map()
    //myMap(stuRDD)
    println(stuRDD.partitions.size)
    println(stuRDD.getNumPartitions)
    myMapPartition(stuRDD)

  }

  def myMapPartition(rdd: RDD[String]) = {
    rdd.mapPartitions(a => {
      val connection = DBUtils.getConnection()
      //TODO 业务逻辑
      DBUtils.retrunConnection(connection)
      println("connection:" + connection)
      a
    }).foreach(println)
  }


  def myMap(rdd: RDD[String]): Unit = {
    rdd.map(x => {
      val connection = DBUtils.getConnection()
      println("connection:" + connection)
      // TODO 业务逻辑
      DBUtils.retrunConnection(connection)
    }).foreach(println)
  }
}
