package spark.rddKnowledge

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

/**
  * @author pinker on 2018/6/10
  */
object rddAPI {
  val localPath = "D:/spark/temp/"
  val hivePath = "D:/spark/hive/"

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("rddName")
      .master("local[*]")
      .config("spark.local.dir", localPath)
      .config("spark.sql.warehouse.dir", hivePath)
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("ERROR")
    //    rddMethod1(sc)

  }

  private def rddMethod1(sc: SparkContext) = {
    val rdd = sc.textFile("src/main/scala/spark/rddKnowledge/rddAPI.scala", 4)
    println(rdd.toDebugString)
    val mapRDD = rdd.map(str => str + "&")
    println(mapRDD.toDebugString)
    val flatMapRDD = mapRDD.flatMap(str => List(str, "hs"))
    println(flatMapRDD.toDebugString)
    val mapPartitionRDD = flatMapRDD.mapPartitionsWithIndex((partionIndex, iter) => {
      println(partionIndex + "||" + iter.mkString("-"))
      iter
    })
    mapPartitionRDD.collect()
  }
}
