package rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RDD_PairRDDCreate {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("RDD_PairRDDCreate")

    val sc: SparkContext = new SparkContext(conf)
    // 第一种创建Pair RDD方式：从文件中加载
    val fileRDD = sc.textFile("data/word.txt")
    val pairRDD: RDD[(String, Int)] = fileRDD.flatMap(line => line.split("\\s+")).map(word => (word, 1))

    // 第二种方式：通过并行集合创建Pair RDD
    val wordRDD: RDD[String] = sc.parallelize(Seq("Hadoop", "Spark", "Hive", "Spark"))
    val oneWordRDD: RDD[(String, Int)] = wordRDD.map(word => (word, 1))

    // 第三种方式：通过keyBy算子转换成Pair RDD
    val colorRDD = sc.parallelize(List("black", "blue", "white", "green", "grey"), 2)
    val colorPairRDD: RDD[(Int, String)] = colorRDD.keyBy(color => color.length)


    sc.stop()
  }
}
