package cn.jly.bigdata.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{Dependency, SparkConf, SparkContext}

/**
 * @author lanyangji
 * @date 2019/11/26 10:58
 */
object Spark04_lineage {

  def main(args: Array[String]): Unit = {

    val sc: SparkContext = new SparkContext(new SparkConf().setMaster("local[*]").setAppName("test-02"))

    val textRdd: RDD[String] = sc.textFile("input")

    val wordAndOneRdd: RDD[(String, Int)] = textRdd.flatMap(_.split(" ")).map((_,1))

    // 打印wordAndOneRdd血缘
    val wordAndOne_lineage: String = wordAndOneRdd.toDebugString
    println(s"wordAndOne_lineage = $wordAndOne_lineage")
    println
    // 打印wordAndOneRDD依赖关系
    val wordAndOneRdd_dependencies: Seq[Dependency[_]] = wordAndOneRdd.dependencies
    wordAndOneRdd_dependencies.foreach(println)
    println

    val wordAndCountRdd: RDD[(String, Int)] = wordAndOneRdd.reduceByKey(_+_)

    // 打印wordAndCountRdd血缘
    println(s"wordAndCount_lineage = ${wordAndCountRdd.toDebugString}")
    println
    // 打印wordAndCountRdd依赖关系
    val wordAndCountRdd_dependencies: Seq[Dependency[_]] = wordAndCountRdd.dependencies
    wordAndCountRdd_dependencies.foreach(println)
    println

    wordAndCountRdd.foreach(println)

  }
}
