package com.bigdata.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * flatMapValues
 * (K,V) -> (K,V)
 * 作用在K,V格式的RDD上，对一个Key的一个Value返回多个Value
 */
object Demo24_flatMapValues {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("flatMapValues")
    conf.setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("error")


    val infos: RDD[(String, String)] = sc.parallelize(List[(String, String)]
      (("zhangsna", "18"), ("lisi", "20"), ("wangwu", "30")))

    val mvRdd: RDD[(String, String)] = infos.mapValues(s => {
      s + " spark"
    })
    mvRdd.foreach(println)

    println("=" * 100)

    val fmRdd: RDD[(String, String)] = mvRdd.flatMapValues(v => {
      v.split(" ")
    })
    fmRdd.foreach(println)


  }
}
