package cn.huq.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}


object FlatMapValuesDemo {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("FlatMapValuesDemo").setMaster("local")
    val sc = new SparkContext(conf)

    val arr: Array[(String, String)] = Array(("scala", "1,2,3"), ("hadoop", "1,2,3"), ("flink", "2,3,4"), ("spark", "3"))
    val rdd: RDD[(String, String)] = sc.parallelize(arr, 3)

    val flatMapRDD: RDD[(String, Int)] = rdd.flatMapValues(_.split(",").map(_.toInt))
//    val flatMapRDD: RDD[(String, Int)] = rdd.flatMap(t => {
//      val word: String = t._1
//      val nums: Array[String] = t._2.split(",")
//      val arr: Array[(String, Int)] = nums.map(i => (word, i.toInt))
//      arr
//    })
    println(flatMapRDD.collect().toBuffer)


  }

}
