package cn.whuc.spark.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo_flatMap {
  def main(args: Array[String]): Unit = {

    // 1 创建sparkContext
    val sc: SparkContext = new SparkContext(
      new SparkConf()
        .setMaster("local[*]")
        .setAppName(" ")
    )

    // 2 编写代码
    val rdd1: RDD[Any] = sc.makeRDD(List(List(1, 2), 3, List(4, 5)))
    val value1: RDD[Any] = rdd1.map {
      case item: List[_] => item
      case item => List(item)
    }.flatMap(e => {
      println("~~~~")
      e.toList
    }
    )

    val value: RDD[Any] = rdd1.flatMap {
      case item: List[_] => item
      case item => List(item)
    }

    value1.collect().foreach(println)

    // 3 关闭上下文对象
    sc.stop()
  }
}
