package com.dxf.bigdata.D05_spark_again

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object FlatMap {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")

    val sc = new SparkContext(sparkConf)

    val list:List[List[Any]] = List(List(1, 3), List(2, 4, List(5, 6)))




    val value = sc.makeRDD(list).flatMap(

      data => {
        data match {
          case list:List[Any] => list
          case num => List(num)
        }


      }).flatMap(
      x=>{
        x match{
          case list:List[Any] => list
          case x => List(x)
        }
      }
    )


    value.collect().foreach(println)



    val array2: List[List[Int]] = List(List(1, 2, 3, 5))
    val listRdd: RDD[List[Int]] = sc.makeRDD(array2)

    val value1: RDD[Int] = listRdd.flatMap(x => x)



  }

}
