package com.atguigu.bigdata.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author: yqb
 * @Date: 2022/6/5 21:00 
 * @Description: Demon 
 * @Version: 1.0
 * */
object Need04 {
    /**
     * 将 List(List(1,2),  3 ,List(4,5))进行扁平化操作
     * flatmap
     *
     * @param args
     */

    def main(args: Array[String]): Unit = {
        val need04: SparkConf = new SparkConf().setMaster("local[*]").setAppName("Need04")
        val context = new SparkContext(need04)

        val value = context.makeRDD(

            List(List(1, 2), 3, List(4, 5))
        )
        val value1: RDD[Any] = value.flatMap {
            case list: List[_] => list
            case other => List(other)
        }

        value1.collect().foreach(println)
        context.stop()
    }

}
