package com.shujia.core

import org.apache.spark.{SparkConf, SparkContext}

object Demo3Flatmap {
  def main(args: Array[String]): Unit = {

    /**
      * flatmap算子
      *
      * 先进行map   --- 再扁平化
      *
      * 传入一行返回多行
      */


    val conf = new SparkConf()
      .setMaster("local") //本地运行
      .setAppName("flatMap")

    val sc = new SparkContext(conf)


    //通过序列化scala集合创建RDD
    val list = List("java,scala", "spark,python,hadoop")
    val rdd = sc.parallelize(list)

    val mRDD = rdd.flatMap(line => {
      line.split(",").toList
    })

    mRDD.foreach(println)

  }

}
