package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo8FlatMap {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setAppName("filter")

    conf.setMaster("local")

    val sc = new SparkContext(conf)

    val rdd1: RDD[String] = sc.parallelize(List("java,spark,scala,hadoop", "hadoop,hive,hbase"))

    /**
      * flatMap算子：将rdd中的数据一行一行传递给后面的函数，函数返回值必须是一个序列
      * flatMap会将返回的序列展开，构建成一个新的rdd
      *
      */
    val rdd2: RDD[String] = rdd1.flatMap((str: String) => {
      str.split(",")
    })

    rdd2.foreach(println)
  }

}
