package cn.darksoul3.spark.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object FlatMap {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("flat map").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val strings = Array("hadoop spark", "java scala")

    val lines: RDD[String] = sc.parallelize(strings)

    val flatWord: RDD[String] = lines.flatMap(x => x.split("\\s+"))

    val words = flatWord.collect()

    println(words.toBuffer)
  }
}
