package cn.doitedu.day01

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object T07_FlatMapDemo {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("WordCount")
      .setMaster("local[5]") //如果提交到集群中运行，setMaster必须注释掉

    val sc = new SparkContext(conf)

    val arr = Array(
      "spark hive flink",
      "hive hive flink",
      "hive spark flink",
      "hive spark flink"
    )
    val rdd1: RDD[String] = sc.makeRDD(arr, 2)
    val rdd2: RDD[String] = rdd1.flatMap(_.split(" "))

    rdd2.collect()

  }

}
