package chapter04

import org.apache.spark.{SparkConf, SparkContext}

object Demo {


  def main(args: Array[String]): Unit = {

    val config: SparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount1")

    val sc: SparkContext = new SparkContext(config)

    // 定义字符串数组
    val arr = Array(
      "spark,hive,flink",
      "hive,hive,flink",
      "hive,spark,flink",
      "hive,spark,flink"
    )
    // 创建并分区RDD
    val rdd1 = sc.makeRDD(arr, 2)
    // 扁平化并分割字符串
    val rdd2 = rdd1.flatMap(_.split(","))

    rdd2.foreach(println)
  }

}
