package com.shujia.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo4FlatMap {
  def main(args: Array[String]): Unit = {

    /**
      * map  传入一行返回一行
      *
      */
    //创建sparkp配置文件对象
    val conf = new SparkConf().setAppName("map").setMaster("local")

    //上下文对象
    val sc = new SparkContext(conf)

    val list = List("java,shujia,python", "hadoop,spark,kafka")


    //将scala集合转换成RDD
    val rdd1: RDD[String] = sc.parallelize(list)

    /**
      * flatMap   懒执行
      * 1、map
      * 2、flat
      *
      */

    val flatmapRDD = rdd1.flatMap(line => {
      line.split(",")
    })

    flatmapRDD.foreach(println)


  }
}
