package spark_core.operate_transform.singlevalue;

import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

/**
 * @author shihb
 * @date 2020/1/7 12:04
 * flatMap算子Demo,返回的是一个iterator
 */
public class FlatMapDemo {

  public static void main(String[] args) {
    //local模式,创建SparkConf对象设定spark的部署环境
    SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("mark rdd");
    //创建spark上下文对象（这边是java上下文）
    JavaSparkContext sc = new JavaSparkContext(sparkConf);
    JavaRDD<String> fileRdd = sc
        .textFile(FlatMapDemo.class.getClassLoader().getResource("words").getPath(),2);
//    arrayRdd.collect().forEach(System.out::println);
    JavaRDD<String> wordRdd = fileRdd.flatMap(s -> Arrays.asList(s.split(" ")).iterator());


    wordRdd.collect().forEach(System.out::println);

    //停止
    sc.stop();
  }

}
