package com.guchenbo.spark.demo;

import com.google.common.collect.Lists;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.util.Arrays;
import java.util.List;

/**
 * @author guchenbo
 */
public class WordCount {

  public static void main(String[] args) {
//    初始化spark
    SparkConf conf = new SparkConf().setAppName("demo-java").setMaster("local");
    JavaSparkContext sc = new JavaSparkContext(conf);

//    基于内存集合
    List<String> list = Lists.newArrayList("1", "2", "3", "4", "5");
    JavaRDD<String> rdd = sc.parallelize(list);
    int sum = rdd.map(Integer::valueOf).reduce((a, b) -> a + b);
    System.out.println(sum);

//    基于外部数据
    JavaRDD<String> text = sc.textFile("words");
    text = text.flatMap(s -> Arrays.asList(s.split(" ")).iterator());
    JavaPairRDD<String, Integer> rdd1 = text.mapToPair(s -> new Tuple2<>(s, 1))
        .reduceByKey((a, b) -> a + b);
    List<Tuple2<String, Integer>> list1 = rdd1.collect();
    System.out.println(list1);
  }

}
