package spark01;

import java.util.Arrays;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.storage.StorageLevel;

import scala.Tuple2;

public class WordCountByJava8 {

	public static void main(String[] args) {
		SparkConf conf = new SparkConf();

		conf.setAppName("WordCounter")//
				.setMaster("local");

		String fileName = "src/main/java/spark01/WordCountByJava8.java";

		JavaSparkContext sc = new JavaSparkContext(conf);
		JavaRDD<String> lines = sc.textFile(fileName, 1);

		  JavaRDD<Integer> lineLengths = lines.map(new Function<String, Integer>() {
	          public Integer call(String s) { 
	              System.out.println("每行长度"+s.length());
	              return s.length(); }
	        });
		  
		  
		  int totalLength = lineLengths.reduce(new Function2<Integer, Integer, Integer>() {
	          public Integer call(Integer a, Integer b) { return a + b; }
	        });

	        System.out.println(totalLength);
	        //为了以后复用  持久化到内存...
	        lineLengths.persist(StorageLevel.MEMORY_ONLY());
		
//		lines.flatMap(line -> Arrays.asList(line.split(" ")).iterator()).mapToPair(word -> new Tuple2<>(word, 1))
//				.reduceByKey((e, acc) -> e + acc, 1).map(e -> new Tuple2<>(e._1, e._2)).sortBy(e -> e._2, false, 1)
//				.foreach(e -> {
//					System.out.println("【" + e._1 + "】出现了" + e._2 + "次");
//				});
		sc.close();

	}
}