package rdds.spark.examples;

import java.util.Arrays;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import scala.Tuple2;


/**
 * 统计 文本中 单词的总个数
 * @author domky
 *
 */
public class WordCount {

	public static void main(String[] args) {

		SparkConf sparkConf = new SparkConf();

		sparkConf.setAppName("WordCounter")//
				.setMaster("local");

		String fileName = "src/main/java/rdds/spark/examples/WordCount.java";

		JavaSparkContext sc = new JavaSparkContext(sparkConf);
		JavaRDD<String> lines = sc.textFile(fileName, 1);

		JavaRDD<String> flatMap = lines.flatMap(line -> Arrays.asList(line.split(" ")).iterator());

		JavaPairRDD<String, Integer> mapToPair = flatMap.mapToPair(word -> new Tuple2<>(word, 1));
		
		
		
		JavaRDD<Tuple2<String, Integer>> map = mapToPair.reduceByKey((e, acc) -> e + acc, 1).map(e -> new Tuple2<>(e._1, e._2));
		
		map.sortBy(e -> e._2, false, 1).foreach(e -> {
			System.out.println("【" + e._1 + "】出现了" + e._2 + "次");
		});
		
		sc.close();
	}

}
