package com.spark.zhou.demo;

import com.sun.javafx.scene.control.skin.VirtualFlow;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * @Description:
 * @Author: ZhOu
 * @Date: 2018/5/10
 */
public class WordCount {
    public static void main(String[] args) {
        SparkConf sparkConf = new SparkConf()
                .setAppName("WordCount")
                .set("spark.executor.memory","450m")
                .set("files","D:\\WorkSpace\\gitee\\HadoopPro\\spark-pro\\src\\main\\resources\\yarn-site.xml")
                .setMaster("local")
                .setJars(new String[]{"D:\\WorkSpace\\gitee\\HadoopPro\\spark-pro\\target\\spark-pro-1.0-SNAPSHOT.jar"});

        JavaSparkContext context = new JavaSparkContext(sparkConf);

        context.textFile("D:/test.txt")
                .flatMap(t -> Arrays.asList(t.split(" ")).iterator())
                .mapToPair(s -> new Tuple2<>(s, 1))
                .reduceByKey((m, n) -> m + n)
                .mapToPair(tuple2 -> new Tuple2<>(tuple2._2, tuple2._1))
                .sortByKey(false)
                .mapToPair(tuple2 -> new Tuple2<>(tuple2._2, tuple2._1))
                .foreach(tuple2 -> System.out.println(tuple2._1 + "\t" + tuple2._2));

        context.stop();

    }
}
