package main.java.demo;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;

/**
 * WordCountMain
 *
 * @author zhangyimin
 * @date 2018-09-26 下午5:25
 * @version 1.0
 */
public class WordCountMain {


    //以下是scala实现的单词计数
    //sc.textFile("hdfs://10.16.7.36:9000/data/input/data.txt")
    // .flatMap(_.split(" "))
    // .map((_,1))
    // .reduceByKey(_+_)
    // .saveAsTextFile("hdfs://10.16.7.36:9000/data/output/spark/0926")
    public static void main(String[] args) {
        SparkConf conf=new SparkConf();
        conf.setAppName("myWC");
        //运行在本地模式上
//        conf.setMaster("local");
        //运行在集群模式上要去掉这行代码


        //创建SparkContext对象
        JavaSparkContext sparkContext=new JavaSparkContext(conf);
        //.textFile("hdfs://10.16.7.36:9000/data/input/data.txt")
//        JavaRDD<String> javaRDD=sparkContext.textFile("hdfs://10.16.7.36:9000/data/input/data.txt",1);
        JavaRDD<String> javaRDD=sparkContext.textFile(args[0],1);
        // .flatMap(_.split(" "))
        //FlatMapFunction<String, U>: string 表示元数据的类型
        //  U:返回值的类型
        JavaRDD<String> words=javaRDD.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String s) throws Exception {
                s.split(" ");
                return Arrays.asList(s.split(" ")).iterator();
            }
        });
        //.map((_,1))
        //每个单词记一次数
        //String是每个单词 String 代表map输出的k2    Integer代表的是计数1
        JavaPairRDD<String,Integer> mapOutPut= words.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) throws Exception {
                //I ->(I,1)
                return new Tuple2<>(s,1);
            }
        });
        //.reduceByKey(_+_) =>  ((a,b=>a+b))
        //a  ,  b  ,结果
        JavaPairRDD<String,Integer> reduceOutPut=mapOutPut.reduceByKey(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer a, Integer b) throws Exception {
                return a+b;
            }
        });
        //触发一个计算
//        List<Tuple2<String,Integer>>  collect=reduceOutPut.collect();
//        System.out.print("结果为:"+collect+"\n");
        //.saveAsTextFile("hdfs://10.16.7.36:9000/data/output/spark/0926")
//        reduceOutPut.saveAsTextFile("hdfs://10.16.7.36:9000/data/output/spark/09281");
        reduceOutPut.saveAsTextFile(args[1]);
        //停止context对象
        sparkContext.stop();



    }


}
