package com.whoami.sparkjava;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import scala.Tuple2;

import java.time.LocalDateTime;
import java.util.Arrays;

/**
 * @author tzp
 * @since 2020/11/29
 */
public class FirstJob {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("My App");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> lines = sc
                .textFile("/Users/tzp/Documents/private/cnm/CodeAccumulate/id-magic/imeigenerator/src/main/scala/com/whoami/idmagic/apprank/Sample1.scala");

        JavaRDD<String> words = lines
                .flatMap(s -> Arrays.asList(s.split("\\s")).iterator());

        JavaPairRDD<Integer, Iterable<String>> pairRDD = words.groupBy(new Function<String, Integer>() {
            @Override
            public Integer call(String v1) throws Exception {
                return null;
            }
        });

//        sc.accumulator(1, "xx");

        JavaPairRDD<String, Integer> counts = words
                .mapToPair(s -> new Tuple2<>(s, 1))
                .reduceByKey(Integer::sum);

        counts.saveAsTextFile("/Users/tzp/Documents/private/cnm/CodeAccumulate/id-magic/sparkjava/target/" + LocalDateTime.now().toString());

    }
}
