package spark_java;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.regex.Pattern;

/**
 * Hello world!
 *
 */
public class App 
{
    private static final Pattern SPACE = Pattern.compile(" ");

    public static void main(String[] args) throws Exception {
        SparkConf conf = new SparkConf().setMaster("local").setAppName(
                "JavaWordCount");
        JavaSparkContext ctx = new JavaSparkContext(conf);

        String filePath = "";
        BufferedReader reader = new BufferedReader(new InputStreamReader(
                System.in));
        System.out.println("Enter FilePath:");
        System.out.println("e.g. D:/systemInfo.log");

        while (true) {
            System.out.println("> ");
            filePath = reader.readLine();
            if (filePath.equalsIgnoreCase("exit")) {
                ctx.stop();
            } else {
                JavaRDD<String> lines = ctx.textFile(filePath, 1);
                JavaRDD<String> words = lines
                        .flatMap(new FlatMapFunction<String, String>() {
                            @Override
                            public Iterable<String> call(String s) {
                                return Arrays.asList(SPACE.split(s));
                            }
                        });
                System.out.println("wordCount:" + words.count());
            }
        }
    }
}
