package SparkExercises;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;

import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

public class SparkLianXi {
    /**
     * 练习：已知一个普通的文本文件words.txt，
     * 里面每一行都是英文语句，
     * 请写一个spark程序，
     * 统计去重后含有“bb”这个子字符串的单词（按空格分割，每一个子字符串看成一个单词）
     * 的总数并输出到控制台。
     * @param args
     */
    public static void main(String[] args) {
        SparkConf sparkConf = new SparkConf().setAppName("LianXi").setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(sparkConf);

        JavaRDD<String> rdd1 = sc.textFile("./data/words.txt");

        JavaRDD<String> rdd2 = rdd1.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String s) throws Exception {
                return Arrays.asList(s.split(" ")).iterator();
            }
        });

        JavaRDD<String> rdd3 = rdd2.distinct();

        JavaRDD<String> rdd4 = rdd3.filter(new Function<String, Boolean>() {
            @Override
            public Boolean call(String s) throws Exception {
                int result = s.indexOf("bb");
                if (result != -1){
                    return true;
                }else {
                    return false;
                }
            }
        });

        List<String> res = rdd4.collect();

        for (String s : res){
            System.out.println(s);
        }
        System.out.println(rdd4.count());




    }
}
