package com.atguigu.bigdata.spark.core.rdd.serial;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.io.Serializable;
import java.util.Arrays;

public class Spark01_RDD_Serial_JAVA {
    public static void main(String[] args) {
        // 1.创建配置对象
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("sparkCore");

        // 2. 创建sparkContext
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd = sc.parallelize(Arrays.asList("hello word", "hello spark", "hive","java"));

        Search search = new Search("h");

        System.out.println(search.getMatch2(rdd).collect().toString());

        sc.stop();
    }

    static class Search implements Serializable {
        private String s;

        public Search(String s) {
            this.s = s;
        }
        public JavaRDD<String> getMatch2(JavaRDD<String> rdd) {
            JavaRDD<String> res= rdd.filter(new Function<String, Boolean>() {
                @Override
                public Boolean call(String v1) throws Exception {
                    return v1.contains(s);
                }
            });
            return  res;
        }

        public String getS() {
            return s;
        }

        public void setS(String s) {
            this.s = s;
        }
    }
}
