package FastLearningSpark.SparkSQL;

import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;

import java.util.Arrays;


/**
 * <pre>
 * Created with IntelliJ IDEA.
 * User: zhengzhi
 * Date: 2017/1/11
 * Time: 11:48
 * To change this template use File | Settings | File Templates.
 * </pre>
 *
 * @author Administrator
 */
public class Test {

    public static void main(String[] args){


        SparkConf conf = new SparkConf().setAppName("Spark Test written by Java").setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> lines = sc.parallelize(Arrays.asList("pandas","I like pandas"));
        for(String str : lines.collect()){  //collect()函数获取整个RDD中的元素
            System.out.println(str);
        }

        JavaRDD<String> inputRDD = sc.textFile("D:/README.md");

        //filter()函数接收一个函数，并将RDD中满足该函数的元素放入新的RDD中返回；
        /**
         * 使用具名类实现接口
         */
        class ContainSpark implements Function<String,Boolean>{

            public Boolean call(String x){ return x.contains("Spark");}

        }
        JavaRDD<String> SparkRdd = inputRDD.filter(new ContainSpark());
        System.out.println("Input had "+SparkRdd.count()+" concerning lines");
        for(String line : SparkRdd.take(7)){
            System.out.println(line);
        }


        /**
         * 使用匿名内部类来实现函数接口
         */

        JavaRDD<String> ExamplesRDD = inputRDD.filter(
                new Function<String,Boolean>(){
                    public Boolean call(String x) {
                        return x.contains("Examples");
                    }
        });
        System.out.println("Input had "+ExamplesRDD.count()+" concerning lines");
        System.out.println("Here are 2 examples:");
        for(String line : ExamplesRDD.take(2)){
            System.out.println(line);
        }

        /**
         * 使用lambda表达式实现函数接口
         */
        JavaRDD<String> forRdd = inputRDD.filter(new Function<String, Boolean>() {
            public Boolean call(String s) throws Exception {
                if (s.contains("for"))
                    return true;
                else
                    return false;
            }
        });
        System.out.println("Input had "+forRdd.count()+" concerning lines");
        for(String line : forRdd.take(2)){
            System.out.println(line);
        }


        //转化操作map()接收一个函数，把这个函数用于RDD中的每个元素，将函数返回结果作为结果RDD中对应的元

        JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1,2,3,4));
        JavaRDD<Integer> result = rdd.map(new Function<Integer, Integer>() {
            public Integer call(Integer x) {
                return x*x;
            }
        });
        System.out.println(StringUtils.join(result.collect(),","));

        //转化操作flatMap()接收一个函数，把这个函数用于RDD中的每个元素上，返回值序列的迭代器，一个简单用途是把输入的字符串切分为单词；
        JavaRDD<String> lines2 = sc.parallelize(Arrays.asList("hello world","hi"));
        JavaRDD<String> words = lines2.flatMap(new FlatMapFunction<String, String>() {
            public Iterable<String> call(String s) throws Exception {
                return Arrays.asList(s.split(" "));
            }
        });
        System.out.println(words.first());//返回“hello”


        //对RDD进行行动操作
        //reduce()操作接收一个函数，这个函数要操作两个RDD元素并返回一个同样类型的新元素，一个简单的例子是对RDD中元素进行加法操作；
        Integer sum = rdd.reduce(new Function2<Integer, Integer, Integer>() {
            public Integer call(Integer x, Integer y) throws Exception {
                return x+y;
            }
        });








    }
}
