package spark;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;

import com.google.gson.Gson;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.function.Consumer;

public class SparkDemo {
	public static void main(String[] args) {

    }

    /**
     * 将原来RDD的每个数据项通过map中的用户自定义函数f映射转变为一个新的元素
     * // 结果
     * id: 1 name: xl1 pwd: xl123 sex:2
     * id: 2 name: xl2 pwd: xl123 sex:1
     * id: 3 name: xl3 pwd: xl123 sex:2
     */
    private static void map() {
        SparkConf conf = new SparkConf().setAppName(SparkDemo.class.getSimpleName())
                .setMaster("local");

        JavaSparkContext sc = new JavaSparkContext(conf);

        List<String> datas = Arrays.asList(
                "{'id':1,'name':'xl1','pwd':'xl123','sex':2}",
                "{'id':2,'name':'xl2','pwd':'xl123','sex':1}",
                "{'id':3,'name':'xl3','pwd':'xl123','sex':2}");

        JavaRDD<String> datasRDD = sc.parallelize(datas);

        JavaRDD<User> mapRDD = datasRDD.map(
                new Function<String, User>() {
                    public User call(String v) throws Exception {
                        Gson gson = new Gson();
                        return gson.fromJson(v, User.class);
                    }
                });

        mapRDD.foreach(new VoidFunction<User>() {
            public void call(User user) throws Exception {
                System.out.println("id: " + user.id
                        + " name: " + user.name
                        + " pwd: " + user.pwd
                        + " sex:" + user.sex);
            }
        });

        sc.close();
    }

    public static class User {
        public int id;
        public String name;
        public String pwd;
        public int sex;
    }

    /**
     * 对元素进行过滤，对每个元素应用f函数，返回值为true的元素在RDD中保留，返回为false的将过滤掉
     * // 结果
     * 3
     * 7
     * 4
     * 5
     * 8
     */
    private  static void filter() {
        SparkConf conf = new SparkConf().setAppName(SparkDemo.class.getSimpleName())
                .setMaster("local");

        JavaSparkContext sc = new JavaSparkContext(conf);

        List<Integer> datas = Arrays.asList(1, 2, 3, 7, 4, 5, 8);

        JavaRDD<Integer> rddData = sc.parallelize(datas);
        JavaRDD<Integer> filterRDD = rddData.filter(
                // jdk1.8
                // v1 -> v1 >= 3
                new Function<Integer, Boolean>() {
                    public Boolean call(Integer v) throws Exception {
                        return v >= 3;
                    }
                }
        );
        filterRDD.foreach(
                // jdk1.8
                // v -> System.out.println(v)
                new VoidFunction<Integer>() {
                    public void call(Integer integer) throws Exception {
                        System.out.println(integer);
                    }
                }
        );
        sc.close();
    }

    /**
     *
     * 与map类似，但每个输入的RDD成员可以产生0或多个输出成员
     * // 结果
     aa
     bb
     cc
     cxf
     spring
     struts2
     java
     C++
     javaScript
     */
    static void flatMap() {
        SparkConf conf = new SparkConf().setAppName(SparkDemo.class.getSimpleName())
                .setMaster("local");

        JavaSparkContext sc = new JavaSparkContext(conf);

        List<String> data = Arrays.asList(
                "aa,bb,cc",
                "cxf,spring,struts2",
                "java,C++,javaScript");
        JavaRDD<String> rddData = sc.parallelize(data);
        JavaRDD<String> flatMapData = rddData.flatMap(
            new FlatMapFunction<String, String>() {
                public Iterator<String> call(String t) throws Exception {
                    List<String> list= Arrays.asList(t.split(","));
                    return list.iterator();
                }
            }
        );
        flatMapData.foreach(new VoidFunction<String>() {
            @Override
            public void call(String s) throws Exception {
                System.out.println(s);
            }
        });

        sc.close();
    }

    /**
     * 与Map类似，但map中的func作用的是RDD中的每个元素，而mapPartitions中的func作用的对象是RDD的一整个分区。
     * 所以func的类型是Iterator<T> => Iterator<U>，其中T是输入RDD元素的类型。
     * preservesPartitioning表示是否保留输入函数的partitioner，默认false。
     *
     * // 结果
     分区索引:0  张三1
     分区索引:1  李四1
     分区索引:2  王五1
     分区索引:0  张三2
     分区索引:1  李四2
     分区索引:2  王五2
     分区索引:0  张三3
     分区索引:1  李四3
     分区索引:2  王五3
     分区索引:3  张三4
     */
    static void mapPartitions() {
        SparkConf conf = new SparkConf().setAppName(SparkDemo.class.getSimpleName())
                .setMaster("local");

        JavaSparkContext sc = new JavaSparkContext(conf);

        List<String> names = Arrays.asList("张三1", "李四1", "王五1", "张三2", "李四2",
                "王五2", "张三3", "李四3", "王五3", "张三4");

        JavaRDD<String> namesRDD = sc.parallelize(names, 3);
        JavaRDD<String> mapPartitionsRDD = namesRDD.mapPartitions(
                new FlatMapFunction<Iterator<String>, String>() {
                    int count = 0;
                    @Override
                    public Iterator<String> call(Iterator<String> stringIterator) throws Exception {
                        List<String> list = new ArrayList<String>();
                        while (stringIterator.hasNext()) {
                            list.add("分区索引:" + count++ + "\t" + stringIterator.next());
                        }
                        return list.iterator();
                    }
                }
        );

        // 从集群获取数据到本地内存中
        List<String> result = mapPartitionsRDD.collect();
        result.forEach(new Consumer<String>() {
        	public void accept(String t) {
        		System.out.println(t);
        	};
		});

        sc.close();
    }

}
