package com.xiaojiezhu.spark.setup3;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;

public class JavaBroadcast {

    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("app");
        JavaSparkContext sc = new JavaSparkContext(conf);

        Broadcast<Map<String, String>> broadcast = sc.broadcast(getUser());

        JavaRDD<String> rdd = sc.parallelize(Arrays.asList("admin", "zxj"));
        JavaRDD<Object> map = rdd.map(new Function<String, Object>() {
            @Override
            public Object call(String text) throws Exception {
                String password = broadcast.getValue().get(text);
                return password;
            }
        });

        map.collect().forEach(new Consumer<Object>() {
            @Override
            public void accept(Object o) {
                System.out.println(o);
            }
        });


    }

    /**
     * 任何对象都可以成为广播变量，只是需要实现Serializable
     * @return
     */
    public static Map<String,String> getUser(){
        System.out.println("loading users");
        Map<String,String> data = new HashMap<>();
        data.put("admin","123");
        data.put("zxj","234");
        return data;
    }
}
