package org.example.com.atguigu.day04;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;
import org.junit.Test;

import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

public class Test1114 implements Serializable {
    @Test
    public void test1(){
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("pdd", "atguigu", "jd", "tb"));

        Map<String, String> map = new HashMap<>();
        map.put("pdd", "www.pdd.com");
        map.put("atguigu", "www.atguigu.com");
        map.put("jd", "www.jd.com");
        map.put("tb", "www.tb.com");

        // 广播
        Broadcast<Map<String, String>> broadcast = sc.broadcast(map);

        JavaRDD<String> rdd2 = rdd1.map(new Function<String, String>() {
            @Override
            public String call(String v1) throws Exception {
                // 取出数据使用
                Map<String, String> value = broadcast.value();
                return value.get(v1);
            }
        });
        System.out.println(rdd2.collect());
    }
}
