package Demo1;

import org.apache.avro.generic.GenericData;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;

import java.util.Arrays;
import java.util.Iterator;

/**
 * Created by lenovo on 2017/8/15.
 */
public class test3 {
    public static void main(String[] args){
        SparkConf conf=new SparkConf();
        conf.setAppName("test3").setMaster("local[2]");
        JavaSparkContext jContext = new JavaSparkContext(conf);

        Integer num=2;
        final Broadcast<Integer> broadcast1=jContext.broadcast(num);
        JavaRDD parallelize=jContext.parallelize(Arrays.asList(1,2,3,4,5,6,7,8,9,10));

       JavaRDD total = parallelize.map(new Function<Integer,Integer>() {
            private static final long serialVersionUID=1L;
            @Override
            public Integer call(Integer v1) throws Exception {
                return v1 * broadcast1.value();
            }
        });

        total.foreachPartition(new VoidFunction<Iterator>() {
            private static final long serialVersionUID=1L;
            @Override
            public void call(Iterator t) throws Exception {
                while (t.hasNext()){
                    System.out.println(t.next());
                }
            }
        });
    }
}
