package edu.zju.gis.dbfg.model.tile;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

public class Example {

    private final static Logger logger = LoggerFactory.getLogger(VectorTileClipper.class);
    private final static int TEST = 1;

    public static void main(String[] args) {

        // Setup environment
        SparkConf conf = new SparkConf();
        JavaSparkContext jsc = new JavaSparkContext(conf);

        final Broadcast<Integer> bufferBroad = jsc.broadcast(1);

        List<String> input = new ArrayList<>();
        input.add("1");
        input.add("2");
        input.add("3");
        input.add("4");

        for (int i=0; i<3; i++) {
            JavaRDD<String> testNum = jsc.parallelize(input);

            JavaRDD<Integer> result = testNum.mapPartitions(new FlatMapFunction<Iterator<String>, Integer>() {
                @Override
                public Iterator<Integer> call(Iterator<String> in) throws Exception {
                    List<Integer> result = new ArrayList<>();
                    result.add(1);
                    while(in.hasNext()) {
                        String t = in.next();
                    }
                    return result.iterator();
                }
            });

            int r = result.reduce(new Function2<Integer, Integer, Integer>() {
                @Override
                public Integer call(Integer t1, Integer t2) throws Exception {
                    return t1 + t2;
                }
            });

            System.out.println("r = " + String.valueOf(r));
        }

    }


}
