package org.shj.spark.operator;

import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;

/**
 * repartition 算子，用于将任意RDD的partition 增加或减少
 * 建议使用场景：
 * 使用Spark SQL 从 Hive中查询数据时候，Spark SQL会根据Hive对应的
 * hdfs文件的block的数量决定加载出来的RDD的partition有多少个
 * 这里默认的partition的数量是根本无法设置的
 * 
 * 有些时候，自动设置的partition的数量过少了，为了进行优化，可以提高
 * 并行度，就是对RDD使用repartition算子
 * @author Administrator
 *
 */
public class RepartitionOperator {

	public static void main(String[] args) {
		SparkConf conf = new SparkConf().setMaster("local").setAppName("RepartitionOperator");
		JavaSparkContext sc = new JavaSparkContext(conf);
		
		List<String> list = Arrays.asList("zhuyin01","zhuyin02","zhuyin03","zhuyin04","zhuyin05","zhuyin06",
				"zhuyin07","zhuyin08","zhuyin09","zhuyin10","zhuyin11","zhuyin12");
		
		JavaRDD<String> staffRDD = sc.parallelize(list, 3);
		JavaRDD<String> staffRDD2 = staffRDD.mapPartitionsWithIndex(new Function2<Integer, Iterator<String>, Iterator<String>>(){
			private static final long serialVersionUID = 4023120648900012732L;

			public Iterator<String> call(Integer index, Iterator<String> it) throws Exception {
				List<String> list = new LinkedList<String>();
				while(it.hasNext()){
					list.add("Dept[" + index +"] " + it.next());
				}
				return list.iterator();
			}
		}, true);
		
		JavaRDD<String> staffRDD3 = staffRDD2.repartition(6);
		
		JavaRDD<String> staffRDD4 = staffRDD3.mapPartitionsWithIndex(new Function2<Integer, Iterator<String>, Iterator<String>>(){
			private static final long serialVersionUID = 4023120648900012732L;

			public Iterator<String> call(Integer index, Iterator<String> it) throws Exception {
				List<String> list = new LinkedList<String>();
				while(it.hasNext()){
					list.add("Dept[" + index +"] " + it.next());
				}
				return list.iterator();
			}
		}, true);
		
		for(String staff : staffRDD4.collect()){
			System.out.println(staff);
		}
		
		sc.close();

	}

}
