package cn.lsh.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

import java.math.BigDecimal;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

public class Pai {

	public static void main(String[] args) {
		/*Random random = new Random();
		for(int i = 0; i < 10; i++) {
			System.out.println(random.nextFloat());
		}*/
		SparkConf conf = new SparkConf();
		conf.setMaster("local").setAppName("test");

		JavaSparkContext sparkContext = new JavaSparkContext(conf);
		Random random = new Random();
		int num = 10000000;
		List<Integer> list = IntStream.range(0, num).boxed().collect(Collectors.toList());
		//parallelize并行计算一个集合，第二个参数表示partition数，即处理的task个数
		long count = sparkContext.parallelize(list, 10).map(i -> {
					float x = random.nextFloat() * 2 - 1;
					float y = random.nextFloat() * 2 - 1;
					// System.out.println(x + " - " + y + " = " + (x * x + y * y));
					return (x * x + y * y) <= 1 ? 1 : 0;
				}).filter(i -> i == 1).count();
		System.out.println("π = " + BigDecimal.valueOf(count * 4).divide(BigDecimal.valueOf(num)));
		sparkContext.stop();
	}
}
