from random import Random, random

from pyspark.context import SparkContext

sc = SparkContext(master='local[*]', appName='demo18_pi')

num = 1000000

# 构建一个很大的RDD
rdd = sc.parallelize(range(num))


# 随机生成点
def random_point_fun(i):
    x = Random().random() * 2 - 1
    y = Random().random() * 2 - 1
    return x, y


points_rdd = rdd.map(random_point_fun)

# 计算圆内点的数量
yuan_point_count = points_rdd.filter(lambda point: point[0] ** 2 + point[1] ** 2 <= 1).count()

# 计算PI
PI = yuan_point_count / num * 4

print(f"PI is :{PI}")
