from pyspark import SparkConf
from pyspark import SparkContext

if __name__ == '__main__':
    conf = SparkConf().setMaster("local[1]").setAppName("spark01")
    sc = SparkContext(conf=conf)

    rdd = sc.parallelize(range(1, 11))
    result = rdd.reduce(lambda x, y: x + y)
    print(result)

    print(rdd.count())
    print(rdd.take(2))
    print(rdd.takeOrdered(2))
    rdd.foreach(lambda x: print(x * x))
