from pyspark import SparkConf, SparkContext

if __name__ == '__main__':
    conf = SparkConf().setAppName("test").setMaster("local[*]")
    sc = SparkContext(conf=conf)

    rdd = sc.parallelize([3, 1, 3, 5, 6, 7], 1)

    # 排序，并取top n
    result = rdd.foreach(lambda x: print(x * 10))
    print(result)
