from pyspark import SparkConf, SparkContext
# import os
# os.environ['PYSPARK_PYTHON'] = ''

conf = SparkConf().setMaster("local[*]").setAppName("test")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([1, 2, 3, 4, 5, 6, 7, 8]).map(lambda x:x*2).collect()

print(rdd)
