# coding:utf-8
from pyspark import SparkConf,SparkContext
import os
os.environ['JAVA_HOME'] = '/server/jdk'

if __name__ == '__main__':
    conf = SparkConf().setAppName('test').setAppName('local[*]')
    sc = SparkContext(conf = conf)
    
    rdd = sc.parallelize([1,2,3,4,5,6],3)

    # 定义方法，作为算子的传入函数体
    def add(data):
        return data*10
    print(rdd.map(add).collect())

    # 更简单的方法是定义lambda表达式来写匿名函数
    print(rdd.map(lambda x:x*10).collect())

