# coding:utf-8
from pyspark import SparkConf, SparkContext
import os

os.environ['JAVA_HOME'] = '/server/jdk'

if __name__ == '__main__':
    conf = SparkConf().setAppName('test').setAppName('local[*]')
    sc = SparkContext(conf=conf)

    rdd = sc.parallelize([1,2,3,4,5,6])

    #通过rdd算子，过滤奇数
    result = rdd.filter(lambda x:x%2==1)
    print(result.collect())