# coding:utf-8
from pyspark import SparkConf, SparkContext
import os

os.environ['JAVA_HOME'] = '/server/jdk'

if __name__ == '__main__':
    conf = SparkConf().setAppName('test').setAppName('local[*]')
    sc = SparkContext(conf=conf)

    rdd1 = sc.parallelize([('a',1),('a',3)])
    rdd2 = sc.parallelize([('a',1),('b',3)])

    # 通过intersection算子求RDD之间的交集，返回新的RDD
    rdd3 = rdd1.intersection(rdd2)
    print(rdd3.collect())