# rdd输入

from pyspark import SparkConf,SparkContext

if __name__ == '__main__':
    conf=SparkConf().setMaster("local[*]").setAppName("test_spark")
    sc=SparkContext(conf=conf)

    rdd1=sc.parallelize([1,2,3,4,5])
    rdd2=sc.parallelize((11,22,33,44,55))
    rdd3=sc.parallelize("abcdefg")
    rdd4=sc.parallelize({6,7,8,9,10})
    rdd5=sc.parallelize({"x":3,"y":4})

    print(rdd1.collect())
    print(rdd2.collect())
    print(rdd3.collect())
    print(rdd4.collect())
    print(rdd5.collect())
    """
    output:
    [1, 2, 3, 4, 5]
    [11, 22, 33, 44, 55]
    ['a', 'b', 'c', 'd', 'e', 'f', 'g']
    [6, 7, 8, 9, 10]
    ['x', 'y']
    """

    rddf=sc.textFile("data/hello.txt")
    print(rddf.collect())

    sc.stop()