from pyspark import SparkConf, SparkContext

if __name__ == '__main__':
    conf = SparkConf().setAppName("test").setMaster("local[*]")
    sc = SparkContext(conf=conf)

    rdd = sc.parallelize([3, 1, 3, 5, 6, 7], 3)

    rdd.saveAsTextFile("../target/output/out2")
    rdd.saveAsTextFile("hdfs://11.50.138.178:8020/tmp/hc/pydemo/01_RDD/output/out2")
