from pyspark import SparkConf, SparkContext


def get_spark_context() -> SparkContext:
    """
    获得Spark上下文对象
    """
    conf = SparkConf().setMaster('local[*]').setAppName('test_spark')
    sc = SparkContext(conf=conf)
    return sc


def get_rdd_by_python():
    """测试根据Python的数据容器作为数据输入获得rdd对象"""
    sc = get_spark_context()
    rdd1 = sc.parallelize('success')
    rdd2 = sc.parallelize(['a', 'b', 'c'])
    rdd3 = sc.parallelize((1, 2, 3))
    rdd4 = sc.parallelize({12, 22, 33})
    rdd5 = sc.parallelize({'k1': 'v1', 'k2': 'v2'})
    print(rdd1.collect())
    print(rdd2.collect())
    print(rdd3.collect())
    print(rdd4.collect())
    print(rdd5.collect())
    sc.stop()


def get_rdd_by_file():
    sc = get_spark_context()
    rdd = sc.textFile('E:/PythonBasicKnowledge/12 pyspark使用/一个普通文本文件.txt')
    print(rdd.collect())


# get_rdd_by_python()
get_rdd_by_file()
