from pyspark import SparkConf, SparkContext, StorageLevel

if __name__ == '__main__':
    # 构建SparkConf对象
    conf = SparkConf().setAppName("test").setMaster("local[*]")
    # 构建SparkContext执行环境入口对象
    sc = SparkContext(conf=conf)

    stu_info_list = [(1, "张大仙", 11), (2, "王晓晓", 12), (3, "张甜甜", 13), (4, "王大力", 14)]

    score_info_list_rdd = sc.parallelize([
        (1, "语文", 99),
        (2, "语文", 98),
        (3, "语文", 97),
        (4, "语文", 96),
        (1, "数学", 89),
        (2, "数学", 88),
        (3, "数学", 87),
        (4, "数学", 86),
        (1, "编程", 79),
        (2, "编程", 78),
        (3, "编程", 77),
        (4, "编程", 76)
    ])


    def map_func(data):
        id = data[0]
        name = ""
        for stu in stu_info_list:
            if id == stu[0]:
                name = stu[1]
        return (name, data[1], data[2])


    ret = score_info_list_rdd.map(map_func).collect()

    print(ret)
