from pyspark.sql import SparkSession

if __name__ == '__main__':
    # 构建SparkSession对象
    spark = SparkSession.builder. \
        appName("local[*]"). \
        config("spark.sql.shuffle.partitions", "4"). \
        getOrCreate()
    # appName 设置程序名称
    # config 设置常用属性。可以通过此来设置配置
    # 最后通过getOrCreate 创建 SparkSession对象

    # 从SparkSession中获取SparkContext
    sc = spark.sparkContext

    # 首先构建一个RDD
    rdd = sc.textFile("../../data/sql/stu_score.txt"). \
        map(lambda line: line.split(",")). \
        map(lambda x: (int(x[0]), x[1], int(x[2])))     # 需要做类型转换，因为类型从RDD中推断，默认为string

    # 基于RDD构建DataFrame
    df = spark.createDataFrame(rdd, schema=['id', 'name', 'score'])
    df.printSchema()
    df.show(truncate=False)


    df.createTempView("score")

    spark.sql("select * from score where score < 99").show()
