# -*- coding: utf-8 -*-
from pyspark.sql import SparkSession


if __name__ == '__main__':
    spark = SparkSession.builder.appName("sql_learn").master("local[*]").getOrCreate()
    rdd = spark.sparkContext.textFile("../data/student.txt")
    df = rdd.map(lambda line: line.split(",")).map(lambda arr: (int(arr[0]), arr[1], int(arr[2])))\
        .toDF("id: int, name: string, age: int")
    df.printSchema()
    df.select("name").show()
    df.select(df['name'], df['age'] + 1).show()
    df.filter(df['age'] > 21).show()
    df.groupBy("age").count().show()
    df.createOrReplaceTempView("stu")
    sqlDF = spark.sql("SELECT * FROM stu")
    sqlDF.show()
    df.createGlobalTempView("stu")
    # Global temporary view is tied to a system preserved database `global_temp`
    spark.sql("SELECT * FROM global_temp.stu").show()
    # Global temporary view is cross-session
    spark.newSession().sql("SELECT * FROM global_temp.stu").show()
    spark.stop()
