from pyspark.sql import SparkSession
from pyspark.sql.types import StructType, StringType, IntegerType

if __name__ == '__main__':
    spark = SparkSession.builder. \
        appName("test"). \
        master("local[*]"). \
        getOrCreate()

    sc = spark.sparkContext

    # 基于RDD转换成DataFrame
    rdd = sc.textFile("../data/input/sql/people.txt").map(lambda x: x.split(",")).map(lambda x: (x[0], int(x[1])))

    schema = StructType().add("name", StringType(), nullable=True).add("age", IntegerType(), nullable=False)
    df2 = rdd.toDF(schema).coalesce(1)
    df2.printSchema()
    df2.show()

    df2.write.mode("overwrite").parquet("../target/people.parquet")

    # parquet
    df = spark.read.format("parquet") \
        .load("../target/people.parquet")
        # .option("sep", ", ") \
        # .option("header", True) \
        # .option("encoding", "utf-8") \
        # .schema("name string, age int") \
        # .load("../data/input/sql/people2.parquet")
    df.printSchema()
    df.show()
