import string

from pyspark.sql import SparkSession
from pyspark.sql.types import StringType, StructType, IntegerType

if __name__ == '__main__':
    spark = SparkSession.builder. \
        appName("test"). \
        master("local[*]"). \
        config("spark.sql.shuffle.partitions", 2). \
        getOrCreate()

    sc = spark.sparkContext

    # TODO 需求：假设有3个数字 1 2 3，我们只传入数字，返回数字所在序号对应的 字母 然后和数字结合形成dict返回
    # 比如传入1 我们返回 {"num":1, "letters":"a"}
    rdd = sc.parallelize([[1], [2], [3]])
    df = rdd.toDF(["num"])


    def process(data):
        return {"num": data, "letters": string.ascii_letters[data]}


    """
    UDF的返回值是字典的话，需要用StructType来接收
    """
    # udf2 = spark.udf.register("udf1", process, StructType([
    #     StructField("num", IntegerType()),
    #     StructField("letters", StringType())
    # ]))
    udf2 = spark.udf.register("udf1", process, StructType().add("num", IntegerType()).add("letters", StringType()))

    df.selectExpr("udf1(num)").show()
    df.select(udf2(df["num"])).show()
