import string

from pyspark.sql import SparkSession, functions as F
from pyspark.sql.types import StructType, IntegerType, StringType, StructField

if __name__ == '__main__':
    # 构建SparkSession对象
    spark = SparkSession.builder. \
        appName("local[*]"). \
        config("spark.sql.shuffle.partitions", "2"). \
        getOrCreate()
    # appName 设置程序名称
    # config 设置常用属性。可以通过此来设置配置
    # 最后通过getOrCreate 创建 SparkSession对象

    # 从SparkSession中获取SparkContext
    sc = spark.sparkContext

    rdd = sc.parallelize([[1], [2], [3]])

    df = rdd.toDF(['num'])


    # TODO 方式1注册

    def my_udf(num):
        return {"num": num * 10, "letter_str": string.ascii_letters[num]}

    # 可以通过StructType 来描述字典类型的函数返回值
    struct_udf = StructType().add("num", IntegerType()).add("letter_str", StringType())

    my_udf2 = spark.udf.register("my_udf", my_udf, struct_udf)

    # SQL风格
    # df.selectExpr("my_udf(num)").show()

    # DSL风格
    df.select(my_udf2(df['num'])).show()


    # TODO 方式2注册

    my_udf3 = F.udf(my_udf2,struct_udf)
    df.select(my_udf3(df['num'])).show()