from pyspark import SparkContext, SparkConf
from pyspark.sql import SparkSession
import os
from decimal import Decimal, getcontext, ROUND_HALF_UP
import pandas as pd
from pyspark.sql import functions as F
from pyspark.sql import SparkSession
# 锁定远端操作环境, 避免存在多个版本环境的问题
os.environ['SPARK_HOME'] = '/export/server/spark'
os.environ["PYSPARK_PYTHON"] = "/root/anaconda3/bin/python"
os.environ["PYSPARK_DRIVER_PYTHON"] = "/root/anaconda3/bin/python"

# 快捷键:  main 回车
if __name__ == '__main__':
    # 初始化 SparkSession
    spark = SparkSession.builder.appName("UDAFExample").getOrCreate()


    @F.pandas_udf('decimal(17,12)')
    def udaf_lx(lx: pd.Series, qx: pd.Series) -> Decimal:
        getcontext().prec = 12  # 设置精度
        tmp_lx = Decimal(0)
        tmp_qx = Decimal(0)

        for i in range(len(lx)):
            if i == 0:
                tmp_lx = Decimal(lx[i])
                tmp_qx = Decimal(qx[i])
            else:
                tmp_lx = (tmp_lx * (1 - tmp_qx)).quantize(Decimal('0.000000000000'), rounding=ROUND_HALF_UP)
                tmp_qx = Decimal(qx[i])

        return tmp_lx


    # 注册 UDAF
    spark.udf.register('udaf_lx', udaf_lx)
