import time

from pyspark.sql import DataFrame, functions as ft
from pyspark.sql.types import StructField, StructType, StringType, IntegerType

from session.LocalSpark import spark


def map1(row):
    print("map1=====")
    time.sleep(1)
    return row


def filter(df: DataFrame):
    df1 = df.filter("idx = 1")
    # 会漏数据
    # df2 = df.subtract(df1)
    # df2.show()
    df2 = df.filter("idx != 1")

    df1 = df1.withColumn("col1", ft.lit(1))
    df2 = df2.withColumn("col1", ft.lit(None).cast(IntegerType()))

    df_all = df1.union(df2)
    df_all.show()


def when_else(df: DataFrame):
    def add_col1(df) -> int:
        print(df['idx'])
        return 1

    udf_add_col1 = ft.udf(add_col1, IntegerType())
    # df_all = df.withColumn("col1", ft.when(df.idx == 1, udf_add_col1(ft.struct(df.columns)))
    #                        .otherwise(ft.lit(None).cast(IntegerType())))
    df_all = df.withColumn("col1", ft.when(df.idx == 1, add_col1(df))
                           .otherwise(ft.lit(None).cast(IntegerType())))
    df_all.show()


if __name__ == '__main__':
    start = time.time()
    df: DataFrame = spark.createDataFrame([
        ["sku1", "shop1", 1],
        ["sku2", "shop1", 2],
        ["sku2", "shop1", 2]
    ], StructType([
        StructField("sku_id", StringType()),
        StructField("shop_id", StringType()),
        StructField("idx", IntegerType()),
    ]))
    df = df.rdd.map(map1).toDF(df.schema)

    # 事情: 筛出 idx = 1的，设置 col1 = 1
    # 方案1： filter，再合并
    # filter(df)

    # 方案2： when_else
    when_else(df)

    print(time.time() - start)
