package com.czk.java;

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.expressions.UserDefinedFunction;
import org.apache.spark.sql.types.DataTypes;
import static org.apache.spark.sql.functions.udf;
/**
 * @Author:ChenZhangKun
 * @Date: 2021/12/14 18:57
 */
public class JavaUserDefinedScalar {
    public static void main(String[] args) {

        // $example on:udf_scalar$
        SparkSession spark = SparkSession
                .builder()
                .master("local[*]")
                .appName("Java Spark SQL UDF scalar example")
                .getOrCreate();

        // Define and register a zero-argument non-deterministic UDF
        // UDF is deterministic by default, i.e. produces the same result for the same input.
        UserDefinedFunction random = udf(
                () -> Math.random(), DataTypes.DoubleType
        );
        random.asNondeterministic();
        // 注册随机函数
        spark.udf().register("random", random);
        spark.sql("SELECT random()").show();
        // +-------+
        // |UDF()  |
        // +-------+
        // |xxxxxxx|
        // +-------+

        // Define and register a one-argument UDF
        spark.udf().register("plusOne",
                (UDF1<Integer, Integer>) x -> x + 1, DataTypes.IntegerType);
        // 注册加一函数
        spark.sql("SELECT plusOne(5)").show();
        // +----------+
        // |plusOne(5)|
        // +----------+
        // |         6|
        // +----------+

        // Define and register a two-argument UDF
        UserDefinedFunction strLen = udf(
                (String s, Integer x) -> s.length() + x, DataTypes.IntegerType
        );
        spark.udf().register("strLen", strLen);
        // 注册长度函数
        spark.sql("SELECT strLen('test', 1)").show();
        // +------------+
        // |UDF(test, 1)|
        // +------------+
        // |           5|
        // +------------+

        // UDF in a WHERE clause
        spark.udf().register("oneArgFilter",
                (UDF1<Long, Boolean>) x -> x > 5, DataTypes.BooleanType);
        spark.range(1, 10).createOrReplaceTempView("test");
        spark.sql("SELECT * FROM test WHERE oneArgFilter(id)").show();
        // +---+
        // | id|
        // +---+
        // |  6|
        // |  7|
        // |  8|
        // |  9|
        // +---+

        // $example off:udf_scalar$
        spark.stop();
    }
}
