package com.xxxx.sqlTest;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.api.java.UDF2;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;
import org.stringtemplate.v4.ST;

import java.util.Arrays;


/**
 * @program: day0316
 * @description: 冲冲冲
 * @author: CoreDao
 * @create: 2021-03-17 17:08
 **/

public class UDFTest {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("usf").master("local").getOrCreate();

        Dataset<Row> json = spark.read().json("src/main/resources/data/json");

        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

        JavaRDD<String> rdd = jsc.textFile("src/main/resources/data/person.txt");

        JavaRDD<Row> rdd1 = rdd.map(new Function<String, Row>() {
            @Override
            public Row call(String v1) throws Exception {
                String[] split = v1.split(",");
                return RowFactory.create(
                        split[0],
                        split[1],
                        Integer.valueOf(split[2])
                );
            }
        });
        StructType schema = DataTypes.createStructType(Arrays.asList(
                DataTypes.createStructField("id", DataTypes.StringType, true),
                DataTypes.createStructField("name", DataTypes.StringType, true),
                DataTypes.createStructField("age", DataTypes.IntegerType, true)

        ));
        spark.createDataFrame(rdd1,schema).createOrReplaceTempView("person");

        /**
         * 编写udf函数并且注册到当前的环境中,计算当前名字的长度并且和年龄求和
         * 注意识别数据元数据的时候自动填充的类型可能和当前函数的参数类型不匹配的问题。
         */
        spark.udf().register("nameLenPlusAge", new UDF2<String, Integer, Integer>() {
            @Override
            public Integer call(String name, Integer age) throws Exception {
                return name.length() + age;
            }
        }, DataTypes.IntegerType);
        spark.sql("select name,age,nameLenPlusAge(name,age) as udf from person").show();

        spark.udf().register("addPrefix", new UDF1<String, String>() {
            @Override
            public String call(String s) throws Exception {
                return "Name is " + s;
            }
        },DataTypes.StringType);

        json.createOrReplaceTempView("people");
        spark.sql("select name,age,addPrefix(name) as NewName from people").show();


        spark.stop();
    }
}
