package spark_sql;

import bean.User;
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.types.DataTypes;


/**
 * @author shihb
 * @date 2020/1/10 18:13
 * 用户自定义函数
 */
public class UdfDemo {

  public static void main(String[] args) {
    //local模式,创建SparkConf对象设定spark的部署环境
    SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("");
    //创建spark上下文对象
    JavaSparkContext jsc = new JavaSparkContext(sparkConf);
    //创建SparkSQL的环境对象
    SparkSession spark = new SparkSession(jsc.sc());


    //创建Rdd
    JavaRDD<User> rdd = jsc.parallelize(Arrays.asList(
        new User(1, "zhangsan", 20),
        new User(2, "lisi", 30),
        new User(3, "wangwu", 40)
        ),1);

    Encoder<User> userEncoder = Encoders.bean(User.class);
    Dataset<User> dataset = spark.createDataset(rdd.rdd(), userEncoder);
    dataset.createOrReplaceTempView("user");

    //创建udf函数
    AddAgeFunction addAgeFunction = new AddAgeFunction();
    //注册udf函数
    spark.udf().register("addage",addAgeFunction, DataTypes.IntegerType);
    //使用udf函数
    Dataset<Row> result = spark.sql("select name,addage(age) from user");
    result.show();
    //释放资源
    spark.stop();
  }

}

class AddAgeFunction implements UDF1<Integer, Integer> {

  @Override
  public Integer call(Integer age) throws Exception {
    return age+1;
  }
}