package cn.jly.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @author lanyangji
 * @date 2019/11/30 17:14
 */
object SparkSql03_UDF {

  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSql03_UDF")
    val spark: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()

    // 引入隐式转换
    import spark.implicits._

    // 引入数据集
    val userDF: DataFrame = spark.read.json("input/people.json")

    userDF.show()

    // 自定义UDF函数
    spark.udf.register("addSuffix", (x: String) => x + ".suffix")
    spark.udf.register("addPrefix", (x: String) => "Prefix." + x)
    spark.udf.register("ageAndOne", (x:Int) => x + 1)

    userDF.createOrReplaceTempView("persons")
    spark.sql("select addSuffix(name) as nameWithSuffix, age from persons").show()
    spark.sql("select addPrefix(age) as nameWithPrefix from persons").show()
    spark.sql("select name, ageAndOne(age) from persons").show()

    // 类型不匹配是，spark会先做类型强制转换UDF:ageAndOne(cast(name as int)) 返回值为null
    spark.sql("select name, ageAndOne(name) from persons").show()

    // 释放资源
    spark.close()
  }
}
