package day02

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @author wsl
 * @version 2022-10-12
 *          SparkSession内部封装了SparkContext
 */
object UDF {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("spark sql").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    val df: DataFrame = spark.read.json("sparksql/input/user.json")

    df.createOrReplaceTempView("user")
    spark.sql("select name,age from user").show()

    //需求1 拼接字段
    spark.udf.register("addName", (name: String) => {
      "name:" + name
    })

    //需求2  age * 2
    spark.udf.register("doubleAge", (age: Long) => {
      age * 2
    })

    spark.sql("select addName(name),doubleAge(age) from user").show()

    spark.stop()


  }
}

