package com.yanggu.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

//自定义UDF -> 一进一出 一个字段 -> 返回另外一个数据
object SparkSQL02_UDF {

  def main(args: Array[String]): Unit = {
    //1. 创建上下文环境对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01_Demo")

    //2. 创建sparkSession对象
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    //RDD => DateFrame => DataSet 转换需要引入隐式转换规则, 否则无法转换
    //spark不是包名, 而是sparkSession的对象名称
    import spark.implicits._

    val dataFrame = spark.read.json("input/test.json")

    spark.udf.register("addName", "Name: " + _)

    dataFrame.createOrReplaceTempView("people")

    spark.sql(
      """
        |SELECT addName(username), age FROM people
        |""".stripMargin)
      .show

    spark.stop
  }

}
