package com.zhang.sparksql_1

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
 * @title:
 * @author: zhang
 * @date: 2021/12/10 19:48 
 */
object SparkSQl_UDF_02 {
  def main(args: Array[String]): Unit = {
    //  获取SparkSession连接对象
    val conf = new SparkConf().setMaster("local[*]").setAppName("spark-sql")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    import spark.implicits._
    //todo 创建DataFrame
    val df: DataFrame = spark.read.json("datas/user.json")

    df.createOrReplaceTempView("user")

    //todo 创建UDF函数
    spark.udf.register("prefixName",(name:String)=>{
       "Name:"+ name
    })
    spark.sql("select age,prefixName(username) from user").show()

    //todo 关闭资源
    spark.stop()
  }

  case class User(id:Int,name:String,age:Int)

}
