package com.niit.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object Spark_SQL_UDF {

  def main(args: Array[String]): Unit = {
    //UDF自定义函数

    //第一步先用SparkSQL 将 user.json的数据全部查询出来
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSql")
    val spark = SparkSession.builder().config(sparkConf).getOrCreate();

    val df = spark.read.json("input/user.json")
    df.createOrReplaceTempView("user")

    spark.udf.register("prefixName",(name:String)=>{
      "Name:"+name //返回
    })

    spark.sql("select age,prefixName(username) from user").show()
    /*  在查询结果的字段上拼接的东西，需要用到UDF函数
    +---+--------+
    |age|username|
    +---+--------+
    | 30|Name: zhangsan|
    | 40|Name: lisi|
    | 50|Name: wangwu|
    +---+--------+
    +---+--------------------+
    |age|prefixName(username)|
    +---+--------------------+
    | 30|       Name:zhangsan|
    | 40|           Name:lisi|
    | 50|         Name:wangwu|
    +---+--------------------+

     */

    spark.close()
  }

}
