package com.atbeijing.bigdata.spark.mytest.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

object Spark_UDF {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("UDF")
    val ss: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    import ss.implicits._

    val rdd1 = ss.sparkContext.makeRDD(
      List(
        (1, "zhangsan", 30),
        (2, "lisi", 40),
        (3, "wangwu", 50)
      )
    )

    val df = rdd1.toDF("id", "name", "age")

    //创建视图
    df.createOrReplaceTempView("Student")

    //自定义方法
    // 将查询结果的每一条数据经过UDF函数处理，返回处理后的结果
    // 需要将自定义的函数注册到Spark中，这样Spark才能在SQL中识别
    ss.udf.register("addAge",(age:String)=>{
      age.toInt+35
    })

    //使用自定义方法
    val df1: DataFrame = ss.sql("select id,name,addAge(age) from Student")
    df1.show()
  }
}
