package com.atguigu.sparksql.day01.udf

import org.apache.spark.sql.{SparkSession, functions}

/**
 * Author atguigu
 * Date 2020/11/3 15:17
 */
object UdtfDemo {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
            .builder()
            .master("local[*]")
            .appName("UdtfDemo")
            .getOrCreate()
        import spark.implicits._
        
        spark.udf.register("myAvg", functions.udaf(new MyAvg))
        
        val df = spark.read.json("c:/person.json")
        // df.createOrReplaceTempView("person")
        //  spark.sql("select name, myAvg(age)  from person group by name").show
        // spark.sql("select myAvg(age) from person ").show
        val ds = df
            .as[Person]
        //                   .filter("age is not null")
        
        val averageAge = MyAvg_2.toColumn.name("average_age")
        val result = ds.select(averageAge) // select averageAge from ...
        result.show
        spark.close()
    }
}
