package com.offcn.bigdata.spark.sql.p3

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

/**
 * @Auther: BigData-LGW
 * @ClassName: SparkSQLUDAF
 * @Date: 2020/12/10 21:53
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object SparkSQLUDAF {
    def main(args: Array[String]): Unit = {
        Logger.getLogger("org.apache.spark").setLevel(Level.INFO)
        val spark = SparkSession.builder()
            .appName("SparkSQLUDAF")
            .master("local[*]")
            .getOrCreate()
        spark.udf.register("myAvg",new MyAvgUDAF)
        val df = spark.read.json("file:/F:/people.json")
        df.createOrReplaceTempView("person")
        val sql =
            """
              |select
              |province,
              |round(avg(height),1) avg_height,
              |round(myAvg(height),1) my_avg_height
              |from person
              |group by province
              |""".stripMargin
        spark.sql(sql).show
        spark.stop()
    }
}
