//package com.kingjw.sql
//
//import org.apache.spark.rdd.RDD
//import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction}
//import org.apache.spark.sql.types.{DataType, DoubleType, IntegerType, LongType, StructField, StructType}
//import org.apache.spark.sql.{DataFrame, Dataset, Encoder, Encoders, Row, SparkSession, TypedColumn, functions}
//import org.apache.spark.{SparkConf, SparkContext}
//import org.apache.spark.sql.expressions.Aggregator
//
///**
// *
// * @Package: com.kingjw.sql
// * @ClassName: SqlTest
// * @Author: 王坚伟
// * @CreateTime: 2021/11/11 11:11
// * @Description:
// */
//object SqlTest {
//  def main(args: Array[String]): Unit = {
//    val conf: SparkConf = new SparkConf()
//      .setAppName("SerDemo")
//      .setMaster("local[*]")
//    val spark = SparkSession.builder().config(conf).getOrCreate()
//
//    import spark.implicits._
//
//    val df: DataFrame = spark.read.json("input/user.json")
//
//
//    df.createOrReplaceTempView("user")
//    spark.sql("select * from user").show()
//
//    spark.udf.register("prefixName",(name:String)=>{
//      "Name:"+name
//    })
//
//    val ds = df.as[User01]
//
//    //方法1
//    val myAveragUDAF1: MyAveragUDAF1 = new MyAveragUDAF1
//    val column: TypedColumn[User01, Double] = myAveragUDAF1.toColumn
//    ds.select(column).show()
//
//    //方法2
//    val myAveragUDAF3: MyAvgAgeUDAF3 = new MyAvgAgeUDAF3
//    spark.udf.register("avgAge",functions.udaf(myAveragUDAF3))
//    spark.sql("select avgAge(age) from user").show()
//
//    spark.close()
//
//  }
//
//}
//
//class MyAveragUDAF extends UserDefinedAggregateFunction {
//  override def inputSchema: StructType = {
//    StructType(Array(StructField("age",IntegerType)))
//
//  }
//
//  override def bufferSchema: StructType = {
//    new StructType(Array(new StructField("sum",LongType),StructField("count",LongType)))
//  }
//
//  override def dataType: DataType = DoubleType
//
//  override def deterministic: Boolean = true
//
//  override def initialize(buffer: MutableAggregationBuffer): Unit = {
//    buffer(0) = 0L
//    buffer(1) = 0L
//
//  }
//
//  override def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
//    if(!input.isNullAt(0)){
//      buffer(0) = buffer.getLong(0) + input.getInt(0)
//      buffer(1) = buffer.getLong(1) + 1
//    }
//  }
//
//  override def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
//    buffer1(0) = buffer1.getLong(0) + buffer2.getLong(0)
//    buffer1(1) = buffer1.getLong(1) + buffer2.getLong(1)
//  }
//
//  override def evaluate(buffer: Row): Any = buffer.getLong(0).toDouble/buffer.getLong(1).toDouble
//}
//
////输入数据类型
//case class User01(username:String,age:Long)
////缓存类型
//case class AgeBuffer(var sum:Long, var count:Long)
//
///**
// * 1.继承org.apache.spark.sql.expressions.Aggregator,定义泛型
// * IN：输入的数据类型 User01
// * BUF：缓冲区的数据类型 AgeBuffer
// * OUT：输出的数据类型 Double
// * 2.重写类中的方法
// */
//class MyAveragUDAF1 extends Aggregator[User01,AgeBuffer,Double] {
//  override def zero: AgeBuffer = {
//    AgeBuffer(0L,0L)
//  }
//
//  override def reduce(b: AgeBuffer, a: User01): AgeBuffer = {
//    b.sum = b.sum + a.age
//    b.count = b.count + 1
//    b
//  }
//
//  override def merge(b1: AgeBuffer, b2: AgeBuffer): AgeBuffer = {
//    b1.sum = b1.sum + b2.sum
//    b1.count = b1.count + b2.count
//    b1
//  }
//
//  override def finish(reduction: AgeBuffer): Double = {
//    reduction.sum.toDouble/reduction.count
//  }
//
//  override def bufferEncoder: Encoder[AgeBuffer] = {
//    Encoders.product
//  }
//
//  override def outputEncoder: Encoder[Double] = {
//    Encoders.scalaDouble
//  }
//}
//
//case class Buff( var sum:Long, var cnt:Long )
//// totalage, count
//class MyAvgAgeUDAF3 extends Aggregator[Long, Buff, Double]{
//  override def zero: Buff = Buff(0,0)
//
//  override def reduce(b: Buff, a: Long): Buff = {
//    b.sum += a
//    b.cnt += 1
//    b
//  }
//
//  override def merge(b1: Buff, b2: Buff): Buff = {
//    b1.sum += b2.sum
//    b1.cnt += b2.cnt
//    b1
//  }
//  override def finish(reduction: Buff): Double = {
//    reduction.sum.toDouble/reduction.cnt
//  }
//
//  override def bufferEncoder: Encoder[Buff] = Encoders.product
//
//  override def outputEncoder: Encoder[Double] = Encoders.scalaDouble
//}