package zuoye12

import org.apache.spark.SparkConf
import org.apache.spark.sql.{Encoder, Encoders, SparkSession, functions}
import org.apache.spark.sql.expressions.Aggregator


object Sparksql1 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("app").setMaster("local[*]")

    val session = SparkSession.builder().config(conf).getOrCreate()
    import session.implicits._

    val value = session.sparkContext.textFile("src/main/java/zuoye12/student.txt")

    value.map(x=>{
      val strings = x.split(" ")
      strings match {
        case Array(q,w,e,r,t,y)=>(q,w,e,r,t,y)
      }
    }).toDF("id","name","age","sex","km","chengji").createOrReplaceTempView("student")


    session.udf.register("strlen",(name:String)=>{
      val str = name.substring(0, 1)
      val str1 = name.substring(2, name.length)
       str+"*"+str1
    })


    session.sql("select strlen(name) from student").show()


    val udaf = new MyAveragUDAF1
    session.udf.register("he",functions.udaf(udaf))

    session.sql("select he(chengji) from student where sex == '男'   ").show()

    session.sql(
      """
        |with
        |s1 as (select 1 as n1,count(distinct name) c1 from student where sex="女"),
        |s2 as (select 1 as n2,count(distinct name) c2 from student )
        |select c1,c2,concat(s1.c1/s2.c2*100,"%") from s1 join s2 on s1.n1 = s2.n2
        |""".stripMargin
    ).show()
  }


  //输入数据类型
  //缓存类型
  case class AgeBuffer(var sum:Long,var count:Long)
  class MyAveragUDAF1 extends Aggregator[Long,AgeBuffer,Double]{
    override def zero: AgeBuffer = {AgeBuffer(0L,0L)}

    override def reduce(b: AgeBuffer, a: Long): AgeBuffer = {
     b.sum=b.sum+a
      b.count+=1
      b

    }

    override def merge(b1: AgeBuffer, b2: AgeBuffer): AgeBuffer = {
      b1.sum+=b2.sum
      b1.count+=b2.count
      b1
    }

    override def finish(buff: AgeBuffer): Double = {
      buff.sum.toDouble/buff.count
    }

    override def bufferEncoder: Encoder[AgeBuffer] = Encoders.product

    override def outputEncoder: Encoder[Double] = Encoders.scalaDouble
  }

}
