package com.king.spark.rdd.persist

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.sql.{DataFrame, Dataset, Encoder, Encoders, Row, SparkSession, functions}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @Author wdl
 * @Date 2022/11/22 16:34
 */
object Spark_RDD_persist {

  def main(args: Array[String]): Unit = {


    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

    val sparkContext: SparkContext = new SparkContext(sparkConf)
    val session: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    import session.implicits._

    val value: RDD[(Int, String, Int)] = sparkContext.makeRDD(List((1001, "wdl", 28), (1002, "lxm", 27)))

    println("------------RDD 转 DF------------")
    //RDD 转 DF
    val frame: DataFrame = value.toDF("id", "name", "age")
    frame.show()

    println("------------RDD 转 DS------------")
    //RDD 转 DS
    val value2: Dataset[User] = value.map( v => User(v._1, v._2, v._3) ).toDS()
    value2.show()

    println("------------DF 转 DS------------")
    //DF 转 DS
    val value1: Dataset[User] = frame.as[User]
    value1.show()

    println("------------DF 转 RDD------------")
    //DF 转 RDD
    val rdd1: RDD[Row] = frame.rdd
    rdd1.map(v => (v.getInt(0), v.getString(1), v.getInt(2))).foreach(println)

    println("------------DS 转 RDD------------")
    //DS 转 RDD
    val rdd: RDD[User] = value1.rdd
    rdd.foreach(println)

    println("------------建表查询------------")
    //创建udf函数
    session.udf.register("prefixName", (name: String) => "prefix : " + name)

    //创建udaf函数
    session.udf.register("avgAge", functions.udaf(new My_Avg_Agg) )

    // 建表查询
    frame.createTempView("user")
    session.sql(" select avgAge(age) from user ").show()

    session.stop()
  }

  case class User(id: Int, name: String, age: Int)

  /**
   * 强类型
   */
  class My_Avg_Agg extends Aggregator[Int, (Int, Int), Double]{
    override def zero: (Int, Int) = (0, 0)

    override def reduce(b: (Int, Int), a: Int): (Int, Int) = {
      (b._2 + a, b._1 + 1)
    }

    override def merge(b1: (Int, Int), b2: (Int, Int)): (Int, Int) = (b1._1 + b2._1, b2._2 + b2._2)

    override def finish(reduction: (Int, Int)): Double = reduction._1 / reduction._2.toDouble

    override def bufferEncoder: Encoder[(Int, Int)] = Encoders.tuple(Encoders.scalaInt, Encoders.scalaInt)

    override def outputEncoder: Encoder[Double] = Encoders.scalaDouble
  }
}
