package hbase.app

import com.google.gson.JsonParser
import org.apache.spark.sql.{Column, SparkSession}
import org.apache.spark.sql.functions.col
import scala.collection.JavaConversions._
import scala.collection.mutable

/**
  * Created on 2019-08-16 09:57.
  */
object SparkTopN {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[*]").getOrCreate()
    val list = List(
      ("xiaohe", "123"),
      ("xiaohe", "123"),
      ("xiaohe", "123"),
      ("xiaojiang", "456"),
      ("xiaojiang", "456"),
      ("xiaojiang", "456"),
      ("xiaohong", "789"),
      ("xiaojuan", "567"))

    val rdd = spark.sparkContext.makeRDD(list)
    import spark.implicits._
    val df = rdd.toDF("name", "id")
    df.createOrReplaceTempView("test")

    val catalog =
      s"""
         |{
         |	"sql": "select name, id, count(*) cnt from test group by name,id",
         |  "default_order" : "desc",
         |  "topn":10,
         |	"sort": [
         |   {
         |		"field": "cnt",
         |		"type": "desc"
         |	 },
         |   {
         |		"field": "id",
         |		"type": "asc"
         |	 },
         |   {
         |		"field": "name",
         |		"type": "desc"
         |	 }
         | ]
         |}
""".stripMargin

    val catalogObj = new JsonParser().parse(catalog).getAsJsonObject
    val sql = catalogObj.get("sql").getAsString

    val defaultOrder = catalogObj.has("default_order") match {
      case true => catalogObj.get("default_order").getAsString
      case false => "asc"
    }

    val sortFields = catalogObj.getAsJsonArray("sort").map(e => {
      val fieldObj = e.getAsJsonObject
      fieldObj.has("type") match {
        case true => (fieldObj.get("field").getAsString, fieldObj.get("type").getAsString)
        case false => (fieldObj.get("field").getAsString, defaultOrder)
      }
    })

    val colList = sortFields.foldLeft(mutable.MutableList[Column]()) {
      (sortList, fieldWithSortType) => {
        val fieldName = fieldWithSortType._1
        val sortType = fieldWithSortType._2
        sortType match {
          case "desc" => sortList.+=(col(fieldName).desc)
          case "asc" => sortList.+=(col(fieldName).asc)
          case _ => sortList.+=(col(fieldName))
        }
      }
    }
    val newdf = spark.sql(sql).sort(colList: _*)
    newdf.show()
  }
}
