package gbench.tartarus.hipponoe.finance.acct

import java.util.concurrent.Executors

import gbench.common.tree.LittleTree.IRecord.REC
import gbench.common.tree.LittleTree.Json.json2obj
import gbench.common.tree.LittleTree.{IRecord, Json}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.util.AccumulatorV2

import scala.collection.JavaConverters._

/**
 *
 * set karfka_home=D:\sliced\develop\kafka_2.13-2.5.0\
 *
 * #maven
 * #zookeeper
 * %karfka_home%bin/windows/zookeeper-server-start %karfka_home%config/zookeeper.properties
 *
 * #kafka_server
 * %karfka_home%bin/windows/kafka-server-start %karfka_home%config/server.properties
 *
 */
object AccountingApp {

  val kafkaParams: collection.mutable.Map[String, Object] = collection.mutable.Map(
    "bootstrap.servers" -> "127.0.0.1:9092",
    "key.deserializer" -> classOf[StringDeserializer],
    "value.deserializer" -> classOf[StringDeserializer],
    "group.id" -> "1",
    "auto.offset.reset" -> "earliest",
    "enable.auto.commit" -> (false: java.lang.Boolean)
  )

  val topics: Array[String] = Array("erp-accounting2", "topicB") // 消息主题

  /**
   * 试算平衡
   *
   * http://spark.apache.org/docs/latest/streaming-kafka-0-10-integration.html
   */
  def trialBalance(): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("acctApp")
    val ssc = new StreamingContext(conf, Seconds(1)) // 每隔1s计算一次
    val stream = KafkaUtils.createDirectStream[String, String](ssc, PreferConsistent, Subscribe[String, String](topics, kafkaParams))
    val sc = ssc.sparkContext

    sc.register(acct_balance) // 注册累加器

    // 统计线程
    val pool = Executors.newFixedThreadPool(1) // 统计线程池
    pool.execute(() => { // 启动统计线程。
      while (true) {
        Thread.sleep(5000)
        println("\n----------------------acct_balance--------------------------------")
        acct_balance.value.foreach(k => {
          println(k)
        })
        println("-------------------------------------------------------------------")
      } // while
    }) // Executors

    // 记账的分组核算
    stream.map(record => (record.key, record.value))
      .foreachRDD((rdd, _) => { // rdd, time
        rdd.flatMap({ case (_, value) => // key, value
          if (value != null && Json.isJson(value)) {
            val rec = json2obj(value, classOf[IRecord])
            val ll: java.util.List[IRecord] = rec.lla("entries", (e: Object) => json2obj(e, classOf[IRecord]))
            val tt: collection.mutable.Buffer[IRecord] = ll.asScala

            tt.map(e => e.set("entityid", rec.get("entityid", "-")))
          } else {
            List()
          }
        }).groupBy(e => (e.str("entityid"), e.str("acctid"))).map(e => {
          val line = e._2.foldLeft((0d, 0L))((acc, r) => (acc._1 + (if (r.dbl("drcr") == 0) 1 else -1) * r.dbl("amount"), acc._2 + 1))
          val rb = IRecord.rb("entityid,acctid,sum,count")
          val r = rb.build(e._1._1, e._1._2, line._1.asInstanceOf[Object], line._2)
          acct_balance.add(r) // 数据累加
        }).collect()
      })

    ssc.start()
    ssc.awaitTermination()
    pool.shutdown(); //  关闭统计线程
  }

  /**
   * http://spark.apache.org/docs/latest/structured-streaming-kafka-integration.html
   */
  def trialBalance2(): Unit = {

    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("trialBalance2")
      .getOrCreate()

    val df = spark
      .read
      .format("kafka")
      .option("kafka.bootstrap.servers", kafkaParams("bootstrap.servers").asInstanceOf[String])
      .option("subscribe", topics(0))
      //.option("startingOffsets", """{"topic1":{"0":23,"1":-2},"topic2":{"0":-2}}""")
      //.option("endingOffsets", """{"topic1":{"0":50,"1":-1},"topic2":{"0":-1}}""")
      .load()

    df.selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)").foreach(row => {
      println(row)
    })
  }


  /**
   *
   * @param args 输入参数
   */
  def main(args: Array[String]): Unit = {
    println(IRecord.rb("name,sex").get("zhangsan", "boy"))
    println(REC("name", "lisi", "address", "shanghai"))
    this.trialBalance()
  }

  /**
   * 获取账户余额
   *
   * @return
   */
  def getAcctBalance: AcctAccumlator = {
    acct_balance
  }

  val acct_balance = new AcctAccumlator() // 试算表的累加器
}

/**
 * 累加器
 */
class AcctAccumlator extends AccumulatorV2[IRecord, collection.mutable.Map[(Long, Double), IRecord]] {

  /**
   *
   * @return
   */
  override def isZero: Boolean = this.container.isEmpty

  /**
   *
   * @return
   */
  override def copy(): AccumulatorV2[IRecord, collection.mutable.Map[(Long, Double), IRecord]] = {
    val _new = new AcctAccumlator
    _new.synchronized {
      _new.container ++= this.container
    }
    _new
  }

  /**
   * 重置
   */
  override def reset(): Unit = container.clear()

  /**
   *
   * @param rec 累加数据 (entityid,acctid,sum,count)
   */
  override def add(rec: IRecord): Unit = {
    val k = (rec.lng("entityid").asInstanceOf[Long], rec.dbl("acctid").asInstanceOf[Double])
    if (!container.contains(k)) {
      container.put(k, rec)
    } else {
      container.get(k).map(r => {
        r.computeIfPresent("sum", (d: Double) => d + rec.dbl("sum"))
        r.computeIfPresent("count", (d: Long) => d + rec.lng("count"))
      })
      container.update(k, container.get(k).orNull)
    }
  }

  /**
   *
   * @param other 另一个Accumulator
   */
  override def merge(other: AccumulatorV2[IRecord, collection.mutable.Map[(Long, Double), IRecord]]): Unit = {
    val c1 = container
    val c2 = other.value
    c2.foreach({ case ((k, v), r) =>
      val _r1 = c1.getOrElse((k, v), REC())
      val _r2 = r
      val rb = IRecord.rb("entityid,acctid,sum,count")
      val __r = rb.get(k, v,
        _r1.dbl("sum", 0d) + _r2.dbl("sum", 0d),
        _r1.lng("count", 0L) + _r2.lng("count", 0L)
      )
      c1.put((k, v), __r)
    })
  }

  /**
   * 转换成 javaMap
   *
   * @return
   */
  def asJavaMap: java.util.Map[IRecord, IRecord] = {
    val javaMap = new java.util.HashMap[IRecord, IRecord]() // javaMap
    val key_rb = IRecord.rb("entityid,acctid") // key record builder
    this.value.foreach(k => javaMap.put(key_rb.get(k._1._1, k._1._2), k._2))
    javaMap
  }

  /**
   *
   * @return
   */
  override def value: collection.mutable.Map[(Long, Double), IRecord] = container

  /**
   * 默认的元数据
   */
  private val container: collection.mutable.Map[(Long, Double), IRecord] = collection.mutable.Map()
}