package com.atguigu.qzpoint.acc

import java.sql.ResultSet

import com.atguigu.util.{DataSourceUtil, QueryCallback, SqlProxy}
import org.apache.spark.util.AccumulatorV2

import scala.collection.mutable

/**
 * description ：注册人数累加器
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/3/11 20:01
 * modified By ：
 * version:    : 1.0
 */
class RegNumAcc extends AccumulatorV2[(String, Int), mutable.Map[String, Int]] {

  private var instanceAcc: RegNumAcc = null
  // 初始化一个map
  private var map: mutable.Map[String, Int] = mutable.Map[String, Int]()

  override def isZero = true

  override def copy() = {
    this.getInstance(null)
  }

  def getInstance(map: mutable.Map[String, Int]): RegNumAcc = {
    synchronized(
      if (null == instanceAcc) {
        instanceAcc = new RegNumAcc()
        if (null != map) {
          instanceAcc.map ++= map
        }
        // 读取 mysql 中的注册信息，初始化累加器
        val connection = DataSourceUtil.getConnection()
        SqlProxy.executeQuery(connection, "SELECT reg_source,reg_num from reg_num", null, new QueryCallback {
          override def process(rs: ResultSet) = {
            while (rs.next()) {
              val key = rs.getString("reg_source")
              instanceAcc.map.put(key, instanceAcc.map.getOrElse(key, 0) + rs.getInt("reg_num"))
            }
          }
        })
        // 关闭连接
        DataSourceUtil.closeResource(null, null, connection)
      } else {
        instanceAcc.map.clear() // 第一次初始化后，就清空就可以了
      }
    )
    instanceAcc
  }

  override def reset() = map.empty

  override def add(v: (String, Int)) = {
    map.put(v._1, map.getOrElse(v._1, 0) + v._2)
  }

  override def merge(other: AccumulatorV2[(String, Int), mutable.Map[String, Int]]) = other match {
    case reg: RegNumAcc =>
      val excutorMap = reg.map
      for (elem <- excutorMap) {
        map.put(elem._1, map.getOrElse(elem._1, 0) + elem._2)
      }
  }

  override def value = {
    map
  }
}
