package com.fulu.game.bigdata.realtime.sink

import java.io.IOException
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.{Executors, ScheduledExecutorService, ScheduledFuture, TimeUnit}

import com.fulu.game.bigdata.realtime.entity.Sink
import com.fulu.game.bigdata.realtime.utils.TimeUtils
import org.apache.flink.configuration.Configuration
import org.apache.flink.runtime.util.ExecutorThreadFactory
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.types.Row
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName, TableNotFoundException}
import org.apache.hadoop.hbase.client.{BufferedMutator, BufferedMutatorParams, Connection, ConnectionFactory, Put}
import org.apache.hadoop.hbase.util.Bytes
import org.slf4j.{Logger, LoggerFactory}

class HbaseSink(sink: Sink) extends RichSinkFunction[(Boolean, Row)] {

  private val LOG: Logger = LoggerFactory.getLogger(classOf[HbaseSink])

  private var hTableName: String = sink.getSinkTable

  private var bufferFlushMaxSizeInBytes: Long = 1024L
  private var bufferFlushMaxMutations: Long = 10L
  private var bufferFlushIntervalMillis: Long = 1000L

  private var connection: Connection = null
  private var mutator: BufferedMutator = null

  private var executor: ScheduledExecutorService = null
  private var scheduledFuture: ScheduledFuture[_] = null
  private var numPendingRequests: AtomicLong = null

  private var closed: Boolean = false

  override def open(parameters: Configuration): Unit = {
    LOG.info("start open ...")
    val config: org.apache.hadoop.conf.Configuration = HBaseConfiguration.create()
    try {
      this.numPendingRequests = new AtomicLong(0)
      if (null == connection) {
        this.connection = ConnectionFactory.createConnection(config)
      }
      // create a parameter instance, set the table name and custom listener reference.
      val params: BufferedMutatorParams = new BufferedMutatorParams(TableName.valueOf(hTableName)).writeBufferSize(bufferFlushMaxSizeInBytes)
      this.mutator = connection.getBufferedMutator(params)
      if (bufferFlushIntervalMillis > 0) {
        this.executor = Executors.newScheduledThreadPool(1, new ExecutorThreadFactory("hbase-upsert-sink-flusher"))
        this.scheduledFuture = this.executor.scheduleWithFixedDelay(() => {
          try {
            if (!closed) {
              flush()
            }
          } catch {
            case e: Exception =>
             LOG.error("write data to hbase error, skip..",e)
          }
        }, bufferFlushIntervalMillis, bufferFlushIntervalMillis, TimeUnit.MILLISECONDS)
      }
    } catch {
      case tnfe: TableNotFoundException =>
        LOG.error("The table " + hTableName + " not found ", tnfe)
        throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe)
      case ioe: IOException =>
        LOG.error("Exception while creating connection to HBase.", ioe)
        throw new RuntimeException("Cannot create connection to HBase.", ioe)
    }
    LOG.info("end open.")
  }

  override def invoke(value: (Boolean, Row), context: SinkFunction.Context[_]): Unit = {
    val currentHour = TimeUtils.currentHour()
    if (value._1) {
      val data = value._2
      val key = if(sink.hbaseFillCurrentHour) String.valueOf(data.getField(0)) + currentHour else String.valueOf(data.getField(0))
      val put = new Put(Bytes.toBytes(key))
      if(sink.hbaseFillCurrentHour){
        put.addColumn(Bytes.toBytes(sink.getHbaseFamily),Bytes.toBytes("hour"),Bytes.toBytes(String.valueOf(currentHour)))
      }
      println(value.toString())
     for(i <- 1 until sink.fields.size()) {
      val fieldValue = if(data.getField(i) == null ) "" else data.getField(i)
      put.addColumn(Bytes.toBytes(sink.getHbaseFamily),Bytes.toBytes(sink.fields.get(i).getName),Bytes.toBytes(String.valueOf(fieldValue)))
       println((i+1) + " --- " + fieldValue )
      mutator.mutate(put)
     }
    } else {
      val data = value._2
      val put = new Put(Bytes.toBytes(String.valueOf(data.getField(0))))
      for(i <- 1 until sink.fields.size()) {
        put.addColumn(Bytes.toBytes(sink.getHbaseFamily),Bytes.toBytes(sink.fields.get(i).getName),Bytes.toBytes(""))

        mutator.mutate(put)

      }

    }
    // flush when the buffer number of mutations greater than the configured max size.
    if (bufferFlushMaxMutations > 0 && numPendingRequests.incrementAndGet >= bufferFlushMaxMutations) {
      flush()
    }
  }

  private def flush(): Unit = { // BufferedMutator is thread-safe
    mutator.flush()
    numPendingRequests.set(0)
  }

  override def close(): Unit = {
    closed = true
    if (mutator != null) {
      try mutator.close()
      catch {
        case e: IOException =>
          LOG.warn("Exception occurs while closing HBase BufferedMutator.", e)
      }
      this.mutator = null
    }
    if (connection != null) {
      try connection.close()
      catch {
        case e: IOException =>
          LOG.warn("Exception occurs while closing HBase Connection.", e)
      }
      this.connection = null
    }
    if (scheduledFuture != null) {
      scheduledFuture.cancel(false)
      if (executor != null) {
        executor.shutdownNow
      }
    }
  }


}
