package com.toutiao.vssue

import java.util.Properties

import java.util.concurrent.{Executors,LinkedBlockingQueue,TimeUnit}

import kafka.consumer.{Consumer,ConsumerConfig,KafkaStream}

import scala.collection.JavaConversions._
import scala.concurrent.duration._

import scala.util.hashing.MurmurHash3

import com.toutiao.model.KvDB

import org.apache.log4j.Logger
import org.apache.log4j.PropertyConfigurator

class ConsumerStore(
      val zookeeper:String,
      val topic:String,
      val groupId:String,
      val locationAck:String
    ) {
  
  private val _log = Logger.getLogger(this.getClass.getName)
  
  _log.info("zk "+zookeeper)
  _log.info("tp "+topic)
  _log.info("gr "+groupId)
  
  private val prop = new Properties
  prop.put("zookeeper.connect",zookeeper)
  prop.put("group.id",groupId)
  
  prop.put("auto.offset.reset","smallest")
  
  val ackClient = new KvDB(locationAck)
  
  val streamNum = 8+1
  val queueSize = 1024
  val queueTimeout: FiniteDuration = 2.milliseconds
  
  private val consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(prop))
  private val executor = Executors.newFixedThreadPool(streamNum)
  
  private val queue = new LinkedBlockingQueue[(Long,String)](queueSize)
  
  private val topicCountMap = Map(topic -> new Integer(streamNum))
  private val consumerMap = consumer.createMessageStreams(topicCountMap)
  private val streams = consumerMap.get(topic)
  
  private def consume(stream: KafkaStream[Array[Byte],Array[Byte]]):Unit = {
    val iterator = stream.iterator()
    while (iterator.hasNext) {
      val message = iterator.next.message
      val msg = new String(message)
      val i = MurmurHash3.stringHash(msg)
      //_log.info(msg)
      //_log.info("Got ")
      ackClient.put(i,msg)
      queue.put((i,msg))
    }
  }
  
  def run = {
    executor.submit(new Runnable {
      override def run():Unit = {
        val xs = ackClient.getDB
        xs.foreach(queue.put)
        _log.info(s"get ${xs.size} pending tasks")
      }
    })
    
    streams.zipWithIndex.foreach {
      case (stream,streamId) => executor.submit(new Runnable {
        override def run():Unit = PromiseDone.promiseDone{
          _log.info(s"stream:$streamId start load data from kafka")
          consume(stream)
        }
      })
    }
  }
  
  def get(batchNum: Int):Map[Long,String] = (1 to batchNum).map(task =>
     queue.poll(queueTimeout.toMillis, TimeUnit.MILLISECONDS)).filter(_ != null).toMap
     
  def ack(taskIds:Array[Long]) = PromiseDone.promiseDone {
    taskIds.foreach(ackClient.remove)
  }
  
}

//import org.apache.log4j.PropertyConfigurator
//
//object TestTest extends App {
//  
//  val log = Logger.getLogger(this.getClass.getName)
//  
//  PropertyConfigurator.configure("log4j.properties")
//  
//  val zk = "10.4.30.135:2185,10.4.29.122:2185,10.4.29.211:2185,10.4.31.26:2185,10.4.31.55:2185/kafka-crawl"
//  val gi = "sim_title"
//  val tp = "crawl_article_sim_cluster"
//  
//  val x = new ConsumerStore(zk,tp,gi)
//  x.run
//  Thread.sleep(1000*10)
//  val xs = x.get(10)  
//  
//  log.info(xs.size)
//  xs.foreach(s => {
//    log.info(s)
//  })
//}