package com.zxq.mall.realtime.util




import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.{ OffsetRange}
import redis.clients.jedis.Jedis



object OffsetManagerUtil {



  def getOffset(topicName: String, groupId: String): Map[TopicPartition, Long] = {

    val jedis: Jedis = MyRedisUtils.getJedisClient

    val offsetKey: String = "offset:" + topicName + ":" + groupId

    val offsetMap: java.util.Map[String, String] =
      jedis.hgetAll(offsetKey)

    jedis.close()

    import scala.collection.JavaConverters._
    val kafkaOffsetMap: Map[TopicPartition, Long] =
      offsetMap.asScala.map {
        case (partitionId, offset) => {
          println("读取分区偏移量：" + partitionId + ":" + offset)

          (new TopicPartition(topicName, partitionId.toInt),
            offset.toLong)
        }
      }.toMap
    kafkaOffsetMap
  }


def saveOffset(topicName: String, groupId: String, offsetRanges: Array[OffsetRange]): Unit = {

  val offsetMap: java.util.HashMap[String, String] = new java.util.HashMap[String, String]()

  if(offsetRanges != null && offsetRanges.size>0) {
    for (offset <- offsetRanges) {

      val partition: Int = offset.partition

      val untilOffset: Long = offset.untilOffset

      offsetMap.put(partition.toString, untilOffset.toString)

      println("保存分区:" + partition + ":" + offset.fromOffset +
        "--->" + offset.untilOffset)
    }
  }

  val offsetKey: String = "offset:" + topicName + ":" + groupId

  if (offsetMap != null && offsetMap.size() > 0) {

    val jedis: Jedis = MyRedisUtils.getJedisClient

    jedis.hmset(offsetKey, offsetMap)

    jedis.close()
  }
}


}
