package com.atguigu.gmall.realtime.util

import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.Jedis

object OffsetManager {
  def getOffset(topic: String, groupId: String): Map[TopicPartition, Long] = {
    // 1 从redis读取偏移量
    //  type? hash key?  offset：【topic】：【groupId】  field/score? 【partitionid】 value? offset
    //  写入api？hset 读取qpi？hgetall  过期时间 无
    val jedisClient: Jedis = RedisUtil.getJedisClient
    val offsetKey = s"offset:$topic:$groupId"
    val offsetMapFromRedis: util.Map[String, String] = jedisClient.hgetAll(offsetKey)
    jedisClient.close()

    // 2 再把redis读取后的结构 转换成 kafka要用的结构
    import scala.collection.JavaConverters._
    val offsetMap: Map[TopicPartition, Long] = offsetMapFromRedis.asScala.map {
      case (partitionId, offset) =>
        val topicPartition: TopicPartition = new TopicPartition(topic, partitionId.toInt)
        (topicPartition, offset.toLong)
    }.toMap

    offsetMap
  }
    // 把偏移量写入redis中
    def saveOffset(topic: String, groupId: String, offsetRanges: Array[OffsetRange]): Unit = {
      val jedisClient: Jedis = RedisUtil.getJedisClient
      val offsetKey = s"offset:$topic:$groupId"

      val offsetMap: util.Map[String, String] = new util.HashMap[String, String]()
      for (offsetRange <- offsetRanges) {
        val offsetEnd: Long = offsetRange.untilOffset
        val partition: Int = offsetRange.partition
        offsetMap.put(partition.toString, offsetEnd.toString)
      }
      println(s"写入偏移量结束点：$offsetMap")
      jedisClient.hset(offsetKey,offsetMap)
      jedisClient.close()
    }



}
