package com.atguigu.gmall.realtime.util

import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.Jedis



object OffsetManager {

  //1 从redis中读取偏移量
  def getOffset(topic:String ,groupId:String):  Map[TopicPartition,Long] ={
    // 1  从redis读取偏移量
    // type?  hash  key? offset:[topic]:[groupId]     field/score?  [partitionid]   value?   offset
    // 写入api ?  hset hmset    读取api ?  hgetall   过期时间？ 不设过期
    val jedisClient: Jedis = RedisUtil.getJedisClient
    val offsetKey=s"offset:$topic:$groupId"
    val offsetMapFromRedis: util.Map[String, String] = jedisClient.hgetAll(offsetKey)
    println(s"加载偏移量：$offsetMapFromRedis")

    jedisClient.close()
    // 2  再把redis读取后的结构 转换成 kafka要用的结构
    import  scala.collection.JavaConverters._
    val offsetMap: Map[TopicPartition, Long] = offsetMapFromRedis.asScala.map { case (partitionId, offset) =>
      val topicPartition: TopicPartition = new TopicPartition(topic, partitionId.toInt)
      (topicPartition, offset.toLong)
    }.toMap
    offsetMap
  }



  //2 把偏移量写入redis
  def saveOffset(topic:String ,groupId:String,offsetRanges: Array[OffsetRange]): Unit ={
    // type?  hash  key? offset:[topic]:[groupId]     field/score?  [partitionid]   value?   offset
    // 写入api ?  hset hmset    读取api ?  hgetall   过期时间？ 不设过期
    val jedisClient: Jedis = RedisUtil.getJedisClient
    val  offsetKey=s"offset:$topic:$groupId"

    val offsetMap:util.Map[String,String] = new util.HashMap[String,String]()
    for (offsetRange <- offsetRanges ) {
      val offsetEnd: Long = offsetRange.untilOffset
      val partition: Int = offsetRange.partition
      offsetMap.put(partition.toString,offsetEnd.toString)
    }
    println(s"写入偏移量结束点： $offsetMap")
    jedisClient.hset(offsetKey,offsetMap)

    jedisClient.close()



  }




}
