package com.atguigu.gmall.realtime.util

import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.Jedis

import scala.collection.mutable

object OffsetManager {

  // 偏移量读取
  def   getOffset(topic:String,groupId:String): Map[TopicPartition,Long] ={
    val jedis: Jedis = RedisUtil.getJedisClient
    //redis  type ?  hash    key ?  offset:[topicname]:[groupid]      field ? partition_id  value?  offset
    // 写入api ？  hset hmset   读取api？ hgetall    超时时间？ 不设过期 永久保留

    val offsetKey= "offset:"+ topic+":"+groupId
    //从redis中取值
    val offsetMap: util.Map[String, String] = jedis.hgetAll(offsetKey)
    //转换格式
    import  scala.collection.JavaConverters._
    val offsetMapForKafka: Map[TopicPartition, Long] = offsetMap.asScala.map { case (partitionId, offset) =>
      val topicPartition = new TopicPartition(topic, partitionId.toInt)
      (topicPartition, offset.toLong)
    }.toMap
    println("加载redis中的偏移量：" + offsetMapForKafka)

    jedis.close
    offsetMapForKafka
  }


  // 偏移量的写入
  def saveOffset(topic:String,groupId:String,offsetRanges: Array[OffsetRange]): Unit ={
    val jedis: Jedis = RedisUtil.getJedisClient
   // 把offsetRanges 整理成map[String,String]
    val redisOffsetMap = new util.HashMap[String,String]()

    for (offsetRange <- offsetRanges ) {
      val partitionId: Int = offsetRange.partition
      val offset: Long = offsetRange.untilOffset
      redisOffsetMap.put(partitionId.toString, offset.toString)
    }

    val offsetKey="offset:"+ topic+":"+groupId
    jedis.hset(offsetKey,redisOffsetMap)
    println("向redis写入偏移量：" + redisOffsetMap)
    jedis.close()
  }
}
