package com.atguigu.gmall.realtime.util

import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.Jedis

import scala.collection.mutable

object OffsetManager {


  //通过 topic 和groupID 获取偏移量
  def     getOffset(topic:String ,groupId:String):  Map[TopicPartition,Long] ={
    // type?  hash   key?  OFFSET:topic:groupid  value?  offset  field/score?   partition-id
    // 写入api ?  hset    读取命令？  hgetall  过期？ 不过期
   val jedisClient: Jedis = RedisUtil.getJedisClient
    val offsetKey=s"OFFSET:$topic:$groupId"
    val offsetMapFormRedis: util.Map[String, String] = jedisClient.hgetAll(offsetKey)
     import scala.collection.JavaConverters._
    val offsetMap: Map[TopicPartition, Long] = offsetMapFormRedis.asScala.map { case (partitionId, offset) =>
      val topicPartition: TopicPartition = new TopicPartition(topic, partitionId.toInt)
      (topicPartition, offset.toLong)
    }.toMap

      println(s"已读取偏移量 $offsetMap" )
    jedisClient.close()
    offsetMap

  }

  def  saveOffset(topic:String ,groupId:String,offsetRanges:Array[OffsetRange]): Unit ={
        val jedisClient: Jedis = RedisUtil.getJedisClient
    val offsetKey=s"OFFSET:$topic:$groupId"

    val offsetMap = new util.HashMap[String,String]()
    //转换存储在java map 中
    for (offsetRange <- offsetRanges ) {
       val partition: Int = offsetRange.partition
       val offset: Long = offsetRange.untilOffset
      offsetMap.put(partition.toString,offset.toString)
    }
    println(s"写入偏移量 $offsetMap" )
    jedisClient.hset(offsetKey,offsetMap)
    jedisClient.close()
  }



}
