package com.atguigu.gmall.realtime.util

import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.Jedis

import scala.collection.mutable

object OffsetManager {


  def main(args: Array[String]): Unit = {
    val partitionMap: Map[TopicPartition, Long] = getOffset("TOPIC1","GROUP101")
    println(partitionMap)
  }

  //读取偏移量
  def getOffset(topic:String,groupId:String): Map[TopicPartition,Long] ={
    // redis 应该如何存储偏移量？
    //type?      hash     key?  OFFSET:{TOPIC}:{GROUPID}   field/score ?   partition_num  value ? offset
    // 读取api ?  hgetall    写入api?   hmset  hset
    //过期时间  不设

    val jedis: Jedis = RedisUtil.getJedisClient
    val offsetKey=s"OFFSET:$topic:$groupId"
    val offsetMapFromRedis: util.Map[String, String] = jedis.hgetAll(offsetKey)
    import scala.collection.JavaConverters._
    val offsetMap: Map[TopicPartition, Long] = offsetMapFromRedis.asScala.map { case (partition, offset) =>
      val topicPartition: TopicPartition = new TopicPartition(topic, partition.toInt)
      (topicPartition, offset.toLong)
    }.toMap
    println(s"读取偏移量： $offsetMap")
    jedis.close()
    offsetMap
  }

  //写入偏移量
  def saveOffset(topic:String,groupId:String,offsetRanges:Array[OffsetRange]): Unit ={
      val jedis: Jedis = RedisUtil.getJedisClient
       val offsetMap = new util.HashMap[String,String]()
      for ( offsetRange<- offsetRanges ) {
        val partition: Int = offsetRange.partition
        val offset: Long = offsetRange.untilOffset
        offsetMap.put(partition.toString,offset.toString)
      }
      println(s"提交偏移量： $offsetMap")
      val offsetKey=s"OFFSET:$topic:$groupId"
      jedis.hset(offsetKey,offsetMap)
      jedis.close()

  }






}
