package com.haozhen.stream.homework

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/15  23:56
  *      redis 管理 kafka 的 offset
  */
object RedisUtils {

  import org.apache.kafka.common.TopicPartition
  import org.apache.spark.streaming.kafka010.OffsetRange
  import redis.clients.jedis.{JedisPool, JedisPoolConfig}

  private val host = "127.0.0.1"

  private val port = 6379

  private val config = new JedisPoolConfig

  config.setMaxIdle(5)

  config.setMaxTotal(10)

  private val pool = new JedisPool(config,host,port,1000)

  private def getRedisConnection = pool.getResource

  private val topicPrefix = "kafka:topic"

  private def getKey(topic:String,groupId:String) = s"$topicPrefix:$topic:$groupId"


  def getOffsetsFormRedis(topics:Array[String],groupId:String):Map[TopicPartition,Long]={
    import redis.clients.jedis.Jedis

    import scala.collection.mutable
    val connection: Jedis = getRedisConnection

    val partitionToLongs: Array[mutable.Map[TopicPartition, Long]] = topics.map(topic => {
      import scala.collection.JavaConverters._
      val key = getKey(topic, groupId)
      connection.hgetAll(key).asScala.map {
        case (partition, offset) => new TopicPartition(topic, partition.toInt) -> offset.toLong
      }
    })

    connection.close()
    partitionToLongs.flatten.toMap
  }

  def saveOffsetsToRedis(offsets:Array[OffsetRange],groupId:String)={
    import redis.clients.jedis.Jedis
    val connection: Jedis = getRedisConnection
    offsets.map(range=>{
      (range.topic,(range.partition.toString,range.untilOffset.toString))
    }).groupBy(_._1)
      .foreach{
        case (topic,buffer)=>
          val key:String = getKey(topic,groupId)
          import scala.collection.JavaConverters._
          val map: java.util.Map[String, String] = buffer.map(_._2).toMap.asJava
          connection.hmset(key,map)
      }
     connection.close()
  }

}
