import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.{Jedis, JedisPool, JedisPoolConfig}

import scala.collection.mutable

object OffsetWithRedisUtils {
  val redisHost = "linux145"
  val redisPort = 6379

  //获取redis的连接
  private val config = new JedisPoolConfig
  //最大空闲数
  config.setMaxIdle(5)
  //最大连接数
  config.setMaxTotal(10)

  private val pool = new JedisPool(config, redisHost, redisPort, 10000)
  private def getRedisConnection:Jedis=pool.getResource
  private val topicPrefix = "kafka:topic"
  private def getKey(topic: String,groupId: String)=s"$topicPrefix:$topic,$groupId"

  def getOffsetsFromRedis(topics: Array[String],groupId : String): Map[TopicPartition,Long] ={
    val jedis:Jedis = getRedisConnection
    val offsets: Array[mutable.Map[TopicPartition,Long]] = topics.map { topic =>
      import scala.collection.JavaConverters._

      jedis.hgetAll(getKey(topic, groupId))
        .asScala
        .map { case (partition, offset) => new TopicPartition(topic, partition.toInt) -> offset.toLong }
    }
    jedis.close()
    offsets.flatten.toMap
  }

  def saveOffsetsToRedis(offsets:Array[OffsetRange],groupId:String): Unit ={
    val jedis:Jedis = getRedisConnection
    offsets.map{range =>(range.topic,(range.partition.toString,range.untilOffset.toString))}
        .groupBy(_._1)
        .map{case (topic,buffer) =>
          val key = getKey(topic, groupId)
          import scala.collection.JavaConverters._
          val maps: util.Map[String, String] = buffer.map(_._2).toMap.asJava
          jedis.hmset(key,maps)

        }

    jedis.close()
  }
}
