package streaming

import java.util

import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.clients.jedis.{Jedis, JedisPool, JedisPoolConfig}

object OffsetWithRedisUtils {

  //定义redis参数
  private val redisHost = "linux121"
  private val redisPort = 6379


  //获取redis链接
  private val config= new JedisPoolConfig
  //最大空闲数
  config.setMaxIdle(5)
  //最大连接数
  config.setMaxTotal(10)

  private val pool = new JedisPool(config,redisHost,redisPort,10000)
  private def getRedisConnection: Jedis = pool.getResource
  private val topicPrefix = "kafka:topic"
  private  def getKey(topic:String,groupid:String)=s"$topicPrefix:$topic:$groupid"



  //根据key获取offsets
  def getOffsetsFromRedis(topics: Array[String],groupid: String):Map[TopicPartition,Long] = {
    val jedis: Jedis = getRedisConnection

    val offsets: Array[(TopicPartition,Long)] = topics.flatMap {
      topic =>
        val key: String = getKey(topic, groupid)
        import scala.collection.JavaConverters._
        jedis.hgetAll(key).asScala
          .map {
            case (partition, offset) =>
              new TopicPartition(topic, partition.toInt) -> offset.toLong
          }
    }
    //归还资源
    jedis.close()

    offsets.toMap
  }

  //将offsets保存到redis
  def saveOffsetsToRedis(offset:Array[OffsetRange],groupid: String):Unit = {
    val jedis: Jedis = getRedisConnection

    offset.map{
      range=>(range.topic,(range.partition.toString,range.untilOffset.toString))
    }.groupBy(_._1).foreach{
      case(topic,buffer)=>
        val key: String = getKey(topic, groupid)
        import scala.collection.JavaConverters._
        val maps: util.Map[String, String] = buffer.map(_._2).toMap.asJava

        jedis.hmset(key,maps)
    }



    jedis.close()
  }


}
