package com.yongche.kafka

import kafka.common.TopicAndPartition
import org.apache.spark.streaming.kafka.OffsetRange

import scala.util.parsing.json.{JSON, JSONArray, JSONObject}

/**
  * Created by zhangzhikuan on 16/1/28.
  */
object OffsetJson {
  def fromJson(jsonStr: String): Map[TopicAndPartition, Long] = {
    JSON.parseFull(jsonStr) match {
      case None => {
        val err = s"解析JSON##${jsonStr}##失败"
        println(err)
        throw new RuntimeException(err)
      }
      case Some(x) => {
        val ret = x.asInstanceOf[List[Map[String, Any]]]
        val fromOffsets = scala.collection.mutable.Map[TopicAndPartition, Long]()
        ret.foreach(m => {

          val index = m.get("index") match {
            case Some(x) => {
              x.asInstanceOf[Double].toInt
            }
            case None => {
              throw new RuntimeException("index不存在")
            }
          }
          val untilOffset = m.get("untilOffset") match {
            case Some(x) => {
              x.asInstanceOf[Double].toLong
            }
            case None => {
              throw new RuntimeException("offset不存在")
            }
          }

          val topic = m.get("topic") match {
            case Some(x) => {
              x.asInstanceOf[String]
            }
            case None => {
              throw new RuntimeException("topic不存在")
            }
          }


          //从上一个的untilOffset开始,作为fromOffset
          fromOffsets.put(TopicAndPartition(topic, index), untilOffset)
        }
        )
        fromOffsets.toMap
      }
    }
  }

  def toJson(offsetRanges: Array[OffsetRange]): String = {
    JSONArray(offsetRanges.map(x => {
      val index = x.partition
      val topic = x.topic
      val offset = x.untilOffset
      JSONObject(Map("index" -> index, "untilOffset" -> x.untilOffset, "fromOffset" -> x.fromOffset, "topic" -> topic))
    }).toList).toString()
  }
}
