package com.feidee.fd.sml.algorithm.forecast


import java.{lang, util}

import com.feidee.fd.sml.algorithm.util.ToolClass
import com.feidee.fd.tensorflow.example._
import com.feidee.fd.tensorflow.framework.{DataType, TensorProto, TensorShapeProto}
import com.feidee.fd.tensorflow.wapper.TFGrpcPredict
import com.feidee.fdspark.transformer.FieldInfo
import com.google.protobuf.ByteString
import org.apache.spark.sql.{DataFrame, SparkSession}
import tensorflow.serving._

import scala.collection.JavaConversions


/**
  * @Author: xiongjun
  * @Date: 2019/5/16 10:47
  */
class TensorflowForecast(val hosts: Array[String], val port: Int, val modelName: String, val version: Int, val fieldInfo: Map[String, Array[String]]) extends Forecast[Any] {
  val grpcPredict: TFGrpcPredict = new TFGrpcPredict(hosts, port, modelName, version)

  def mapToFeatureMap(dataMap: Map[String, Any]): Map[String, Feature] = {
    fieldInfo.map(kv => {
      val feature = kv._2.head match {
        case FieldInfo.STRING =>
          val value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            //            logInfo(s"key:${kv._1}, value:${value}")
            val bytesList = BytesList.newBuilder.addValue(ByteString.copyFromUtf8(value.toString)).build
            Feature.newBuilder.setBytesList(bytesList).build
          } else {
            val bytesList = BytesList.newBuilder.addValue(ByteString.copyFromUtf8(kv._2.last)).build
            Feature.newBuilder.setBytesList(bytesList).build
          }
        case FieldInfo.DOUBLE =>
          val value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            val floatList = FloatList.newBuilder.addValue(value.toString.toFloat).build
            Feature.newBuilder.setFloatList(floatList).build
          } else {
            val floatList = FloatList.newBuilder.addValue(kv._2.last.toFloat).build
            Feature.newBuilder.setFloatList(floatList).build
          }
        case FieldInfo.SEQUENCE =>
          val value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            val valueSeq = value.toString.split(",")
            val build = BytesList.newBuilder
            valueSeq.foreach(v => {
              build.addValue(ByteString.copyFromUtf8(v))
            })
            val bytesList = build.build()
            Feature.newBuilder.setBytesList(bytesList).build
          } else {
            val values = kv._2.last
            val valueSeq = values.split(",")
            val build = BytesList.newBuilder
            valueSeq.foreach(v => {
              build.addValue(ByteString.copyFromUtf8(v))
            })
            val bytesList = build.build()
            Feature.newBuilder.setBytesList(bytesList).build
          }
        case FieldInfo.SEQUENCE_INT =>
          val value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            val valueSeq = value.toString.split(",")
            val build = Int64List.newBuilder
            valueSeq.foreach(v => {
              build.addValue(v.toInt)
            })
            val intList = build.build()
            Feature.newBuilder.setInt64List(intList).build
          } else {
            val values = kv._2.last
            val valueSeq = values.split(",")
            val build = Int64List.newBuilder
            valueSeq.foreach(v => {
              build.addValue(v.toInt)
            })
            val intList = build.build()
            Feature.newBuilder.setInt64List(intList).build
          }
        case FieldInfo.SEQUENCE_FLOAT =>
          val value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            val valueSeq = value.toString.split(",")
            val build = FloatList.newBuilder
            valueSeq.foreach(v => {
              build.addValue(v.toFloat)
            })
            val floatList = build.build()
            Feature.newBuilder.setFloatList(floatList).build
          } else {
            val values = kv._2.last
            val valueSeq = values.split(",")
            val build = FloatList.newBuilder
            valueSeq.foreach(v => {
              build.addValue(v.toFloat)
            })
            val floatList = build.build()
            Feature.newBuilder.setFloatList(floatList).build
          }
        case FieldInfo.ARRAY =>
          var value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            if (value.isInstanceOf[String]) {
              val build = BytesList.newBuilder
              value = value.toString.split(",").foreach(v => {
                build.addValue(ByteString.copyFromUtf8(v))
              })
              val bytesList = build.build()
              Feature.newBuilder.setBytesList(bytesList).build
            } else {
              val valueSeq = value.asInstanceOf[scala.collection.AbstractSeq[String]]
              val build = BytesList.newBuilder
              valueSeq.foreach(v => {
                build.addValue(ByteString.copyFromUtf8(v))
              })
              val bytesList = build.build()
              Feature.newBuilder.setBytesList(bytesList).build
            }
          } else {
            val values = kv._2.last
            val valueSeq = values.split(",")
            val build = BytesList.newBuilder
            valueSeq.foreach(v => {
              build.addValue(ByteString.copyFromUtf8(v))
            })
            val bytesList = build.build()
            Feature.newBuilder.setBytesList(bytesList).build
          }
        case FieldInfo.INT =>
          val value = dataMap.getOrElse(kv._1, null)
          if (value != null) {
            val int64List = Int64List.newBuilder.addValue(value.toString.toInt).build
            Feature.newBuilder.setInt64List(int64List).build
          } else {
            val int64List = Int64List.newBuilder.addValue(kv._2.last.toInt).build
            Feature.newBuilder.setInt64List(int64List).build
          }
        case _ =>
          throw new Exception(s"${kv._1} field info error,please check $modelName $version's field info")
      }
      kv._1 -> feature.asInstanceOf[Feature]
    })
  }

  def createExample(data: Map[String, Any]): Example = {
    val featureMap = mapToFeatureMap(data)
    val featuresBuilder = Features.newBuilder()

    featuresBuilder.putAllFeature(JavaConversions.mapAsJavaMap(
      featureMap.map(r => {
        (r._1, r._2)
      })))
    val features = featuresBuilder.build()
    val exampleBuilder = Example.newBuilder()
    exampleBuilder.setFeatures(features)
    exampleBuilder.build()
  }

  def predictProbabilitiesWithTF(dataMaps: Seq[Map[String, Any]], topn: Int, preModels: Array[Forecast[Any]]): Array[Map[String, Double]] = {
    val singletonStart = System.currentTimeMillis()
    var dataMapsVar = dataMaps
    if (preModels != null && preModels.length > 0) {
      for (forecast <- preModels) {
        if (forecast.isInstanceOf[MleapForecast]) {
          dataMapsVar = forecast.predict(dataMapsVar)
        }
      }
    }
    //单线程
    val exampleByteList = dataMapsVar.map(data => createExample(data).toByteString)
    val exampleListJava = JavaConversions.seqAsJavaList(exampleByteList)
    val featureDim = TensorShapeProto.Dim.newBuilder().setSize(exampleListJava.size()).build()
    val shapeProto = TensorShapeProto.newBuilder().addDim(featureDim).build()
    val tensorProto = TensorProto.newBuilder
      .addAllStringVal(exampleListJava)
      .setDtype(DataType.DT_STRING)
      .setTensorShape(shapeProto).build
    val request = Predict.PredictRequest.newBuilder()
      .setModelSpec(grpcPredict.getModelSpec)
      .putInputs("inputs", tensorProto)
      .build
    val response = grpcPredict.predict(request)




    val result = if (response != null) {

      val outMap = response.getOutputsMap

      val classTensor = outMap.get("classes")


      val labelNum = classTensor.getTensorShape.getDim(1).getSize
      var bytesList = classTensor.getStringValList

      if(bytesList==null||bytesList.size()==0){
        bytesList = new util.ArrayList()
        for(l<- 0L until dataMaps.size){
          for(i<-0L until labelNum){
            bytesList.add(ByteString.copyFromUtf8(i+""))
          }
        }
      }
      val scoresTensor = outMap.get("scores")

      val floatList = scoresTensor.getFloatValList

      val bytesListScala = JavaConversions.asScalaBuffer(bytesList)

      val bytesListSeg = bytesListScala.sliding(labelNum.toInt, labelNum.toInt)


      val floatListScala = JavaConversions.asScalaBuffer(floatList)

      val floatListSeg = floatListScala.sliding(labelNum.toInt, labelNum.toInt)



      val resMaps = bytesListSeg.zip(floatListSeg).map(f => {
        var resMap = f._1.zip(f._2).map(bf => {
          bf._1.toStringUtf8 -> bf._2.toDouble //TODO 拉链之后变成了n个二元组，n的个数为推广计划id个数
        }).toMap
        //logInfo("resMap: "+resMap.toString) //TODO 有几个推广计划id就有几个map,结果例:(0->0.9,1->0.1)

        val tool = new ToolClass
        resMap = tool.sortMapValAndTakeTopn(resMap, topn)
        resMap.map(r => r._1 -> r._2.formatted("%.8f").toDouble)
      }).toArray
      resMaps
    } else {
      Array[Map[String, Double]]()
    }
    var SinglePredictTime = System.currentTimeMillis() - singletonStart
    logInfo(s"${hosts.head}:$port single thread predict total time: $SinglePredictTime ms")
    result
  }

  override val spark: SparkSession = null

  override val paths: Seq[String] = null

  override def load(): Array[Any] = ???

  /**
    * 返回预测标签 topn 项和对应概率值
    *
    * @param prediction 已经预测好的原始 DataFrame
    * @param topn       as it indicates
    * @return
    */
  override def computeProbability(prediction: DataFrame, topn: Int): DataFrame = ???

  /**
    * 用加载好的模型预测并返回预测概率值
    *
    * @param maps
    * @param topn
    * @return
    */
  override def predictProbabilityMaps(maps: Seq[Map[String, Any]], topn: Int, preModels: Array[Forecast[Any]]): Array[Map[String, Double]] = {
    predictProbabilitiesWithTF(maps, topn, preModels)
  }

  /**
    * 用加载好的回归模型预测并返回实数值
    *
    * @param dataMaps
    * @return
    */
  override def getPredictionMaps(dataMaps: Seq[Map[String, Any]]): Array[Double] = {
     val singletonStart = System.currentTimeMillis()
    var dataMapsVar = dataMaps

    //单线程
    val exampleByteList = dataMapsVar.map(data => createExample(data).toByteString)
    val exampleListJava = JavaConversions.seqAsJavaList(exampleByteList)
    val featureDim = TensorShapeProto.Dim.newBuilder().setSize(exampleListJava.size()).build()
    val shapeProto = TensorShapeProto.newBuilder().addDim(featureDim).build()
    val tensorProto = TensorProto.newBuilder
      .addAllStringVal(exampleListJava)
      .setDtype(DataType.DT_STRING)
      .setTensorShape(shapeProto).build
    val request = Predict.PredictRequest.newBuilder()
      .setModelSpec(grpcPredict.getModelSpec)
      .putInputs("inputs", tensorProto)
      .build
    val response = grpcPredict.predict(request)



    val result = if (response != null) {

      val outMap = response.getOutputsMap


      val outputsTensor = outMap.get("outputs")
      val floatList = outputsTensor.getFloatValList
      val floatListScala = JavaConversions.asScalaBuffer(floatList)

      floatListScala.toArray.map(r => r.toDouble)
    }
    else {
      logWarning("预测结果为空--->null")
      Array[Double]()
    }
    var SinglePredictTime = System.currentTimeMillis() - singletonStart
    logInfo(s"${hosts.head}:$port single thread predict total time: $SinglePredictTime ms")
    result
  }

 // override def getPredictionJavaList(javaMaps: util.List[util.Map[String, Any]]): util.List[lang.Double] = ???

  override def predict(maps: Seq[Map[String, Any]]): Array[Map[String, Any]] = {
    predictProbabilitiesWithTF(maps, 5, null).map(kv => kv.map(f => f._1 -> f._2.asInstanceOf[Any]))
  }
}