package org.apache.spark.ml.classification

import com.feidee.fdhadoop.hdfs.HdfsUtils
import com.feidee.fdspark.transformer.ps.{PSColEliminator, PSMetaStorage}
import com.feidee.fdspark.transformer.{ColEliminator, MetaStorage}
import com.tencent.angel.sona.ml.PipelineModel
import com.tencent.angel.sona.ml.classification.CusAngelClassifierModel
import com.tencent.angel.sona.ml.feature.{IndexToString, StringIndexerModel}
import org.apache.hadoop.fs.Path
import org.apache.spark.ml
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.{Transformer, linalg}
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer

/**
  * @author xiongjun
  * @date 2019/11/13 10:22
  * @description
  * @reviewer
  */
object ConvertPSModel2Spark{

  def getMoldePath(path:String):String={
    val modelPath = path+"/stages"
    val classPath = HdfsUtils.list(modelPath).filter(path => path.getName.contains("CusAngelClassification"))
    classPath.head.toString
  }

  def convertPipeline(spark: SparkSession,path:String,pipelineModel:PipelineModel):ml.PipelineModel={
    val sparkConf = spark.sparkContext.getConf
//    val driverCtx = DriverContext.get(sparkConf)
//    driverCtx.startAngelAndPSAgent()
    val stageBuff = ArrayBuffer[org.apache.spark.ml.Transformer]()
    pipelineModel.stages.foreach {
      case meta: PSMetaStorage =>
        val metaStorage = new MetaStorage()
        metaStorage.setFields(meta.getFields).setModelType(meta.getModelType)
          .setParameters(meta.getParameters)
        stageBuff.append(metaStorage)
      case stringIndexer: StringIndexerModel =>
        val indexerModel = new org.apache.spark.ml.feature.StringIndexerModel(stringIndexer.uid, stringIndexer.labels)
        indexerModel.setInputCol(stringIndexer.getInputCol).setOutputCol(stringIndexer.getOutputCol)
          .setHandleInvalid(stringIndexer.getHandleInvalid)
        stageBuff.append(indexerModel)
      case angelModel: CusAngelClassifierModel =>
        val modelName = stageBuff.head.asInstanceOf[MetaStorage].getParameters.getOrElse("modelName","")
        modelName match {
          case "LR" =>
            stageBuff.append(convertLR(spark, getMoldePath(path),stageBuff.head.asInstanceOf[MetaStorage]))
          case "SVC" =>
            stageBuff.append(convertSVC(spark,getMoldePath(path)))
          case _=>
        }
      case idx2str: IndexToString =>
        val idxToStr = new org.apache.spark.ml.feature.IndexToString()
        idxToStr.setInputCol(idx2str.getInputCol).setOutputCol(idx2str.getOutputCol).setLabels(idx2str.getLabels)
        stageBuff.append(idxToStr)
      case colEli:PSColEliminator =>
        val colEliminator = new ColEliminator()
        colEliminator.setDrops(colEli.getDrops)
        stageBuff.append(colEliminator)
      case _=>
    }
    import scala.collection.JavaConversions._
//    driverCtx.stopAngelAndPSAgent()
    new ml.PipelineModel("",new java.util.ArrayList[Transformer](stageBuff))

  }
  def parseWeightAndBias(spark:SparkSession,angelModelPath:String):(Array[Double],Array[Double])={
    var coefficients:Array[Double] = null
    var intercept:Array[Double] = null
    HdfsUtils.list(angelModelPath).filter(path => !path.getName.endsWith(".json")).foreach(fpath=>{
      if(fpath.getName.endsWith("_weight")){
        val wpaths = HdfsUtils.listFiles(fpath.toString).toArray[String](Array[String]())
        val weightVector = wpaths.filter(path=> !path.endsWith("meta")).flatMap(wpath=>{
          spark.sparkContext.textFile(wpath).map(kv=>{
            val indexAndValue = kv.split(",")
            (indexAndValue(0).toInt,indexAndValue(1).toDouble)
          }).collect()
        }).sortBy(_._1).map(_._2)
        coefficients =weightVector
      }else{
        val ipaths = HdfsUtils.listFiles(fpath.toString).toArray[String](Array[String]())
        val vector = ipaths.filter(path=> !path.endsWith("meta")).flatMap(ipath=>{
          spark.sparkContext.textFile(ipath).map(kv=>{
            val indexAndValue = kv.split(",")
            (indexAndValue(0).toInt,indexAndValue(1).toDouble)
          }).collect()
        }).map(_._2)
        intercept = vector
      }
    })
    (coefficients,intercept)
  }
  def convertSVC(spark: SparkSession,path:String):LinearSVCModel = {
    val classifierModel = CusAngelClassifierModel.load(path)
    val angelModelPath = new Path(path, "angel").toString
    val (weight,bias) = parseWeightAndBias(spark,angelModelPath)
    var coefficients:linalg.Vector = Vectors.dense(weight)
    var intercept:Array[Double] = bias
    require(coefficients!=null && coefficients.size>0 && intercept!=null && intercept.size>0,"weight or bias error")
    val linearSVCModel = new LinearSVCModel("svc",coefficients,intercept.head)
    linearSVCModel.setFeaturesCol(classifierModel.getFeaturesCol)
      .setPredictionCol(classifierModel.getPredictionCol).setRawPredictionCol(classifierModel.getRawPredictionCol)
    linearSVCModel
  }

  def convertLR(spark: SparkSession,path:String,meta:MetaStorage):LogisticRegressionModel={
    val classifierModel = CusAngelClassifierModel.load(path)
    val angelModelPath = new Path(path, "angel").toString
    val (weight,bias) = parseWeightAndBias(spark,angelModelPath)
    var coefficient = Vectors.dense(weight)
//    var coefficientMatrix:Matrix = new DenseMatrix(1,weightAndBias._1.length,weightAndBias._1,true)
//    var coefficientMatrix:Matrix = Matrices.dense(1,weightAndBias._1.length,weightAndBias._1)
//    var interceptVector:linalg.Vector = Vectors.dense(weightAndBias._2)
    require(coefficient!=null && coefficient.size>0 && bias!=null && bias.length>0,"weight or bias error")
    val lrModel = new LogisticRegressionModel("logreg",coefficient,bias.head)
    lrModel.setFeaturesCol(classifierModel.getFeaturesCol)
    .setPredictionCol(classifierModel.getPredictionCol).setRawPredictionCol(classifierModel.getRawPredictionCol)
      .setProbabilityCol(classifierModel.getProbabilityCol)
      .setThreshold(0.5)
    val param = lrModel.getParam("probabilityCol")
    lrModel.set(param,meta.getParameters.getOrElse("probabilityCol","probability"))
    lrModel
  }
}
