package com.diven.spark.ml.learn.transforms

import java.io.FileInputStream
import java.util

import javax.xml.transform.stream.StreamResult
import org.dmg.pmml.FieldName
import org.jpmml.evaluator.{FieldValue, InputField, ModelEvaluatorFactory}
import org.jpmml.model.filters.ImportFilter
import org.jpmml.model.{JAXBUtil, SAXUtil}

import scala.util.Random

object PmmlLoad {

  def main(args: Array[String]): Unit = {

    val source = SAXUtil.createFilteredSource(new FileInputStream("./spark-pmml-model-003.pmml"), new ImportFilter())
    val pmml = JAXBUtil.unmarshalPMML(source)
    JAXBUtil.marshalPMML(pmml, new StreamResult(System.out))

    val modelEvaluatorFactory = ModelEvaluatorFactory.newInstance();
    val modelEvaluator = modelEvaluatorFactory.newModelEvaluator(pmml)
    println(modelEvaluator.getActiveFields().toString())
    println()

    for(i <- 1 until 100){
      val arguments = new util.LinkedHashMap[FieldName, FieldValue]();
      for(item <-modelEvaluator.getActiveFields.toArray){
        val inputField = item.asInstanceOf[InputField]
        val activeValue = inputField.prepare(Random.nextInt(100));
        arguments.put(inputField.getFieldName, activeValue)
      }
      val results = modelEvaluator.evaluate(arguments)
      println(results)
    }


  }
}
