package com.htiiot.sparkpredict.tool

import java.time.{LocalDate, LocalDateTime, LocalTime, ZoneOffset}
import com.htiiot.sparkpredict.bean.PredictData
import org.apache.spark.ml.linalg.{DenseVector, Vectors}
import org.apache.spark.ml.regression.LinearRegressionModel
import org.apache.spark.sql.SparkSession
import scala.collection.mutable.ArrayBuffer

/**
  * @author :chensi
  */
object LinearMetricPredict {

  var model: LinearRegressionModel = null
  var time: String = LocalDate.now().toString
  val spark = SparkSession.builder().master("local[2]").appName("predict")
    .config("spark.sql.warehouse.dir","/tmp/predict")
    .getOrCreate()
  import spark.implicits._
  def predict(path: String, metricId: Int): Array[PredictData] = {
    if (model == null || !(LocalDate.now.toString.equals(time))) {
      synchronized {
        model = LinearRegressionModel.load(path)
        time = LocalTime.now.toString
      }
    }
    val list = new ArrayBuffer[Tuple2[Double,Double]]()
    val start = LocalDateTime.of(LocalDate.now(), LocalTime.MIN).toEpochSecond(ZoneOffset.of("+8"))
    for(i<- 0 to 6){
      list.+=((metricId.toDouble,(start+i*86400).toDouble))
    }
    val trainData = spark.sparkContext.makeRDD(list).map(t=>{
      Vectors.dense(t._1,t._2)
    }).zipWithIndex().toDF("features","idx")
    val frame = model.transform(trainData)
    val r = new ArrayBuffer[PredictData]
    frame.collect().foreach(row=>{
      val d = row.getAs[Double]("prediction")
      val f = row.getAs[DenseVector]("features")
      r.+=(new PredictData(f.values(1).toLong,d))
    })
    r.toArray
  }

}
