package LogServer.dao

import LogServer.constants.{PropertiesAssemblyJar, PropertiesMongo, PropertiesNodejs, PropertiesStreaming}
import LogServer.spark.mllib.Kmeans
import LogServer.spark.sql.SqlContext
import LogServer.utils.{DateUtil, HttpRequest}
import com.mongodb.DBObject
import com.mongodb.casbah.commons.{Imports, MongoDBObject}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.DStream

import scala.util.parsing.json.{JSON, JSONObject}

/**
  * Created by root on 17-2-21.
  */
class VmhostDao() extends Serializable{
  def save(dstream: DStream[String],propertiesMongoObject: PropertiesMongo,@transient sc: SparkContext): Unit ={
    dstream.foreachRDD(rdd => {
      val sqlContext = new SQLContext(sc)
      //val name ="name"
      //val age = 12
      //val testRDD = sc.makeRDD(s"""{"num":"$name","age":"$age"}""")
      if(!rdd.isEmpty()){
        val rdd2DF = sqlContext.read.json(rdd)

        val time = DateUtil.getCurrent_time()

        val host_mem = rdd2DF.select("host_mem").first().getAs[Long]("host_mem")
        val host_cpu = rdd2DF.select("host_cpu").first().getAs[Long]("host_cpu")
        val vm_mem = rdd2DF.select("vm_mem").first().getAs[Long]("vm_mem")
        val host_id = rdd2DF.select("host_id").first().getAs[Long]("host_id")

        //根据host_id去查询已使用的内存host_mem_used，并累加
        val vmhostDao = new VmhostDao()
        val host_mem_used_last = vmhostDao.getHostMemUsed(sqlContext,host_id)
        val host_mem_used = host_mem_used_last + vm_mem
        println("str: "+host_mem_used)

        val vm_cpu_num = rdd2DF.select("vm_cpu_num").first().getAs[Long]("vm_cpu_num")
        val vm_id = rdd2DF.select("vm_id").first().getAs[Long]("vm_id")
        val vm_cpu = rdd2DF.select("vm_cpu").first().getAs[Long]("vm_cpu")
        //        val vm_cpu_rate = vm_cpu_num/vm_cpu
        val vm_mem_rate = "%1.1f".format(vm_mem*100.0/host_mem)

        var predicted = 0
        //MLlib
        val model = Kmeans.getModel(sc)
        model match {
          case Some(m) => {
            predicted = Kmeans.predict(m,vm_cpu,vm_mem_rate.toDouble)
            println("predicted : " + predicted)
          }
          case None =>
        }

        //调用api推送给前端
        val json =
          s"""{"time":"$time","host_mem":"$host_mem","host_cpu":"$host_cpu","vm_mem":"$vm_mem","host_id":"$host_id",
             |"vm_cpu_num":"$vm_cpu_num","vm_id":"$vm_id","vm_cpu":"$vm_cpu","vm_mem_rate":"$vm_mem_rate",
             |"predicted":"$predicted","host_mem_used":"$host_mem_used"}""".stripMargin
        val dBObject: Imports.DBObject = MongoDBObject(
          "time"->time,
          "host_mem"->host_mem,
          "host_cpu"->host_cpu,
          "vm_mem"->vm_mem,
          "host_id"->host_id,
          "vm_cpu_num"->vm_cpu_num,
          "vm_id"->vm_id,
          "vm_cpu"->vm_cpu,
          "vm_mem_rate"->vm_mem_rate,
          "host_mem_used"->host_mem_used
          //              "vm_cpu_rate"->vm_cpu_rate
          //              "host_mem_used"->host_mem_used
        )
        dBObjectHandler(propertiesMongoObject,json,dBObject)
      }
    })
  }

  /**
    * 处理DBObject
    * (1)保存到mongo
    * (2)调用api,发送给前端,websocket更新界面
    * @param propertiesMongoObject
    * @param json
    * @param dBObject
    */
  private def dBObjectHandler(propertiesMongoObject: PropertiesMongo,json: String,dBObject: DBObject): Unit = {
    //Use Collection
    val coll = MongoConnector.getConnection(propertiesMongoObject,"vmhost")
    //(1)保存到mongo
    coll.save(dBObject)
    //(2)调用api,发送给前端,websocket更新界面
    val request = new HttpRequest()
    val data = json
    println(data)
    try{
      val ip = PropertiesNodejs.getUrl()
      val url = "http://"+ip+"/log/vmhost"
      val result = request.post(url,data)
      println(result)
    }catch {
      case e:Exception =>
        println(e.getMessage)
    }

  }

  /**
    * 根据host_id从spark-sql中查出已使用的内存host_mem_used
    * @param ctx
    * @return 值为0代表没有匹配到
    */
  private def getHostMemUsed(ctx: SQLContext,host_id: Long): Long ={
    //保证统一个sparkContext
    val df = SqlContext.loadCollection(ctx,"vmhost")
    df.registerTempTable("vmhost")
    //    val str = df.select("_id").orderBy("time").first().toString()
    var host_mem_used: Long = 0
    try{
      val result = ctx.sql("select * from vmhost where host_id ="
        +host_id+ "order by time desc")
      if(result.count() > 0){
        host_mem_used = result.first().getAs("vm_mem")
      }
    }catch{
      case e: Exception =>
    }
    //空值处理:值为0代表没有匹配到
    host_mem_used
  }
}

object VmhostDao{
  def main(args: Array[String]): Unit ={
    val conf = new SparkConf().setAppName("LogServer").setMaster("spark://master:7077").
      set("spark.executor.memory", "500m").setJars(PropertiesAssemblyJar.getAssemblyJars())
    val timespan = PropertiesStreaming.getTimespan()
    val ssc = new StreamingContext(conf, Seconds(timespan))
    val ctx = new SQLContext(ssc.sparkContext)
    val vmhostDao = new VmhostDao()
    val host_id = 65
    val str = vmhostDao.getHostMemUsed(ctx,host_id)
    println("str: "+str)
  }
}
