package com.central.controller

import java.util

import com.central.service._
import com.central.utils.LoadESData
import com.typesafe.config.Config
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.scalactic.Accumulation.withGood
import org.scalactic._
import spark.jobserver.api.{JobEnvironment, SingleProblem, SparkJob, ValidationProblem}

import scala.util.{Failure, Success, Try}

object FollowController extends SparkJob{
  override type JobData = (SparkSession, String, FollowingSiteConditionEntity, Int)

  override def runJob(sc: SparkContext, runtime: JobEnvironment, data: JobData): JobOutput = {
    new FollowService().followAll(data._1, data._2, data._3, data._4)
  }

  override def validate(sc: SparkContext, runtime: JobEnvironment, config: Config): JobData Or Every[ValidationProblem] = {
    //    val spark = SparkContextHelper.initOtherContextFromExistSC[SparkSession,String](sc,"lxdata")
    var jobId: String = null
    val properties = LoadESData.loadProperties();

    val spark = SparkSession
      .builder()
      .appName("lxdata")
      .master("spark://192.168.5.180:7077")
      .config("es.nodes", properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")
      .getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")
    var conditions: Array[ClueConditionEntity] = null
    val parameterValidatin: (SparkSession, String, FollowingSiteConditionEntity, Int) Or One[ValidationProblem] =
      Try { //将传入的参数转换成InitClueConditionEntity
        config.getConfig("body")
      } match {
        case Success(config) => {
          val jobId = config.getString("jobId")
          val idNumberType = config.getString("idNumberType")
          val idNumber = config.getString("idNumber")
          val beginTime = config.getLong("begintime")
          val endTime = config.getLong("endtime")
          val slice = config.getInt("slice")
          val deviceNumbers = config.getString("deviceNumbers")
          val threshold = config.getInt("threshold")

          val hasliferow = config.getString("hasliferow")
          val fuzzyway = config.getString("fuzzyway")
          import scala.collection.JavaConversions._
          val activityTimes = config.getAnyRefList("activitytimes").toList.map(jobj => {
            val timeMap = jobj.asInstanceOf[util.Map[String, Object]]
            val day = String.valueOf(timeMap.get("days"))
            val time = String.valueOf(timeMap.get("time"))
            ActivityTime(day, time)
          })
          val condition = FollowingSiteConditionEntity(idNumberType, idNumber, beginTime, if (endTime <= beginTime) Long.MaxValue else endTime, slice, deviceNumbers,hasliferow,fuzzyway,activityTimes) //如果结束时间和开始时间相等,则取最大Long值
          Good((spark, jobId, condition, threshold))
        }
        case Failure(ex) => Bad(One(SingleProblem("body is not exists or json can't parse. error message:" + ex.getMessage)))
      }
    withGood(parameterValidatin) { a => a }
  }


  override type JobOutput = Unit

}

