package com.central.controller

import com.central.service.{DayNightService, NightCondition}
import com.central.utils.ConfigUtil
import com.typesafe.config.Config
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.scalactic.Accumulation.withGood
import org.scalactic._
import spark.jobserver.api.{JobEnvironment, SingleProblem, SparkJob, ValidationProblem}

import scala.util.{Failure, Success, Try}

object DayNightController extends SparkJob {
  override type JobData = (SparkSession, NightCondition)
  override type JobOutput = Unit

  override def runJob(sc: SparkContext, runtime: JobEnvironment, data: JobData): JobOutput = {
    new DayNightService().dayNightAnalysis(data._1, data._2)
//     DayNightService.dayNightAnalysis(data._1, data._2)
  }

  override def validate(sc: SparkContext, runtime: JobEnvironment, config: Config): JobData Or Every[ValidationProblem] = {
    val spark = SparkSession
      .builder()
      .appName("lingerAnalysis")
      .master("spark://192.168.5.180:7077")
      .config("es.nodes", ConfigUtil.properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", ConfigUtil.properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")
      .getOrCreate()
    val parameterValidatin: (SparkSession, NightCondition) Or One[ValidationProblem] =
      Try { //将传入的参数转换成GangFindCondition
        config.getConfig("body")
      } match {
        case Success(config) => {
          val jobid = config.getInt("jobid")
          val code = config.getString("code")
          val codetype = config.getString("codetype")
          val condition = NightCondition(jobid,code,codetype)
          Good((spark, condition))
        }
        case Failure(ex) => Bad(One(SingleProblem("body is not exists or json can't parse. error message:" + ex.getMessage)))
      }
    withGood(parameterValidatin) { a => a }
  }
}
