package com.central.controller

import com.central.resident.batch_job.ResidentAnalysis
import com.central.utils.LoadESData
import com.typesafe.config.Config
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.scalactic.Accumulation.withGood
import org.scalactic._
import spark.jobserver.api.{JobEnvironment, SingleProblem, SparkJob, ValidationProblem}

import scala.util.{Failure, Success, Try}

object ResidentController extends SparkJob{
  override type JobData = (SparkSession,String)
  override type JobOutput = Unit

  override def runJob(sc: SparkContext, runtime: JobEnvironment, data: JobData): JobOutput ={
    ResidentAnalysis.getResidentResult(data._2,data._1)
  }

  override def validate(sc: SparkContext, runtime: JobEnvironment, config: Config): JobData Or Every[ValidationProblem] = {
    val properties = LoadESData.loadProperties();
    val spark = SparkSession
      .builder()
      .master("spark://192.168.5.180:7077")
      .config("es.nodes", properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")
      .getOrCreate()
    val parameterValidatin: JobData Or One[ValidationProblem] =
      Try {
        val content = config.getConfig("body")
//        val jobId = content.getString("jobId")
        var jobId=""
        jobId
      } match {
        case Success(context) => {

          Good((spark, context))
        }
        case Failure(ex) => Bad(One(SingleProblem("body is not exists or json can't parse. error message:" + ex.getMessage)))
      }
    withGood(parameterValidatin) { a => a }
  }
}
