package dataProcess

import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory
import util._
/**
  * Created by THINKPAD on 2017/12/4.
  * 通州警务
  * 设施指标标准化
  * 计算总得分
  */
object POI_score {
  val log= LoggerFactory.getLogger(POI_score.getClass)

  def main(args: Array[String]) {
    if (args.length <2) {
      System.err.println("You must pass the arguments:<inputfilepath,  dest>")
      System.exit(1)
    }

    val(textData,   dest )=
      (args(0),args(1))
    val destOutName=dest.split('/').last

    val conf =new SparkConf().setAppName(s"POI-score-" + destOutName)
      .setMaster("local[8]")
      .setJars(List("C:\\Codes\\IdeaProjects\\MachineLearning\\JiQiXueXi\\outJar\\JiQiXueXi.jar"))
    conf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val sc=new SparkContext(conf)

    log.info("-----------------------Start Spark Context-----------------------" +destOutName)
    val data = sc.textFile(textData)
    val colNum = data.first().split(",").length
    var result = data.map(f=>(f.split(",")(0) , Array(0.0)))
    for (i<- 1 until colNum){
      val arr_calcu = data.map(f=>f.split(",")(i).toDouble).filter(f=>f !=0.0).collect()
      val maxN = arr_calcu.max
      val minN =   arr_calcu.min
      val data_norm = data.map(f=>f.split(",")).map(f=>(f(0) , f(i).toDouble)).map(f=>{
        if(f._2 == 0){
          (f._1, Array(f._2))
        }else{
          (f._1 , Array(util.three((f._2-minN)/(maxN-minN))))
        }
      })
      result = result.join(data_norm).map(f=>(f._1 , f._2._1 ++ f._2._2))
    }
    val result1 = result.map(f=>f._1 +"\t"+ f._2.mkString("\t") +"\t"+ util.three(f._2.sum))
    val arr_sum = result1.map(f=>f.split("\t").last.toDouble).collect()
    val maxN = arr_sum.max
    val minN = arr_sum.min
    result1.map(f=>{
      val sumN = f.split("\t").last.toDouble
      if (sumN == 0){
        f +"\t"+0
      }else{
        f +"\t"+ util.three((sumN-minN)*100/(maxN-minN))
      }
    }) .coalesce(1).saveAsTextFile(dest)
  }

}
