package dataProcess

import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory

/**
  * Created by GS on 2018/1/29.
  * 统计出探针站点每15min的个数
  */
object tanzhenSummary {
    val log= LoggerFactory.getLogger(tanzhenSummary.getClass)
    val tab = "\t"
    val colon = ":"
    def main(args: Array[String]) {
        val(textData,   dest, partition)=
            (args(0),args(1),args(2))
        val destOutName=dest.split('/').last

        val conf =new SparkConf().setAppName(s"tanzhenSummary-" + destOutName)
        //      .setMaster("local[8]")
        //      .setJars(List("C:\\Codes\\IdeaProjects\\MachineLearning\\JiQiXueXi\\outJar\\JiQiXueXi.jar"))
        conf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
        val sc=new SparkContext(conf)

        log.info("-----------------------Start Spark Context-----------------------" +destOutName)
        val data = sc.textFile(textData, partition.toInt)
        val macList_urbanxyz = Array("20f41b7db848",            "20f41b7db836",            "20f41b7db835"   )
        val macList_shichahai = Array("20f41b7db777",            "20f41b7db762",            "20f41b7db742",            "20f41b7db745",            "20f41b7db764",            "20f41b7db761",
            "20f41b7db743",            "20f41b7db785",            "20f41b7db763",            "20f41b7db782",            "20f41b7db776",            "20f41b7db774",            "20f41b7db760",
            "20f41b7db778",            "20f41b7db781",            "20f41b7db779" )
        val macList_dashilanr = Array("20f41b7db758",            "20f41b7db741",            "20f41b7db739",            "20f41b7db740",            "20f41b7db608",            "20f41b7db609")
        val macList_baitasi = Array("20f41b7db787",            "20f41b7db766"   )
        val macList_shijia =Array("20f41b7db768",            "20f41b7db748",            "20f41b7db754",            "20f41b7db746" )
        //        (人的mac，探针的mac，日期小时:第几个15min  【2018-01-29T09:00】)
        val people_ap_time =  data.map(_.split(tab)).filter(f=>f.length == 5 & f.last.split(colon).length == 3).map(f=>{
            val time = f.last.split(colon)(0)
            val inte = f.last.split(colon)(1).toInt/15
            val mac_ap = f(1).replace(":","")
            (f(0) , mac_ap , time +colon+inte)
        })
        val macList = Array(macList_baitasi, macList_dashilanr, macList_shichahai, macList_shijia, macList_urbanxyz )
        val macMap = Map(0->"baitasi", 1->"dashilanr", 2->"shichahai", 3->"shija", 4->"urbanxyz" )
        for (ind<- macList.indices){
            //            选取站点包含的探针
            val result = people_ap_time.filter(f=> macList(ind).contains(f._2))
              //              （时间，人mac）
              .map(f=>(f._3, f._1)).distinct()
              //              (时间，人数)
              .map(f=>(f._1 , 1)).reduceByKey(_+_)
            result.coalesce(1).sortByKey().map(f=>f._1 +tab+ f._2).saveAsTextFile(dest + macMap(ind))

        }
        log.info("-----------------------Start Spark stop-----------------------" +destOutName)
        sc.stop()

    }

}
