package com.hxk.division

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory

object DivisionMain {

  //初始化一个logger ，打印自己代码的日志
  private val logger = LoggerFactory.getLogger("DivisionMain")

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    conf.setAppName("TimeMerge")
    if(!conf.contains("spark.master"))  conf.setMaster("local")

    val sc = new SparkContext(conf)

    val identInputPath = conf.get("spark.regionDivision.identInputPath","data/ident.txt")
    val codeInputPath = conf.get("spark.regionDivision.codeInputPath","data/code.txt")
    val baseOutputPath = conf.get("spark.regionDivision.baseOutputPath","data/output")


    //1.加载数据ident.txt
    val identRDD: RDD[String] = sc.textFile(identInputPath)

    val codeRDD= sc.textFile(codeInputPath).map{ case line =>
      val temp = line.split(" ")
      if(temp.length == 3){
        (temp(0),(temp(1),temp(2)))
      }else {
        (temp(0),(temp(1),null))
      }

    }
    //codeRDD.collect()foreach(println)

    //2.取前6位
    val idRDD: RDD[(String,String)] = identRDD.map{ case line =>
      line.substring(0,6) -> null
    }
    //idRDD.collect().foreach(println)

    //3.匹配
    val divisionRDD = idRDD.leftOuterJoin(codeRDD)


    val outRDD = divisionRDD.saveAsTextFile(baseOutputPath)

      sc.stop()
    logger.info("End DivisionMain")

  }
}
