package cn.dmp.charts

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object ProAndCity_Counts_FromRawV2 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("ProAndCity_Counts_FromRaw").setMaster("local[4]")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.sql.parquet.compression.codec", "snappy")

    val sc: SparkContext = new SparkContext(conf)
    
    val rawRDD: RDD[String] = sc.textFile(args(0))
    
    val splitsRDD: RDD[Array[String]] = rawRDD.map(_.split(",",-1))
    
    val suitedLengthRDD = splitsRDD.filter(_.length >= 85)

    val proAndCityRDD = suitedLengthRDD.map((splits: Array[String]) => {

      val provincename = splits(24)
      val cityname = splits(25)

      ((provincename, cityname),1)
    })
    val countProAndCity: RDD[((String, String), Int)] = proAndCityRDD.reduceByKey(_+_)
    countProAndCity.foreach(t=>{
      println(t._1._1+"-"+t._1._2+":"+t._2)
    })


  }
}
