package Count

import org.apache.spark.sql.SparkSession

object ProvinceAndCity {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName(s"${this.getClass.getName}")
      .master("local[*]")
      .getOrCreate()
    //val sc = spark.sparkContext
    val dataFrame = spark.read.json("D:\\asa\\dianshang\\pay - 副本.log")
    dataFrame.rdd.map(row=>{
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      ((province,city),1)
    }).reduceByKey(_+_).foreach(println)
    //((河南省,洛阳市),6)
    //((西藏自治区,那曲地区),15)
    //((黑龙江省,哈尔滨市),16)
    //((河南省,鹤壁市),4)
    //((北京市,北京市),49)
    //((河北省,石家庄市),14)
    //((山西省,长治市),23)
    spark.stop()
  }
}
