package com._51doit.spark03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object ImpAndClick {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
    val sc: SparkContext = new SparkContext(conf)

    val data: RDD[String] = sc.textFile("D:\\07spark\\spark-day03\\作业\\impclick.txt")

    val res: RDD[((String, String), (Int, Int))] = data.flatMap(tr => {
      val sdata: Array[String] = tr.split(",")
      val id: String = sdata(0)
      val allkeyword: String = sdata(1)
      val imp: Int = sdata(2).toInt
      val click: Int = sdata(3).toInt

      val keyword: Array[String] = allkeyword.split("\\|")




      keyword.map(st => ((id,st),(imp,click)))


    })
    //res.foreach(println)
     val res2: RDD[((String, String), (Int, Int))] = res.reduceByKey { case (a1,a2) => {

       val imps: Int = a1._1 + a2._1
       val clicks: Int = a1._2 + a2._2

       (imps, clicks)
     }


     }
    // res2.foreach(println)

    val res3: RDD[((String, String), (Int, Int))] = res2.sortByKey()


    //val res3: Array[((String, String), (Int, Int))] = res2.sortBy(_._1._1,false).collect

    val res4: RDD[(String, String, Int, Int)] = res3.map(tb=>(tb._1._1,tb._1._2,tb._2._1,tb._2._2))



  res4.foreach(println)









  }



}
