package task3

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object ClickAndImpose {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(s"${this.getClass.getCanonicalName}").setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val click = sc.textFile("file:///D:\\projects\\spark_homework\\data\\click.log")
    val imp = sc.textFile("file:///D:\\projects\\spark_homework\\data\\imp.log")

    // 两次shuffle
//    val clickRDD = click.map(s => (s.split("&")(4).split("=")(1), 1)).reduceByKey(_ + _)
//    val impRDD = imp.map(s => (s.split("&")(4).split("=")(1), 1)).reduceByKey(_ + _)
//
//    val result1 = clickRDD.fullOuterJoin(impRDD)
//    result1.foreach(println)

    // 一次shuffle
    // union
    val clickRDD: RDD[(String, (Option[Int], Option[Int]))] = click.map(s => (s.split("&")(4).split("=")(1), (Some(1), None)))
    val impRDD: RDD[(String, (Option[Int], Option[Int]))] = imp.map(s => (s.split("&")(4).split("=")(1), (None, Some(1))))
    // -> reduce
    val result: RDD[(String, (Option[Int], Option[Int]))] = clickRDD.union(impRDD).reduceByKey((x, y) => {
      val click = Some(x._1.getOrElse(0) + y._1.getOrElse(0))
      val imp = Some(x._2.getOrElse(0) + y._2.getOrElse(0))
      (click, imp)
    })
    result.foreach(x => {
      println(x._1, x._2._1.getOrElse(0), x._2._2.getOrElse(0))
    })

    Thread.sleep(1000000)

    sc.stop()
  }
}
