package cn.lagou.part03

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object ClickLog {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName(this.getClass.getCanonicalName)
      .master("local[4]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.setLogLevel("warn")

    val clickRDD = sc.textFile("data/click.log")
    val clickResult: RDD[(String, Int)] = clickRDD.map(line => {
      (line.split("&")(2).split("=")(1), 1)
    }).reduceByKey(_+_)

//    clickResult.foreach(println)

    val impRDD = sc.textFile("data/imp.log")
    val impResult: RDD[(String, Int)] = impRDD.map(line => {
      (line.split("&")(2).split("=")(1), 1)
    }).reduceByKey(_+_)

//    println("--------------------")
//    impResult.foreach(println)

//    println("--------------------")
    impResult.fullOuterJoin(clickResult)
      .map(x =>{
        x._1 + "," + x._2._1.getOrElse(0) + "," + x._2._2.getOrElse(0)
      }).repartition(1)
      .saveAsTextFile("hdfs://linux121:9000/data/spark_home_work")
//      .saveAsTextFile("data/part03")

    sc.stop()
  }
}
