package cn.lagou.sparkcore

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object homework3 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName(this.getClass.getCanonicalName).setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    val clickLog: RDD[String] = sc.textFile("file:///F:\\code\\lagoubigdata\\data\\click.log")
    val impLog: RDD[String] = sc.textFile("file:///F:\\code\\lagoubigdata\\data\\imp.log")

    //读文件：点击日志
    val clkRDD: RDD[(String, (Int, Int))] = clickLog.map { line =>
      val arr: Array[String] = line.split("\\s+")
      val adid: String = arr(3).substring(arr(3).lastIndexOf("=") + 1)
      (adid, (1, 0))
    }

    // 读文件：曝光日志
    val impRDD: RDD[(String, (Int, Int))] = impLog.map { line =>
      val arr: Array[String] = line.split("\\s+")
      val adid: String = arr(3).substring(arr(3).lastIndexOf("=") + 1)
      (adid, (0, 1))
    }

    // join
    val RDD: RDD[(String, (Int, Int))] = clkRDD.union(impRDD).reduceByKey((x, y) => (x._1 + y._1, x._2 + y._2))

    // 写hdfs
    RDD.saveAsTextFile("hdfs://linux01:9000/")
    sc.stop()

  }
}
