package io.sqrtqiezi.spark.lagou

import org.apache.spark.sql.SparkSession

object ClickAnalysis {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local")
      .appName("click log analysis")
      .getOrCreate()

    // INFO 2019-09-01 00:31:51 requestURI:/click?app=1&p=1&adid=18005472&industry=469&adid=33
    val clickRDD = spark.sparkContext.textFile("lagou-data/click.log")
      .map(line => ("\\d+$".r.findFirstIn(line).getOrElse(""), 1))

    val impRDD = spark.sparkContext.textFile("lagou-data/imp.log")
      .map(line => ("\\d+$".r.findFirstIn(line).getOrElse(""), 1))

    val result = clickRDD.cogroup(impRDD) // 在 cogroup 时有一次 shuffle
      .mapValues { case (clickBuffer, impBuffer) => (clickBuffer.sum, impBuffer.sum) }

    // 保存到 hdfs
    result.saveAsTextFile("hdfs://lagou-data/click_imp_count/")

    for((id, values) <- result.collect) {
      println(s"adid: $id 点击数: ${values._1}  曝光数: ${values._2}")
    }

    spark.stop
  }
}
