package cn.lagou.sparksql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object AdidCount {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName(this.getClass.getCanonicalName)
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")

    val clickRDD: RDD[String] = sc.textFile("data/click.log")
    val impRDD: RDD[String] = sc.textFile("data/imp.log")

    //根据=号进行行切分，取的adid的数字，click的key点击数赋值1，曝光数赋值0
    val click = clickRDD.map { lines =>
      val line = lines.split("=")
      (line(5), (1,0))
    }
    //根据=号进行行切分，取的adid的数字，imp的key点击数赋值01，曝光数赋值1
    val imp = impRDD.map { lines =>
      val line = lines.split("=")
      (line(5), (0,1))
    }

    //将click和imp的数据合并，再进行按key进行合计，每一行value的第一个值相加，第二个值相加
    click.union(imp).reduceByKey((x,y) => (x._1+y._1,x._2+y._2))
      .map(x => (x._1,x._2._1,x._2._2)).collect().foreach(println)

    spark.close()
  }

}
