package com.lagoue.spark

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

/**
 * @author: yehw
 * @date: 2020/10/23 22:25
 * @description: Spark⾯试题
 */
object homework3 {
  def main(args: Array[String]): Unit = {
    /*   val click="INFO 2019-09-01 00:29:53 requestURI:/click?app=1&p=1&adid=18005472&industry=469&adid=31 " +
         "INFO 2019-09-01 00:30:31 requestURI:/click?app=2&p=1&adid=18005472&industry=469&adid=31 " +
         "INFO 2019-09-01 00:31:03 requestURI:/click?app=1&p=1&adid=18005472&industry=469&adid=32 " +
         "INFO 2019-09-01 00:31:51 requestURI:/click?app=1&p=1&adid=18005472&industry=469&adid=33"

       val imp="INFO 2019-09-01 00:29:53 requestURI:/imp?app=1&p=1&adid=18005472&industry=469&adid=31\n" +
               "INFO 2019-09-01 00:29:53 requestURI:/imp?app=1&p=1&adid=18005472&industry=469&adid=31 " +
               "INFO 2019-09-01 00:29:53 requestURI:/imp?app=1&p=1&adid=18005472&industry=469&adid=34"*/

    println("测试scala环境配置成功")
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("Spark SQL basic example")
      .config("spark.some.config.option", "some-value")
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    import spark.implicits._
    val clcik = sc.textFile("data/click.log")
    val rawRDD = clcik.map(line => {
      val arr = line.split("\\s+")
      (("click",arr(3).split("&")(4).split("=")(1)), 1)
    })
    val imp = sc.textFile("data/imp.log")
    val impRdd = imp.map(line => {
      val arr = line.split("\\s+")
      (("imp",arr(3).split("&")(4).split("=")(1)), 1)
    })
    val value1 = rawRDD.groupByKey().map(x => (x._1, x._2.sum))
    val value2 = impRdd.groupByKey().map(x => (x._1, x._2.sum))
    val value = value1.fullOuterJoin(value2)
    value.saveAsTextFile("data/result.txt")
    //val imp = sc.textFile("data/imp.log")


  }

}
