package com.xbb.analysis

import com.xbb.dao.CourseSearchCountDAO
import com.xbb.domain.ClickLog
import com.xbb.utils.DateUtil
import kafka.serializer.StringDecoder
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ListBuffer

object LogAnalysis {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("LogAnalysis")

    val ssc = new StreamingContext(conf, Seconds(3))

    val brokers = "hadoop1:9092"
    val topic = "visitlog"
    val group = "logger"
    val deserializationClass = "org.apache.kafka.common.serialization.StringDeserializer"
    //参数封装
    val kafkaParams: Map[String, String] = Map[String, String](
      ConsumerConfig.GROUP_ID_CONFIG -> group,
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> brokers,
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> deserializationClass,
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> deserializationClass
    )

    val logDStream: InputDStream[(String, String)] = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, Set(topic))

    //logDStream.map(_._2).count().print()

    val ETLDStream: DStream[ClickLog] = logDStream.map(_._2).map(line => {
      val fields: Array[String] = line.split("\t")
      val url: String = fields(2).split(" ")(1)
      var courseId = 0
      //   /class/145.html
      if (url.startsWith("/class")) {
        val courseIdHTML: String = url.split("/")(2)
        courseId = courseIdHTML.substring(0, courseIdHTML.lastIndexOf(".")).toInt
      }

      ClickLog(
        fields(0),
        DateUtil.parseToMinute(fields(1)),
        courseId,
        fields(3).toInt,
        fields(4)
      )
    }).filter(log => log.courseId != 0)

    ETLDStream.map {
      log => {
        val time: String = log.time.substring(0, 8)
        var host = ""
        if ("-".equals(log.referer)) {
          host = "other"
        } else {
          host = log.referer.substring(log.referer.indexOf(".") + 1, log.referer.lastIndexOf("."))
        }
        (time + "_" + host + "_" + log.courseId, 1)
      }
    }.reduceByKey(_ + _).foreachRDD(rdd => {
      rdd.foreachPartition(records => {
        val list = new ListBuffer[CourseSearchCount]()
        for (elem <- records) {
          list.append(CourseSearchCount(elem._1, elem._2))
        }
        CourseSearchCountDAO.saveList(list)
      })
    })

    //ETLDStream.print()

    //ETLDStream.map {
    //  log => {
    //    val date: String = log.time.substring(0, 8)
    //    val rowKey: String = date + "_" + log.courseId
    //    (rowKey, 1)
    //  }
    //}.reduceByKey(_ + _).foreachRDD(
    //  rdd => {
    //    rdd.foreachPartition(
    //      records => {
    //        val list = new ListBuffer[CourseCount]()
    //        for (elem <- records) {
    //          list.append(CourseCount(elem._1, elem._2))
    //        }
    //        CourseCountDAO.saveList(list)
    //      }
    //    )
    //  })

    ssc.start()
    ssc.awaitTermination()
  }
}
