package com.lvmama.rhino.analyze.client

import com.lvmama.rhino.common.entity.{JobConfig, JsError, KafkaTopic, RequestFlow}
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.kafka.{KafkaDStreamSource, KafkaPayloadStringCodec}
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.spark.StreamingApplication
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col
import org.apache.spark.streaming.{StreamingContext, Time}
import org.elasticsearch.spark.sql._

/**
  * Created by Administrator on 2017/12/28.
  */
class MonitorPageJsStreaming (config: JobConfig, duration: Long) extends StreamingApplication
  with Serializable with LazyLogging  {

  override var sparkConfig: Map[String, String] = config.spark
  override var appName: String = "MonitorPageJsStreaming"

  override def streamingBatchDuration: Long = duration

  override def streamingCheckpointDir: String = config.streamingCheckpointDir
    .getOrElse("monitorPageJsStreamingCP", "/checkpoint/monitorPageJs_cp")

  var kafkaConf = config.sourceKafka
  kafkaConf += ("group.id"->"sparkStreaming_group_monitorPageJs")

  private val kafkaSourceTopic = KafkaTopic("monitorPageJs")

  def execute() = {

    sparkConfig += ("spark.sql.autoBroadcastJoinThreshold" -> "10485760")
    sparkConfig += ("spark.master" -> "local[2]")
    sparkConfig += ("spark.app.name" -> appName)

    def func = (sc: SparkContext, ssc: StreamingContext) => {
      val streaming = KafkaDStreamSource(kafkaConf).createSource(ssc, kafkaSourceTopic.name)

      val lines = streaming
        .flatMap(KafkaPayloadStringCodec().decodeValue(_))
        .map(l => Utils.initJsError(l))

      val log = lines.flatMap{
        x =>x._3.map(y =>JsError.process(x._1, x._2, y))
      }

      val columns = Seq("url","pid","jsName","errorType","message","stack")
      log.foreachRDD((rdd, time: Time) => {
        val sqlContext = SparkSession.builder().getOrCreate().sqlContext
        import sqlContext.implicits._
        val wordsDataFrame = rdd.toDF()
        val result = wordsDataFrame.filter(!col("url").equalTo("") && !col("pid").equalTo("") && !col("pid").equalTo("NONE"))
        val data = result.select(col("url"),col("pid"),col("jsName"),col("errorType"),col("message"),col("stack"))
        .dropDuplicates(columns)

        val today = Utils.getToday("yyyyMMdd")
        data.saveToEs(today+"/jsError")
      })
    }

    val ssc = StreamingContext.getOrCreate(streamingCheckpointDir, () => withSparkStreamingContext(func))
    ssc.start()
    ssc.awaitTermination()
  }

}

object MonitorPageJsStreaming {
  def apply(duration: Long): Unit = {
    val config = JobConfig()
    new MonitorPageJsStreaming(config, duration).execute()
  }
}
