package com.lvmama.rhino.analyze.client

import com.lvmama.rhino.common.entity.{JobConfig, KafkaTopic, RequestFlow}
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitLoad
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.kafka.{KafkaDStreamSource, KafkaPayloadStringCodec}
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.spark.StreamingApplication
import org.apache.spark.SparkContext
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.functions.col
import org.apache.spark.streaming.{StreamingContext, Time}
import org.elasticsearch.spark.sql._

/**
  * Created by Administrator on 2017/12/22.
  */
class MonitorPageStreaming (config: JobConfig, duration: Long) extends StreamingApplication
  with Serializable with LazyLogging  {

  override var sparkConfig: Map[String, String] = config.spark
  override var appName: String = "MonitorPageStreaming"

  override def streamingBatchDuration: Long = duration

  override def streamingCheckpointDir: String = config.streamingCheckpointDir
    .getOrElse("monitorPageStreamingCP", "/checkpoint/monitorPage_cp")

  var kafkaConf = config.sourceKafka
  kafkaConf += ("group.id"->"sparkStreaming_group_monitorPage")

  private val kafkaSourceTopic = KafkaTopic("flume_monitorPage_topic")

  def execute() = {

    sparkConfig += ("spark.sql.autoBroadcastJoinThreshold" -> "10485760")
    sparkConfig += ("spark.master" -> "local[2]")
    sparkConfig += ("spark.app.name" -> appName)

    def func = (sc: SparkContext, ssc: StreamingContext) => {
      val streaming = KafkaDStreamSource(kafkaConf).createSource(ssc, kafkaSourceTopic.name)

      var threshold = Map[Any,Row]()
      val lines = streaming
        .flatMap(KafkaPayloadStringCodec().decodeValue(_))
        .map(l => Utils.initMonitorPageInfo(l,threshold))

      val linesRF = streaming
        .flatMap(KafkaPayloadStringCodec().decodeValue(_))
        .map(l => Utils.initRequestFlow(l))

      val log = linesRF.flatMap{
        x =>x._5.map(y =>RequestFlow.process(x._1, x._2, x._3, x._4, y, threshold))
      }

      val pageColumns = Seq("url","pid","time","tec","ip","operators","networkType","city","province","latitudeValue","longtitudeValue","mobileNumber","usersId","resolution","deviceName"
        ,"deviceToken","osVersion","ua","whiteScreenTime","firstScreenTime","interactiveTime","completeLoadTime","isWS","isFS","isCL","requestStart"
        ,"responseEnd","responseStart","domainLookupEnd","domainLookupStart","connectStart","connectEnd","secureConnectionStart","loadEventEnd","navigationStart"
        ,"unload","redirectStart","redirectEnd","domLoading","loadEventStart")

      val requestColumns = Seq("url","pid","time","name","resourceType","duration","decodeBodySize","startLoadingTime","responseTime","isSlow")

      lines.foreachRDD((rdd, time: Time) => {
        val sqlContext = SparkSession.builder().getOrCreate().sqlContext
        threshold = sqlContext
          .loadFromMysqlWolverine("threshold")
          .select("application_type", "slow_load", "slow_first_screen", "slow_white_screen", "slow_element", "abnormal_data")
          .collect()
          .map(r => (r(0), r))
          .toMap
        import sqlContext.implicits._
        val wordsDataFrame = rdd.toDF()
        val result = wordsDataFrame.filter(!col("url").equalTo("") && !col("pid").equalTo("") && !col("pid").equalTo("NONE"))
        val data = result.select(col("url"),col("pid"),col("time"),col("tec"),col("userInformation").getItem("ip").as("ip"),col("userInformation").getItem("operators").as("operators")
          ,col("userInformation").getItem("networkType").as("networkType"),col("userInformation").getItem("city").as("city"),col("userInformation").getItem("province").as("province")
          ,col("userInformation").getItem("latitude").as("latitudeValue"),col("userInformation").getItem("longtitude").as("longtitudeValue"),col("userInformation").getItem("mobileNumber").as("mobileNumber")
          ,col("userInformation").getItem("usersId").as("usersId")
          ,col("deviceInformation").getItem("resolution").as("resolution"),col("deviceInformation").getItem("deviceName").as("deviceName"),col("deviceInformation").getItem("deviceToken").as("deviceToken")
          ,col("deviceInformation").getItem("osVersion").as("osVersion"),col("deviceInformation").getItem("ua").as("ua")
          ,col("userTime").getItem("whiteScreenTime").as("whiteScreenTime"),col("userTime").getItem("firstScreenTime").as("firstScreenTime")
          ,col("userTime").getItem("interactiveTime").as("interactiveTime"),col("userTime").getItem("completeLoadTime").as("completeLoadTime")
          ,col("threshold").getItem("isWS").as("isWS"),col("threshold").getItem("isFS").as("isFS"),col("threshold").getItem("isCL").as("isCL")
          ,col("events").getItem("requestStart").as("requestStart"),col("events").getItem("responseEnd").as("responseEnd"),col("events").getItem("responseStart").as("responseStart")
          ,col("events").getItem("domainLookupEnd").as("domainLookupEnd"),col("events").getItem("domainLookupStart").as("domainLookupStart"),col("events").getItem("connectStart").as("connectStart")
          ,col("events").getItem("connectEnd").as("connectEnd"),col("events").getItem("secureConnectionStart").as("secureConnectionStart"),col("events").getItem("loadEventEnd").as("loadEventEnd")
          ,col("events").getItem("navigationStart").as("navigationStart"),col("events").getItem("unload").as("unload"),col("events").getItem("redirectStart").as("redirectStart")
          ,col("events").getItem("redirectEnd").as("redirectEnd"),col("events").getItem("domLoading").as("domLoading"),col("events").getItem("loadEventStart").as("loadEventStart"))
          .dropDuplicates(pageColumns)
//        data.show()

        val today = Utils.getToday("yyyyMMdd")
        data.saveToEs(today+"/monitorPageInfo")
      })

      log.foreachRDD((rdd, time: Time) => {
        val sqlContext = SparkSession.builder().getOrCreate().sqlContext
        threshold = sqlContext
          .loadFromMysqlWolverine("threshold")
          .select("application_type", "slow_load", "slow_first_screen", "slow_white_screen", "slow_element", "abnormal_data")
          .collect()
          .map(r => (r(0), r))
          .toMap
        import sqlContext.implicits._
        val wordsDataFrame = rdd.toDF()
        val result = wordsDataFrame.filter(!col("url").equalTo("") && !col("pid").equalTo("") && !col("pid").equalTo("NONE"))
        val data = result.select(col("url"),col("pid"),col("time"),col("name"),col("resourceType"),col("duration")
          ,col("decodeBodySize"),col("startTime").as("startLoadingTime"),col("responseTime"),col("isSlow"))
          .dropDuplicates(requestColumns)
//        data.show()

        val today = Utils.getToday("yyyyMMdd")
        data.saveToEs(today+"/requestFlow")
      })

    }

    val ssc = StreamingContext.getOrCreate(streamingCheckpointDir, () => withSparkStreamingContext(func))
    ssc.start()
    ssc.awaitTermination()
  }
}


object MonitorPageStreaming {
  def apply(duration: Long): Unit = {
      val config = JobConfig()
      new MonitorPageStreaming(config, duration).execute()
  }
}
