package com.lvmama.rhino.analyze.client

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitLoad
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.kafka.{KafkaDStreamSource, KafkaPayloadStringCodec}
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.methodset.InterfaceInvoke
import com.lvmama.rhino.common.utils.spark.StreamingApplication
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.{StreamingContext, Time}
import org.elasticsearch.spark.sql._

/**
  * Created by Administrator on 2017/9/1.
  */
class MonitorApiStreaming(config: JobConfig, duration: Long) extends StreamingApplication
  with Serializable with LazyLogging  {
  override var sparkConfig: Map[String, String] = config.spark
  override var appName: String = "MonitorApiStreaming"

  override def streamingBatchDuration: Long = duration

  override def streamingCheckpointDir: String = config.streamingCheckpointDir
    .getOrElse("monitorApiStreamingCP", "/checkpoint/monitorApi_cp")

  var kafkaConf = config.sourceKafka
  kafkaConf += ("group.id"->"sparkStreaming_group")

  private val kafkaSourceTopic = KafkaTopic("flume_monitor_topic")

  def execute() = {

    sparkConfig += ("spark.sql.autoBroadcastJoinThreshold" -> "10485760")
    sparkConfig += ("spark.master" -> "local[2]")
    sparkConfig += ("spark.app.name" -> appName)

    def func = (sc: SparkContext, ssc: StreamingContext) => {
      val streaming = KafkaDStreamSource(kafkaConf).createSource(ssc, kafkaSourceTopic.name)
      val yesterday = Utils.getYesterdayTimeInMillis()
      val lines = streaming
        .flatMap(KafkaPayloadStringCodec().decodeValue(_))
        .map(l => PerformanceMonitor.prePerformanceMonitor(l))

      val log =  lines.flatMap{
        x =>x._15.map(y =>PerformaceMonitorLog.fullProcess(x._1, x._2, x._3, x._4, x._5, x._6, x._7, x._8, x._9, x._10, x._11, x._12, x._13, x._14, y, yesterday))
      }

      val columns = Seq("operators","networkType","deviceToken","tec","version","province","city","mobileNumber","usersId","networkSpeed","osVersion",
        "browser","deviceName","interfaceUniqueFlag","pageId","networkTime","apiTime","apiName","apiVersion","requestType","time","errorCode")

      log.foreachRDD((rdd, time: Time) => {
        val sqlContext = SparkSession.builder().getOrCreate().sqlContext
        import sqlContext.implicits._
        val wordsDataFrame = rdd.toDF()
        val result = wordsDataFrame.filter(!col("apiName").equalTo(""))
        val data = result.select(col("operators"),col("networkType"),col("deviceToken"),col("tec"),col("version"),col("province"),col("city"),
          col("mobileNumber"),col("userId").as("usersId"),col("networkSpeed"),col("osVersion"),col("browser"),col("deviceName"),col("interfaceUniqueFlag"),col("pageId"),
          col("networkTime"),col("apiTime"),col("apiName"),col("apiVersion"),col("requestType"),col("time"),col("errorCode"))
          .dropDuplicates(columns)

        logger.info("monitorApi count:"+data.count())
        val today = Utils.getToday("yyyyMMdd")
        logger.info("monitorApi index:" + today)
        data.saveToEs(today+"/monitorApi",Map("es.mapping.id" -> "interfaceUniqueFlag"))
        logger.info("monitorApi after count:"+data.count())
      })

    }

    val ssc = StreamingContext.getOrCreate(streamingCheckpointDir, () => withSparkStreamingContext(func))
    ssc.start()
    ssc.awaitTermination()
  }
}

object MonitorApiStreaming {
  def apply(duration: Long): Unit = {
    val config = JobConfig()
    new MonitorApiStreaming(config, duration).execute()
  }

}
