package main.scala.demo.kafka

import kafka.serializer.StringDecoder
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}


/**
  * MyDemoKafka
  *
  * @author zhangyimin
  * @date 2019-02-22 17:38
  * @version 1.0
  */
object MyDemoKafka_str_plus {
  def main(args: Array[String]): Unit = {
    //创建SparkStreaming对象
    local4WindowDev
    //local[2]代表核数为2
    //    val spark = SparkSession.builder().appName("MyNetworkWordCount").master("local[2]").getOrCreate()
    //    val sc = spark.sparkContext
    val sparkConf = new SparkConf().setAppName("MyNetworkWordCount").setMaster("local[2]")
    //在windows下设置SPRAK的最大内存  或者设置  -Xms256m -Xmx1024m   或 -Dspark.testing.memory=1073741824
    sparkConf.set("spark.testing.memory", "2147480000")
    val streamingContext = new StreamingContext(sparkConf, Seconds(3))
    //    val sparkContext = new SparkContext(sparkConf)
    //创建Topic名称
    val topic = Set("topc_syslog_test")
    //创建kafka的基本属性 属性值为kafka的broker的ip和port
    val kafkaProps = Map[String, String](
      "metadata.broker.list" -> "172.16.1.239:9092,172.16.1.240:9092,172.16.1.241:9092",
      "group.id" -> "CID_PV_test",
      "serializer.class" -> "kafka.serializer.StringEncoder",
      "auto.offset.reset" -> "largest"
    )
    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
      streamingContext,
      kafkaProps,
      topic)
    //处理每次接收到的数据
    //    OPERATER: , IP: 192.168.8.36, TIME: 2019-02-27 01:34:11, METHOD: GET, URL: http://192.168.12.241:20000/demo/list/log, QUERYSTR: workunitid=1 ,SERVER:huaxia-demo

    kafkaStream.foreachRDD(rdd => {
      val spark = SparkSession
        .builder()
        .master("local[2]")
        .appName("test_sql_spark")
        //        .config(rdd.sparkContext.getConf)
        .config("spark.sql.warehouse.dir", "file:///e:/tmp/spark-warehouse")
        .getOrCreate()
      getIP4PV(rdd, spark)
    })
    streamingContext.start()
    streamingContext.awaitTermination()
  }

  /**
    * 查询PV的方法
    * @param rdd
    * @param spark
    */
  def getIP4PV(rdd: RDD[(String, String)], spark: SparkSession): Unit = {
    val testSql = createDataFrame(rdd)
    import spark.sqlContext.implicits._
    val sqlDF = testSql.toDF()
    sqlDF.show()
    sqlDF.createOrReplaceTempView("fm_loan_pv")
    val result = spark.sql("select IP,count(*) total from fm_loan_pv group by IP")

    result.show()
  }

  /**
    * windows环境本地配置
    */
  def local4WindowDev: Unit = {
    System.setProperty("hadoop.home.dir", "C:\\hadoop2.6.0")
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
  }

  /**
    * 从KAFKA获取数据并解析
    * @param rdd
    * @return
    */
  def createDataFrame(rdd: RDD[(String, String)]): RDD[Some[MyMsg]] = {
    val testSql = rdd.map(f = x => {
      println(x._2.toString)
      val strs = x._2.replace("SERVER:", "SERVER: ").split(",")
      var map = scala.collection.mutable.HashMap[String, String]()
      if (strs.length > 0) {
        for (str <- strs) {
          val kvs = str.split(": ")
          if (kvs.length > 1) {
            map += (kvs(0).trim -> kvs(1).trim)
          } else if (kvs.length == 1) {
            map += (kvs(0) -> null)
          }
        }
      }
      if (!map.isEmpty) {
        println(map)

        val obj = new MyMsg(
          map.get("OPERATER").toString,
          map.get("IP").toString,
          map.get("TIME").toString,
          map.get("METHOD").toString,
          map.get("URL").toString,
          map.get("QUERYSTR").toString,
          map.get("SERVER").toString
        )
        println(obj)
        Some(obj)
      } else {
        null
      }
    })
    testSql
  }


  /**
    * 数据对应的样本类
    * @param OPERATER
    * @param IP
    * @param TIME
    * @param METHOD
    * @param URL
    * @param QUERYSTR
    * @param SERVER
    */
  case class MyMsg(OPERATER: String, IP: String, TIME: String, METHOD: String, URL: String, QUERYSTR: String, SERVER: String)

  //  case class MyMsg()

}
