package main.scala.demo.kafka

import kafka.serializer.StringDecoder
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.json4s._
import org.json4s.jackson.JsonMethods._

/**
  * MyDemoKafka
  *
  * @author zhangyimin
  * @date 2019-02-22 17:38
  * @version 1.0
  */
object MyDemoKafka_json {
  def main(args: Array[String]): Unit = {
    //创建SparkStreaming对象
    System.setProperty("hadoop.home.dir", "C:\\hadoop2.6.0")
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    //local[2]代表核数为2
    //    val spark = SparkSession.builder().appName("MyNetworkWordCount").master("local[2]").getOrCreate()
    //    val sc = spark.sparkContext
    val sparkConf = new SparkConf().setAppName("MyNetworkWordCount").setMaster("local[2]")
    //在windows下设置SPRAK的最大内存  或者设置  -Xms256m -Xmx1024m   或 -Dspark.testing.memory=1073741824
        sparkConf.set("spark.testing.memory", "2147480000")
    val streamingContext = new StreamingContext(sparkConf, Seconds(3))

    //创建Topic名称
    val topic = Set("topc_syslog_test")
    //创建kafka的基本属性 属性值为kafka的broker的ip和port
    val kafkaProps = Map[String, String](
      "metadata.broker.list" -> "172.16.1.239:9092,172.16.1.240:9092,172.16.1.241:9092",
      "group.id" -> "CID_PV_test",
      "auto.offset.reset" -> "largest"
    )


    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
      streamingContext,
      kafkaProps,
      topic)
    //处理每次接收到的数据


    kafkaStream.foreachRDD(rdd => {
      val spark = SparkSession
        .builder()
        .master("local[2]")
        .appName("test_sql_spark")
        //        .config(rdd.sparkContext.getConf)
        .config("spark.sql.warehouse.dir", "file:///e:/tmp/spark-warehouse")
        .getOrCreate()

      //OPERATER：超级管理员，IP: 192.168.14.46，TIME：2019-02-25 15:06:53，METHOD: POST，URL: http://192.168.11.89:8080/match/contractAssetPackage/getProductDetail，QUERYSTR: {"investProductCode":["64"]} ,SERVER:huaxia-ctm-cashTransact-deploy
      val testSql = rdd.map(x => {
        println(x._2.toString)


        //导入隐式值
        implicit val formats = DefaultFormats
        val msg: MyMsg = parse(x._2.toString).extract[MyMsg]
        println(msg.toString)




        //        val mapper: ObjectMapper = new ObjectMapper()
        //        val book: Book = mapper.readValue(json, classOf[Book])
        //


        //        val strs = x._2.split(",")
        //        strs.foreach(a => {
        //          println(a + "---------------->")
        //        })
        //        println(strs.length + "=======>")

        //        if (x._2.split(",").length > 0) {
        //          val obj = new MyMsg(
        //            if (strs(0).split(":").length > 1) {
        //              strs(0).split(":")(1).toString
        //            } else {
        //              null
        //            },
        //            if (strs(1).split(":").length > 1) {
        //              strs(1).split(":")(1).toString
        //            } else {
        //              null
        //            },
        //            if (strs(2).split(":").length > 1) {
        //              strs(2).split(":")(1).toString
        //            } else {
        //              null
        //            },
        //            if (strs(3).split(":").length > 1) {
        //              strs(3).split(":")(1).toString
        //            } else {
        //              null
        //            },
        //            if (strs(4).split(":").length > 1) {
        //              strs(4).split(":")(1) + strs(4).split(":")(2) + strs(4).split(":")(3)
        //            } else {
        //              null
        //            },
        //            if (strs(5).split(":").length > 1) {
        //              strs(5).split(":")(1).toString
        //            } else {
        //              null
        //            }
        //          )
        //          obj
        //        } else {
        //          null
        //        }
        msg
      })
      import spark.sqlContext.implicits._
      val sqlDF = testSql.toDF()
      testSql.toDF().show()
      sqlDF.createOrReplaceTempView("fm_loan_pv")
      val result = spark.sql("select IP,count(*) total from fm_loan_pv group by IP")

      result.show()
    })
    streamingContext.start()
    streamingContext.awaitTermination()
  }

  case class MyMsg(OPERATER: String, IP: String, TIME: String, METHOD: String, URL: String, QUERYSTR: String)

}
