package main.scala.demo.kafka

import kafka.serializer.StringDecoder
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * MyDemoKafka
  *
  * @author zhangyimin
  * @date 2019-02-22 17:38
  * @version 1.0
  */
object MyDemoKafka {
  def main(args: Array[String]): Unit = {


    //    OPERATER:"zym",IP: 192.168.13.163,TIME:2019-02-22 01:52:13,METHOD: GET,URL: http://192.168.12.241:20000/demo/list/log,QUERYSTR: workunitid=1
    //    OPERATER:"zym",IP: 192.168.13.163,TIME:2019-02-22 01:52:25,METHOD: GET,URL: http://192.168.12.241:20000/demo/list/log,QUERYSTR: workunitid=2
    //    OPERATER:"zym",IP: 192.168.13.163,TIME:2019-02-22 01:52:36,METHOD: GET,URL: http://192.168.12.241:20000/demo/list/log,QUERYSTR: workunitid=5

    //创建SparkStreaming对象
    System.setProperty("hadoop.home.dir", "C:\\hadoop2.6.0")
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    //local[2]代表核数为2
    //    val spark = SparkSession.builder().appName("MyNetworkWordCount").master("local[2]").getOrCreate()
    //    val sc = spark.sparkContext
    val sparkConf = new SparkConf().setAppName("MyNetworkWordCount").setMaster("local[2]")
    //在windows下设置SPRAK的最大内存  或者设置  -Xms256m -Xmx1024m   或 -Dspark.testing.memory=1073741824
    //    sparkConf.set("spark.testing.memory", "2147480000")
    val streamingContext = new StreamingContext(sparkConf, Seconds(3))

    //创建Topic名称
    val topic = Set("topc_syslog_test")
    //创建kafka的基本属性 属性值为kafka的broker的ip和port
    val kafkaProps = Map[String, String](
      "metadata.broker.list" -> "172.16.1.239:9092,172.16.1.240:9092,172.16.1.241:9092",
      "group.id" -> "CID_PV_test",
      "auto.offset.reset" -> "largest"
    )
    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
      streamingContext,
      kafkaProps,
      topic)
    //处理每次接收到的数据


    //    kafkaStream.foreachRDD(rdd => {
    //      val spark = SparkSession
    //        .builder()
    //        .master("local[2]")
    //        .appName("test_sql_spark")
    //        //        .config(rdd.sparkContext.getConf)
    //        .config("spark.sql.warehouse.dir", "file:///e:/tmp/spark-warehouse")
    //        .getOrCreate()
    //      //OPERATER：超级管理员，IP: 192.168.14.46，TIME：2019-02-25 15:06:53，METHOD: POST，URL: http://192.168.11.89:8080/match/contractAssetPackage/getProductDetail，QUERYSTR: {"investProductCode":["64"]} ,SERVER:huaxia-ctm-cashTransact-deploy
    //      val testSql = rdd.map(x => {
    //        println(x._2.toString)
    //        //导入隐式值
    //        implicit val formats = DefaultFormats
    //        val msg: MyMsg = parse(x._2.toString).extract[MyMsg]
    //        println(msg.toString)
    //        msg
    //      })
    //      import spark.sqlContext.implicits._
    //      val sqlDF = testSql.toDF()
    //      testSql.toDF().show()
    //      sqlDF.createOrReplaceTempView("fm_loan_pv")
    //      val result = spark.sql("select IP,count(*) total from fm_loan_pv group by IP")
    //
    //      result.show()
    //    })


    kafkaStream.foreachRDD(rdd => {
      val spark = SparkSession
        .builder()
        .master("local[2]")
        .appName("test_sql_spark")
        //        .config(rdd.sparkContext.getConf)
        .config("spark.sql.warehouse.dir", "file:///e:/tmp/spark-warehouse")
        .getOrCreate()
      val testSql = rdd.map(x => {
        //        println(x._2.toString)
        //        //导入隐式值
        //        implicit val formats = DefaultFormats
        //        val msg: MyMsg = parse(x._2.toString).extract[MyMsg]
        //        println(msg.toString)
        //        msg
        //      })

        val strs:Array[String]=x._2.split(",")
        val strs_pro=strs.map(y=>{
          val a=y.split(":").mkString("\"",":","\"")
          a
        })
        strs_pro.mkString("{","","}")

        println(strs_pro)


//        val map=strs_pro.map{case Array(x,y)=>(x,y)}.toMap




      })

    })
    streamingContext.start()
    streamingContext.awaitTermination()
  }

  case class MyMsg(OPERATER: String, IP: String, TIME: String, METHOD: String, URL: String, QUERYSTR: String)

}
