package com.lianzt

import java.util

import net.minidev.json.{JSONObject, JSONValue}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Encoders, SparkSession}
import org.apache.spark.sql.functions._

object Kafka {

  case class State(state: Boolean)

  def main(args: Array[String]): Unit = {
    val spark =
      SparkSession.builder
        .appName("kafka2Spark2Cassandra")
        .getOrCreate()

    //    kafkaJson(spark)
//    jsonData(spark)
    Ping.analyze(spark)
  }



  /**
    * 输出到控制台
    *
    * @param spark
    */
  def outputConsole(spark: SparkSession): Unit = {

    val lines = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("subscribe", "test")
      //      .option("startingOffsets", "earliest")
      .load()

    println("connect kafka success")


    lines.printSchema()


    implicit val e = Encoders.tuple(Encoders.STRING, Encoders.STRING)

    val df = lines.selectExpr("CAST(topic AS STRING)", "CAST(value AS STRING)").as[(String, String)]

    df.printSchema()

    val only = df.writeStream
      .outputMode("append")
      .format("console")
      .start()
    only.awaitTermination()

  }

  /**
    * 向 kafka 输出 json
    *
    * @param spark
    */
  def kafkaJson(spark: SparkSession): Unit = {

    val lines = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("subscribe", "test")
      //      .option("startingOffsets", "earliest")
      .load()

    println("connect kafka success")


    lines.printSchema()


    implicit val e = Encoders.tuple(Encoders.STRING, Encoders.STRING)

    val df = lines.selectExpr("CAST(topic AS STRING)", "CAST(value AS STRING)").as[(String, String)]

    df.printSchema()

    import spark.implicits._

    val json = df.map(x => {
      println(s"recv line : $x")
      val map = new util.HashMap[String, Object]()
      try {
        val m = JSONValue.parse(x._2).asInstanceOf[JSONObject].get("m").asInstanceOf[String]
        map.put("state", (m.indexOf("检查点") != -1).toString)
      } catch {
        case e: Exception => {
          e.printStackTrace()
          map.put("state", "false")
        }
      }
      JSONObject.toJSONString(map)
    })

    df.printSchema()

    println("print streaming : ")

    val kafka = json
      .selectExpr("CAST(value AS STRING)")
      .writeStream
      .outputMode("append")
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("topic", "spark")
      .option("checkpointLocation", "/home/lzt/tmp/spark")
      .start()
    kafka.awaitTermination()
  }

  /**
    * 解析json data
    *
    * @param spark
    */
  def jsonData(spark: SparkSession): Unit = {

    val lines = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("subscribe", "test")
      //      .option("startingOffsets", "earliest")
      .load()

    println("connect kafka success")


    lines.printSchema()

    val schema = StructType(Seq(
      StructField("a", StringType, true),
      StructField("t", LongType, true),
      StructField("p", StringType, true),
      StructField("g", StringType, true),
      StructField("m", StringType, true)
    ))

    //    implicit val e = Encoders.tuple(Encoders.STRING, Encoders.STRING)

    val df = lines.selectExpr("CAST(value AS STRING) as json")
    val json = df.select(from_json(df.col("json"), schema = schema).as("data"))

    json.printSchema()


    import spark.implicits._

    val kafka = json.map(x => {
      println(s"json ------> $x")
      x.toSeq.toString
    })
//      .where("data.p='WARN'")
      .writeStream
      .outputMode("append")
        .format("console")
//      .format("kafka")
//      .option("kafka.bootstrap.servers", "localhost:9092")
//      .option("topic", "spark")
//      .option("checkpointLocation", "/home/lzt/tmp/spark")
      .start()
    kafka.awaitTermination()
  }
}
