package com.lianzt

import java.util
import java.util.Date
import java.util.Map.Entry

import net.minidev.json.{JSONObject, JSONValue}
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory, Put, Table}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.sql.{Encoders, ForeachWriter, Row, SparkSession}

import scala.collection.JavaConversions

object Ping {
  val config = HBaseConfiguration.create()
  config.set("hbase.zookeeper.quorum", "FatServer1")

  /**
    * 向 kafka 输出 json
    *
    * @param spark
    */
  def analyze(spark: SparkSession): Unit = {

    val lines = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("subscribe", "test")
      //      .option("startingOffsets", "earliest")    //最早偏移量
      .option("startingOffsets", "latest") //最近偏移量
      .option("failOnDataLoss", false)
      .load()

    println("connect kafka success")


    lines.printSchema()

    import spark.implicits._


    val df = lines.selectExpr("CAST(value AS STRING)").as[String]

    implicit val e = Encoders.tuple(Encoders.LONG, Encoders.STRING)
    val log = df
      .filter(x => {
        x.indexOf("[analyze-log-") > 0
      }).map(x => {
      val log = JSONValue.parse(x).asInstanceOf[JSONObject]
      var m = log.get("m").asInstanceOf[String]
      m = m.substring(m.indexOf("-->") + 3)
      val time = Util.dateFormat(new Date(log.get("t").asInstanceOf[Long]))
      println($"recving message : $m")
      val logJson = JSONValue.parse(m).asInstanceOf[JSONObject]
      logJson.put("time", time)
      logJson.toJSONString
    })

    df.printSchema()

    println("print streaming : ")


    val kafka = log
      .selectExpr("CAST(value AS STRING)")
      .writeStream
      .outputMode("append")
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("topic", "spark")
      .option("checkpointLocation", "/home/lzt/tmp/spark")
      .start()

    val hbase = log
      .selectExpr("CAST(value AS STRING)")
      .writeStream
      .outputMode("append")
      .foreach(new WriteHbase())
      .start()

    kafka.awaitTermination()
    hbase.awaitTermination()
  }

  class WriteHbase extends ForeachWriter[Row] {
    var con: Connection = null
    var table: Table = null

    override def open(partitionId: Long, version: Long): Boolean = {
      con = ConnectionFactory.createConnection(config)
      table = con.getTable(TableName.valueOf("tccgl_ping"))
      true
    }

    override def close(errorOrNull: Throwable): Unit = {
      con.close()
    }

    override def process(x: Row): Unit = {
      val logJson = JSONValue.parse(x.getAs[String](0)).asInstanceOf[JSONObject]
      val put = new Put(Bytes.toBytes(logJson.get("time").asInstanceOf[String] + "-" + logJson.get("id").asInstanceOf[String]))
      logJson.entrySet().toArray().foreach(e => {
        val element = e.asInstanceOf[Entry[String, AnyRef]]
        put.addColumn(Bytes.toBytes("cf"), Bytes.toBytes(element.getKey), Bytes.toBytes(element.getValue.toString))
      })
      table.put(put)
    }
  }

}
