package com.taoqi.ss

import com.alibaba.fastjson.JSON
import org.apache.spark.SparkConf
import org.apache.spark.sql.{ForeachWriter, Row, SparkSession}

/**
  * Created by TQ on 2017/9/21.
  */
object SSTest5 {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().master("local[2]").appName("kafka").getOrCreate()
    val dsReader = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "10.0.25.57:9092,10.0.25.48:9092,10.0.25.39:9092")
      .option("subscribe", "gps-data")
      .option("includeTimestamp", true)
      .load()
    import spark.implicits._
    val df = dsReader.selectExpr("CAST(key as STRING)", "CAST(value as STRING)").as[(String, String)]

    //df.collect().foreach(println)     1 minutes
    /*val dsWriter = df.writeStream.trigger(Trigger.ProcessingTime("10 seconds")).format("console")
    val query: StreamingQuery = dsWriter.start()*/

    val schemaString = "imei,\n gpsTime,\n receiveTime,\n lng,\n lat,\n course,\n speed,\n gsmSignal," +
      "\n acc,\n targetingStatus,\n oilStatus,\n speedAlarm,\n vibrationAlarm,\n powerAlarm," +
      "\n cellId,\n lac,\n voltage,\n gpsSignal,\n electricity,\n gpsStatus,\n latStatus,\n lngStatus," +
      "\n powerStatus,\n sosStatus,\n workStatus,\n status,\n mcc,\n mnc,\n workStyle,\n sim,\n currentTime,\n source," +
      "\n province,\n city"

    println(dsReader.isStreaming)
    dsReader.printSchema()
    /*df.foreachPartition { ss =>
      ss.foreach(println)
    }*/
    /*df.transform { ss =>
      val rVal = ss.select("value")
      val df2 = spark.read.schema(JsonSchemaBuilder.getJsonSchema(schemaString)).json(rVal.toJSON)
        .selectExpr(JsonSchemaBuilder.columnSplitPattern.split(schemaString): _*)
      df2.printSchema()
      ss
    }*/

    //本例子 不限窗口进行聚合 读取json数据并转换成df
    val words = df.map { ss =>
      println(ss._2)
      if (ss._2) {
        val json = JSON.parseObject(ss._2.toString)
        Gps(json.getString("imei"), json.getString("lng"), json.getString("lat"))
      } else {
        Gps("", "", "")
      }
    }

    /* val words = df.flatMap { ss =>
       println(ss._2)
       ss._2.split(" ").map(word => (word, 1))
     }.toDF("word", "wCount")*/
    //val windowCounts = words.groupBy(window($"word", "10 seconds", "5 seconds")).count().orderBy("window")
    words.printSchema()
    val windowCounts = words.groupBy($"imei").count() //.orderBy("window")

    windowCounts.writeStream.foreach(new ForeachWriter[Row] {
      override def process(value: Row): Unit = {
      }

      override def close(errorOrNull: Throwable): Unit = ???

      override def open(partitionId: Long, version: Long): Boolean = ???
    })

    windowCounts.writeStream
      //.trigger(Trigger.ProcessingTime("10 seconds"))
      .outputMode("complete")
      .format("console").start().awaitTermination()


    //query.awaitTermination()


    //dsReader.createOrReplaceGlobalTempView("")


  }

  case class Gps(imei: String, lng: String, lat: String)

  def getSparkSession(conf: SparkConf, isNeedHive: Boolean = true) = SparkSession.synchronized {
    SparkSession.clearDefaultSession()
    @transient
    var session: SparkSession = null
    if (isNeedHive) {
      session = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
    } else {
      session = SparkSession.builder().config(conf).getOrCreate()
    }
    SparkSession.clearDefaultSession()
    session
  }

  implicit def isJson(str: String): Boolean = {
    import javax.script.ScriptEngineManager
    val sem = new ScriptEngineManager
    val se = sem.getEngineByName("js")
    try {
      se.eval("(" + str + ")")
      true
    } catch {
      case e: Exception =>
        false
    }
  }
}                                                                                        