

import java.util.concurrent.TimeUnit

import com.typesafe.scalalogging.Logger
import org.apache.spark.sql.functions._
import org.apache.spark.sql.streaming.{StreamingQuery, Trigger}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{SparkSession, types}

import scala.collection._

/**
  *
  * @author Abyss
  * @date 2019/9/01
  * @description
  */
object KafkaToStructuredStreaming {


  private val logger = Logger(this.getClass)

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    println("hello world")
    val spark = getSparkSession
    sparkReadKafka(spark)
    spark.streams.awaitAnyTermination()
  }

  /**
    * 创建SparkSession
    *
    * @return
    */
  private def getSparkSession = {
    val spark = SparkSession.builder().appName(getClass.getName).master("local[*]").getOrCreate()
    spark
  }

  /**
    * 注册spark临时表，执行sql语句，注意这里每一个sql都是一个writeStream，最后使用spark.streams.awaitAnyTermination()等待所有查询
    *
    * @param spark
    */
  def sparkReadKafka(spark: SparkSession): Unit = {
    createOrReplaceTempView(spark, "mq_cj_data_1", "ab_test")
    val sqls = Array("select * from ab_test")
    val querys = mutable.ListBuffer[StreamingQuery]()
    for (sql <- sqls) {
      println(sql)
      querys += sqlWriteStream(spark, sql)
    }
  }
  /**
    * 解析kafka json数据，并将其映射为spark临时表
    *
    * @param spark
    * @param kafkaTopic
    * @param sourceName
    */
  def createOrReplaceTempView(spark: SparkSession, kafkaTopic: String, sourceName: String): Unit = {
    //    val schema = SocSchemaCollection.getSchemaBySourceName(sourceName) //从数据库加载json schema
    val df = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "192.168.0.185:9092")
      .option("subscribe", kafkaTopic)
      .option("startingOffsets", "latest")
      .load()

    val schema = StructType(
      Seq(
        StructField("dataPointId", IntegerType, true),
        StructField("dataSource", IntegerType, true),
        StructField("dataType", IntegerType, true),
        StructField("dataTime", LongType, true),
        StructField("data",
          types.MapType(
            StringType,
            StringType
          )
        )
      )
    )

    if (schema != null) {
      val jsonDf = df.select(from_json(col("value").cast("string"), schema).alias("result"))
      jsonDf.select("result.*").createOrReplaceTempView(sourceName)

    } else {
      println("error,schema is null")
    }

  }

  /**
    * * 输出spark sql的查询结果
    * * @param spark
    * * @param sql
    * * @return
    **/
  def sqlWriteStream(spark: SparkSession, sql: String): StreamingQuery = {
    val query = spark.sql(sql).writeStream.outputMode("append")
      .format("console")
      .trigger(Trigger.ProcessingTime(5, TimeUnit.SECONDS))
      .start()
    query
  }


}

