package com.hiscene.structurestream

/**
 * <dependencies>
 *
 *
 * <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core -->
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId>spark-core_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 *
 *
 * <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId>spark-sql_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 *
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId> spark-sql-kafka-0-10_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 *
 * <!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
 * <dependency>
 * <groupId>org.apache.hbase</groupId>
 * <artifactId>hbase-client</artifactId>
 * <version>1.1.2.2.6.5.0-292</version>
 * </dependency>
 * <dependency>
 * <groupId>org.json4s</groupId>
 * <artifactId>json4s-jackson_2.10</artifactId>
 * <version>3.2.10</version>
 * </dependency>
 *
 * <!-- &lt;!&ndash; https://mvnrepository.com/artifact/org.apache.spark/spark-hive &ndash;&gt;
 * <dependency>
 * <groupId>org.apache.spark</groupId>
 * <artifactId>spark-hive_2.11</artifactId>
 * <version>2.3.0.2.6.5.0-292</version>
 * </dependency>
 * &lt;!&ndash; https://mvnrepository.com/artifact/org.apache.hive/hive-exec &ndash;&gt;
 * <dependency>
 * <groupId>org.apache.hive</groupId>
 * <artifactId>hive-exec</artifactId>
 * <version>1.2.1000.2.6.5.0-292</version>
 * </dependency>-->
 *
 * </dependencies>
 *
 * <repositories>
 * <repository>
 * <id>repository.hortonworks</id>
 * <name>Hortonworks Repository</name>
 * <url>http://repo.hortonworks.com/content/repositories/releases/</url>
 * </repository>
 * </repositories>
 */

object KafkaStream {
  def main(args: Array[String]): Unit = {
      import org.apache.spark.sql.{Dataset, SparkSession}
      val spark = SparkSession
        .builder
        //.master("local[*]")
        .appName("hi")
        .getOrCreate()
      spark.sparkContext.setLogLevel("ERROR")

      val df = spark
        .readStream
        .format("kafka")
        .option("kafka.bootstrap.servers", "192.168.1.16:6667,192.168.1.17:6667,192.168.1.18:6667")
        .option("subscribe", "hi")
        .load()

      import spark.implicits._
      import org.apache.spark.sql.streaming.OutputMode
      import org.json4s._
      import org.json4s.jackson.JsonMethods._
      import com.hiscene.utils.StreamingUtils


      implicit val formats = DefaultFormats
      val iotDataset: Dataset[DeviceData] = df.selectExpr("CAST(value AS STRING)")
        .as[String]
        .filter(line => null != line && line.trim.length > 0)
        .map(x => {
            implicit val formats = DefaultFormats
            parse(x).extract[DeviceData]
        })

    // 你的解析代码 xxx

    //TODO

    val query =iotDataset.writeStream
      .outputMode(OutputMode.Update())
      //.option("truncate",false)
      .option("checkpointLocation", "/data/checkpoint")
      .foreach(new HbaseForeachWriter)
      .queryName("hi test")
      //.format("console")
      .start()

    //query.awaitTermination()
    StreamingUtils.stopStructuredStreaming(query, "/spark/check-hi/stop/hi-stop")
  }
}

case class DeviceData(eid: String, diviceId: String, eventInfo: String)
