package com.teradata.bigdata.sink

import com.teradata.bigdata.sink.app.ForeachWriterHBase
import com.teradata.bigdata.util.kafka.KafkaProperties
import com.teradata.bigdata.util.spark.SparkConfig
import org.apache.spark.sql._
/**
  * @Project:
  * @Description:
  * @Version 1.0.0
  * @Throws SystemException:
  * @Author: <li>2019/11/6/006 Administrator Create 1.0
  * @Copyright ©2018-2019 al.github
  * @Modified By:
  */
object StreamingSinkHBase {

  def main(args: Array[String]): Unit = {
    val classNameStr = "StreamingSinkHBase"
    val kafkaProperties = new KafkaProperties()
    //定义broker 列表
    val kafkaServers = kafkaProperties.kafkaBrokers.mkString(",")
    //定义topic
    val sourceTopic = "O_DPI_LTE_S1U_HTTP"

    val sparkSession: SparkSession = SparkSession
      .builder()
      .config((new SparkConfig).getConf)
      .config("spark.sql.streaming.checkpointLocation", "/user/b_yz_app_td/checkpoint/StreamingSinkHBase")
      .appName(classNameStr)
      .getOrCreate()

    val kafaka_stream: DataFrame = sparkSession
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", kafkaServers)
      .option("subscribe", sourceTopic)
      .option("startingOffsets", "latest")
      .option("kafkaConsumer.pollTimeoutMs", "5000")
      .option("failOnDataLoss", "false")
      .load()
      .selectExpr("CAST(value AS STRING)") //将整行记录转为String

    val query = kafaka_stream.writeStream.foreach(new ForeachWriterHBase())
      .outputMode("update").start()

    query.awaitTermination()
  }

}
