package lhb.spark.sparkstreaming

import java.sql.Timestamp

import lhb.spark.sparkreduce.alarm
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}


case  class  alarm (timein:String,timeout:String,
                    tagname:String,almvalue:String, almstatus:String)
object streamingKafka {

  def main(args: Array[String]): Unit = {



    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      .setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(2))

    val spark: SparkSession = SparkSession.builder().appName("StreamOnKafka").getOrCreate()

    import spark.implicits._
    val groupID="testS"
    //kafka消费者参数
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "192.168.0.201:9092,192.168.0.202:9092,192.168.0.202:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> groupID,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = Array("sstream")
    val dstream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      //任务均匀分布在excutor
      LocationStrategies.PreferConsistent,
      //订阅主题
      ConsumerStrategies.Subscribe(topics, kafkaParams))


    //设置时间段
    val startTime = "2018-12-22 00:00:00"
    val stopTime = "2018-12-23 23:59:59"

    val inputValue: DStream[String] = dstream.map(tp => tp.value())

    inputValue.foreachRDD(rdd => {

      val frame: DataFrame = rdd
        .map(_.split(";"))
        .map(attr => alarm(attr(0).trim.toString, attr(1).trim.toString,
          attr(3).trim.toString, attr(5).trim.toString, attr(9).trim.toString))
        .toDF()

      val nonull = frame
        .where("tagname <> ''")
        .filter($"timein" > Timestamp.valueOf(startTime) && $"timeout" < Timestamp.valueOf(stopTime)
          && frame.col("tagname").isNotNull
        )

      val result = nonull
          //聚合
          .groupBy("tagtagname","almstatus")
          .agg(("almvalue", "min"), ("almvalue", "max"))





      result.foreachPartition(it => {
        result.write.format("JDBC")
          .option("url", "jdbc:sqlserver://192.168.0.200:1433;DatabaseName=sjk")
          .option("driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver")
          .option("dbtable", "dbo.SPARKSTREAM")
          .option("user", "sa")
          .option("password", "123456")
          .option("truncate", "True")
          .mode("overwrite")
          .save()
      })

    })

    ssc.start()
    //阻塞
    ssc.awaitTermination()
  }
}
