package com.dongline.te
import javafx.application.Application
import javafx.stage.Stage
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods

import scala.util.parsing.json.JSON

class MyMessage(val author: String,val content: String,val id: String, val time: Long, val title: String)
class KafkaStreaming extends  Application{
  override def start(primaryStage: Stage): Unit = {
    val conf:SparkConf=new SparkConf().setAppName("sql").setMaster("local")
    conf.set("spark.testing.memory", "471859200")
    val ssc=new StreamingContext(conf,Seconds(20))

    ssc.checkpoint("D:/aa")
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "192.168.2.31:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "use_a_separate_group_id_for_each_stream1",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val topics = Array("test_topic")

    val recordDStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](ssc,
      LocationStrategies.PreferConsistent,//位置策略 会让Spark的Executor和Kafka的Broker均匀对应
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams))

    val rdd1 = recordDStream.map(_.value().split(" ")).map(_(2))
    rdd1.print()
    val rdd2=rdd1.flatMap(_.split(",")).map((_,1))
        .updateStateByKey((seq:Seq[Int],options:Option[Int])=> {
     val it= seq.iterator
     var value=options.getOrElse(0)
      for( x <- it)
        {
          value=value+x
        }
    Option(value)
    })

    rdd2.print()

    ssc.start()
    ssc.awaitTermination()
  }



}
