package streaming.demo5

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.flume.{FlumeUtils, SparkFlumeEvent}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkFlumePush {

  def updateFunc(inputData:Seq[Int],sumData: Option[Int]) :Option[Int] = {
    val result: Int = inputData.sum+sumData.getOrElse(0)
    Some(result)
  }

  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("SparkFlumePush")
    val sc: SparkContext = new SparkContext(sparkConf)
    sc.setLogLevel("WARN")
    val streamingContext: StreamingContext = new StreamingContext(sc,Seconds(5))
    //保存更新的数据
    streamingContext.checkpoint("./sparkcheckpush")
    /**
      * Create a input stream from a Flume source.
      * @param ssc      StreamingContext object
      * @param hostname Hostname of the slave machine to which the flume data will be sent
      * @param port     Port of the slave machine to which the flume data will be sent
      * @param storageLevel  Storage level to use for storing the received objects
      */
    //flume采集数据推送到streamingContext上进行处理
    val FlumePush: ReceiverInputDStream[SparkFlumeEvent] = FlumeUtils.createStream(streamingContext,"192.168.10.101",8888,StorageLevel.MEMORY_AND_DISK_2)
    val map: DStream[String] = FlumePush.map(x=>new String(x.event.getBody.array()))
    //将flume推送过来的数据进行业务处理
    val wordAndOne: DStream[(String, Int)] = map.flatMap(x=>x.split(" ")).map((_,1))
    //key，value形式的值可以用updatestatebykey进行数据的更新保存
    val finalresult: DStream[(String, Int)] = wordAndOne.updateStateByKey(updateFunc)
    finalresult.print()
    map.print()
    //开启流式计算
    streamingContext.start()
    streamingContext.awaitTermination()


  }
}
