package com.dongline.te
import javafx.application.Application
import javafx.stage.Stage
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.flume.{FlumeUtils, SparkFlumeEvent}
import org.apache.spark.streaming.{Seconds, StreamingContext}
class FlumeStreamingtPush extends  Application{
  /*
  flume push sparkStream demo
   */
  override def start(primaryStage: Stage): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
    //设置spark的内存
    conf.set("spark.testing.memory", "471859200")
    val ssc = new StreamingContext(conf, Seconds(1))
    //拉取flume avrosink数据 ip是绑定的本机的ip
    val flumeStream: ReceiverInputDStream[SparkFlumeEvent] = FlumeUtils.createStream(ssc,"192.168.0.109",9999)

    flumeStream.map(x=>new String(x.event.getBody.array()).trim)
      .flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).print()


    ssc.start()
    ssc.awaitTermination()
  }
}
