package com.dongline.te
import javafx.application.Application
import javafx.stage.Stage
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.flume.{FlumeUtils, SparkFlumeEvent}

import java.net.InetSocketAddress
class FlumeStreamingPoll extends Application{
  override def start(primaryStage: Stage): Unit = {

    val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
    //设置spark的内存
    conf.set("spark.testing.memory", "471859200")
    val ssc = new StreamingContext(conf, Seconds(1))


    //拉取flume  org.apache.spark.streaming.flume.sink.SparkSink sink数据 ip是flume的ip，可以是多个
    //定义一个flume地址集合，可以同时接受多个flume的数据
    val address=Array(new InetSocketAddress("192.168.0.31",9999),new InetSocketAddress("192.168.216.121",9999))


    val flumeStream: ReceiverInputDStream[SparkFlumeEvent] = FlumeUtils.createPollingStream(ssc,address,StorageLevel.MEMORY_AND_DISK_SER_2)

    flumeStream.map(x=>new String(x.event.getBody.array()).trim)
      .flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).print()


    ssc.start()
    ssc.awaitTermination()
  }
}
