package com.streaming.demo

import java.net.InetSocketAddress

import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.flume.FlumeUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * @author td
  * @date 2018/4/3
  */
object SparkStreamingFlumeDemo {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[2]").setAppName("flumeDemo");

    val ssc = new StreamingContext(conf,Seconds(8));

   // flumeStreamingDataPush(ssc);
    flumeStreamingDataPoll(ssc);
  }

  /**
    * 推送数据到spark中,推送数据到spark中,flume 中sink的配置需要是spark work节点的host和端口
    * @param streamingContext
    */
  def flumeStreamingDataPush(streamingContext: StreamingContext): Unit = {

    val flumeStream = FlumeUtils.createStream(streamingContext,"172.31.61.85",9999);

  //  val flatMapRdd = flumeStream.flatMap();

    flumeStream.count().map(cnt => "received "+cnt+" flume events.").print();

    flumeStream.flatMap(event=> new String(event.event.getBody.array())).print()

    streamingContext.start();

    streamingContext.awaitTermination();

  }

  /**
    * 直接从flume中拉取数据,拉取数据需要把flume配置的
    * sinks.`type` = org.apache.spark.streaming.flume.sink.SparkSink
    *
    * @param streamingContext
    */
  def flumeStreamingDataPoll(streamingContext: StreamingContext): Unit = {

    val flumeStream = FlumeUtils.createPollingStream(streamingContext,"172.16.88.179",9999,
      StorageLevel.MEMORY_ONLY_SER_2);

    //多个sink的时候
    val flumeSinkList = Array[InetSocketAddress](new InetSocketAddress("172.16.88.179",9999));

    flumeStream.flatMap(event=>new String(event.event.getBody.array())).print();

    streamingContext.start();

    streamingContext.awaitTermination();

  }


}
