package com.learn.lb.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.flume.FlumeUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * 整合flume，通过push的方式进行单词统计
 * flume ===》push ======》SparkStreaming
 *
 * @author laibo
 * @since 2019/9/10 21:21
 */
object WordCountWithFlumePush {


  def main(args: Array[String]): Unit = {
    if (args.length < 2) {
      throw new IllegalArgumentException("usage: WordCountWithFlumePush <host> <port>")
    }
    val sparkConf = new SparkConf().setMaster("local[2]").setAppName("WordCountWithFlumePush")
    val ssc = new StreamingContext(sparkConf, Seconds(5))
    // 1. 整合flume
    val flumePushStream = FlumeUtils.createStream(ssc, args(0), args(1).toInt)
    flumePushStream.map(x=> new String(x.event.getBody.array()).trim)
        .flatMap(_.split(" ")).map((_, 1)).reduceByKey(_+_)
        .print()
    ssc.start()
    ssc.awaitTermination()
  }

}
