package com.dtkavin.sparkstreaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.flume.FlumeUtils
import org.apache.spark.streaming.{Milliseconds, StreamingContext}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/19 0019
  * Time : 22:52
  * Discribtion : 通过flume来push到streaming中
  *
  *
  */
class FlumPush {

}

object FlumPush {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("FlumPush").setMaster("local[3]")
    val ssc = new StreamingContext(conf, Milliseconds(5000))
    //从本地哪块网卡，那个端口接收数据
    val flumeStream = FlumeUtils.createStream(ssc, "net8", 8888)
    //flume中的数据通过event.getBody()才能拿到真正的内容
    val results = flumeStream.flatMap(x => new String(x.event.getBody().array()).split("\\t"))
//      .map(x => (x(0), x(1), x(2), x(4)))

    results.print()

    ssc.start()
    ssc.awaitTermination()
  }
}