package com.bigdata.exam

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

object test {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("Streaming")
    //第二个参数表示批量处理的周期（采集数据的周期）
    val ssc = new StreamingContext(sparkConf, Seconds(15))

    val input: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)

    val sum = input.map(line => {
      val data = line.split(" ")
      ((data(0), data(1)), data(2).toDouble)
    }).map(_._2).reduce(_ + _)

    sum.print()
    ssc.start()
    ssc.awaitTermination()
  }

}
