package com.peng.sparktest.sparkstreaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

object StreamTest01_Receiver02_CustomReceiver {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName("stream_receiver")
      //这里的2，表示在local模式下，指定的开启线程的个数。在stream中，最小限度也要开启2个，
      // 因为一个是用于receive job的task用于接收数据
      //一个用于处理batch时的job的task
      //当然，如果batch数据量比较大，涉及到分区出多个task并行执行，可以调高线程数，因为此时只有1个线程的话，多个task会变成串行执行
      .setMaster("local[2]")
    val context = new StreamingContext(conf, Seconds(5)) //时间间隔去触发job

    context.sparkContext.setLogLevel("ERROR")

    //自定义Receiver
    val dStream: ReceiverInputDStream[String] = context.receiverStream(new MySocketReceiver("localhost", 8888))

    dStream.print()

    context.start() //开始执行调度

    context.awaitTermination() //阻塞等待直到程序被结束
  }

}
