package day04

import java.io.{BufferedReader, InputStream, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver

/**
  * 实现自定义的接收器
  * 步骤：
  * 1.继承receiver接受器
  * 设置接收器接收数据的类型：string
  * 设置存储级别：StorageLevel.MEMORY_AND_DISK_2
  * 覆写onstart&onstop方法
  * 将自定义的接收器与streaming进行绑定
  *
  */
class UserDefineReceiver(host:String,port:Int) extends Receiver[String](StorageLevel.MEMORY_AND_DISK_2){
 //接收数据
 override def onStart(): Unit = {
    new Thread("UserDefineReceiver"){
      receive()
    }.start()
  }
  def receive()={
    val socket: Socket = new Socket(host,port)
    //获取scoket当中的输入流
    val stream: InputStream = socket.getInputStream
    //将流转换成字符串
    val bufferedReader: BufferedReader = new BufferedReader(new InputStreamReader(stream,StandardCharsets.UTF_8))
    //从bufferedReader里面读取数据
    var line:String=null;
    while ((line=bufferedReader.readLine()) !=null){
      //将我们读取的数据存起来
      store(line)
      line= bufferedReader.readLine()
    }

  }
//没有用
  override def onStop(): Unit = {

  }
}
object UserDefineReceiver{
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("AcceptScoketData").setMaster("local[2]")
    val sc: SparkContext = new SparkContext(sparkConf)
    sc.setLogLevel("WARN")
    val ssc: StreamingContext = new StreamingContext(sc,Seconds(5))
    //将自定义接收器跟当前程序进行绑定
    val data: ReceiverInputDStream[String] = ssc.receiverStream(new UserDefineReceiver("node01",9999))
  data.print()
    //开启流式计算
    ssc.start()
    ssc.awaitTermination()
  }
}