package com.atguigu.bigdata.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
//双流join.现在是spark的,在flink中也是有的.
object SprakStreaming08_Join {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("join").setMaster("local[*]")
    val ssc = new StreamingContext(conf,Seconds(6))

    //从网络端口获取数据,拿到一行字符串
    //tips:其实端口号最大是可以到65535的不过1024下的是系统的端口号一般是不可以使用的.

    val socketData1: ReceiverInputDStream[String] = ssc.socketTextStream("localhost",9999)
    val socketData2: ReceiverInputDStream[String] = ssc.socketTextStream("localhost",8888)
    //join是通过key进行的。
    //其底层就是rdd的join操作
    val joinData: DStream[(String, (Int, Int))] = socketData1.map((_,1)).join(socketData2.map((_,1)))
    //结果(a,(1,1))
    joinData.print()









    ssc.start()
    ssc.awaitTermination()

  }

}
