package com.niit.streaming

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

object Spark_stream_Join {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("sparkStream")
    val ssc = new StreamingContext(sparkConf, Seconds(5) )
    ssc.sparkContext.setLogLevel("ERROR")
    //监听端口号 获取数据
    var lines9999 = ssc.socketTextStream("localhost",9999)//<--- a  b
    var lines8848 = ssc.socketTextStream("localhost",8848)//<--- a  c

    val map9999: DStream[(String, String)] = lines9999.map((_, "9999"))//--  (a,9999)  (b,9999)
    val map8848: DStream[(String, String)]  = lines8848.map( (_,"8848"))//-- (a,8848)  (c,8848)

    val joinDS = map9999.join(map8848)//-->(a,(9999,8848))  -->(b,9999) (c,8848) 是没有办法合并，因为key不相同

    joinDS.print()

    ssc.start()
    ssc.awaitTermination()

  }

}
