package com.atbeijing.bigdata.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming07_Join {

    def main(args: Array[String]): Unit = {

        // TODO 创建环境对象
        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
        val ssc = new StreamingContext(sparkConf, Seconds(3))

        // 从socket中获取的数据是一行一行的字符串
        val socketDS1: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)
        val socketDS2: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 8888)

        val mapDS1 = socketDS1.map((_, 99))
        val mapDS2 = socketDS2.map((_, 88))

        val joinDS: DStream[(String, (Int, Int))] = mapDS1.join(mapDS2)

        joinDS.print()

        ssc.start()
        ssc.awaitTermination()
    }
}
