package com.kafka

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SocketDstreamTest {

  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")

    val sparkConf = new SparkConf().setAppName("SocketTest").setMaster("local[3]")
    val spark = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()

    //创建StreamingContext
    val ssc = new StreamingContext(spark.sparkContext, Seconds(5))

    val textStream: ReceiverInputDStream[String] = ssc.socketTextStream("127.0.0.1", 9995)
    textStream.flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _).print()

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()
  }

}
