package org.huangrui.spark.scala.streaming

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @Author hr
 * @Create 2024-10-21 21:36 
 */
object SparkStreaming07_Method {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
    val sc: StreamingContext = new StreamingContext(sparkConf, Seconds(3))
    val lines: ReceiverInputDStream[String] = sc.socketTextStream("localhost", 9999)

    // transform方法可以将底层RDD获取到后进行操作
    // 1. DStream功能不完善
    // 2. 需要代码周期性的执行

    // TODO code => 原语
    // int i = 10; (Driver 1)
    val newDS: DStream[String] = lines.transform(
      (rdd: RDD[String]) => {
        // TODO code => 算子
        //int j = 20; (Driver X) （周期性执行）
        rdd.map(
          (str: String) => {
            // TODO code => 内部逻辑
            // int k = 30; (Executor N)
            str
          }
        )
      }
    )
    // Code : Driver端
    val newDS1: DStream[String] = lines.map(
      (data: String) => {
        // Code : Executor端
        data
      }
    )
    sc.start()
    sc.awaitTermination()
  }
}
