package cn.wangjie.spark.yangliang

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.io.Source


object YangLiang {

  def main(args: Array[String]): Unit = {

    // TODO: 1. 创建StreamingContext流式上下文实例对象，设置BatchInterval
    val ssc: StreamingContext = {
      // 1.a 创建SparkConf对象，设置应用名称和运行模式
      val sparkConf: SparkConf = new SparkConf()
        .setAppName(this.getClass.getSimpleName.stripSuffix("$"))
        .setMaster("local[3]")
      // 1.b 传递SparkConf和设置BatchInterval构建流式上下文对象
      new StreamingContext(sparkConf, Seconds(5))
    }

    val data: String = Source.fromFile("data/schema.txt").mkString

    val schema: Array[String] = data.split(" ")
    var count: Int = 0
    schema.map(
      s =>
        if (s.contains("time")) {
          count = count + 1
        }
    )
    val slot: Array[Int] = new Array[Int](count)

    var count2: Int = 0
    var count3: Int = 0
    schema.foreach(
      s => {
        if (s.contains("time")) {
          slot(count2) = count3
          count2 = count2 + 1
        }
        count3 = count3 + 1
      }
    )
    //val schema: Array[String] = Array("time1","time2","time3","value3","value4","value5")

    //val inputDStream: DStream[String] = ssc.socketTextStream("node01", 9999)
    val inputDStream: DStream[String] = ssc.textFileStream("data/data.txt")

    // TODO: 3. 实时对每批次数据进行处理
    val value = inputDStream.transform(rdd => {
      // TODO： 针对每批次RDD进行操作
      rdd
        // 将每行数据按照分隔符划分
        .map(line => line.trim.split("\\s+"))
        .map(arr => {
          val slotData: Array[String] = new Array[String](count)
          var count4: Int = 0
          for (s <- slot) {
            slotData(count4) = arr(s)
            count4 = count4 + 1
          }
          for (s <- slotData) {
            println(s)
          }

        })
      // 每个单词转换为二元组，表示单词出现一次
    })



    // TODO: 5. 启动流式应用
    ssc.start() // 当流式应用启动以后，将会一直运行，除非认为终止或者程序异常终止
    ssc.awaitTermination()
    // 当程序终止以后，结束应用
    ssc.stop(stopSparkContext = true, stopGracefully = true)
  }

}
