package org.cancer.app

import org.apache.spark.streaming.dstream.DStream
import org.cancer.bean.CancersData
import org.cancer.common.TApp_Laurel
import org.cancer.controller.CancersHighAreaController
import org.cancer.handler.DataHandler_Laurel
import org.cancer.util.SparkUtil

object CancersHighAreaApp$Laurel extends TApp_Laurel {

  val cancersHighAreaController = new CancersHighAreaController()

  start("local[*]","CancersHighAreaApp"){

    // 从TApp中获取StreamingContext
    implicit val ssc = SparkUtil.takeSSC()

     // 1. 设置checkpoint目录
    ssc.checkpoint("file:///D:/spark_checkpoint") // Windows本地目录，d盘根目录下

    //利用封装好的DataHandler获取并切割（处理）数据
    val value: DStream[CancersData] = DataHandler_Laurel.KafkaDataHandler("BD2", "CancersHighArea")

    cancersHighAreaController.dispatch(value)
    // 启动 start和 awaitTermination封装在一起了
    DataHandler_Laurel.startAndAwait()

  }

}
