package demo.spark.hurypoint.parser

import demo.spark.hurypoint.filedealer.ParquetDealer.getTimeSequences
import demo.spark.utils.{SparkCore, SparkSql}
import org.apache.spark.SparkContext
import org.apache.spark.sql.types.{LongType, StringType}
import org.apache.spark.sql.{DataFrame, SQLContext, SparkSession}
import org.apache.spark.storage.StorageLevel

object AppFileRunner {

  // 引入循环控制.
  import scala.util.control.Breaks;

  def main(args: Array[String]): Unit = {
    // spark environment.
    val sparkContext: SparkContext = SparkCore.getContext("AppRunner")(master = "local[*]")
    val sparkSession: SparkSession = SparkSql.getSession("AppRunner")(master = "local[*]")
    val sqlContent: SQLContext = sparkSession.sqlContext

    // parquet dir home.
    val parquetHomeDirTemplate: String = "/Users/icasue/Desktop/bury_point/parquet_json_clear/%s.parquet";

    // report deal lines.
    var totalCount: Long = 0;

    // VinCode Parser
    val vinParser: VinParser = new VinParser(
      sparkSession,
      ("vin",StringType,false),
      ("count",LongType,false)
    )
    vinParser.prepare(null,null);

    // ClickEvent Parser.
    val clickEventParser: ClickEventParser = new ClickEventParser(
      sparkSession,
      ("page_name",StringType,true),
      ("ctrl_name",StringType,true),
      ("arg",StringType,true),
      ("count",LongType,false),
    )
    clickEventParser.prepare(null,null);

    // loop code block control.
    val breaks: Breaks = new Breaks;

    breaks.breakable {
      // read parquet file as datasource.
      val timeSeqs = getTimeSequences("2021-06-01", "2021-07-15")
      for (timeZone: String <- timeSeqs){

        // read DataFrame by day.
        val timePartDF: DataFrame = sparkSession.read
          .format("parquet")
          .option("pathGlobFilter", "*.parquet")
          .load(parquetHomeDirTemplate.format(timeZone))
          .persist(StorageLevel.MEMORY_ONLY)

        // aggregate vin group.
        vinParser.parse(timePartDF);

        // deal page jumping event.
        clickEventParser.parse(timePartDF);

        // sum deal lines and report.
        val dayLogCount = timePartDF.count()
        totalCount += dayLogCount;
        println(s"\t\t##### deal [${timeZone}] log lines: ${dayLogCount}, total log lines: ${totalCount}")

        // loop break.
        breaks.break();
      }
    }

    // execute vin report for lines.
    vinParser.report(100 * 1000);
    // execute click event report for lines.
    clickEventParser.report(100 * 1000);

    // close spark session.
    sparkSession.close();
  }

}
