package com.brsface

import java.time.LocalDate
import java.time.format.DateTimeFormatter

import org.apache.spark.sql.SparkSession

object PassinfoMetric_01 {

  case class Coltest(id: Int, name: String, language: String) extends Serializable //定义字段名和类型

  def main(args: Array[String]): Unit = {

    //    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.2.0-bin-master")

    val spark = SparkSession
      .builder()
      .appName("PassinfoMetric_01")
      //      .master("local[2]")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use brsface")

    val start = args(0)
    val end = args(1)
    val formatter1 = DateTimeFormatter.ofPattern("yyMMdd")
    val formatter2 = DateTimeFormatter.ofPattern("yyyy-MM-dd")
    val endDate = LocalDate.parse(end, formatter1)
    var formattedDate = LocalDate.parse(start, formatter1)

    while (!endDate.isBefore(formattedDate)) {

      val dt1 = formatter1.format(formattedDate)
      val dt2 = formatter2.format(formattedDate)

      val SELECT_SQL =
        s"""
           |select to_date(min(create_time)) as create_time, camera_id, count(pass_id) as total
           |from t_person_passinfo
           |where dt='${dt1}'
           |group by camera_id
        """.stripMargin
      val resultDF = spark.sql(SELECT_SQL)

      val CREATE_TABLE_SQL =
        s"""
           |CREATE TABLE IF NOT EXISTS metric_day_passinfo_t (camera_id STRING, total INT, create_time STRING)
           |PARTITIONED BY (dt string)
           |row format delimited fields terminated by '\\001'
           |stored as textfile
        """.stripMargin
      spark.sql(CREATE_TABLE_SQL);

      resultDF.createOrReplaceTempView("temp_table1")

      val INSERT_SQL =
        s"""
           |insert overwrite table metric_day_passinfo_t
           |partition(dt='${dt2}')
           |select camera_id,total,create_time
           |from temp_table1
        """.stripMargin
      spark.sql(INSERT_SQL)

      formattedDate = formattedDate.plusDays(1)
    }

    spark.stop()
  }
}
