package org.jxkj.app

import java.text.SimpleDateFormat
import java.util.{Date, UUID}
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.types.StructType
import org.jxkj.data.IDaoProvider
import org.jxkj.util.DftoMap

object IntelligentMeter {


  def calculateData_day(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    import ss.implicits._
    val lastDate = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val endTimeStamp = sdf.parse(lastDate).getTime()
    val endTime = sdf.format(new Date(endTimeStamp + 3600 * 24 * 1000))

    val startTime2 = lastDate + " 00:00:00"
    val endTime2 = endTime + " 00:00:00"
    println("startTime2" + startTime2)
    println("endTime2" + endTime2)
    val time1 = Array(startTime2, startTime2)
    val time2 = Array(endTime2, endTime2)
    println("begin day")

    // 读配置文件，获取计算的测点
    val schema = new StructType()
      .add("point_code", types.StringType)
      .add("point_code_day", types.StringType)
    val point_code_cal_DF = ss.read.schema(schema).csv(MysqlDml.fileprefix + "/user/hive/point_code_cal.csv")
    point_code_cal_DF.createOrReplaceTempView("point_code_cal")
    // 测点
    val point_code: Array[String] = point_code_cal_DF.map(x => x.getString(0)).collect()


    ss.sql(
      s"""
         | select
         |   point_code,
         |   point_code_day,
         |   case when instr(point_code, 'P2R0') > 0 then -1 else 1 end as is_positive
         | from
         |   point_code_cal
         |""".stripMargin)
      .createOrReplaceTempView("hms_eo_trfloss_config2")


    // 取测点数据(每个测点这一天只有一条数据)
    GetData.getByWebApiDay(ss, sc, hTable, point_code, time1, step, 1)
    ss.sql("select * from t_ods_hbase_edos_tmp")
      .createOrReplaceTempView("t_ods")

    // 取测点第二天的数据（每个测点只有一条数据）
    GetData.getByWebApiDay(ss, sc, hTable, point_code, time2, step, 1)

    // 处理换表的情况, 获取后一天值小于前一天值得测点
    val tempDF = ss.sql(
      s"""
         | select
         |   a.checkPoint as point_code
         | from
         |   t_ods_hbase_edos_tmp a join t_ods b on
         |     a.checkPoint = b.checkPoint
         | where
         |   a.pvalue < b.pvalue
         |""".stripMargin)
    // 测点
    val pointCodes = tempDF.map(x => x.getString(0)).collect();
    // 查询这些点第一天内的最大值
    GetData.getMaxValueOneDay(ss, sc, pointCodes, time1(0), time2(0))


    // 计算测点两天的pvalue的差值
    val etlDF = ss.sql(
      s"""
         | select
         |   a.checkPoint,
         |   b.createTime,
         |   a.createTime as end_date,
         |   a.pvalue-b.pvalue as pvalue
         | from
         |   t_ods_hbase_edos_tmp a join t_ods b on
         |     a.checkPoint = b.checkPoint
         | where
         |   to_date(regexp_replace(a.createTime, '/', '-')) = date_add(to_date(regexp_replace(b.createTime, '/', '-')), 1)
         |""".stripMargin)
    etlDF.createOrReplaceTempView("t_ods_hbase_edos2")

    // 处理换表的数据（补上前一天的最大值）
    val etlDF2 = ss.sql(
      s"""
         | select
         |   a.checkPoint,
         |   a.createTime,
         |   a.end_date,
         |   (case when (a.pvalue < 0 and b.pvalue is not null) then (a.pvalue + b.pvalue) else a.pvalue end) as pvalue
         | from
         |   t_ods_hbase_edos2 a left join t_ods_hbase_edos_max_value b on
         |     a.checkPoint = b.checkPoint
         |""".stripMargin)
    etlDF2.createOrReplaceTempView("t_ods_hbase_edos2")

    //
    val tmp1resultDf = ss.sql(
      s"""
         | select
         |   b.point_code_day as point_code,
         |   'day' as stat_type,
         |   createTime as stat_date,
         |   end_date,
         |   sum(pvalue*is_positive) as calvalue
         | from
         |   t_ods_hbase_edos2 a join hms_eo_trfloss_config2 b on
         |     a.checkPoint = b.point_code
         | group by
         |   b.point_code_day,
         |   createTime,
         |   end_date
         |""".stripMargin).cache()
    tmp1resultDf.createOrReplaceTempView("tmp1resultDf")


    //将自定义函数注册到SparkSQL里
    val generateUUID = () => UUID.randomUUID().toString.replace("-", "")
    ss.udf.register("uuID", generateUUID)


    // 一天的计算结果
    val resultDf = ss.sql(
      s"""
         | select
         |   uuID() as data_id,
         |   a.*,
         |   'GZB' as project_id,
         |   '10000' as org_id,
         |   'zncb' as sub_org_id,
         |   '' as create_person,
         |   now() as create_time,
         |   '' as modify_person,
         |   now() as modify_time
         | from
         |   tmp1resultDf a
         |""".stripMargin)


    // 删除旧数据
    val tablename = "hms_stat_caldata"
    val delString = s"delete from $tablename where stat_date ='$startTime2' and stat_type='day' and sub_org_id='zncb'"
    MysqlDml.delete2(delString)
    // 写入结果到数据库
    resultDf.write
      .format("jdbc")
      .mode("append")
      .option("url", MysqlDml.url2)
      .option("dbtable", tablename)
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .save()

    // 计算公式计算的测点，保存结果到数据库
    DftoMap.insertFormula2Mysql(ss, sc, Array(startTime2, startTime2), "day", 3600 * 24)

    // 计算变损率和厂用电
    TrfLoss.transformData(ss, sc, hTable, Array(startTime2, startTime2), step)
  }

  /**
   * 计算月统计结果
   *
   * @param ss
   * @param sc
   * @param hTable
   * @param time
   * @param step
   */
  def calculateData_mth(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    val lastDate = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val endTimeStamp = sdf.parse(lastDate).getTime()
    val endTime = sdf.format(new Date(endTimeStamp + 3600 * 24 * 1000))
    println(endTime)
    println(endTime.substring(8, 10))
    val startTime2 = lastDate.substring(0, 8) + "01 00:00:00"
    val endTime2 = endTime + " 00:00:00"
    println("startTime " + startTime2)
    println("endTime " + endTime2)

    if (endTime.substring(8, 10) == "01") {
      calculateData(ss, sc, hTable, startTime2, endTime2, 1, "month")
      DftoMap.insertFormula2Mysql(ss, sc, Array(startTime2, startTime2), "month", 3600 * 24)
      TrfLoss.transformData(ss, sc, hTable, Array(startTime2, startTime2), step)
    }
  }

  def calculateData_quarter(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    import ss.implicits._
    var startTime = time(0)
    val lastDate = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    val endTimeStamp = sdf.parse(lastDate).getTime()
    val endTime = sdf.format(new Date(endTimeStamp + 3600 * 24 * 1000))
    val q = endTime.substring(5, 10)
    val startTime2 = q match {
      case "04/01" => lastDate.substring(0, 5) + "01/01 00:00:00"
      case "07/01" => lastDate.substring(0, 5) + "04/01 00:00:00"
      case "10/01" => lastDate.substring(0, 5) + "07/01 00:00:00"
      case "01/01" => lastDate.substring(0, 5) + "10/01 00:00:00"
      case _ => lastDate
    }

    val endTime2 = endTime + " 00:00:00"

    if (q == "04/01" || q == "07/01" || q == "10/01" || q == "01/01") {
      calculateData(ss, sc, hTable, startTime2, endTime2, 3, "quarter")
      DftoMap.insertFormula2Mysql(ss, sc, Array(startTime2, startTime2), "quarter", 3600 * 24)
      TrfLoss.transformData(ss, sc, hTable, Array(startTime2, startTime2), step)
    }
  }

  /**
   * 计算年统计结果
   *
   * @param ss
   * @param sc
   * @param hTable
   * @param time
   * @param step
   */
  def calculateData_year(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    val lastDate = time(1)
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    var startTime2 = lastDate.substring(0, 5) + "01/01 00:00:00"
    var endTime3 = sdfs.format(new Date)
    println(startTime2 + "--" + endTime3)

    if (step == 1) {
      calculateData(ss, sc, hTable, startTime2, endTime3, 12, "year")
      DftoMap.insertFormula2Mysql(ss, sc, Array(startTime2, startTime2), "year", 3600 * 24)
      TrfLoss.transformData(ss, sc, hTable, Array(startTime2, startTime2), step)
    }
  }

  /**
   * 计算测点采集数据
   *
   * @param ss
   * @param sc
   * @param hTable
   * @param startTime2
   * @param endTime2
   * @param mthCnt
   * @param statType
   */
  def calculateData(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, startTime2: String, endTime2: String, mthCnt: Int, statType: String): Unit = {
    import ss.implicits._
    println("startTime2" + startTime2)
    println("endTime2" + endTime2)

    val time1 = Array(startTime2, startTime2)
    val time2 = Array(endTime2, endTime2)

    //
    val schema = new StructType()
      .add("point_code", types.StringType)
      .add("point_code_day", types.StringType)
    val point_code_cal_DF = ss.read.schema(schema).csv(MysqlDml.fileprefix + "/user/hive/point_code_cal.csv")
    point_code_cal_DF.createOrReplaceTempView("point_code_cal")

    //
    ss.sql(
      s"""
         | select
         |   point_code,
         |   point_code_day,
         |   (case when instr(point_code, 'P2R0') > 0 then -1 else 1 end) as is_positive
         | from
         |   point_code_cal
         |""".stripMargin)
      .createOrReplaceTempView("hms_eo_trfloss_config2")

    //
    val point_code: Array[String] = point_code_cal_DF.map(x => x.getString(0)).collect()

    // 查询起始时间的数据
    GetData.getByWebApiDay(ss, sc, hTable, point_code, time1, 1, 1)

    // 缓存
    ss.sql(
      s"""
         | select * from t_ods_hbase_edos_tmp
         |""".stripMargin)
      .createOrReplaceTempView("t_ods")

    // 查询结束时间的数据
    GetData.getByWebApiDay(ss, sc, hTable, point_code, time2, 1, 1)

    // 处理换表的情况, 获取后一天值小于前一天值得测点
    val tempDF = ss.sql(
      s"""
         | select
         |   a.checkPoint as point_code
         | from
         |   t_ods_hbase_edos_tmp a join t_ods b on
         |     a.checkPoint = b.checkPoint
         | where
         |   a.pvalue < b.pvalue
         |""".stripMargin)
    // 测点
    val pointCodes = tempDF.map(x => x.getString(0)).collect();
    // 查询这些点第一天内的最大值
    GetData.getMaxValueOneDay(ss, sc, pointCodes, time1(0), time2(0))

    // 计算测点两天的pvalue的差值
    val etlDF = ss.sql(
      s"""
         | select
         |   a.checkPoint,
         |   b.createTime,
         |   a.createTime as end_date,
         |   a.pvalue-b.pvalue as pvalue
         | from
         |   t_ods_hbase_edos_tmp a join t_ods b on
         |     a.checkPoint = b.checkPoint
         |""".stripMargin)
    etlDF.createOrReplaceTempView("t_ods_hbase_edos2")

    // 处理换表的数据（补上前一天的最大值）
    val etlDF2 = ss.sql(
      s"""
         | select
         |   a.checkPoint,
         |   a.createTime,
         |   a.end_date,
         |   (case when (a.pvalue < 0 and b.pvalue is not null) then (a.pvalue + b.pvalue) else a.pvalue end) as pvalue
         | from
         |   t_ods_hbase_edos2 a left join t_ods_hbase_edos_max_value b on
         |     a.checkPoint = b.checkPoint
         |""".stripMargin)
    etlDF2.createOrReplaceTempView("t_ods_hbase_edos2")


    //
    val tmp1resultDf = ss.sql(
      s"""
         | select
         |   b.point_code_day as point_code,
         |   '$statType' as stat_type,
         |   createTime as stat_date,
         |   end_date,
         |   sum(pvalue*is_positive) as calvalue
         | from
         |   t_ods_hbase_edos2 a join hms_eo_trfloss_config2 b on
         |     a.checkPoint = b.point_code
         | group by
         |   b.point_code_day,
         |   createTime,
         |   end_date
         |""".stripMargin).cache()
    tmp1resultDf.createOrReplaceTempView("tmp1resultDf")

    //将自定义函数注册到SparkSQL里
    val generateUUID = () => UUID.randomUUID().toString.replace("-", "")
    ss.udf.register("uuID", generateUUID)

    //
    val resultDf = ss.sql(
      s"""
         | select
         |   uuID() as data_id,
         |   a.*,
         |   'GZB' as project_id,
         |   '10000' as org_id,
         |   'zncb' as sub_org_id,
         |   '' as create_person,
         |   now() as create_time,
         |   '' as modify_person,
         |   now() as modify_time
         | from
         |   tmp1resultDf a
         |""".stripMargin)

    // 清除旧数据
    val tablename = "hms_stat_caldata"
    val delString = s"delete from $tablename where stat_date ='$startTime2' and stat_type='$statType'  and sub_org_id='zncb'"
    MysqlDml.delete2(delString)

    // 结果写入数据库
    resultDf.write
      .format("jdbc")
      .mode("append")
      .option("url", MysqlDml.url2)
      .option("dbtable", tablename)
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .save()
  }
}
