package org.jxkj.app

import org.apache.spark.SparkContext
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{SparkSession, types}
import org.jxkj.data.IDaoProvider
import org.jxkj.util.DftoMap.insertDF2H

import java.text.SimpleDateFormat
import java.util.{Date, UUID}

/**
 * 1、换算油位和共事故油罐计算程序
 * 2、日漏油量平均最大
 * 3、实时漏油量
 */
object OilLevelConvert {
  /**
   * 换算油位和共事故油罐-5
   *
   * @param ss
   * @param sc
   * @param hTable
   * @param time
   * @param step
   */
  def calcOilLevelConvert(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    import ss.implicits._

    val jdbc2DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url)
      .option("dbtable", "t_oil_level_convert_point_conf")
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc2DF.createOrReplaceTempView("t_oil_level_convert_point_conf")

    val hms_cm_asset = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_asset")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()
    hms_cm_asset.createOrReplaceTempView("hms_cm_asset")

    //
    val unitDF = ss.sql(
      s"""
         | (
         |  select
         |    unit_name,
         |    point_code_oil_tank
         |  from
         |    t_oil_level_convert_point_conf
         | )
         | union all
         | (
         |   select
         |     unit_name,
         |     point_code_oil_trap
         |   from
         |     t_oil_level_convert_point_conf
         | )
         | union all
         | (
         |   select
         |     unit_name,
         |     point_code_pit_oil_leak
         |   from
         |     t_oil_level_convert_point_conf
         | )
         | union all
         | (
         |   select
         |     unit_name,
         |     point_code_jg_oil_leak
         |   from
         |     t_oil_level_convert_point_conf
         | )
         | union all
         | (
         |   select
         |     unit_name,
         |     point_code_accident_oil_tank
         |   from
         |     t_oil_level_convert_point_conf
         | )
         |""".stripMargin)

    val arrayString: Array[String] = unitDF.map { x => x.getString(1) }.dropDuplicates().collect()

    // 取数据
    GetData.getBySecond(ss, sc, hTable, arrayString, time, step)
    ss.catalog.cacheTable("t_ods_hbase_edos_tmp")

    //
    val resultDf = ss.sql(
      s"""
         | select
         |   a.unit_name,
         |   b.createTime,
         |   round(b.pvalue, 2) as oil_tank,
         |   round(c.pvalue, 2) as oil_trap,
         |   round(d.pvalue, 2) as pit_oil_leak,
         |   round(f.pvalue, 2) as jg_oil_leak,
         |   round(g.pvalue, 2) as accident_oil_tank,
         |   a.is_common
         | from
         |   t_oil_level_convert_point_conf a
         | left join
         |   t_ods_hbase_edos_tmp b on
         |     a.point_code_oil_tank = b.checkPoint
         | left join
         |   t_ods_hbase_edos_tmp c on
         |     a.point_code_oil_trap = c.checkPoint and b.createTime = c.createTime
         | left join
         |   t_ods_hbase_edos_tmp d on
         |     a.point_code_pit_oil_leak = d.checkPoint and b.createTime = d.createTime
         | left join
         |   t_ods_hbase_edos_tmp f on
         |     a.point_code_jg_oil_leak = f.checkPoint and b.createTime = f.createTime
         | left join
         |   t_ods_hbase_edos_tmp g on
         |     a.point_code_accident_oil_tank = g.checkPoint and b.createTime = g.createTime
         |
         |""".stripMargin)
    resultDf.createOrReplaceTempView("t_etl_oil_level_convert_tmp1")


    // assetid = locationsid
    val result2Df = ss.sql(
      s"""
         | select
         |   a.createTime as stat_time,
         |   b.assetid as asset_pid,
         |   accident_oil_tank,
         |   (oil_tank-1400)/(4640*3440/(3.14*1200*1200))+oil_trap+(pit_oil_leak-100)/(4640*3440/(900*700))+(nvl(jg_oil_leak,100)-100)/(4640*3440/(1200*850)) as oil_trap_level,
         |   is_common
         | from
         |   t_etl_oil_level_convert_tmp1 a left join hms_cm_asset b on
         |     concat(a.unit_name, '机组') = b.description where a.createTime is not null
         |""".stripMargin)
    result2Df.createOrReplaceTempView("t_etl_oil_level_convert_tmp2")

    //
    val result3Df = ss.sql(
      s"""
         | select
         |   stat_time,
         |   asset_pid,
         |   (case when oil_trap_level > 0 then oil_trap_level else null end) as oil_trap_level,
         |   sum(case when oil_trap_level > 0 then oil_trap_level else null end) over(partition by stat_time, is_common) + (accident_oil_tank-700)/(4640*3440/(3.14*1200*1200)) as common_accident_tank_level,
         |   'GZB' as project_id,
         |   '10000' as org_id,
         |   '' as sub_org_id,
         |   '' as create_person,
         |   now() as create_time,
         |   '' as modify_person,
         |   now() as modify_time
         | from
         |   t_etl_oil_level_convert_tmp2 a
         |""".stripMargin)


    val startTime = time(0)
    val endTime = time(1)

    // 删除旧数据
    val tableName = "hms_oil_level_convert"
    MysqlDml.delete2(s" delete from $tableName where stat_time between '$startTime' and '$endTime' ")

    // 写入结果
    result3Df.write
      .format("jdbc")
      .mode("append")
      .option("url", MysqlDml.url2)
      .option("dbtable", tableName)
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .save()

    ss.catalog.uncacheTable("t_ods_hbase_edos_tmp")
  }


  //公式换算油位 取hbase  计算8h 24h漏油量 统计最大最小存mysql
  //日漏油量平均最大-12
  def calcOilStat(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    import ss.implicits._
    val startTime = time(0)
    val endTime = time(1)
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    val startTimeStamp = sdfs.parse(startTime).getTime()
    val lastDate = sdfs.format(new Date(startTimeStamp - 3600 * 24 * 1000))
    val yyyymm = new SimpleDateFormat("yyyyMM")
    val startmth = yyyymm.format(sdfs.parse(startTime))

    //将自定义函数注册到SparkSQL里
    val generateUUID = () => UUID.randomUUID().toString.replace("-", "")
    ss.udf.register("uuID", generateUUID)

    val schema = new StructType()
      .add("point_code", types.StringType)
      .add("point_code_8", types.StringType)
      .add("point_code_24", types.StringType)

    // 从配置文件取测点
    val point_code_cal_DF = ss.read.schema(schema).csv(MysqlDml.fileprefix + "/user/hive/8h24hlyl.csv")
    point_code_cal_DF.createOrReplaceTempView("lyl")
    val point_code: Array[String] = point_code_cal_DF.map(x => x.getString(0)).collect()

    // 取测点数据（按天取）
    GetData.getByDay(ss, sc, hTable, point_code, Array(lastDate.substring(0, 10), endTime.substring(0, 10)), step)

    //
    val etlDF = ss.sql(
      s"""
         | select
         |   point_code_24,
         |   point_code_8,
         |   point_code,
         |   createtime as stat_time,
         |   pvalue as oil_trap_level
         | from
         |   lyl a left join t_ods_hbase_edos_tmp b on
         |     a.point_code = b.checkpoint
         |""".stripMargin)
      .cache()
    etlDF.createOrReplaceTempView("t_etlDF")

    //
    val resultDf3 = ss.sql(
      s"""
         | select
         |   a.point_code_8 as point_code,
         |   a.stat_time,
         |   a.oil_trap_level,
         |   round((a.oil_trap_level - b.oil_trap_level), 2) as leakage_oil,
         |   b.stat_time as end_date
         | from
         |   t_etlDF a join t_etlDF b on
         |     a.point_code = b.point_code and unix_timestamp(a.stat_time, 'yyyy/MM/dd HH:mm:ss') = unix_timestamp(b.stat_time, 'yyyy/MM/dd HH:mm:ss')+3600*8
         | where
         |   a.stat_time >= '$startTime' and a.stat_time <= '$endTime'
         |""".stripMargin)
      .cache()
    resultDf3.createOrReplaceTempView("tmp1resultDf3")

    //
    val resultDf4 = ss.sql(
      s"""
         | select
         |   uuID() as data_id,
         |   point_code,
         |   '8hour' as stat_type,
         |   stat_time as stat_date,
         |   end_date,
         |   leakage_oil as calvalue,
         |   'GZB' as project_id,
         |   '10000' as org_id,
         |   'lyl' as sub_org_id,
         |   '' as create_person,
         |   now() as create_time,
         |   '' as modify_person,
         |   now() as modify_time
         | from
         |   tmp1resultDf3
         |""".stripMargin)

    //
    val resultDf5 = ss.sql(
      s"""
         | select
         |   a.point_code_24 as point_code,
         |   a.stat_time,
         |   a.oil_trap_level,
         |   round((a.oil_trap_level - b.oil_trap_level), 2) as leakage_oil,
         |   b.stat_time as end_date
         | from
         |   t_etlDF a join t_etlDF b on
         |     a.point_code = b.point_code
         |   and
         |     unix_timestamp(a.stat_time, 'yyyy/MM/dd HH:mm:ss') = unix_timestamp(b.stat_time, 'yyyy/MM/dd HH:mm:ss')+3600*24
         | where
         |   a.stat_time >= '$startTime' and a.stat_time <= '$endTime'
         |""".stripMargin)
      .cache()
    resultDf5.createOrReplaceTempView("tmp1resultDf5")


    // 2021-01-23  因为间隔从每天改到小时  需补上一天内其他时间数据 然后计算
    // 按小时计算故需将0点到开始时间的数据+结束时间到23:59:59取出来一起汇总成天
    val tabname7_2 = "hms_stat_rptdata_" + startmth
    val resultDf7Before = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url)
      .option("dbtable", tabname7_2)
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver)
      .load()

    //
    val temp1 = resultDf7Before
      .filter("stat_type = 'hour'")
      .filter("stat_date = '" + startTime.substring(0, 10).replace("/", "") + "'")
      .filter("hour >= '0'")
      .filter("hour < '" + startTime.substring(11, 13).toInt + "'") //0点到开始

    //
    val temp2 = resultDf7Before
      .filter("stat_type = 'hour'")
      .filter("stat_date = '" + startTime.substring(0, 10).replace("/", "") + "'")
      .filter("hour > '" + endTime.substring(11, 13).toInt + "'")
      .filter("hour<='23'") //结束到24点

    //
    val dayTemp = temp1.union(temp2)
    dayTemp.createOrReplaceTempView("t_et7Before")

    //筛选固定点位的数据
    ss.sql(
      s"""
         | select
         |   point_code,
         |   stat_type,
         |   stat_date,
         |   hour,
         |   maxtime,
         |   maxval,
         |   mintime,
         |   minval,
         |   avgval,
         |   create_time
         | from
         |   t_et7Before
         | where
         |   substr(point_code, 1, 15) in ('HMS_QSFX_8HLYL_', 'HMS_QSFX_24HLYL')
         |""".stripMargin)
    .createOrReplaceTempView("t_et7Before_8_24")

    // 最大值最小值插入 "hms_stat_rptdata_"+startmth 表
    //  每小时的
    val resultDf7 = ss.sql(
      s"""
         | select
         |   point_code,
         |   'hour' as stat_type,
         |   regexp_replace(substr(stat_time, 1, 10), '/', '') as stat_date,
         |   cast(substr(stat_time, 12, 2) as int) as hour,
         |   '1970-01-01 08:00:00' as maxtime,
         |   max(leakage_oil) as maxval,
         |   '1970-01-01 08:00:00' as mintime,
         |   min(leakage_oil) as minval,
         |   avg(leakage_oil) as avgval,
         |   now() as create_time
         | from
         |   (select * from tmp1resultDf5 union select * from tmp1resultDf3)
         | group by
         |   point_code,
         |   regexp_replace(substr(stat_time, 1, 10), '/', ''),
         |   cast(substr(stat_time, 12, 2) as int)
         | union
         |   (select * from t_et7Before_8_24)
         |""".stripMargin)
    resultDf7.createOrReplaceTempView("tle_7_hour")

    //
    ss.sql("select * from tmp1resultDf5 union select * from tmp1resultDf3").show()

    //
    val resultDf7_2 = ss.sql(
      s"""
         | select
         |   point_code,
         |   'day' as stat_type,
         |   stat_date,
         |   0 as hour,
         |   '1970-01-01 08:00:00' as maxtime,
         |   max(maxval) as maxval,
         |   '1970-01-01 08:00:00' as mintime,
         |   min(minval) as minval,
         |   avg(avgval) as avgval,
         |   now() as create_time
         | from
         |   tle_7_hour
         | group by
         |   point_code,
         |   stat_date
         |""".stripMargin)

    resultDf7.union(resultDf7_2)
      .createOrReplaceTempView("tmp7resultDf")

    //
    val resultDf8 = ss.sql(
      s"""
         | select
         |   uuID() as data_id,
         |   a.*
         | from
         |   tmp7resultDf a
         |""".stripMargin)
    resultDf8.createOrReplaceTempView("r8")

    ss.sql("select * from r8 where point_code = 'HMS_QSFX_24HLYL_01F'").show()


    // 删除旧结果
    val tablename2 = "hms_stat_rptdata_" + startmth
    val delString3 = s"""
       | delete from $tablename2
       | where
       |   substr(point_code, 1, 15) in ('HMS_QSFX_8HLYL_', 'HMS_QSFX_24HLYL')
       | and
       |   stat_date >= replace(substr('$startTime', 1, 10), '/', '')
       | and
       |   stat_date <= replace(substr('$endTime', 1, 10), '/', '')
       |""".stripMargin
    MysqlDml.delete(delString3)


    // 保存结算结果
    resultDf8.write
      .format("jdbc")
      .mode("append")
      .option("url", MysqlDml.url)
      .option("dbtable", tablename2)
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver)
      .save()
  }


  //实时漏油量（8小时漏油量、24小时漏油量）
  // 每秒的值，保存到Nodejs服务，不存数据库
  def calcOilReal(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    import ss.implicits._
    val startTime = time(0)
    val endTime = time(1)
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    val startTimeStamp = sdfs.parse(startTime).getTime()
    val lastDate = sdfs.format(new Date(startTimeStamp - 3600 * 24 * 1000))

    //将自定义函数注册到SparkSQL里
    val generateUUID = () => UUID.randomUUID().toString.replace("-", "")
    ss.udf.register("uuID", generateUUID)

    val schema = new StructType()
      .add("point_code", types.StringType)
      .add("point_code_8", types.StringType)
      .add("point_code_24", types.StringType)

    // 从配置文件取测点
    val point_code_cal_DF = ss.read.schema(schema).csv(MysqlDml.fileprefix + "/user/hive/8h24hlyl.csv")
    point_code_cal_DF.createOrReplaceTempView("lyl")
    val point_code: Array[String] = point_code_cal_DF.map(x => x.getString(0)).collect()

    // 获取测点的采集数据
    GetData.get(ss, sc, hTable, point_code, Array(lastDate, endTime), step)

    //
    val etlDF = ss.sql(
      s"""
         | select
         |   a.point_code_24,
         |   a.point_code_8,
         |   a.point_code,
         |   b.createTime as stat_time,
         |   b.pvalue as oil_trap_level
         | from
         |   lyl a left join t_ods_hbase_edos_tmp b on
         |     a.point_code = b.checkPoint and unix_timestamp(b.createTime, 'yyyy/MM/dd HH:mm:ss')%300 = 0
         |""".stripMargin)
      .cache()
    etlDF.createOrReplaceTempView("t_etlDF")

    // 计算8小时漏油量的差值
    val resultDf3 = ss.sql(
      s"""
         | select
         |   a.point_code_8 as point_code,
         |   a.stat_time,
         |   a.oil_trap_level,
         |   round((a.oil_trap_level - b.oil_trap_level), 2) as leakage_oil,
         |   b.stat_time as end_date
         | from
         |   t_etlDF a join t_etlDF b on
         |     a.point_code = b.point_code and unix_timestamp(a.stat_time, 'yyyy/MM/dd HH:mm:ss') = unix_timestamp(b.stat_time, 'yyyy/MM/dd HH:mm:ss')+3600*8
         | where
         |   a.stat_time >= '$startTime' and a.stat_time <= '$endTime'
         |""".stripMargin)

    // 计算24小时漏油量的差值
    val resultDf5 = ss.sql(
      s"""
         | select
         |   a.point_code_24 as point_code,
         |   a.stat_time,
         |   a.oil_trap_level,
         |   round((a.oil_trap_level - b.oil_trap_level), 2) as leakage_oil,
         |   b.stat_time as end_date
         | from
         |   t_etlDF a join t_etlDF b on
         |     a.point_code = b.point_code and unix_timestamp(a.stat_time, 'yyyy/MM/dd HH:mm:ss') = unix_timestamp(b.stat_time, 'yyyy/MM/dd HH:mm:ss')+3600*24
         | where
         |   a.stat_time >= '$startTime' and a.stat_time <= '$endTime'
         |""".stripMargin)

    // 合并24小时和8小时计算结果到一张表
    resultDf5.union(resultDf3)
      .createOrReplaceTempView("tmp1resultDf5")

    //
    val df = ss.sql(
      s"""
         | select
         |   a.point_code,
         |   cast(unix_timestamp(a.stat_time, 'yyyy/MM/dd HH:mm:ss') as int) as stat_date,
         |   cast(a.leakage_oil as string) as calvalue,
         |   b.point_hid
         | from
         |   tmp1resultDf5 a, hms_cm_point b
         | where
         |   a.point_code = b.point_code
         |""".stripMargin)

    // 保存结果数据到nodejs
    insertDF2H(ss, sc, time, df)
  }
}

