package org.jxkj.app

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.jxkj.data.IDaoProvider
import org.jxkj.util.JzCondition

import java.util.UUID

/**
 * 协联定制曲线及机组开限及最大出力计算程序
 */
object UnitPower {

  //b3=b1+(a3-a1)*(b2-b1)/(a2-a1)
  //b3=b2+(a3-a2)*(b2-b1)/(a2-a1)

  /**
   * 协联定制曲线
   * 20210114 轮叶中阶 导叶主阶
   *
   * @param ss
   * @param sc
   * @param hTable
   * @param time
   * @param step
   */
  def calPowerCurve(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {

    //读mysql
    val jdbc1DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_asset")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc1DF.createOrReplaceTempView("hms_cm_asset")


    val jdbc2DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url)
      .option("dbtable", "t_unit_electric_point_conf")
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc2DF.createOrReplaceTempView("t_unit_electric_point_conf")

    val jdbc3DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_eo_speed_setting")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc3DF.createOrReplaceTempView("hms_eo_speed_setting")


    //生成udf函数 可以在withcolumn里使用
    //    val generateUUID = udf(() => UUID.randomUUID().toString.replace("-",""))
    //将自定义函数注册到SparkSQL里
    val generateUUID = () => UUID.randomUUID().toString.replace("-", "")

    ss.udf.register("uuID", generateUUID)

    import ss.implicits._
    val jzgkDF = Seq(
      ("JKXT_EJ_AI00440", "00F"),
      ("JKXT_EJ_AI00441", "01F"),
      ("JKXT_EJ_AI00442", "02F"),
      ("JKXT_EJ_AI00443", "03F"),
      ("JKXT_EJ_AI00444", "04F"),
      ("JKXT_EJ_AI00445", "05F"),
      ("JKXT_EJ_AI00446", "06F"),
      ("JKXT_EJ_AI00447", "07F"),
      ("JKXT_EJ_AI00448", "08F"),
      ("JKXT_EJ_AI00449", "09F"),
      ("JKXT_EJ_AI00450", "10F"),
      ("JKXT_EJ_AI00451", "11F"),
      ("JKXT_EJ_AI00452", "12F"),
      ("JKXT_EJ_AI00453", "13F"),
      ("JKXT_EJ_AI00454", "14F"),
      ("JKXT_EJ_AI00455", "15F"),
      ("JKXT_EJ_AI00456", "16F"),
      ("JKXT_EJ_AI00457", "17F"),
      ("JKXT_EJ_AI00458", "18F"),
      ("JKXT_EJ_AI00459", "19F"),
      ("JKXT_EJ_AI00460", "20F"),
      ("JKXT_EJ_AI00461", "21F")
    ).toDF("point_code", "unit_name")
    jzgkDF.createOrReplaceTempView("t_jzgk")
    val unitDF = ss.sql("select unit_name,point_code_st from t_unit_electric_point_conf " +
      "union all select unit_name,point_code_ly from t_unit_electric_point_conf " +
      "union all select unit_name,point_code_dy from t_unit_electric_point_conf " +
      " union all select unit_name,point_code from t_jzgk ")

    val arrayString: Array[String] = unitDF.map { x => x.getString(1) }.collect()
    //.filter($"unit_name"==="07F")
    println(arrayString.mkString("---"))

    //GetData.getBySecond(ss,sc,hTable,arrayString,time,step)
    GetData.getByWebApi(ss, sc, hTable, arrayString, time, step, 300)
    ss.catalog.cacheTable("t_ods_hbase_edos_tmp")
    ss.sql("select * from t_ods_hbase_edos_tmp")
    val etlDF = ss.sql("select e.assetid,b.createTime,round(b.pvalue,2) pvalue_st,round(c.pvalue,2) pvalue_ly,round(d.pvalue,2) pvalue_dy,nvl(g.pvalue,1) condition_id" +
      " from t_unit_electric_point_conf a" +
      "  left join t_ods_hbase_edos_tmp b on a.point_code_st=b.checkPoint" +
      "  left join t_ods_hbase_edos_tmp c on a.point_code_ly=c.checkPoint and b.createTime=c.createTime" +
      "  left join t_ods_hbase_edos_tmp d on a.point_code_dy=d.checkPoint and b.createTime=d.createTime" +
      "  left join t_jzgk f on a.unit_name=f.unit_name" +
      "  left join t_ods_hbase_edos_tmp g on f.point_code=g.checkPoint and b.createTime=g.createTime" +
      "  left join hms_cm_asset e on concat(a.unit_name,'机组')=e.description" +
      " where nvl(g.pvalue,1)=1 ").cache()
    // etlDF.show(500)
    etlDF.createOrReplaceTempView("t_etl")

    ss.sql("select locationsid,water_head,guide_blade_base_value b1,wheel_blade_base_value c1,wheel_alarm_threshold" +
      ",lead(guide_blade_base_value,1) over(partition by locationsid,water_head order by guide_blade_base_value) b2" +
      ",lead(wheel_blade_base_value,1) over(partition by locationsid,water_head order by wheel_blade_base_value) c2" +
      " from hms_eo_speed_setting").createOrReplaceTempView("t_lower_upper")

    ss.sql("select assetid,createTime,pvalue_st,pvalue_dy,pvalue_ly" +
      ",b.water_head a1,b1,nvl(b2,b1) b2,c1,nvl(c2,c1) c2" +
      ",row_number() over(partition by assetid,createTime,pvalue_st,pvalue_dy,pvalue_ly order by water_head desc) rn" +
      " from t_etl a left join t_lower_upper b on a.assetid=b.locationsid and pvalue_st>=b.water_head and pvalue_dy>=b1 and pvalue_dy<nvl(b2,b1)"
    ).filter("rn=1").createOrReplaceTempView("t_etl2")


    ss.sql("select assetid,createTime,pvalue_st,pvalue_dy,pvalue_ly" +
      ",a1,a.b1,a.b2,a.c1,a.c2" +
      ",b.water_head a2,b.b1 b3,nvl(b.b2,b.b1) b4,b.c1 c3,nvl(b.c2,b.c1) c4" +
      ",row_number() over(partition by assetid,createTime,pvalue_st,pvalue_dy,pvalue_ly order by water_head) rn2" +
      " from t_etl2 a left join t_lower_upper b on a.assetid=b.locationsid and pvalue_st<b.water_head and pvalue_dy>=b.b1 and pvalue_dy<nvl(b.b2,b.b1)"
    ).filter("rn2=1").createOrReplaceTempView("t_etl3")

    //(pvalue_dy-b1)/(b2-b1)=(x1-c1)/(c2-c1)
    //x1=c1+(c2-c1)*(pvalue_dy-b1)/(b2-b1)
    //x2=c3+(c4-c3)*(pvalue_dy-b3)/(b4-b3)
    ss.sql("select assetid,createTime,pvalue_st,pvalue_dy,pvalue_ly" +
      ",a1,b1,b2,c1,c2" +
      ",a2,b3,b4,c3,c4" +
      ",c1+(c2-c1)*(pvalue_dy-b1)/(b2-b1) d1" +
      ",c3+(c4-c3)*(pvalue_dy-b3)/(b4-b3) d2" +
      " from t_etl3").createOrReplaceTempView("t_etl4")


    val resultDf = ss.sql("select uuID() stat_id,'' id,assetid locationsid,createTime stat_time,'机组稳定运行态' condition_id,pvalue_st water_head,pvalue_dy guide_blade_value,pvalue_ly wheel_blade_value" +
      ",round(d1+(d2-d1)*(pvalue_st-a1)/(a2-a1),2) wheel_blade_cacl_value" +
      ",round(abs(d1+(d2-d1)*(pvalue_st-a1)/(a2-a1)-pvalue_ly)/pvalue_ly*100,2) wheel_blade_radio" +
      ",case when abs(d1+(d2-d1)*(pvalue_st-a1)/(a2-a1)-pvalue_ly)/pvalue_ly*100>2 then 1 else 0 end wheel_blade_alarm" +
      ", 'GZB' project_id,'10000' org_id,'' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time " +
      " from t_etl4")

    val startTime = time(0)
    val endTime = time(1)
    val tableName = "hms_eo_speed_setting_stat"
    MysqlDml.delete2(s"delete from $tableName where stat_time between  '$startTime'  and  '$endTime'")

    //写mysql
    resultDf.write.mode("append")
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", tableName)
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .save()
    ss.catalog.uncacheTable("t_ods_hbase_edos_tmp")
  }


  /**
   * 切换可配置表 //工况源  电气开限比导叶+0.5  机组出力比出力+2 协联曲线比轮叶
   * 机组开限及最大出力计算
   *
   * @param ss
   * @param sc
   * @param hTable
   * @param time
   * @param step
   */
  def calcUnitPower(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step: Int): Unit = {
    //读mysql
    //  20210114 轮叶中阶 导叶主阶
    //换成 point_code_dzy point_name_dzy导叶中阶
    JzCondition.calData(ss, sc, hTable, time, step)

    val jdbc2DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url)
      .option("dbtable", "t_unit_electric_point_conf") //测点表
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc2DF.createOrReplaceTempView("t_unit_electric_point_conf")

    val hms_cm_asset = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_asset")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load().cache()
    hms_cm_asset.createOrReplaceTempView("hms_cm_asset")

    val jdbc3DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_eo_electric_limit") //机组开限基本信息
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc3DF.createOrReplaceTempView("hms_eo_electric_limit")

    val jdbc4DF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_eo_waterhead_power") //机组水头限出力基本信息
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver2)
      .load()
    jdbc4DF.createOrReplaceTempView("hms_eo_waterhead_power")

    //生成配置表
    ss.sql("select substr(b.description,1,3) unit_name,water_head a2,power_max b2,guide_blade_max c2,wheel_blade_max d2,idle_load_max e2,guide_blade_min f2" +
      ",lag(water_head,1) over(partition by a.locationsid order by water_head) a1" +
      ",lag(power_max,1) over(partition by a.locationsid order by water_head) b1" +
      ",lag(guide_blade_max,1) over(partition by a.locationsid order by water_head) c1" +
      ",lag(wheel_blade_max,1) over(partition by a.locationsid order by water_head) d1" +
      ",lag(idle_load_max,1) over(partition by a.locationsid order by water_head) e1" +
      ",lag(guide_blade_min,1) over(partition by a.locationsid order by water_head) f1" +
      " from hms_eo_electric_limit a left join hms_cm_asset b on a.locationsid=b.assetid")
      .createOrReplaceTempView("t_unit_electric_limit_conf_tmp")

    ss.sql("select unit_name,a2,b2,c2,d2,e2,f2,nvl(a1,0) as a1,nvl(b1,b2) as b1,nvl(c1,c2) as c1,nvl(d1,d2) as d1,nvl(e1,e2) as e1,nvl(f1,f2) as f1" +
      " from t_unit_electric_limit_conf_tmp").createOrReplaceTempView("t_unit_electric_limit_conf")

    ss.sql("select substr(b.description,1,3) unit_name,water_head a2,power_max b2,power_min c2" +
      ",lag(water_head,1) over(partition by a.locationsid order by water_head) a1" +
      ",lag(power_max,1) over(partition by a.locationsid order by water_head) b1" +
      ",lag(power_min,1) over(partition by a.locationsid order by water_head) c1" +
      " from hms_eo_waterhead_power a left join hms_cm_asset b on a.locationsid=b.assetid")
      .createOrReplaceTempView("t_unit_power_limit_conf_tmp")

    ss.sql("select unit_name,a2,b2,c2,nvl(a1,0) as a1,nvl(b1,b2) as b1,nvl(c1,c2) as c1" +
      " from t_unit_power_limit_conf_tmp").createOrReplaceTempView("t_unit_power_limit_conf")

    //ss.sql("select *from t_unit_electric_limit_conf order by unit_name,a2").show()
    //ss.sql("select *from t_unit_power_limit_conf order by unit_name,a2").show()

    val unitDF = ss.sql("select unit_name,point_code_st from t_unit_electric_point_conf " +
      "union all select unit_name,point_code_ly from t_unit_electric_point_conf " +
      //      "union all select unit_name,point_code_dy from t_unit_electric_point_conf " +20210114
      "union all select unit_name,point_code_dzy from t_unit_electric_point_conf " +
      "union all select unit_name,point_code_power from t_unit_electric_point_conf ")
    import ss.implicits._
    val arrayString: Array[String] = unitDF.map { x => x.getString(1) }.collect()

    //GetData.getBySecond(ss,sc,hTable,arrayString,time,step)
    GetData.getByWebApi(ss, sc, hTable, arrayString, time, step, 300)

    val resultDf = ss.sql("select a.unit_name,b.createTime,round(b.pvalue,2) pvalue_st,round(c.pvalue,2) pvalue_ly,round(d.pvalue,2) pvalue_dy," +
      "round(f.pvalue,2) pvalue_power,nvl(working_name,'机组稳定运行态') working_name from t_unit_electric_point_conf a" +
      "  left join t_ods_hbase_edos_tmp b on a.point_code_st=b.checkPoint" +
      "  left join t_ods_hbase_edos_tmp c on a.point_code_ly=c.checkPoint and b.createTime=c.createTime" +
      "  left join t_ods_hbase_edos_tmp d on a.point_code_dzy=d.checkPoint and b.createTime=d.createTime" +
      "  left join t_ods_hbase_edos_tmp f on a.point_code_power=f.checkPoint and b.createTime=f.createTime" +
      "  left join t_jzgk_statu e on a.unit_name=e.crew_code and b.createTime=e.start_time where round(f.pvalue,2)>60").cache()
    //20210115加上 修改机组开限的有功功率＜60 即不计算（因为机组工况给的值有问题 无法判断发电状态

    resultDf.createOrReplaceTempView("t_unit_electric_st_ly_dy_power")
    //     ("JKXT_EJ_AI00458","18F"),.filter("checkPoint=JKXT_EJ_AI00458")
    resultDf.show()
    resultDf.filter("pvalue_power<61").show()
    //机组电气开限
    val result2Df = ss.sql("select a.unit_name,createTime,pvalue_st,pvalue_ly,pvalue_dy,pvalue_power,working_name, " +
      "(b1+(pvalue_st-a1)*(b2-b1)/(a2-a1)) as power_max," +
      "(c1+(pvalue_st-a1)*(c2-c1)/(a2-a1)) as dy_max," +
      "(d1+(pvalue_st-a1)*(d2-d1)/(a2-a1)) as ly_max," +
      "(e1+(pvalue_st-a1)*(e2-e1)/(a2-a1)) as noload_max," +
      "(f1+(pvalue_st-a1)*(f2-f1)/(a2-a1)) as dy_min" +
      " from t_unit_electric_st_ly_dy_power a" +
      " left join t_unit_electric_limit_conf b on a.pvalue_st >b.a1 and a.pvalue_st<=b.a2" +
      " where instr(b.unit_name,a.unit_name)>0") //加上发电判断 and pvalue_power>60
    //    result2Df.filter("unit_name='01F'").show()
    result2Df.createOrReplaceTempView("t_unit_st_ly_dy_power_range")

    //a.unit_name ---b.assetid
    /* 电气开限逻辑
* 水头	water_head
* 机组功率	power_value
* 导叶开度	guide_blade_value
* 轮叶开度	wheel_blade_value
* 机组最大功率	power_max_value
* 导叶开度最大	guide_blade_max
* 导叶开度最小	guide_blade_min
* 轮叶开度最大	wheel_blade_max
* 状态	ege_status
* */
    val result3Df = ss.sql("select working_name id,b.assetid locationsid,working_name as condition_id,createTime stat_time,pvalue_st water_head,pvalue_power power_value,pvalue_dy guide_blade_value,pvalue_ly wheel_blade_value," +
      " power_max power_max_value,round(dy_max,2) as guide_blade_max,dy_min guide_blade_min,round(ly_max,2) as wheel_blade_max" +
      ",case when working_name='机组稳定运行态' and pvalue_dy between dy_min-0.5 and dy_max+0.5 then 1" +
      "      when working_name='开机至空转过程' and pvalue_dy<=noload_max then 1 " +
      "      when working_name not in ('机组稳定运行态','开机至空转过程') then null else 0 end as ege_status," +
      " case when working_name='机组稳定运行态' and (pvalue_dy<dy_min-0.5 or pvalue_dy>dy_max+0.5) then 1 end ege_type," +
      " 'GZB' project_id,'10000' org_id,'' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time from t_unit_st_ly_dy_power_range a" +
      " left join hms_cm_asset b on concat(a.unit_name,'机组')=b.description where working_name in ('机组稳定运行态','开机至空转过程') ")
    //    result3Df.filter("unit_name='01F'").show()

    //机组功率限制
    /* //机组功率限制
* condition_id --工况
* water_head --水头
* generator_power --出力
* power_max --出力最大
* power_min --出力最小
* */
    val result4Df = ss.sql("select working_name id,c.assetid as locationsid,createTime as stat_time,working_name condition_id,pvalue_st water_head,pvalue_power generator_power, " +
      "case when pvalue_power between (c1+(pvalue_st-a1)*(c2-c1)/(a2-a1))-2 and (b1+(pvalue_st-a1)*(b2-b1)/(a2-a1))+2 then 1 else 0 end ege_status," +
      "(b1+(pvalue_st-a1)*(b2-b1)/(a2-a1)) as power_max," +
      "(c1+(pvalue_st-a1)*(c2-c1)/(a2-a1)) as power_min," +
      "'GZB' project_id,'10000' org_id,'' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time from t_unit_electric_st_ly_dy_power a" +
      " left join t_unit_power_limit_conf b on a.pvalue_st >b.a1 and a.pvalue_st<=b.a2" +
      "  left join hms_cm_asset c on concat(a.unit_name,'机组')=c.description" +
      " where working_name='机组稳定运行态' and instr(b.unit_name,a.unit_name)>0  ") //机组发电判断and a.pvalue_power>60
    result4Df.show()
    result4Df.orderBy("generator_power").filter("generator_power<61").show()
    //resultDf.repartition(1).write.csv("F://user/root/result.csv")
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    //val tableName ="t_unit_electric_data"
    val tableName = "hms_eo_electric_limit_stat"
    MysqlDml.delete2(s"delete from $tableName where stat_time between  '$startTime'  and  '$endTime'")

    //写mysql
    result3Df.write
      .format("jdbc")
      .mode("append")
      .option("url", MysqlDml.url2)
      .option("dbtable", tableName)
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .save()

    val tableName2 = "hms_eo_waterhead_power_stat"
    MysqlDml.delete2(s"delete from $tableName2 where stat_time between  '$startTime'  and  '$endTime'")

    //写mysql
    result4Df.write
      .format("jdbc")
      .mode("append")
      .option("url", MysqlDml.url2)
      .option("dbtable", tableName2)
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .save()

  }
}
