package org.jxkj.app

import java.text.SimpleDateFormat
import java.util.Date
import org.apache.spark.SparkContext
import org.apache.spark.sql.types.{StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession, types}
import org.jxkj.data.IDaoProvider

/**
 * 1、油温绕组碳刷打火评分
 *
 */
object GroupPointMax {
  //组合测点最大平均的调用
  def calCompPointStat(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step : Int, item: String): Unit = {
    import ss.implicits._
    val startTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val yyyymm = new SimpleDateFormat("yyyyMM")
    var startmth = yyyymm.format(sdf.parse(startTime))
    val schema = StructType(
      Seq(
        StructField("scene_id", types.StringType, true),
        StructField("item_id", types.StringType, true),
        StructField("assetid", types.StringType, true),
        StructField("stat_time", types.StringType, true),
        StructField("scope_h", types.IntegerType, true),
        StructField("maxval", types.FloatType, true),
        StructField("avgval", types.FloatType, true)))
    var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    //读mysql
    val jdbcDF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF.createOrReplaceTempView("hms_cm_scene_point")

    val jdbcDF2 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_item")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF2.createOrReplaceTempView("hms_cm_scene_item")

    val jdbcDF3 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF3.createOrReplaceTempView("hms_cm_point")

    val maxSet = Array("B.1-2.3.5.1-1",
      "B.1-1.4.4.1-1",
      "B.1-1.3.5.1-1"
    )

    import ss.implicits._
    val pointDF = ss.sql("select a.scene_id,a.item_id,a.parent_assetid assetid,a.point_code,c.scope_l,c.scope_h" +
      ",case when b.item_no ='B.1-2.3.5.1-1' then 56 " +
      "      when b.item_no ='B.1-1.4.4.1-1' then 50 " +
      "      when b.item_no ='B.1-1.3.5.1-1' then 55 " +
      "      when b.item_no ='B.1-1.5.2.1-1' then 30 " +
      "      when b.item_no ='B.1-1.3.4.1-1' then 240 " +
      "      when b.item_no ='B.1-2.3.4.1-1' then 435 " +
      "      when b.item_no =='B.1-1.5.2.2-1' then 80 when b.item_no =='B.1-1.5.2.3-1' then 80 " +
      "      when b.item_no =='B.1-1.7.2.1-1' then 110 when b.item_no =='B.1-1.7.2.2-1' then 80 " +
      "      when b.item_no =='B.1-2.6.2.1-1' then 90 when b.item_no =='B.1-2.6.2.2-1' then 110 " +
      "else 30 end scope_h2 " +
      ",case when b.item_no in ('B.1-2.3.5.1-1','B.1-1.4.4.1-1','B.1-1.3.5.1-1') then 1 else 2 end flag"+
      " from hms_cm_scene_point a join hms_cm_scene_item b " +
      " on a.item_id=b.item_id and a.scene_id=b.scene_id " +
      "and b.item_no in ('B.1-2.3.5.1-1','B.1-1.4.4.1-1','B.1-1.3.5.1-1','B.1-1.5.2.1-1','B.1-1.3.4.1-1','B.1-2.3.4.1-1'" +
      ",'B.1-1.5.2.2-1','B.1-1.5.2.3-1','B.1-1.7.2.1-1','B.1-1.7.2.2-1','B.1-2.6.2.1-1','B.1-2.6.2.2-1') " +
      s" left join hms_cm_point c on a.point_code=c.point_code where a.parent_assetid='$item'").cache()
    pointDF.createOrReplaceTempView("t_point")
/*
    B.1-2.3.5.1-1	水轮机-水导轴承瓦温
    B.1-1.4.4.1-1	水轮机-推力轴承温度
    B.1-1.3.5.1-1	水轮机-上导轴承温度

    B.1-1.5.2.1-1	水轮机-定子铁芯振动

    B.1-1.3.4.1-1	水轮机-上导轴承摆度
    B.1-2.3.4.1-1	水轮机-水导轴承摆度
    val x = id match
      {case "B.1-2.3.5.1-1" =>56
        case "B.1-1.4.4.1-1" =>50
        case "B.1-1.3.5.1-1" =>55
        case "B.1-1.5.2.1-1" =>30
        case "B.1-1.3.4.1-1" =>240
        case "B.1-2.3.4.1-1" =>435
        case _ =>30
      }
    */
    //val assetid : Array[String] =pointDF.map(x=>x.getString(2)).dropDuplicates().collect()
    //val assetid : Array[String] =pointDF.map(x=>x.getString(1)).dropDuplicates().collect()

    //for(id <- assetid){
    val point_code: Array[String] = pointDF.map(x=>x.getString(3)).collect()
      //val point_code: Array[String] = pointDF.filter($"item_id"===id).map(x=>x.getString(3)).collect()

      for(i <- 0 until  point_code.length){println(point_code(i))}
    GetData.getBySecond(ss,sc,hTable,point_code,time,step,1)

    val etlDF= ss.sql("select scene_id,item_id,assetid,createTime as stat_time,scope_h2,flag,max(pvalue) maxval,avg(pvalue) avgval" +
      " from t_point a join t_ods_hbase_edos_tmp b on a.point_code=b.checkPoint" +
      s" where ((pvalue between scope_l and scope_h) or scope_l is null)" +
      s" group by scene_id,item_id,assetid,createTime,scope_h2,flag having (case when flag=1 then maxval else avgval end)>scope_h2")
      //emptyDf = emptyDf.union(etlDF)
    //}
    //emptyDf.createOrReplaceTempView("t_etl")
      etlDF.createOrReplaceTempView("t_etl")
    val resultDf =ss.sql("select scene_id,item_id,assetid,stat_time,maxval,avgval" +
      ",'GZB' project_id,'10000' org_id,'1' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time"+
      " from t_etl a")
    //resultDf.show()
    val tablename="hms_health_evalut_mech"+"_"+startmth
    val delString=s"delete from $tablename where sub_org_id='1' and assetid='$item' and stat_time between '$startTime' and '$endTime'"
    MysqlDml.delete2(delString)
      //assetid='$id' and
    //写mysql
    resultDf.write
      .format("jdbc")
      .mode("append")
      .option("url",MysqlDml.url2)
      .option("dbtable",tablename)
      .option("user",MysqlDml.user2)
      .option("password",MysqlDml.password2)
      .option("driver",MysqlDml.driver2)
      .save()
    }
  //}

  //碳刷计算
  def calCarbonBrush(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step : Int): Unit = {
    import ss.implicits._
    val startTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val yyyymm = new SimpleDateFormat("yyyyMM")
    var startmth = yyyymm.format(sdf.parse(startTime))
    //读mysql
    val jdbcDF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF.createOrReplaceTempView("hms_cm_scene_point")

    val jdbcDF2 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_item")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF2.createOrReplaceTempView("hms_cm_scene_item")

    val jdbcDF3 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF3.createOrReplaceTempView("hms_cm_point")

    import ss.implicits._
    val pointDF = ss.sql("select a.scene_id,a.item_id,a.parent_assetid assetid,a.point_code,c.scope_l,c.scope_h" +
      " from hms_cm_scene_point a join hms_cm_scene_item b " +
      " on a.item_id=b.item_id and a.scene_id=b.scene_id " +
      "and b.item_no in ('B.2-1.3.1.1-1') " +
      s" left join hms_cm_point c on a.point_code=c.point_code").cache()
    pointDF.createOrReplaceTempView("t_point")

    //val assetid : Array[String] =pointDF.map(x=>x.getString(2)).dropDuplicates().collect()
    //val assetid : Array[String] =pointDF.map(x=>x.getString(1)).dropDuplicates().collect()

    //for(id <- assetid){
    val point_code: Array[String] = pointDF.map(x=>x.getString(3)).collect()
    //val point_code: Array[String] = pointDF.filter($"item_id"===id).map(x=>x.getString(3)).collect()
     //.filter($"point_code"==="QX_EJ_7F_FDJDQ_AI00065")
    for(i <- 0 until  point_code.length){println(point_code(i))}
    //GetData.getBySecond(ss,sc,hTable,point_code,time,step,1)
    GetData.getNoLoop(ss,sc,hTable,point_code,time,step)

    val etlDF= ss.sql("select scene_id,item_id,assetid,point_code,createtime,substr(createtime,1,13) stat_time,pvalue " +
      ",case when pvalue>130 then 0 else 1 end isexceed_flag"+
      ",lag(case when pvalue>130 then 0 else 1 end,1) over(partition by point_code order by createtime) isexceed_flag_lag "+
      " from t_point a join t_ods_hbase_edos_tmp b on a.point_code=b.checkPoint" +
      s" where ((pvalue between scope_l and scope_h) or scope_l is null)" )

    etlDF.createOrReplaceTempView("t_etl")
    //
    ss.sql("select a.scene_id,a.item_id,a.assetid,a.point_code,a.createtime,a.stat_time" +
      " from t_etl a join t_etl b " +
      " on a.assetid=b.assetid and a.point_code=b.point_code and a.stat_time=b.stat_time and b.isexceed_flag=1" +
      " and unix_timestamp(a.createtime, 'yyyy/MM/dd HH:mm:ss')< unix_timestamp(b.createtime, 'yyyy/MM/dd HH:mm:ss') + 300" +
      " and a.createtime>b.createtime" +
      " where a.isexceed_flag=0 and a.isexceed_flag_lag=1 and a.pvalue-b.pvalue>50" +
      " group by a.scene_id,a.item_id,a.assetid,a.point_code,a.createtime,a.stat_time").createOrReplaceTempView("t_etl2_1")

    ss.sql("select a.scene_id,a.item_id,a.assetid,a.point_code,a.createtime,a.stat_time" +
      " ,lag(createtime,1) over(partition by point_code,stat_time order by createtime) createtime_lag" +

      " from t_etl2_1 a " ).createOrReplaceTempView("t_etl2_2")

    ss.sql("select a.scene_id,a.item_id,a.assetid,a.point_code,a.createtime,a.stat_time,createtime_lag" +
      " ,sum(unix_timestamp(createtime, 'yyyy/MM/dd HH:mm:ss')-unix_timestamp(createtime_lag, 'yyyy/MM/dd HH:mm:ss')) over(partition by point_code,stat_time order by createtime) time_interval" +
      " ,floor(sum(unix_timestamp(createtime, 'yyyy/MM/dd HH:mm:ss')-unix_timestamp(nvl(createtime_lag,createtime), 'yyyy/MM/dd HH:mm:ss')) over(partition by point_code,stat_time order by createtime)/300) fiveminute_flag" +
      " from t_etl2_2 a " ).createOrReplaceTempView("t_etl2_3")

    ss.sql("select a.scene_id,a.item_id,a.assetid,a.point_code,a.createtime,a.stat_time,createtime_lag,time_interval,fiveminute_flag" +
      " ,row_number() over(partition by point_code,stat_time,fiveminute_flag order by createtime) rn" +
      " from t_etl2_3 a " ).createOrReplaceTempView("t_etl2")


      //"2020/08/19 06:25:00" "2020/08/19 06:50:59"   point_code='QX_EJ_7F_FDJDQ_AI00065'
    //按小时机组下的测点跳变次数求和
    ss.sql("select a.scene_id,a.item_id,a.assetid,a.stat_time,sum(1) cnt " +
      " from t_etl2 a" +
      " where rn=1 " +
      " group by a.scene_id,a.item_id,a.assetid,a.stat_time").createOrReplaceTempView("t_etl3")
    /*
    ss.sql("select * from t_etl2_1 order by point_code,createtime" ).show(50)
    ss.sql("select * from t_etl2_2 order by point_code,createtime" ).show(50)
    ss.sql("select * from t_etl2_3 order by point_code,createtime" ).show(50)
    ss.sql("select * from t_etl2 order by point_code,createtime" ).show(100)
    ss.sql("select * from t_etl3 order by  stat_time" ).show(50)
    */
    ss.sql("select a.scene_id,a.item_id,a.assetid,substr(a.stat_time,1,10) stat_time,max(cnt) maxval" +
      " from t_etl3 a" +
      " group by a.scene_id,a.item_id,a.assetid,substr(a.stat_time,1,10)").createOrReplaceTempView("t_etl4")

    val resultDf =ss.sql("select scene_id,item_id,assetid,stat_time,maxval,0 avgval" +
      ",'GZB' project_id,'10000' org_id,'2' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time"+
      " from t_etl4 a")
    val tablename="hms_health_evalut_mech"+"_"+startmth

    val delString=s"delete from $tablename where sub_org_id='2' and stat_time between '$startTime' and '$endTime'"
    MysqlDml.delete2(delString)
    //assetid='$id' and
    //写mysql
    resultDf.write
      .format("jdbc")
      .mode("append")
      .option("url",MysqlDml.url2)
      .option("dbtable",tablename)
      .option("user",MysqlDml.user2)
      .option("password",MysqlDml.password2)
      .option("driver",MysqlDml.driver2)
      .save()
  }

  //油温绕组
  def calOilCoilTemperature(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step : Int): Unit = {
    import ss.implicits._
    val startTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val yyyymm = new SimpleDateFormat("yyyyMM")
    var startmth = yyyymm.format(sdf.parse(startTime))

    //读mysql
    val jdbcDF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF.createOrReplaceTempView("hms_cm_scene_point")

    val jdbcDF2 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_item")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF2.createOrReplaceTempView("hms_cm_scene_item")

    val jdbcDF3 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF3.createOrReplaceTempView("hms_cm_point")

    import ss.implicits._
    val pointDF = ss.sql("select a.scene_id,a.item_id,a.parent_assetid assetid,a.point_code,a.point_name,c.scope_l,c.scope_h" +
      " from hms_cm_scene_point a join hms_cm_scene_item b " +
      " on a.item_id=b.item_id and a.scene_id=b.scene_id " +
      "and b.item_no in ('B.4-1.1.1.3-1') " +
      s" left join hms_cm_point c on a.point_code=c.point_code").cache()
    pointDF.createOrReplaceTempView("t_point")

    //val assetid : Array[String] =pointDF.map(x=>x.getString(2)).dropDuplicates().collect()
    //val assetid : Array[String] =pointDF.map(x=>x.getString(1)).dropDuplicates().collect()

    //for(id <- assetid){
    val point_code: Array[String] = pointDF.map(x=>x.getString(3)).collect()
    //val point_code: Array[String] = pointDF.filter($"item_id"===id).map(x=>x.getString(3)).collect()

    for(i <- 0 until  point_code.length){println(point_code(i))}
    GetData.getBySecond(ss,sc,hTable,point_code,time,step,1)

    val etlDF= ss.sql("select scene_id,item_id,assetid,case when substr(point_name,-4)='绕组温度' then 'rz' else 'yw' end flag,createtime stat_time,max(pvalue) pvalue_max " +
      " from t_point a join t_ods_hbase_edos_tmp b on a.point_code=b.checkPoint" +
      s" where ((pvalue between scope_l and scope_h) or scope_l is null)" +
      s" group by scene_id,item_id,assetid,case when substr(point_name,-4)='绕组温度' then 'rz' else 'yw' end,createtime" ).cache()


    etlDF.createOrReplaceTempView("t_etl")
    ss.sql("select a.scene_id,a.item_id,a.assetid,substr(a.stat_time,1,10) stat_time,sum(1) maxval" +
      " from t_etl a left join t_etl b " +
      " on a.assetid=b.assetid and a.item_id=b.item_id and a.stat_time=b.stat_time and b.flag='rz'" +
      " where a.flag='yw' and a.pvalue_max>b.pvalue_max " +
      " group by a.scene_id,a.item_id,a.assetid,substr(a.stat_time,1,10)").createOrReplaceTempView("t_etl2")

    val resultDf =ss.sql("select scene_id,item_id,assetid,stat_time, maxval,0 avgval" +
      ",'GZB' project_id,'10000' org_id,'3' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time"+
      " from t_etl2 a")
    //resultDf.show()
    val tablename="hms_health_evalut_mech"+"_"+startmth
    val delString=s"delete from $tablename where sub_org_id='3' and stat_time between '$startTime' and '$endTime'"
    MysqlDml.delete2(delString)
    //assetid='$id' and
    //写mysql
    resultDf.write
      .format("jdbc")
      .mode("append")
      .option("url",MysqlDml.url2)
      .option("dbtable",tablename)
      .option("user",MysqlDml.user2)
      .option("password",MysqlDml.password2)
      .option("driver",MysqlDml.driver2)
      .save()
  }

  //	红外测量
  def calInfraredTemperature(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, time: Array[String], step : Int): Unit = {
    import ss.implicits._
    val startTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val yyyymm = new SimpleDateFormat("yyyyMM")
    val startmth = yyyymm.format(sdf.parse(startTime))
//    val startTimeStamp = sdf.parse(startTime).getTime()
//    val startTime2 = sdf.format(new Date(startTimeStamp - 3600 * 24 * 1000))
    //读mysql
    val jdbcDF = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url2)
      .option("dbtable", "hms_cm_scene_point")
      .option("user", MysqlDml.user2)
      .option("password", MysqlDml.password2)
      .option("driver", MysqlDml.driver2)
      .load()

    jdbcDF.createOrReplaceTempView("hms_cm_scene_point")

    val jdbcDF4 = ss.read
      .format("jdbc")
      .option("url", MysqlDml.url)
      .option("dbtable", "hms_stat_rptdata_"+startmth)
      .option("user", MysqlDml.user)
      .option("password", MysqlDml.password)
      .option("driver", MysqlDml.driver)
      .load()

    jdbcDF4.createOrReplaceTempView("hms_stat_rptdata")

    import ss.implicits._
    val pointDF = ss.sql("select a.scene_id,a.item_id,a.parent_assetid assetid,a.point_code,a.point_name" +
      " from hms_cm_scene_point a " +
      "where a.item_id='6545f19f7dce41c9be00d23af0314844'" ).cache()
    //b.item_no in ('B.4-1.5.8.2-1')
    pointDF.createOrReplaceTempView("t_point")
    pointDF.show()
    /*
    val point_code: Array[String] = pointDF.map(x=>x.getString(3)).collect()

    for(i <- 0 until  point_code.length){println(point_code(i))}
    GetData.getBySecond(ss,sc,hTable,point_code,time,step,1)
*/
    val etlDF= ss.sql("select scene_id,item_id,assetid,a.point_code,b.stat_date,b.hour,b.avgval " +
      " from t_point a join hms_stat_rptdata b on a.point_code=b.point_code and b.stat_type='hour' " +
      s" and stat_date between regexp_replace('$startTime','/','') and regexp_replace('$endTime','/','')" ).cache()

    etlDF.createOrReplaceTempView("t_etl")
    ss.sql("select a.scene_id,a.item_id,a.assetid,concat(substr(a.stat_date,1,4),'/',substr(a.stat_date,5,2),'/',substr(a.stat_date,7,2),' ',lpad(a.hour,2,'0'),':00:00')  as stat_time,(a.avgval-nvl(b.avgval,a.avgval))/nvl(b.avgval,a.avgval) as maxval" +
      " from t_etl a left join t_etl b " +
      " on a.assetid=b.assetid and a.item_id=b.item_id and a.stat_date=b.stat_date and b.hour+1=a.hour" +
      " where (a.avgval-nvl(b.avgval,a.avgval))/nvl(b.avgval,a.avgval)>=0.1 ").createOrReplaceTempView("t_etl2")

    val resultDf =ss.sql("select scene_id,item_id,assetid,stat_time, maxval,0 avgval" +
      ",'GZB' project_id,'10000' org_id,'4' sub_org_id,'' create_person,now() create_time,'' modify_person,now() modify_time"+
      " from t_etl2 a")
    resultDf.show()
    val tablename="hms_health_evalut_mech"+"_"+startmth
    val delString=s"delete from $tablename where sub_org_id='4' and substr(stat_time,1,10) between '$startTime' and '$endTime'"
    MysqlDml.delete2(delString)
    //assetid='$id' and
    //写mysql
    resultDf.write
      .format("jdbc")
      .mode("append")
      .option("url",MysqlDml.url2)
      .option("dbtable",tablename)
      .option("user",MysqlDml.user2)
      .option("password",MysqlDml.password2)
      .option("driver",MysqlDml.driver2)
      .save()
  }

}
