
package org.jxkj.app

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession, types}
import org.apache.spark.sql.types.{StructField, StructType}
import org.jxkj.bean.PointData
import org.jxkj.data.IDaoProvider
import org.jxkj.webapi.WebAPI
import java.text.SimpleDateFormat
import java.util
import java.util.{Calendar, Date, GregorianCalendar}

import org.jxkj.client.PmuClient

import scala.collection.JavaConversions._

object GetData {

  /**
   * 获取测点原始采集数据
   * @param ss
   * @param sc
   * @param hTable
   * @param pointCode
   * @param time
   * @param step
   */
  def get(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //字符串 转date类型
    val dayDate = sdfs.parse(startTime)
    val calendar = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {
      calendar.add(Calendar.HOUR_OF_DAY, step)
      val nextTime = sdfs.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.SECOND, -1)

      val current = sdfs.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }
      calendar.add(Calendar.SECOND, 1)
      println(startTime)
      println(midTime)

      for (pointName <- pointCode) {
        val ori: util.List[PointData] = hTable.getByRegion(pointName, startTime, nextTime) //因为要取到下一小时的整点值
        val rdd = sc.parallelize(ori, 1)
        emptyRDD = emptyRDD.union(rdd)
      }


      println("point loop end")
      startTime = nextTime
    }
    println("date loop end")
    val edosDF = emptyRDD.map(x => {
      ColumnType(x.getName, x.getTime, x.getValue.toFloat)
    }).toDF()

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }


  //原有的按天2021-01-25
  def getByDay(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdf.parse(startTime)
    val calendar = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")

      //data时间+1M
      //calendar.add(Calendar.MONTH, 1)
      calendar.add(Calendar.DATE, step)
      //      calendar.add(Calendar.HOUR_OF_DAY, step)

      val nextTime = sdf.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.DATE, -1)
      //val current: Date = calendar.getTime()
      val current = sdf.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }
      calendar.add(Calendar.DATE, 1)
      println(startTime)
      println(midTime)
      ///////日期区间循环取数
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
      //      var startTimeStamp = sdfs.parse(startTime).getTime()
      //      val endTimeStamp = sdfs.parse(midTime).getTime()
      //      var startTimeStampToDate = new Date(startTimeStamp)
      for (pointName <- pointCode) {
        //val ori: util.List[PointData] = hTable.getOriginal(pointName, startTime + " 00", midTime.concat(" 24"))
        val ori: util.List[PointData] = hTable.getByRegionByDay(pointName, startTime.concat(" 00:00:00"), midTime.concat(" 23:59:59"))
        //        val ori: util.List[PointData] = IDaoProvider.getByRegion(pointName, startTime , nextTime)//因为要取到下一小时的整点值
        //val ori: util.List[PointData] = IDaoProvider.getByRegion(pointName, startTime, midTime)
        val rdd = sc.parallelize(ori, 1)
        emptyRDD = emptyRDD.union(rdd)
      }


      println("point loop end")
      startTime = nextTime
    }
    println("date loop end")
    val edosDF = emptyRDD.map(x => {
      ColumnType(x.getName, x.getTime, x.getValue.toFloat)
    }).toDF()

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }

  //getByRegion--->getWebData
  def getNoLoop(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    val web = new WebAPI()
    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]

    println(startTime)
    println(endTime)

    for (pointName <- pointCode) {
      val ori = hTable.getWebData(pointName, startTime, endTime)
      //val ori = web.getHistoryVals(pointName, startTime, endTime)
      val rdd = sc.parallelize(ori, 10)
      emptyRDD = emptyRDD.union(rdd)
    }

    val edosDF = emptyRDD.map(x => {
      ColumnType(x.getName, x.getTime, x.getValue.toFloat)
    }).toDF()

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }

  //getWebDataAndLast
  def getNoLoopFromWeb(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    val web = new WebAPI()
    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]

    println(startTime)
    println(endTime)

    for (pointName <- pointCode) {
      //      val ori = hTable.getWebDataAndLast(pointName, startTime+" 00:00:00", endTime+" 23:59:59") 20210114注释
      val ori = hTable.getWebDataAndLast(pointName, startTime, endTime)

      //val ori: util.List[PointData] = web.getHistoryVals(pointName,startTime+" 00:00:00", endTime+" 23:59:59")
      val rdd = sc.parallelize(ori, 1)
      emptyRDD = emptyRDD.union(rdd)
    }

    val edosDF = emptyRDD.map(x => {
      ColumnType(x.getName, x.getTime, x.getValue.toFloat)
    }).toDF()

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }

  /**
   * 获取PMU高频数据
   * @param ss
   * @param sc
   * @param pointCodes
   * @param time
   */
  def getByMillisFromPmu(ss: SparkSession, sc: SparkContext, pointCodes: Array[String], time: Array[Date]): Unit = {
    import ss.implicits._
    // 时间
    val startTs = time(0).getTime
    val endTs = time(1).getTime

    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS")

    var emptyRDD = sc.emptyRDD[ColumnType3]

    for (pointCode <- pointCodes) {
      println("调用接口获取PMU高频数据 -- " + pointCode + " -- " +  time(0).toString + " --" + time(1).toString)
      val dto = PmuClient.getMillisData(pointCode, startTs, endTs)
      if (dto != null) {
        val dataMap = dto.getData
        val seq = dataMap.map(x => ColumnType3(pointCode, sdfs.format(new Date(x._1)), x._2)).toSeq
        val rdd = sc.parallelize(seq)
        emptyRDD = emptyRDD.union(rdd)
      }
    }
    val edosDF = emptyRDD.toDF()
    println("所有测点获取高频数据结束")
    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
  }

  /**
   * 获取测点每一秒的值
   * @param ss
   * @param sc
   * @param hTable
   * @param pointCode
   * @param time
   * @param step
   * @param interval
   */
  def getBySecond(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int, interval: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")


    //var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdfs.parse(startTime)
    val calendar = new GregorianCalendar
    val calendar2 = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数
    while (startTime <= endTime) {
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
      //data时间+1M
      //calendar.add(Calendar.MONTH, 1)
      //+++ 2021-03-05 加 多一个判断小时制
      if (step == 1) {
        calendar.add(Calendar.DATE, step)
      } else {
        calendar.add(Calendar.HOUR_OF_DAY, 1) //改时间间隔为小时制
      }
      //+++ 2021-03-05 加
      val nextTime = sdfs.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.SECOND, -1)

      val current = sdfs.format(calendar.getTime())
      startTime = startTime //.substring(0,10)+" 00:00:00"
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }

      calendar.add(Calendar.SECOND, 1)
      println("startTime " + startTime)
      println("midtime " + midTime)

      for (pointName <- pointCode) {
        println("beginloop--" + pointName + sdfs.format(new Date))

        val ori: util.ArrayList[PointData] = hTable.getCompleteData(pointName, startTime, midTime, interval)
        val rdd = sc.parallelize(ori, 1)
        //rdd.checkpoint()
        emptyRDD = emptyRDD.union(rdd)

      }

      println("point point loop end" + sdfs.format(new Date))
      startTime = nextTime
    }

    val edosDF = emptyRDD.map(x => {
      //val index = x.getTime.indexOf(".")
      ColumnType2(x.getName, x.getTime, x.getValue)
    }).toDF()
    println("date loop end" + sdfs.format(new Date))

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    //    edosDF.filter("pvalue!='1'").show()
    //ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }

  def getBySecond(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    //var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdfs.parse(startTime)
    val calendar = new GregorianCalendar
    val calendar2 = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")

      //data时间+1M
      //calendar.add(Calendar.MONTH, 1)
      calendar.add(Calendar.DATE, step)
      val nextTime = sdfs.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.SECOND, -1)

      val current = sdfs.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }

      calendar.add(Calendar.SECOND, 1)
      println("startTime " + startTime)
      println("midtime " + midTime)

      for (pointName <- pointCode) {
        val ori = hTable.getCompleteData(pointName, startTime, midTime)
        val rdd = sc.parallelize(ori, 1)
        emptyRDD = emptyRDD.union(rdd)

      }

      println("point point loop end")

      startTime = nextTime
    }
    println("date loop end")
    val edosDF = emptyRDD.map(x => {
      //val index = x.getTime.indexOf(".")
      ColumnType2(x.getName, x.getTime, x.getValue)
    }).toDF()

    //emptyDf = emptyDf.union(edosDF)
    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    //ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }
  //DailyGenerationCheck调用
  def getByWebApiTo(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int, interval: Int): Unit = {

    import ss.implicits._
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    var startTime = time(0)
    val firstTime = time(0)
    //    var endTime = time(1)

    //为取变损率厂用电 区间计算值需多计算一条数据,故结束时间+interval //20210114
    var endTime = sdfs.format((sdfs.parse(time(1)).getTime) + 1000 * interval)

    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    //var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdfs.parse(startTime)
    val calendar = new GregorianCalendar
    val calendar2 = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
      //data时间+1M
      //calendar.add(Calendar.MONTH, 1)
      //      calendar.add(Calendar.DATE, step)
      calendar.add(Calendar.HOUR_OF_DAY, step)


      val nextTime = sdfs.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.SECOND, -1)

      val current = sdfs.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }

      calendar.add(Calendar.SECOND, 1)
      println("startTime " + startTime)
      println("midtime " + midTime)

      for (pointName <- pointCode) {
        println("beginloop--" + pointName + sdfs.format(new Date))
        val web = new WebAPI()

        //val ori: util.ArrayList[PointData] = hTable.getCompleteData(pointName, startTime, midTime,interval)
        val format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        var start = format.parse(startTime).getTime / 1000;
        var ts1 = format.parse(midTime).getTime / 1000;


        //val url = "http://10.68.13.21:3000/getPointValRaw?pointId=" + pointName + "&&start=" + start + "&end=" + ts1
        //val httpget = new HttpGet(url)
        //val httpClient = HttpClientBuilder.create.build
        //val resp = httpClient.execute(httpget)
        //val content = EntityUtils.toString(resp.getEntity)
        //val json = JSONObject.parseObject(content).getJSONObject("data")


        val ori = web.getHistSnap(pointName, startTime, midTime, interval.toString)


        val rdd = sc.parallelize(ori, 1)
        //rdd.checkpoint()
        emptyRDD = emptyRDD.union(rdd)

      }

      println("point point loop end" + sdfs.format(new Date))
      startTime = nextTime
    }

    val edosDF = emptyRDD.map(x => {
      //val index = x.getTime.indexOf(".")
      ColumnType2(x.getName, x.getTime, x.getValue)
    }).toDF()
    println("date loop end" + sdfs.format(new Date))

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")

    edosDF.show(1000) //filter("checkPoint='JKXT_EJ_AI00458'").show()//18f
    //ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }

  def getByWebApi(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int, interval: Int): Unit = {

    import ss.implicits._
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
    var startTime = time(0)
    val firstTime = time(0)
    //    var endTime = time(1)

    //为取变损率厂用电 区间计算值需多计算一条数据,故结束时间+interval //20210114
    var endTime = sdfs.format((sdfs.parse(time(1)).getTime) + 1000 * interval)

    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    //var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdfs.parse(startTime)
    val calendar = new GregorianCalendar
    val calendar2 = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
      //data时间+1M
      //calendar.add(Calendar.MONTH, 1)
      //      calendar.add(Calendar.DATE, step)
      calendar.add(Calendar.HOUR_OF_DAY, step)


      val nextTime = sdfs.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.SECOND, -1)

      val current = sdfs.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }

      calendar.add(Calendar.SECOND, 1)
      println("startTime " + startTime)
      println("midtime " + midTime)

      for (pointName <- pointCode) {
        println("beginloop--" + pointName + sdfs.format(new Date))
        val web = new WebAPI()

        //val ori: util.ArrayList[PointData] = hTable.getCompleteData(pointName, startTime, midTime,interval)
        val ori = web.getSnapVals(pointName, startTime, midTime, interval.toString)
        val rdd = sc.parallelize(ori, 1)
        //rdd.checkpoint()
        emptyRDD = emptyRDD.union(rdd)

      }

      println("point point loop end" + sdfs.format(new Date))
      startTime = nextTime
    }

    val edosDF = emptyRDD.map(x => {
      //val index = x.getTime.indexOf(".")
      ColumnType2(x.getName, x.getTime, x.getValue)
    }).toDF()
    println("date loop end" + sdfs.format(new Date))

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    //    edosDF.show()//filter("checkPoint='JKXT_EJ_AI00458'").show()//18f
    //ss.sql("select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') as day from t_ods_hbase_edos_tmp").createOrReplaceTempView("t_ods_hbase_edos")

  }

  // 取最大值
  def getMaxValueOneDay(ss: SparkSession, sc: SparkContext, pointCodes: Array[String], startTime: String, endTime: String): Unit = {
    import ss.implicits._
    var emptyRDD = sc.emptyRDD[PointData]

    for (pointCode <- pointCodes) {
      val web = new WebAPI()
      val ori = web.getStatTypeValue(pointCode, startTime, endTime, "max")
      val rdd = sc.parallelize(Seq(ori), 1)
      emptyRDD = emptyRDD.union(rdd)
    }

    val edosDF = emptyRDD.map(x => {
      ColumnType2(x.getName, x.getTime, x.getValue)
    }).toDF()
    edosDF.createOrReplaceTempView("t_ods_hbase_edos_max_value")
    ss.sql("select checkPoint, createTime, cast(pvalue as decimal(14,4)) from t_ods_hbase_edos_max_value")
      .createOrReplaceTempView("t_ods_hbase_edos_max_value")
  }

  //  为实现原16智能抄表计算按天计算不动复制表，
  def getByWebApiDay(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int, interval: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

    val schema = StructType(
      Seq(
        StructField("checkPoint", types.StringType, true),
        StructField("createTime", types.StringType, true),
        StructField("pvalue", types.FloatType, true)))
    //var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
    var emptyRDD = sc.emptyRDD[PointData]
    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdfs.parse(startTime)
    val calendar = new GregorianCalendar
    val calendar2 = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {

      calendar.add(Calendar.DATE, step)
      val nextTime = sdfs.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.SECOND, -1)

      val current = sdfs.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }

      calendar.add(Calendar.SECOND, 1)
      println("startTime " + startTime)
      println("midtime " + midTime)

      for (pointName <- pointCode) {
        //      println("beginloop--"+pointName+sdfs.format(new Date))
        val web = new WebAPI()

        //val ori: util.ArrayList[PointData] = hTable.getCompleteData(pointName, startTime, midTime,interval)
        val ori = web.getSnapVals(pointName, startTime, midTime, interval.toString)
        val rdd = sc.parallelize(ori, 1)
        //rdd.checkpoint()
        emptyRDD = emptyRDD.union(rdd)

      }

      println("point point loop end" + sdfs.format(new Date))
      startTime = nextTime
    }

    val edosDF = emptyRDD.map(x => {
      //val index = x.getTime.indexOf(".")
      ColumnType2(x.getName, x.getTime, x.getValue)
    }).toDF()
    println("date loop end" + sdfs.format(new Date))

    edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")
    ss.sql("select checkPoint, createTime, cast(pvalue as decimal(14,4)) from t_ods_hbase_edos_tmp")
      .createOrReplaceTempView("t_ods_hbase_edos_tmp")
  }

  def getGroupBy(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String], step: Int): Unit = {

    import ss.implicits._
    var startTime = time(0)
    val firstTime = time(0)
    val endTime = time(1)
    val sdf = new SimpleDateFormat("yyyy/MM/dd")
    val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

    val schema = StructType(
      Seq(
        StructField("point_code", types.StringType, true),
        StructField("date_time", types.StringType, true),
        StructField("maximum", types.FloatType, true),
        StructField("minimum", types.FloatType, true),
        StructField("num", types.FloatType, true),
        StructField("cnt", types.IntegerType, true),
        StructField("day", types.StringType, true)
      )
    )
    var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)


    //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
    //字符串 转date类型
    val dayDate = sdf.parse(startTime)
    val calendar = new GregorianCalendar
    calendar.setTime(dayDate)
    ///////日期区间循环取数

    while (startTime <= endTime) {
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")

      //data时间+1M
      //calendar.add(Calendar.MONTH, 1)
      calendar.add(Calendar.DATE, step)
      val nextTime = sdf.format(calendar.getTime())
      println("nexttime " + nextTime)
      calendar.add(Calendar.DATE, -1)
      //val current: Date = calendar.getTime()
      val current = sdf.format(calendar.getTime())
      val midTime = current match {
        case current if (current <= endTime) => current
        case _ => endTime
      }
      calendar.add(Calendar.DATE, 1)
      println(startTime)
      println(midTime)
      ///////日期区间循环取数
      //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
      var startTimeStamp = sdf.parse(startTime).getTime()
      val endTimeStamp = sdf.parse(midTime).getTime()
      var startTimeStampToDate = new Date(startTimeStamp)

      for (pointName <- pointCode) {
        val ori: util.List[PointData] = hTable.getCompleteData(pointName, startTime + " 00:00:00", midTime.concat(" 23:59:59"))
        val rdd: RDD[PointData] = sc.parallelize(ori, 10)


        val edosDF = {
          rdd.map(x => {
            ColumnType(x.getName, x.getTime, x.getValue.toFloat)
          }).toDF()
        }

        edosDF.createOrReplaceTempView("t_ods_hbase_edos")
        //ss.catalog.cacheTable("t_ods_hbase_edos")
        val etl2Df = ss.sql("select checkPoint as point_code,substr(createTime,1,13) as date_time" +
          ",max(pvalue) maximum ,min(pvalue) minimum ,sum(pvalue) num ,sum(1) cnt,regexp_replace(substr(createTime,1,10),'/','') as day" +
          " from  t_ods_hbase_edos group by checkPoint,substr(createTime,1,13),regexp_replace(substr(createTime,1,10),'/','')"
        )
        /*
         etl2Df.createOrReplaceTempView("t_gdl_hbase_edos")
         ss.sql("use default")
         ss.sql("insert into table t_etl_hive_edos partition(day) " +
           "select point_code,date_time,maximum,minimum,num,cnt,day from t_gdl_hbase_edos")
        */
        emptyDf = emptyDf.union(etl2Df)
      }
      println("point loop end")
      startTime = nextTime
    }
    println("date loop end")
    emptyDf.createOrReplaceTempView("t_etl_hive_edos")
  }
  //getByRegion

  //  20210114注释
  /*

    def getAndFillToHive2(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String],step: Int,tableName:String): Unit = {

      val schema = StructType(
        Seq(
          StructField("checkPoint", types.StringType, true),
          StructField("createTime", types.StringType, true),
          StructField("pvalue", types.FloatType, true)))


      import ss.implicits._
      var startTime = time(0)
      val firstTime = time(0)
      val endTime = time(1)
      val sdf = new SimpleDateFormat("yyyy/MM/dd")
      val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
      //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
      //字符串 转date类型
      val dayDate = sdf.parse(startTime)
      val calendar = new GregorianCalendar
      calendar.setTime(dayDate)
      ///////日期区间循环取数
      ss.sql("use default")
      //ss.sql("truncate table t_ods_hbase_edos")
      while (startTime <= endTime) {
        //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")

        //data时间+1M
        //calendar.add(Calendar.MONTH, 1)
        calendar.add(Calendar.DATE, step)
        val nextTime = sdf.format(calendar.getTime())
        println("nexttime "+nextTime)
        calendar.add(Calendar.DATE,-1)
        //val current: Date = calendar.getTime()
        val current = sdf.format(calendar.getTime())
        val midTime = current match
        {case current if(current<=endTime) =>current
          case _ =>endTime
        }
        calendar.add(Calendar.DATE,1)
        println(startTime)
        println(midTime)
        ///////日期区间循环取数
        //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
        var startTimeStamp = sdf.parse(startTime).getTime()-1000
        val endTimeStamp = sdf.parse(midTime).getTime()
        var startTimeStampToDate = new Date(startTimeStamp)
        var list = new util.ArrayList[String]()
        val lastSecondBeforeDay=sdfs.format(startTimeStampToDate)
        list.add(lastSecondBeforeDay)
        println("lastSecondBeforeDay--"+lastSecondBeforeDay)
        while (startTimeStamp<endTimeStamp+3600*24*1000-1000) {
          startTimeStamp = startTimeStamp + 1000
          startTimeStampToDate = new Date(startTimeStamp)
          list.add(sdfs.format(startTimeStampToDate))
        }
        println(sdfs.format(startTimeStampToDate))
        var timeRdd = sc.parallelize(list)
        var timeDF = {
          timeRdd.map(x => {
            TimeType(x)
          }).toDF()
        }
        //timeDF.orderBy(desc("today")).show()
        timeDF.createOrReplaceTempView("t_ods_time")

        var structDf = ss.sql(s"select a.point_code,b.today,scope_h,scope_l from $tableName a left join t_ods_time b on 1=1")
        structDf.createOrReplaceTempView("t_ods_point_time")

        var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)
        var emptyRDD = sc.emptyRDD[PointData]

        for(pointName <- pointCode){
        val ori: util.List[PointData] = IDaoProvider.getByRegion(pointName, startTime + " 00", midTime.concat(" 24"))
           val rdd: RDD[PointData] = sc.parallelize(ori,10)
          emptyRDD = emptyRDD.union(rdd)
        }

        val edosDF = {
          emptyRDD.map(x => {
            ColumnType(x.getName, x.getTime, x.getValue.toFloat)
          }).toDF()
        }
        //edosDF.filter(_.getString(0)=="JKXT_EJ_AI03076").sort("createTime").show(500)

        edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")

        val etlDf = ss.sql("select a.point_code as checkPoint,a.today as createTime," +
          "last_value(b.pvalue,true) over(partition by a.point_code order by a.today rows between unbounded preceding and current row) as pvalue" +
          s",scope_h,scope_l from t_ods_point_time a left join (select * from t_ods_hbase_edos_tmp union all select checkPoint,createTime,pvalue from t_ods_hbase_edos where day=regexp_replace(substr('$lastSecondBeforeDay',1,10),'/','') and createTime='$lastSecondBeforeDay') b on a.point_code=b.checkPoint and a.today=b.createTime")


        //etlDf.filter($"pvalue"===3276.699951171875).orderBy(col("createTime")).show(500)
         //emptyDf = emptyDf.union(etlDf)
        etlDf.createOrReplaceTempView("t_etl_hbase_edos")
        //ss.sql("set hive.exec.dynamic.partition=true")
        //ss.sql("set hive.exec.dynamic.partition.mode=nonstrict")
        ss.sql("insert overwrite table t_ods_hbase_edos partition(day) " +
          "select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') day from t_etl_hbase_edos" +
          s" where substr(createTime,1,10) between '$startTime' and '$midTime' and pvalue between coalesce(scope_l,-1000) and coalesce(scope_h,2000)")

        ///按小时汇总
        /*
        val etl2Df = ss.sql("select checkPoint as point_code,substr(createTime,1,13) as date_time" +
          ",max(pvalue) maximum ,min(pvalue) minimum ,sum(pvalue) num ,sum(1) cnt,regexp_replace(substr(createTime,1,10),'/','') as day" +
          " from  t_etl_hbase_edos group by checkPoint,substr(createTime,1,13),regexp_replace(substr(createTime,1,10),'/','')"
        )
        etl2Df.createOrReplaceTempView("t_gdl_hbase_edos")
        ss.sql("insert overwrite table t_etl_hive_edos partition(day) " +
          "select point_code,date_time,maximum,minimum,num,cnt,day from t_gdl_hbase_edos")
        */
        println("succ")
        startTime = nextTime

      }
      println("loop end")

    }

    def getAndFillToHiveGroupBy2(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String],step: Int,tableName:String): Unit = {


      import ss.implicits._
      var startTime = time(0)
      val firstTime = time(0)
      val endTime = time(1)
      val sdf = new SimpleDateFormat("yyyy/MM/dd")
      val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

      val schema = StructType(
        Seq(
          StructField("point_code", types.StringType, true),
          StructField("date_time", types.StringType, true),
          StructField("maximum", types.FloatType, true),
          StructField("minimum", types.FloatType, true),
          StructField("num", types.FloatType, true),
          StructField("cnt", types.IntegerType, true),
          StructField("day", types.StringType, true)
        )
      )
      var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)

      //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
      //字符串 转date类型
      val dayDate = sdf.parse(startTime)
      val calendar = new GregorianCalendar
      calendar.setTime(dayDate)
      ///////日期区间循环取数
      ss.sql("use default")
      //ss.sql("truncate table t_etl_hive_edos")
      while (startTime <= endTime) {
        //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")

        //data时间+1M
        //calendar.add(Calendar.MONTH, 1)
        calendar.add(Calendar.DATE, step)
        val nextTime = sdf.format(calendar.getTime())
        println("nexttime "+nextTime)
        calendar.add(Calendar.DATE,-1)
        //val current: Date = calendar.getTime()
        val current = sdf.format(calendar.getTime())
        val midTime = current match
        {case current if(current<=endTime) =>current
          case _ =>endTime
        }
        calendar.add(Calendar.DATE,1)
        println(startTime)
        println(midTime)
        ///////日期区间循环取数
        //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
        var startTimeStamp = sdf.parse(startTime).getTime()-1000
        val endTimeStamp = sdf.parse(midTime).getTime()
        var startTimeStampToDate = new Date(startTimeStamp)
        var list = new util.ArrayList[String]()
        val lastSecondBeforeDay=sdfs.format(startTimeStampToDate)
        list.add(lastSecondBeforeDay)
        println(lastSecondBeforeDay)
        while (startTimeStamp<endTimeStamp+3600*24*1000-1000) {
          startTimeStamp = startTimeStamp + 1000
          startTimeStampToDate = new Date(startTimeStamp)
          list.add(sdfs.format(startTimeStampToDate))
        }
        println(sdfs.format(startTimeStampToDate))
        var timeRdd = sc.parallelize(list)
        var timeDF = {
          timeRdd.map(x => {
            TimeType(x)
          }).toDF()
        }

        //timeDF.orderBy(desc("today")).show()
        timeDF.createOrReplaceTempView("t_ods_time")
        var structDf = ss.sql(s"select a.point_code,b.today from t_ods_time b left join $tableName a")

        println("structDf：" + structDf.rdd.getNumPartitions)

        structDf.createOrReplaceTempView("t_ods_point_time")
        var emptyRDD = sc.emptyRDD[PointData]
        for(pointName <- pointCode) {
          val ori: util.List[PointData] = IDaoProvider.getByRegion(pointName, startTime + " 00", midTime.concat(" 24"))
          //ori.foreach(println)
          val rdd = sc.parallelize(ori,10)
          println("rdd：" + rdd.getNumPartitions)
          emptyRDD = emptyRDD.union(rdd)
        }
          val edosDF = {
            emptyRDD.map(x => {
              ColumnType(x.getName, x.getTime, x.getValue.toFloat)
            }).toDF()
          }
        //edosDF.show()
          //edosDF.filter(_.getString(0)=="JKXT_EJ_AI03076").sort("createTime").show(500)

          edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")

          println("111111111111111")
          val etlDf = ss.sql("select a.point_code as checkPoint,a.today as createTime," +
            "last_value(b.pvalue,true) over(partition by a.point_code order by a.today rows between unbounded preceding and current row) as pvalue" +
            s" from t_ods_point_time a left join (select * from t_ods_hbase_edos_tmp union all select checkPoint,createTime,pvalue from t_ods_hbase_edos_health where day=regexp_replace(substr('$lastSecondBeforeDay',1,10),'/','') and createTime='$lastSecondBeforeDay') b on a.point_code=b.checkPoint and a.today=b.createTime")
        println("etlDf：" + etlDf.rdd.getNumPartitions)
          //etlDf.orderBy(desc("createTime")).show(100)

          etlDf.createOrReplaceTempView("t_etl_hbase_edos")
        ss.catalog.cacheTable("t_etl_hbase_edos")


         ss.sql("insert overwrite table t_ods_hbase_edos_health partition(day) " +
           "select checkPoint,createTime,pvalue,regexp_replace(substr(createTime,1,10),'/','') day from t_etl_hbase_edos"+
           s" where substr(createTime,1,10) between '$startTime' and '$midTime'"  )

          ///按小时汇总
        //修改substr(createTime,1,10) between '$startTime' and '$midTime'
        // day between regexp_replace('$startTime','/','') and regexp_replace('$midTime','/','')
          val etl2Df = ss.sql("select checkPoint as point_code,substr(createTime,1,13) as date_time" +
            ",max(pvalue) maximum ,min(pvalue) minimum ,sum(pvalue) num ,sum(1) cnt,regexp_replace(substr(createTime,1,10),'/','') as day" +
            s" from  t_ods_hbase_edos_health where day between regexp_replace('$startTime','/','') and regexp_replace('$midTime','/','') " +
            s" group by checkPoint,substr(createTime,1,13),regexp_replace(substr(createTime,1,10),'/','')"
          )

          //emptyDf=emptyDf.union(etl2Df)

        etl2Df.createOrReplaceTempView("t_gdl_hbase_edos")

          ss.sql("insert overwrite table t_etl_hive_edos partition(day) " +
            "select point_code,date_time,maximum,minimum,num,cnt,day from t_gdl_hbase_edos")

        ss.catalog.uncacheTable("t_etl_hbase_edos")
        println("succ")
        startTime = nextTime

      }
      println("loop end")

    }

    def getAndFillToHiveGroupBy3(ss: SparkSession, sc: SparkContext, hTable: IDaoProvider, pointCode: Array[String], time: Array[String],step: Int,tableName:String,report:String): Unit = {


      import ss.implicits._
      var startTime = time(0)
      val firstTime = time(0)
      val endTime = time(1)
      val sdf = new SimpleDateFormat("yyyy/MM/dd")
      val sdfs = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss")

      val schema = StructType(
        Seq(
          StructField("point_code", types.StringType, true),
          StructField("date_time", types.StringType, true),
          StructField("maximum", types.FloatType, true),
          StructField("minimum", types.FloatType, true),
          StructField("num", types.FloatType, true),
          StructField("cnt", types.IntegerType, true),
          StructField("day", types.StringType, true)
        )
      )
      var emptyDf = ss.createDataFrame(sc.emptyRDD[Row], schema)

      //val sdfH =new SimpleDateFormat("yyyy/MM/dd HH")
      //字符串 转date类型
      val dayDate = sdf.parse(startTime)
      val calendar = new GregorianCalendar
      calendar.setTime(dayDate)
      ///////日期区间循环取数
      ss.sql("use default")
      //ss.sql("truncate table t_etl_hive_edos")
      while (startTime <= endTime) {
        //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")

        //data时间+1M
        //calendar.add(Calendar.MONTH, 1)
        calendar.add(Calendar.DATE, step)
        val nextTime = sdf.format(calendar.getTime())
        println("nexttime "+nextTime)
        calendar.add(Calendar.DATE,-1)
        //val current: Date = calendar.getTime()
        val current = sdf.format(calendar.getTime())
        val midTime = current match
        {case current if(current<=endTime) =>current
          case _ =>endTime
        }
        calendar.add(Calendar.DATE,1)
        println(startTime)
        println(midTime)
        ///////日期区间循环取数
        //生成补全表 dayDate = new SimpleDateFormat("yyyy/MM/dd").parse("2019/11/08")
        var startTimeStamp = sdf.parse(startTime).getTime()-1000
        val endTimeStamp = sdf.parse(midTime).getTime()
        var startTimeStampToDate = new Date(startTimeStamp)
        var list = new util.ArrayList[String]()
        val lastSecondBeforeDay=sdfs.format(startTimeStampToDate)
        list.add(lastSecondBeforeDay)
        println(lastSecondBeforeDay)
        while (startTimeStamp<endTimeStamp+3600*24*1000-1000) {
          startTimeStamp = startTimeStamp + 1000
          startTimeStampToDate = new Date(startTimeStamp)
          list.add(sdfs.format(startTimeStampToDate))
        }
        println(sdfs.format(startTimeStampToDate))
        var timeRdd = sc.parallelize(list)
        var timeDF = {
          timeRdd.map(x => {
            TimeType(x)
          }).toDF()
        }

        //timeDF.orderBy(desc("today")).show()
        timeDF.createOrReplaceTempView("t_ods_time")
        var structDf = ss.sql(s"select a.point_code,b.today,scope_h,scope_l from t_ods_time b left join $tableName a")

        //println("structDf：" + structDf.rdd.getNumPartitions)

        structDf.createOrReplaceTempView("t_ods_point_time")
        var emptyRDD = sc.emptyRDD[PointData]
        for(pointName <- pointCode) {
          val ori: util.List[PointData] = IDaoProvider.getByRegion(pointName, startTime + " 00", midTime.concat(" 24"))
          //ori.foreach(println)
          val rdd = sc.parallelize(ori,10)
          emptyRDD = emptyRDD.union(rdd)
        }

        val edosDF = {
          emptyRDD.map(x => {
            ColumnType(x.getName, x.getTime, x.getValue.toFloat)
          }).toDF()
        }
        //edosDF.show()
        //edosDF.filter(_.getString(0)=="JKXT_EJ_AI03076").sort("createTime").show(500)

        edosDF.createOrReplaceTempView("t_ods_hbase_edos_tmp")

        val etlDf = ss.sql("select a.point_code as checkPoint,a.today as createTime," +
          "last_value(b.pvalue,true) over(partition by a.point_code order by a.today rows between unbounded preceding and current row) as pvalue" +
          s",scope_h,scope_l from t_ods_point_time a left join (select * from t_ods_hbase_edos_tmp union all select checkPoint,createTime,pvalue from t_ods_hbase_edos_health_new where report='$report' and day=regexp_replace(substr('$lastSecondBeforeDay',1,10),'/','') and createTime='$lastSecondBeforeDay') b on a.point_code=b.checkPoint and a.today=b.createTime")
        //println("etlDf：" + etlDf.rdd.getNumPartitions)
        //etlDf.orderBy(desc("createTime")).show(100)

        etlDf.createOrReplaceTempView("t_etl_hbase_edos")
        //ss.catalog.cacheTable("t_etl_hbase_edos")

        ss.sql("insert overwrite table t_ods_hbase_edos_health_new partition(report,day) " +
          s"select checkPoint,createTime,pvalue,'$report',regexp_replace(substr(createTime,1,10),'/','') day from t_etl_hbase_edos"+
          s" where substr(createTime,1,10) between '$startTime' and '$midTime' and ((pvalue between scope_l and scope_h) or scope_l is null)"  )
        println("insert succ")
        //ss.sql("select checkPoint,createTime,pvalue,'$report',regexp_replace(substr(createTime,1,10),'/','') day from t_etl_hbase_edos"+
        //s" where substr(createTime,1,10) between '$startTime' and '$midTime' and pvalue between scope_l and scope_h"  ).show()

        ///按小时汇总
        //修改substr(createTime,1,10) between '$startTime' and '$midTime'
        // day between regexp_replace('$startTime','/','') and regexp_replace('$midTime','/','')
        val etl2Df = ss.sql("select checkPoint as point_code,substr(createTime,1,13) as date_time" +
          ",max(pvalue) maximum ,min(pvalue) minimum ,sum(pvalue) num ,sum(1) cnt,report,regexp_replace(substr(createTime,1,10),'/','') as day" +
          s" from  t_ods_hbase_edos_health_new where report='$report' and day between regexp_replace('$startTime','/','') and regexp_replace('$midTime','/','') " +
          s" group by checkPoint,substr(createTime,1,13),report,regexp_replace(substr(createTime,1,10),'/','')"
        )

        //emptyDf=emptyDf.union(etl2Df)

        etl2Df.createOrReplaceTempView("t_gdl_hbase_edos")
        val cleanSet = Array("5f4170183b284ce193c932c0bcc57083",
          "e986fc1e5b444fe09c687b6f23d485ce",
          "aa4c4594232448cba25c875619929487",
          "123aecbef8b048158ed8d31ae2be37fc",
          "e696b90495cf4a9aa5d44faf3f0294d5",
          "7a574bcd350d4e5cbc92d81f5e79c1fd",
          "a077a24eb90a447e97250f7d97725ec7",
          "f54d013fb7d24317b08e73b91ffc2c6f"
        )
        if (cleanSet.contains(report)){
        ss.sql("insert overwrite table t_etl_hive_edos_new partition(report,day) " +
          "select point_code,date_time,maximum,minimum,num,cnt,report,day from t_gdl_hbase_edos where  maximum<>minimum")
        }else{
          ss.sql("insert overwrite table t_etl_hive_edos_new partition(report,day) " +
            "select point_code,date_time,maximum,minimum,num,cnt,report,day from t_gdl_hbase_edos")
        }
        //ss.catalog.uncacheTable("t_etl_hbase_edos")
        println("succ")

        startTime = nextTime

      }
      println("loop end")

    }
  */

}



