package com.dtmobile.spark.biz.excepitonsection


import java.util
import java.util.{Collections, _}

import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}
import scala.util.control.Breaks._

/**
  * Created by shenkaili on 2017/10/18.
  */
class GetGridMap(ANALY_DATE: String, ANALY_HOUR: String, SDB: String, DDB: String, warhouseDir: String) {

  def getGridMap(sparkSession: SparkSession, questiontype: String): HashMap[String, String] = {
    import sparkSession.sql
    var te: java.util.List[Row] = null
    var gridMap = new HashMap[String, String]()
    //脱网路段
    if (questiontype.equalsIgnoreCase("dpnetgrid")) {
      te = sql(
        s"""
           |select gridid from $DDB.exception_analysis where dt=$ANALY_DATE and h=$ANALY_HOUR and gridid is not null group by gridid having sum(case when ETYPE=14 or ETYPE=15 then 1 else 0 end)>2 order by gridid
         """.stripMargin).collectAsList()
    }
    //高掉话路段
    else if (questiontype.equalsIgnoreCase("highdpmcgrid")) {
      te = sql(
        s"""
           |select gridid from $DDB.exception_analysis where dt=$ANALY_DATE and h=$ANALY_HOUR and gridid is not null group by gridid having sum(case when ETYPE=5 or ETYPE=7 then 1 else 0 end)>3 order by gridid
         """.stripMargin).collectAsList()
    }
    //高干扰路段
    else if (questiontype.equalsIgnoreCase("highdistgrid")) {
      val th: java.util.List[Row] = sql(
        s"""
           |select gridid from $DDB.mr_gt_grid_ana_base60 where dt=$ANALY_DATE and h=$ANALY_HOUR and gridid  is not null group by gridid  having sum(upsigrateavgx)/sum(upsigrateavgy) < 3 order by gridid
   """.stripMargin).collectAsList()
      if (th.size() > 0) {
        var cnt = 0
        for (a <- 0 until th.size()) {
          cnt += 1
          gridMap.put(th.get(a).get(0).toString,th.get(a).get(0).toString+ "=" + th.get(a).get(0).toString + "=" + cnt)
        }
        var datalist = new ArrayList[String]()
        val itResult = gridMap.entrySet().iterator()
        while (itResult.hasNext) {
          datalist.add(itResult.next().toString)
        }
        val arr2 = new Array[String](datalist.size())
        for (xx <- 0 to datalist.size() - 1) {
          arr2(xx) = datalist.get(xx)
        }
        val rdd = sparkSession.sparkContext.parallelize(arr2)
        val schemaString = "gridid,value,midgrid,cnt"
        val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, true)))
        val rowRDD = rdd.map(_.toString.split("=")).map(p => Row(p(0), p(1), p(2), p(3)))
        val peopleDataFrame = sparkSession.sqlContext.createDataFrame(rowRDD, schema)
        peopleDataFrame.createOrReplaceTempView("griddatatable")
        return gridMap
      }
    }
    //频繁切换路段
    else if (questiontype.equalsIgnoreCase("freqswgrid")) {
      val tt: java.util.List[Row] = sql(
        s"""
           |select s.gridid,case when t.num is null then 0 else t.num end etypeNum from  (select gridid ,count(1) num from $DDB.exception_analysis  where etype=10 and dt=$ANALY_DATE and h=$ANALY_HOUR  group by gridid) t right join $DDB.grid s on t.gridid = s.gridid order by s.gridid
       """.stripMargin).collectAsList()

      var curproceduregridid = 0L
      var nextproceduregridid = 0L
      var thirdproceduregridid = 0L
      var gridtempset = new LinkedHashSet[String]()
      if (tt.size > 3) {
        for (a <- 0 to (tt.size() - 3)) {
          val changeTimes = tt.get(a).get(1).toString.toInt + tt.get(a + 1).get(1).toString.toInt + tt.get(a + 2).get(1).toString.toInt
          if (changeTimes >= 5) {
            gridtempset.add(tt.get(a).get(0).toString)
            gridtempset.add(tt.get(a + 1).get(0).toString)
            gridtempset.add(tt.get(a + 2).get(0).toString)
          }
        }
        val gridList = gridtempset.iterator()
        var temp = 0l
        var bd = new StringBuilder
        val resultList = new util.ArrayList[String]()
        var i = 0
        while (gridList.hasNext) {
          var gridid = gridList.next().toLong
          breakable {
            if (i == 0) {
              bd.append(gridid + ";")
              temp = gridid
              temp = temp + 1
              i += 1
              break
            } else {
              if (gridid == temp) {
                bd.append(gridid + ";")
              } else {
                resultList.add(bd.substring(0, bd.length - 1))
                bd = new StringBuilder
                bd.append(gridid + ";")
              }
              if (i == gridtempset.size - 1) {
                resultList.add(bd.substring(0, bd.length - 1))
              }
              temp = gridid
              temp = temp + 1
              i += 1
            }
          }
        }

        val it = resultList.iterator()
        while (it.hasNext) {
          val next = it.next
          val keys: Array[String] = next.split(";")
          for (i <- keys) {
            gridMap.put(i.toString, next + "=" + keys(keys.length / 2))
          }
        }
        var datalist = new ArrayList[String]()
        val itResult = gridMap.entrySet().iterator()
        while (itResult.hasNext) {
          datalist.add(itResult.next().toString)
        }
        val arr2 = new Array[String](datalist.size())
        for (xx <- 0 to datalist.size() - 1) {
          arr2(xx) = datalist.get(xx)
        }
        val rdd = sparkSession.sparkContext.parallelize(arr2)
        val schemaString = "gridid,value,midgrid"
        val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, true)))
        val rowRDD = rdd.map(_.toString.split("=")).map(p => Row(p(0), p(1), p(2)))
        val peopleDataFrame = sparkSession.sqlContext.createDataFrame(rowRDD, schema)
        peopleDataFrame.createOrReplaceTempView("griddatatable")
        return gridMap
      }
    }
    //弱覆盖路段
    else if (questiontype.equalsIgnoreCase("weakcovergrid")) {
      //若连续2个栅格，其平均RSRP均低于-110dbm，则该连续2个栅格形成一个弱覆盖路段。
      val tw: java.util.List[Row] = sql(
        s"""
           |select gridid from $DDB.mr_gt_grid_ana_base60 where dt=$ANALY_DATE and h=$ANALY_HOUR
           |and gridid  is not null group by gridid having sum(avgrsrpx)/sum(commy) < -110 order by gridid
   """.stripMargin).collectAsList() //得到RSRP低于-110的栅格
      var curGridId = 0
      var nextGridId = 0
      var gridset = new LinkedHashSet[String]()
      if (tw.size() >= 2) {
        for (a <- 0 to tw.size() - 2) {
          curGridId = tw.get(a).get(0).toString().toInt
          nextGridId = tw.get(a + 1).get(0).toString().toInt
          if (nextGridId - curGridId == 1) {
            gridset.add(tw.get(a).get(0).toString())
            gridset.add(tw.get(a + 1).get(0).toString())
          }
        }
        val gridList = gridset.iterator()
        var temp = 0l
        var bd = new StringBuilder
        val resultList = new util.ArrayList[String]()
        var i = 0
        while (gridList.hasNext) {
          var gridid = gridList.next().toLong
          breakable {
            if (i == 0) {
              bd.append(gridid + ";")
              temp = gridid
              temp = temp + 1
              i += 1
              break
            } else {
              if (gridid == temp) {
                bd.append(gridid + ";")
              } else {
                resultList.add(bd.substring(0, bd.length - 1))
                bd = new StringBuilder
                bd.append(gridid + ";")
              }
              if (i == gridset.size - 1) {
                resultList.add(bd.substring(0, bd.length - 1))
              }
              temp = gridid
              temp = temp + 1
              i += 1
            }
          }
        }

        val it = resultList.iterator()
        while (it.hasNext) {
          val next = it.next
          val keys: Array[String] = next.split(";")
          for (i <- keys) {
            gridMap.put(i.toString, next + "=" + keys(keys.length / 2) + "=" + keys.length)
          }
        }
        var datalist = new ArrayList[String]()
        val itResult = gridMap.entrySet().iterator()
        while (itResult.hasNext) {
          datalist.add(itResult.next().toString)
        }
        val arr2 = new Array[String](datalist.size())
        for (xx <- 0 to datalist.size() - 1) {
          arr2(xx) = datalist.get(xx)
        }
        val rdd = sparkSession.sparkContext.parallelize(arr2)
        val schemaString = "gridid,value,midgrid,cnt"
        val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, true)))
        val rowRDD = rdd.map(_.toString.split("=")).map(p => Row(p(0), p(1), p(2), p(3)))
        val peopleDataFrame = sparkSession.sqlContext.createDataFrame(rowRDD, schema)
        peopleDataFrame.createOrReplaceTempView("griddatatable")
        return gridMap
      }
    }


    val gridSet = new HashSet[Integer]
    var a = 0
    var curGridId = 0
    var nextGridId = 0

    if (te == null || te.isEmpty || te.size() < 2) {
      return gridMap
    }
    for (a <- 0 to te.size() - 2) {
      if (te.get(a) != null) {
        curGridId = te.get(a).get(0).toString().toInt
        nextGridId = te.get(a + 1).get(0).toString().toInt
        if (nextGridId - curGridId == 1) {
          gridSet.add(nextGridId)
          gridSet.add(curGridId)
        }
      }
    }

    val griddata = new ArrayList[Integer](gridSet)
    Collections.sort(griddata)
    var tmp = 0
    var i = 0
    val gridsb = new StringBuffer()
    val midgrid = new String()
    for (a <- 0 to (griddata.size() - 2)) {
      if (griddata.get(a + 1) - griddata.get(a) == 1 && a != griddata.size() - 2) {
        i = i + 1
      }
      else {
        var y = 0
        var cnt = 0
        //===获取路段对应的异常事件次数================
        val sqlbuild = new StringBuffer()
        for (y <- 0 to i) {
          gridsb.append(griddata.get(tmp + y).toString)
          gridsb.append(";")
        }
        cnt = i + 1
        println(cnt)
        println("=========ssssss==================")
        //===============================================
        println(gridsb)
        for (y <- 0 to i) {
          val midgrid = gridsb.toString.split(";")(Math.floor(i / 2).toInt)
          gridMap.put(griddata.get(tmp + y).toString, gridsb.substring(0, gridsb.length() - 1) + "=" + midgrid + "=" + cnt) //得到GRID对应的连续gridid
        }
        gridsb.delete(0, gridsb.length())
        i = 0
        tmp = a + 1
      }
    }

    //=====将map转成RDD===
    var datalist = new ArrayList[String]()
    var it = gridMap.entrySet().iterator()
    while (it.hasNext) {
      datalist.add(it.next().toString)
    }
    val arr2 = new Array[String](datalist.size())
    for (xx <- 0 to datalist.size() - 1) {
      arr2(xx) = datalist.get(xx)
    }
    val rdd = sparkSession.sparkContext.parallelize(arr2)
    val schemaString = "gridid,value,midgrid,cnt"
    val schema = StructType(schemaString.split(",").map(fieldName => StructField(fieldName, StringType, true)))
    val rowRDD = rdd.map(_.toString.split("=")).map(p => Row(p(0), p(1), p(2), p(3)))
    val peopleDataFrame = sparkSession.sqlContext.createDataFrame(rowRDD, schema)
    peopleDataFrame.createOrReplaceTempView("griddatatable")
    //    sql(
    //      s"""
    //         |select * from griddatatable
    //       """.stripMargin).show(10)
    gridMap
  }
}
