
import org.apache.spark.sql.SparkSession


object traffic_detect {

  object GetSpark{
    def fun1(tuple:((Any, Iterable[org.apache.spark.sql.Row]))) = {
      val monitorId = tuple._1
      val cameraIterator = tuple._2.iterator
      var count = 0 //标准表中当前卡扣对应的摄像头个数
      val cameraIds = new StringBuilder
      while (cameraIterator.hasNext) {
        cameraIds.append("," + cameraIterator.next.get(1))
        count += 1
      }
      //cameraIds=00001,00002,00003,00004|cameraCount=4
      val cameraInfos ="cameraIds=" + cameraIds.toString.substring(1) + "|cameraCount=" + count
      (monitorId, cameraInfos)
    }
  }



  object GetSpark_2{
    def fun1(tuple:((Any, Iterable[Any]))):(Any,String) = {
      val monitorId = tuple._1
      val cameraIterator = tuple._2.iterator
      var count = 0 //标准表中当前卡扣对应的摄像头个数
      val cameraIds = new StringBuilder
      while (cameraIterator.hasNext) {
        cameraIds.append("," + cameraIterator.next)
        count += 1
      }
      //cameraIds=00001,00002,00003,00004|cameraCount=4
      val cameraInfos ="cameraIds=" + cameraIds.toString.substring(1) + "|cameraCount=" + count
      return (monitorId, cameraInfos)
    }
  }

  object GetSpark_3 {


    def strToList_(str:String): List[String] ={
      import collection.breakOut
      val trimmedList: List[String] = str.split(",").map(_.trim)(breakOut)
      return trimmedList
    }

    def reg_(str:String): String ={

      import scala.util.matching.Regex
      val numberPattern: Regex = "\\=(\\S+)\\|".r
      val regStr = numberPattern.findFirstMatchIn(str) match {
        case Some(s) => s.group(1)
        case None => "not match"
      }
      return regStr
    }

    def show(x: Option[String]) = x match {
      case Some(s) => s
      case None => "?"
    }

    def list_diff(leftList: List[String], rightList: List[String]): List[String] ={

      return leftList.diff(rightList)
    }

    def listToStringJoinCount(a: List[String],count: Int): String ={

      val str = a.mkString(",")
      return "cameraid="+str+"|count:"+count
    }
    //count:10
    def split_(): Unit ={
      val str =  "cameraid=apple,banana,cherry|count:10".split("\\|",0)(1)
      println(str)
    }


    def fun1(tuple: ((Any, (String, Option[String])))):(Any,String) = {

      val monitorId = tuple._1
      val left_str = tuple._2._1
      val right_str = tuple._2._2
      var ExpMonitorsStr = ""
      if(right_str.isEmpty){
        ExpMonitorsStr = left_str
      }else{
        if(left_str.split("\\|",0)(1)!=show(right_str).split("\\|",0)(1)) {
          val leftListStr = reg_(left_str)
          val rightListStr = reg_(show(right_str))
          val leftList = strToList_(leftListStr)
          val rightList = strToList_(rightListStr)
          val ExpMonitors = list_diff(leftList, rightList)
          val count = ExpMonitors.length
          ExpMonitorsStr = listToStringJoinCount(ExpMonitors, count)
        }
      }

      return (monitorId, ExpMonitorsStr)
    }
  }

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("traffic_detection").getOrCreate()
    spark.sql("use traffic")
    val camera_rdd = spark.sql("select * from monitor_camera_info").rdd
    val flow_rdd = spark.sql("select * from monitor_flow_action").rdd
    var flow_rdd_right = flow_rdd.map(key=>{(key.get(1),key.get(2))}).groupByKey().map(GetSpark_2.fun1)
    var camera_rdd_left = camera_rdd.map(key=>{(key(0),key)}).groupByKey().map(GetSpark.fun1)

    camera_rdd_left.leftOuterJoin(flow_rdd_right).map(GetSpark_3.fun1).saveAsTextFile("hdfs://nameservice1/tmp/traffic_value.txt")
  }

}
