import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession

object traffic_detect_topndetail {
  object MonitorCarNumSort{
    def concatMC(tupple:(org.apache.spark.sql.Row)) = {
      val monitorId:Any = tupple.get(1)
      val cameraId:Any = tupple.get(2)
      val carlicensePlate:Any = tupple.get(3)
      (monitorId,cameraId+"-"+carlicensePlate)
    }
  }



  def main(args: Array[String]): Unit = {


    val conf = new SparkConf()
      .setAppName("WordCount")

    //create spark context object
    val sc = new SparkContext(conf)

    val spark = SparkSession.builder().appName("traffic_detection").getOrCreate()
    spark.sql("use traffic")
    val flow_rdd = spark.sql("select * from monitor_flow_action").rdd

    val flow_rdd_MonitorCarNumSort_tupple = flow_rdd.map(MonitorCarNumSort.concatMC).distinct()
    //    flow_rdd_MonitorCarNumSort_tupple.map(key=>{(key._1,1)}).reduceByKey(_+_).map(key=>{(key._2,key._1)}).sortByKey(false).take(100).foreach(println)
    val broadcastVariables = flow_rdd_MonitorCarNumSort_tupple.map(key=>{(key._1,1)}).reduceByKey(_+_).map(key=>{(key._2,key._1)}).sortByKey(false).collect()
    val bc = sc.broadcast(broadcastVariables)
    //    得到topN 监控下所有车辆的详细信息
    flow_rdd.filter(MonitorCarDetailsFilter.Filter).map(key=>{(key.get(1),(key.get(2),key.get(3),key.get(4),key.get(5)))}).take(10).foreach(println)

//----------------------------------------------------------


    object MonitorCarDetailsFilter{
      def Filter(tupple:(org.apache.spark.sql.Row)) = {
        val arr:Array[(Int, Any)] =bc.value
        var list: List[Any] =List()
        for (each <- arr){
          list = list:+each._2
        }
        list.contains(tupple.get(1))
      }
    }
  }


}
