package com.edata.bigdata.algorithm.networks

import com.edata.bigdata.annotation.Edata_Algorithm
import com.edata.bigdata.util.{BroadcastWrapper, EDataPregel}
import org.apache.spark.graphx.{EdgeTriplet, Graph, Pregel, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import spire.ClassTag

import scala.collection.immutable.List

/*
* 计算nbunch中所有排列source->target的连接度，连接度为source->的可达路径数
* TODO 为了解决大规模图的计算问题，需要进一步处理cutoff的问题，以及广播变量的问题
* */
@Edata_Algorithm(target = "APNCONNECTIVITY")
class AllPairsNodeConnectivity extends EDataPregel[Map[VertexId, Set[List[VertexId]]]] {

  override var SESSION: SparkSession = _

  var broadCastWrapper: BroadcastWrapper[Map[String, Int]] = _


  override def mergeMsgs(msg1: Map[VertexId, Set[List[VertexId]]], msg2: Map[VertexId, Set[List[VertexId]]]): Map[VertexId, Set[List[VertexId]]] = {
    /*
    * 根据两个msg的key，获取两个消息中的路径集合，整合在一起。
    * 由于多条路径的存储使用的是Set数据结构，所以直接使用++不存在重复的路径
    * */
    (msg1.keySet ++ msg2.keySet).map {
      k => k -> (msg1.getOrElse(k, Set()) ++ msg2.getOrElse(k, Set()))
    }(collection.breakOut)
  }

  /*AllPairsNodeConnectivity算法针对A->B的消息传递是从B到A进行传递的*/
  override def sendMsg(edge: EdgeTriplet[Map[VertexId, Set[List[VertexId]]], _]): Iterator[(VertexId, Map[VertexId, Set[List[VertexId]]])] = {
    /*
    * 1、如果 B 属性为空，则不发生消息传递
    * 2、如果 B 属性与 A 属性一致，则不发生消息传递
    *
    * */

    if (edge.dstAttr.isEmpty) return Iterator.empty
    val sId = edge.srcId
    val msg_new = edge.dstAttr.map(
      data => {
        val vid = data._1
        val ps = data._2
        /*TODO 考虑是否仅保留一个路径，因为计算连通度的路径不能有交集？*/
        val ps_new = ps.map(p => p.::(sId)).filterNot(p => p.distinct.length < p.length)
        vid -> ps_new
      })
    if (edge.srcAttr.isEmpty) return Iterator((edge.srcId, msg_new))
    val ps_1 = edge.srcAttr.values.reduce((x, y) => x ++ y)
    val ps_2 = msg_new.values.reduce((x, y) => x ++ y)
    if ((ps_2 -- ps_1).isEmpty) return Iterator.empty
    Iterator((edge.srcId, msg_new))

  }

  override def handleMsg(vId: VertexId, vAttr: Map[VertexId, Set[List[VertexId]]], msg: Map[VertexId, Set[List[VertexId]]]): Map[VertexId, Set[List[VertexId]]] = {

    val attr_new = mergeMsgs(vAttr, msg).map(
      data => {
        val vid = data._1
        /*去除可能存在环的情况*/
        val ps = data._2
        /*scan此处接收的List()是一个初始值，这个初始值在下来的操作中优先参与计算*/
        val ps_ = ps.scan(List())((x, y) => {
          val x_ = x.slice(1, x.length - 1)
          val y_ = y.slice(1, y.length - 1)
          val z_ = x_ ++ y_
          if (z_.distinct.length == z_.length) {
            y
          } else {
            List()
          }
        }).filterNot(data => data.isEmpty)
        vid -> ps_
      })
    attr_new
  }


  def run[VD: ClassTag, ED: ClassTag](G: Graph[VD, ED], nbunch: List[VertexId], cutoff: Int): VertexRDD[Map[VertexId, Set[List[VertexId]]]] = {

    /*获取图的出度和入度 [vid,D_i] 和 [vid,D_o]，筛选出nbunch中的节点，没有出度的节点不会出现在数据集中 */
    val inDegrees = G.inDegrees.filter(data => nbunch.contains(data._1))
    val outDegrees = G.outDegrees.filter(data => nbunch.contains(data._1))

    /*Join操作，组成[vid,D_i,D_o]，没有出度或入度的节点会在相应位置设置为0*/
    val degrees = inDegrees.fullOuterJoin(outDegrees).map(
      data => {
        val vid = data._1
        val inDegrees = data._2._1.getOrElse(0)
        val outDegrees = data._2._2.getOrElse(0)
        (vid, inDegrees, outDegrees)
      }).collect().toList
    /*进行全排列，计算该排列的最大连接度*/
    val permutations = degrees.combinations(2).flatMap(
      data => {
        List(List(data(0), data(1)), List(data(1), data(0)))
      }).map(
      data => {
        val source = data(0)
        val target = data(1)
        val max_connectivity = math.min(math.min(source._3, target._2), cutoff)
        //Map(source.toString()+"->")
        List(source._1, target._1, max_connectivity)
      }).map(
      data => {
        val source = data(0).toString
        val target = data(1).toString
        val key = source + "->" + target
        val value = data(2).asInstanceOf[Int]
        Map(key -> value)
      }).reduce((x, y) => x ++ y)

    /*
    * 初始化计算图
    * */
    val APNCGraph = G.mapVertices {
      (vid, attr) => {
        if (nbunch.contains(vid)) {
          val p = List(vid)
          val ps = Set(p)
          Map(vid -> ps)
        } else {
          Map[VertexId, Set[List[VertexId]]]()
        }
      }
    }
    broadCastWrapper = new BroadcastWrapper[Map[String, Int]]
    broadCastWrapper.getInstance(SESSION.sparkContext, permutations)
    val msg = Map[VertexId, Set[List[VertexId]]]()
    val PGraph = Pregel(APNCGraph, msg)(vprog = handleMsg, sendMsg = sendMsg, mergeMsg = mergeMsgs)
    val vertex = PGraph.vertices.filter(
      data => {
        nbunch.contains(data._1)
      })
    vertex
  }


}
