package com.edata.bigdata.viewmain
import com.edata.bigdata.algorithm.networks.{AllPairsNodeConnectivity, KCores}
import com.edata.bigdata.annotation.Edata_Scan
import com.edata.bigdata.util.EDataManager
import org.apache.spark.graphx.{Edge, Graph}
@Edata_Scan(bean = "com.edata.bigdata.bean"
  ,executor = "com.edata.bigdata.executor")
object testing {
  def main(args: Array[String]): Unit = {
    val edm = EDataManager(this.getClass)
    edm.APPNAME = "EDATA_Executor_Application"
    edm.MASTER = "local" //设置Spark程序的Master
    edm.help("algorithm")
    val apnc = edm.createAlgorithm("APNCONNECTIVITY").asInstanceOf[AllPairsNodeConnectivity]
    val myVertices = apnc.SESSION.sparkContext.makeRDD(Array(
      (1L, "Alice"),
      (2L, "Bob"),
      (3L, "Charlie"),
      (4L, "Dave"),
      (6L, "Faith"),
      (7L, "George"),
      (8L, "Harvey"),
      (9L, "Ivy"),
      (5L, "Eve"),
      (10L, "Lily"),
      (11L, "Helen"),
      (12L, "Ann")
    ))

    val myEdges = apnc.SESSION.sparkContext.makeRDD(Array(
      Edge(1L, 9L, "friend"),
      Edge(1L, 3L, "sister"),
      Edge(2L, 1L, "friend"),
      Edge(2L, 4L, "brother"),
      Edge(3L, 2L, "boss"),
      Edge(4L, 5L, "client"),
      Edge(6L, 7L, "cousin"),
      Edge(9L, 7L, "coworker"),
      Edge(8L, 9L, "father"),
      Edge(10L, 11L, "colleague"),
      Edge(11L, 12L, "colleague"),
      Edge(10L, 12L, "colleague")
    ))
    val g = Graph(myVertices, myEdges)
    val result = apnc.run(g,List(1L,3L,9L),100)
  }
}