package cn.doitedu.graphx

import cn.doitedu.commons.utils.SparkUtil
import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD

/**
 * 滚动合并 guid字典计算
 */
object DemoDay02 {
  def main(args: Array[String]): Unit = {


    val spark = SparkUtil.getSparkSession("guid图计算生成")
    import spark.implicits._

    // 加载前一日的guid字典
    val day01Rdd = spark.read.textFile("testdata/graph_out/day01")

    val day01VerticesRdd: RDD[(Long, String)] = day01Rdd.rdd.flatMap(line => {
      val split = line.split(",")
      for (id <- split) yield (id.toLong, "")
    })

    val day01Edges = day01Rdd.rdd.map(line => {
      val split = line.split(",")
      Edge(split(0).toLong, split(1).toLong, null)
    })

    // 加载今日的日志数据
    val day02Log = spark.read.textFile("testdata/graphx/x2.txt")

    val day02VeticesRdd = day02Log.rdd.flatMap(line => {
      val split = line.split(",")
      val points: Array[String] = split.slice(1, 3)
      for (p <- points) yield (p.hashCode.toLong, p)
    })


    val day02EdgesRdd = day02Log.rdd.map(line => {
      val split = line.split(",")
      Edge(split(1).hashCode.toLong, split(2).hashCode.toLong, null)
    })

    // 构造图对象
    val graph = Graph(day01VerticesRdd.union(day02VeticesRdd), day01Edges.union(day02EdgesRdd))

    // 调用连通子图算法
    val resultMapping: VertexRDD[VertexId] = graph.connectedComponents().vertices

    // 将今天的映射关系计算结果，比对 上一日的映射关系结果==》确保可以沿用之前的guid
    val idmp01 = day01Rdd.rdd.map(line => {
      val split = line.split(",")
      (split(0).toLong, split(1).toLong)
    })

    val day01Map = idmp01.collectAsMap()
    val bc = spark.sparkContext.broadcast(day01Map)

    val idmp02: RDD[(Long, Long)] = resultMapping.map(tp => (tp._2, tp._1))

    val finalResult: RDD[(Long, Long)] = idmp02.groupByKey().map(tp => {
      val day01dict = bc.value

      var guid = tp._1
      val ids: Iterable[VertexId] = tp._2

      val commonIdSet = day01dict.keySet.intersect(ids.toSet)
      // 如果今天的这一组id，存在于上一日的id集合中，就要给今天的guid重新赋值上一日对应的guid
      if (commonIdSet.size > 0) {
        val idsArray = commonIdSet.toArray
        guid = day01dict.get(idsArray(0)).get
      }

      (guid, ids)

    })
      .flatMap(tp => {
        for (id <- tp._2) yield (id, tp._1)

      })



    // 保存最终映射结果
    finalResult.map(tp=>tp._1+","+tp._2).saveAsTextFile("testdata/graph_out/day02")

    spark.close()


  }

}
