package yang

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer

object Homework {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("homework")
    val sc = new SparkContext(conf)
    val data = sc.parallelize(List((1, List(("d1", 0.802), ("d2", 0.535), ("d5", 0.267))),
      (2, List(("d1", 0.189), ("d2", 0.189), ("d3", 0.189), ("d5", 0.945))),
      (3, List(("d2", 0.333),  ("d4", 0.667), ("d5", 0.667)))), 3);
    val data1= data.flatMap(x => {
      val y=x._2;
      y.map(x1=>{
        val key=x1._1;
        val vaule=(x._1,x1._2)
        (key,vaule)
      })
    }).combineByKey(List(_),(x: List[(Int,Double)],
                             y: (Int,Double)) => x :+ y,
      (m: List[(Int,Double)], n: List[(Int,Double)]) => m ++ n).map(x=>{
      val list1 = new ListBuffer[((Int, Int), Double)];
      //val list=();
      for (i <- 0 until x._2.length - 1) {
        for (j <- i+1 until x._2.length) {
          val key = (x._2(i)._1, x._2(j)._1)
          val value = x._2(i)._2 * x._2(j)._2
          list1.append((key, value))
        }
      }
      list1
    }).flatMap(x=>{x}).reduceByKey(_+_).filter(_._2>0.6)
    println(data1.collect().toBuffer)

  }
}
