package SparkGraphXInAction

import org.apache.spark._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.graphx._
import org.apache.spark.graphx
import org.apache.spark.graphx.Graph._
import org.apache.spark.rdd.RDD
import org.apache.spark.graphx.util.GraphGenerators

/**
  * Created by Administrator on 2017/4/26 0026.
  */
object TestSVDPlusPlus {
  def main(args: Array[String]): Unit = {
    // 屏蔽日志
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)

    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    //构建EdgeRDD(边RDD)，其实也可以从Graph中获取。
    //这个EdgeRDD，对应图7.2
    val edges = sc.makeRDD(Array(Edge(1L,11L,5.0),Edge(1L,12L,4.0),Edge(2L,12L,5.0),
                                Edge(2L,13L,5.0),Edge(3L,11L,5.0),Edge(3L,13L,2.0),
                                Edge(4L,11L,4.0),Edge(4L,12L,4.0)))
    //算法的指定超参数（参考表7.1）
    val svdconf = new lib.SVDPlusPlus.Conf(2, 10, 0, 5, 0.007, 0.007, 0.005, 0.015)
    //运行SVD++算法，获得返回的模型——输入图的再处理结果和数据集的平均打分情况。
    val (g,mean) = lib.SVDPlusPlus.run(edges,svdconf)
    val list_tri = g.vertices.collect
    for(xx <- list_tri){
      println("VertexId : "+xx._1)
      val list_1 = xx._2._1
      print("a:")
      for(a <- list_1){
        print(", "+a)
      }
      println()

      val list_2 = xx._2._2
      print("b:")
      for(b <- list_2){
        print(", "+b)
      }
      println()

      val c = xx._2._3
      println("c: "+c)

      val d = xx._2._4
      println("d: "+d)
      println("===========================================")
    }

  }
}
