package org.zjt.spark.book

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer

/**
  * DESC    皮尔逊xy相关度
  *
  * @author
  * @create 2017-06-29 上午9:50
  **/
object PearsonRelation extends App {
  var sparkConf = new SparkConf().setMaster("local[2]").setAppName("PearsonRelation")
  val sc = new SparkContext(sparkConf)
  val rdd = sc.parallelize {
    val data = new Array[Array[Double]](3)
    data(0) = Array[Double](1, 1, 3, -1)
    data(1) = Array[Double](2, 2, 1, -2)
    data(2) = Array[Double](3, 3, 8, -3)
    data
  }.flatMap {
    line => {
      val length = line.length
      val array = ArrayBuffer[(String, String)]()
      for (i <- 0 until length; j <- i + 1 until length if j != i) {
        array += new Tuple2[String, String](s"$i:$j", "%s:%s".format(line(i), line(j)))
      }
      array
    }
  }.groupByKey().map {
    line => {
      //println(line)
      (line._1, PearsonTool.Calculation(line._2.toArray))
    }
  }

  println(rdd.collect().mkString("\n"))
  sc.stop()
}


object PearsonTool {
  /**
    * 得到array中行和列的相关度
    *
    * @param array
    * @return
    */
  def Calculation(array: Array[String]): Double = {
    var sumX = 0.0;
    var sumY = 0.0;
    var sumXX = 0.0;
    var sumYY = 0.0;
    var sumXY = 0.0;
    val n = array.length.toDouble
    for (o <- array) {
      val x = o.split(":").last.toDouble
      val y = o.split(":").head.toDouble
      sumX += x
      sumY += y
      sumXX += Math.pow(x, 2)
      sumYY += Math.pow(y, 2)
      sumXY += x * y
    }
    //println(sumX + " " + sumY + " " + sumXX + " " + sumYY + "  " + sumXY)
    (sumXY - (sumX * sumY) / n) / math.sqrt((sumXX - math.pow(sumX, 2) / n) * (sumYY - math.pow(sumY, 2) / n))
  }


}