package scala

import org.apache.spark.mllib.stat.Statistics
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 项目名称:spark-learn<br>
  * 包名:scala<br>
  * 用于:spark-learn<br>
  * 创建时间:2019年03月16日<br>
  * 更新时间:2019年03月16日<br>
  *
  * @author :lds（创建人）<br>
  * @version :v1.0（版本号）<br>
  * @since jdk1.8
  */
object TestCorrect {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local").setAppName("TestIndexedRowMatrix")
    val sc = new SparkContext(conf)
    val rddX = sc.textFile("conf\\testCorrectX.txt").flatMap(_.split(" ").map(_.toDouble))
    val rddY = sc.textFile("conf\\testCorrectY.txt").flatMap(_.split(" ").map(_.toDouble))
    //皮尔逊相关系数
    val correlation:Double = Statistics.corr(rddX,rddY)
    println(correlation)

    //使用斯皮尔曼相关系数
    val spearmanCorrelation = Statistics.corr(rddX,rddY,"spearman")
    println(spearmanCorrelation)

  }

}
