package spark.mllib

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession

/**
  * Created by liuwei on 2017/7/13.
  */
object PCATest {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.ml.feature.PCA
    import org.apache.spark.ml.linalg.Vectors

    var arr1:Array[Double] = new Array[Double](5)
    for(i <- 0 to arr1.length-1){
      arr1(i) = i%10
    }
//    arr1.foreach(println)

    val v1 =Vectors.dense(arr1);

//    val v1 =arr1.toVector
//    val v2 =arr1.toVector

//    v3.a

    val data = Array(
      Vectors.sparse(5, Seq((1, 1.0), (3, 7.0))),
      v1,
      v1
    )
    val sparkConf = new SparkConf().setAppName("PCATest").setMaster("local[8]")
    val sc = new SparkContext(sparkConf)
    val spark = SparkSession.builder.getOrCreate()
    val df = spark.createDataFrame(data.map(Tuple1.apply)).toDF("features")
    println(df.schema)
    df.columns.foreach(println)

    val pca = new PCA()
      .setInputCol("features")
      .setOutputCol("pcaFeatures")
      .setK(5)
      .fit(df)

    val result = pca.transform(df).select("pcaFeatures")
    result.show(false)
  }

}
