package pxene.test.kmeans
import org.apache.log4j.{ Level, Logger }
import org.apache.spark.{ SparkConf, SparkContext }
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.clustering.KMeansModel

object KmeansModelUse {
  def main(args: Array[String]): Unit = {
    // 屏蔽不必要的日志显示在终端上
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)

    // 设置运行环境
    val conf = new SparkConf().setAppName("Kmeans").setMaster("local[1]")
    val sc = new SparkContext(conf)
    val model=KMeansModel.load(sc, "file:///home/chenjinghui/kmeans_model")
    // 装载数据集
    val data = sc.textFile("file:///home/chenjinghui/kmeans_data2.txt", 1)
    val parsedData = data.map{
      (s=>(s.split("\t")(0),Vectors.dense(s.split('\t')(1).split(" ").map(_.toDouble))))
    }
    
    parsedData.map(x=>(x._1,model.predict(x._2))).foreach(println)
    
//    val parsedData = data.map(s => Vectors.dense(s.split(' ').map(_.toDouble)))
    
  }
}