package com.etc

import org.apache.spark.mllib.feature.{HashingTF, IDF}
import org.apache.spark.mllib.linalg
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object TF_IDF {




    def main(args: Array[String]) {
      val conf = new SparkConf()               //创建环境变量
        .setMaster("local")                                             //设置本地化处理
        .setAppName("TF_IDF")                              //设定名称
      val sc = new SparkContext(conf)
      val documents = sc.textFile("a.txt")
        .map(_.split(" ").toSeq)

      val hashingTF = new HashingTF()			//首先创建TF计算实例
      val tf = hashingTF.transform(documents).cache()//计算文档TF值
      val idf = new IDF().fit(tf)						//创建IDF实例并计算

      val tf_idfRDD:RDD[linalg.Vector] = idf.transform(tf) //计算TF_IDF词频
      tf_idfRDD.foreach(println)
  }
}
