package pxene

import scala.reflect.runtime.universe

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.ml.feature.HashingTF
import org.apache.spark.ml.feature.IDF
import org.apache.spark.ml.feature.Tokenizer
import org.apache.spark.mllib.classification.NaiveBayesModel
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.sql.Row

/**
 * @author zhengyi
 */
object ModelUse4Local {

  case class RawDataRecord(category: String, text: String)

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()

    val sparkContext = new SparkContext(sparkConf)
    val sqlContext = new org.apache.spark.sql.SQLContext(sparkContext)
    import sqlContext.implicits._

    

   //将原始数据映射到DataFrame中，字段category为分类编号，字段text为分好的词，以空格分隔  
    val srcDF = sparkContext.textFile("file://"+args(0)).map {  
      x =>  
        val data = x.split("\t")  
        RawDataRecord(x,data(1))  
    }.toDF()  

    //将分好的词转换为数组  
    val tokenizer = new Tokenizer().setInputCol("text").setOutputCol("words")
    val wordsData = tokenizer.transform(srcDF)
    val hashingTF = new HashingTF().setInputCol("words").setOutputCol("rawFeatures").setNumFeatures(50000)
    val featurizedData = hashingTF.transform(wordsData)
    //计算TF-IDF值  
    val idf = new IDF().setInputCol("rawFeatures").setOutputCol("features")
    val idfModel = idf.fit(featurizedData)
    val rescaledData = idfModel.transform(featurizedData)

    //转换成计算需要的输入格式  
    val trainDataRdd = rescaledData.select($"category", $"features").map {
      case Row(label: String, features: Vector) =>
        (label, Vectors.dense(features.toArray))
    }
    //加载模型  
    val model = NaiveBayesModel.load(sparkContext, "/user/chenjinghui/model2")
    //对测试数据集使用训练模型进行分类预测  
    val testpredictionAndLabel = trainDataRdd.map(p => (model.predict(p._2), p._1))
        testpredictionAndLabel.saveAsTextFile("file://"+args(1))  
//    testpredictionAndLabel.foreach(println)
    sparkContext.stop()
  }
}