package week5.work1

import org.apache.spark.{SparkConf, SparkContext}

object InvertIndex {
  def main (args : Array[String]) : Unit = {
    //配置文件 上下文环境
    val conf = new SparkConf().setAppName("InvertIndex").setMaster("local")
    val context = new SparkContext(conf)
    //Spark读取文件API,读取文件内容
    val wordRDD = context.textFile("D:\\DL\\index.txt")
      .flatMap{ line =>
        val array = line.split("\\.", 2)
        //split(a,b) a代表分割符 b代表被分割成的字串最大数
        val textName = array(0)
        array(1).split("\"")(1).split(" ").map(word => (textName, word))
        }
    val kvRDD = wordRDD.map(kv => (kv._2, kv._1))
      .map((_,1))
      .reduceByKey((x,y) => x+y)
      .map{case ((k,v),cnt) => (k,(v,cnt))}
      .groupByKey()
      .collect()//获取所有数据项
      .foreach(println)
  }
}
