/**
  * Created by Alex on 2016/7/26.
  */
import scala.collection.mutable.HashMap

object TestSeg {
  def main(args:Array[String]): Unit = {
    //
    //    val an = new LuceneDocumentAnalyzer()
    //    val map  = an.getWordMap("我种下一颗种子终于长出了果实今天是个伟大日子摘下星星送给你拽下月亮送给你让太阳每天为你升起变成蜡烛燃烧自己只为照亮你把我一切都献给你只要你欢喜你")
    //    val count:Double = map.size()
    //    for((k,v)<-map){
    //      v.setTf(v.getFreq()/count)
    //    }
    //    val atest = map.get("滑板鞋")
    //    map.agg
    //    print(atest)
    def merge(map:scala.collection.mutable.HashMap[String,Int],arr:Array[String]): HashMap[String,Int] ={
          for(str<-arr){
            map+= str->(map.getOrElse(str,0)+1)
          }
          map
    }

    def combo(map1:HashMap[String,Int],map2:HashMap[String,Int]): HashMap[String,Int] ={
      for((k,v)<-map2){
        map1 += k->(map1.getOrElse(k,0)+v)
      }
      map1
    }
    val wordCount = HashMap[String,Int]()
    val docs = Seq(Array("我的", "小苹果", "星星", "香蕉"), Array("苹果", "公司", "香蕉"));
    docs.aggregate(wordCount)(merge,combo)
    for((k,v)<-wordCount){
      printf("%s,%d",k,v)
    }




  }
}
