import org.apache.spark.{SparkConf, SparkContext}

object WordCount {

  def main(args: Array[String]): Unit = {

    val sc: SparkContext = new SparkContext(new SparkConf().setMaster("local[*]").setAppName("WordCount"))

    val regExpArr: Array[String] = Array(" ", "//", "/", ";", ":", "，", "：", "！", "？", "。")

    val wordCount: Array[String] = sc.textFile("in/words.txt")
      .flatMap(line=>{
        var stringArr: Array[String] = Array(line)
        for(re <- regExpArr){
          var tmpArr: Array[String] = Array()
          for(str <- stringArr){
            if (str.contains(re)) {
              tmpArr = Array.concat(tmpArr,str.split(re).filterNot(_==""))
            }else{
              tmpArr:+=str
            }
          }
          stringArr=tmpArr
        }
        stringArr
      })
      .map((_, 1))
      .reduceByKey(_ + _)
      .map(tp=>"\""+tp._1+"\"出现了"+tp._2+"次")
      .collect()

    println(wordCount.mkString("\n"))
  }

}
