package HadoopWithSpark.homework

import java.io.{FileWriter, PrintWriter}

import scala.io.Source

/**
 * @Author TheKernel
 * @Date 2019/10/5 3:44 下午
 * @Version 1.0
 * @Description
 */
object MyWordCount {

  var wordMap: Map[String, Int] = Map[String, Int]();

   def putMap(tuple: (Array[String], Int)) {
     val arr = tuple._1

     for (x <- arr) {
       if (wordMap.contains(x)) {
         val count = wordMap(x) + 1
         wordMap -= x
         wordMap += (x -> count)
       } else {
         wordMap += (x -> 1);
       }
     }
   }

  def main(args: Array[String]): Unit = {
    val inputFile = Source.fromFile("E:\\IDEACode\\MyScala\\src\\1.txt")
    // 放入list集合
    val lines = inputFile.getLines()

    val res = List(lines).map(_.map(_.split("\\s")).map((_, 1)).foreach(putMap))

    val pw = new PrintWriter(new FileWriter("E:\\IDEACode\\MyScala\\src\\2.txt"))
    //遍历
    for (key <- wordMap.keySet) {
      pw.println(key + " -> " + wordMap(key));
    }

    pw.close()
    println("My WordCount is over !!!")

  }
}
