package cn.rslee.scala.spark.demos

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import java.io.PrintWriter
import java.io.FileWriter

/**
 * @author rslee
 * 模仿Spark官网demo写的例子
 */
object WordCount {

 def main(args : Array[String]){
    var param = Array("local[2]","D:/logs/doc.txt","D:/tmp/result.tmp")
    if(args.length !=0){
       param = args
    }
    
    //1.先设置sparkConf对象
    val conf = new SparkConf().setAppName("WordCountApp").setMaster(param(0))
    val sc = new SparkContext(conf)
    
    //3.加载文本
    
    val textFile = sc.textFile(param(1))
    //4.进行处理
//    val wordCount = textFile.flatMap(line => line.split(" ")).map(w => (w,1)).reduceByKey(_ + _)
    
    val wordCount = textFile.flatMap(line => line.split(" ")).map(word => {
      val r = word.replaceAll(",","").replaceAll(":","")
      (r,1)
    }).reduceByKey(_+_)
    
    //5.打印结果
//    wordCount.collect().foreach(println _)
    
    val writer = new PrintWriter(new FileWriter(param(2),false))

    
    //6.存储结果到文本
    wordCount.collect().foreach(line =>{
      writer.println(line._1+","+line._2)
      println(line)
    })
    
    writer.close();
    
  }
}