import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SparkConf, SparkContext}


object workCount {
  def main(args: Array[String]) {
    val filePath="hdfs:///test/res"
    val sparkConf=new SparkConf().setAppName("workCount")
    val sc= new SparkContext(sparkConf)
     val hdfs=FileSystem.get(new Configuration())
      if(hdfs.exists(new Path(filePath)))hdfs.delete(new Path(filePath),true)
      val f =sc.textFile("hdfs:///test/test")
     f.flatMap(line =>line.split(" "))
       .map(line =>(line,1))
        .reduceByKey(_ + _)
        .map(line=>line._1+","+line._2)
        .saveAsTextFile(filePath)

  }
}