package org.hadoop.spark
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object WordCount {
  def main(args: Array[String]): Unit = {
    //声明Config
    val conf: SparkConf = new SparkConf();
    //设置master
    conf.setAppName("WordCount");
    conf.setMaster("local[2]");
    //声明SparkContext
    val sc: SparkContext = new SparkContext(conf);
    //声明一个RDD
    val rdd: RDD[String] = sc.textFile("file:///D:/a/a.txt", minPartitions = 2);
    val a: Long = rdd.count();
    println("size is:" + a);
    //以下执行一个RDD
    rdd.flatMap(_.split("\\s+")).map(str => (str, 1)).reduceByKey(_ + _) //
      .map(kv => kv._1 + "\t" + kv._2).saveAsTextFile("file:///D:/a/2");
    sc.stop();
  }
}
