package cn.spark.study.core

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._

object LineCounts {
  def main(args: Array[String]): Unit = {
    //创建SparkConf
    val conf = new SparkConf()
      .setAppName("LineCounts")
      .setMaster("local")
    //创建SparkContext
    val sc = new SparkContext(conf)
    //引用本地文件 构造RDD lines
    val rdd = sc.textFile("C://Users//e20160504//Desktop//spark.txt", 5)
    //对RDD map操作 形成（line,1）  reduce操作进行累加 
    
//    val count = rdd.count();
    val pairs = rdd.map { line => (line,1) }.reduceByKey(_+_)
    //foreach循环遍历打印 key value
    
//    val lines = rdd.take(4)
//    lines.foreach { line => println(line) }
    
    pairs.foreach(pair => println("key " + pair._1 + "appred " + pair._2 + " times "))
   
  }
}