package cn.spark.study.core

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf

object ActionOperation {
  def main(args: Array[String]): Unit = {
    
    val sc = getSC();
    //reduce(sc);
    //take(sc)
    countByKey(sc)
    //collect(sc)
  }
  
  //案例五：countByKey
  def countByKey(sc:SparkContext){
    val list = Array(
    Tuple2("kevin",20),
    Tuple2("Jack",30),
    Tuple2("kevin",40),
    Tuple2("Marray",50)
    )
    
    sc.parallelize(list, 1).countByKey().foreach(pair => println(pair._1+" "+pair._2))
  }
  //案例四：saveAsTextFile 
  def saveAsTextFile(sc:SparkContext){
    val list = Array("Kevin is a good man","Jack is a good man","Marray is a good man");
    sc.parallelize(list, 1).saveAsTextFile("hafs://spark1:9000/saveAsTextFile")
  }
  //案例三： take
  def take(sc:SparkContext){
    val list = Array("Kevin is a good man","Jack is a good man","Marray is a good man");
   
    sc.parallelize(list, 1).take(2).foreach { println }   //注意函数简化写法
  }
  //案例二：collect
  def collect(sc:SparkContext){
    val number = Array(1,2,3,4,5,6,7,8,9)
    val numberArray = sc.parallelize(number, 1).map(_*2).collect()
    
    for(num<-numberArray){
      println(num)
    }
  }
  
  //案例一：reduce
  def reduce(sc:SparkContext){
    val number = Array(1,2,3,4,5,6,7,8,9)
    val numberRDD = sc.parallelize(number, 1).reduce(_+_)
    println(numberRDD);
  }
  
  // 获得 SparkContext
  def getSC():SparkContext = {
    val conf = new SparkConf()
      .setAppName("ActionOperation")
      .setMaster("local")
      
      return new SparkContext(conf);
  }
  
}