package spark.core.scala

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkContext, SparkConf}

/**
 * Created by Administrator on 2018/2/10/010.
 */
object ActionOperation {
  def main(args: Array[String]) {

    countByKey
  }

  def reduce(): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("reduce")
    val sc = new SparkContext(conf)
    val list = Array(1, 2, 3, 4, 5, 6, 7, 8, 9)
    val rdd = sc.parallelize(list)
    val result: Int = rdd.reduce(_ + _)
    println(result)
  }


  def collect(): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("reduce")
    val sc = new SparkContext(conf)
    val list = Array(1, 2, 3, 4, 5, 6, 7, 8, 9)
    val rdd = sc.parallelize(list)
    val dbNumbers: RDD[Int] = rdd.map(x => x * 2)
    val array = dbNumbers.collect()
    for (x <- array) {
      println(x)
    }
  }

  def count(): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("reduce")
    val sc = new SparkContext(conf)
    val list = Array(1, 2, 3, 4, 5, 6, 7, 8, 9)
    val rdd = sc.parallelize(list)
    val dbNumbers: RDD[Int] = rdd.map(x => x * 2)
    val array = dbNumbers.count()
    println(array)
  }


  def take(): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("reduce")
    val sc = new SparkContext(conf)
    val list = Array(1, 2, 3, 4, 5, 6, 7, 8, 9)
    val rdd = sc.parallelize(list)
    val top: Array[Int] = rdd.take(3)

    for (x <- top) {
      println(x)
    }
  }


  def countByKey(): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("countByKey")
    val sc = new SparkContext(conf)
    val list = Array(
      Tuple2("class1", "tom"),
      Tuple2("class1", "lee"),
      Tuple2("class2", "hello"),
      Tuple2("class2", "sss"))
    val listRDD = sc.parallelize(list)
    val totalScores = listRDD.countByKey()
    println(totalScores)
  }

}
