package RDD

import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

class Map_flatMap_reduceByKey {
  val conf = new SparkConf()
    .setMaster("local[6]")
    .setAppName("Create_RDD")
  val sc = new SparkContext(conf)

  @Test
  def mapTest(): Unit = {
    //创建RDD
    val rdd1 = sc.parallelize(Seq(1, 2, 3))
    println(rdd1.foreach(println(_)))
    //执行map操作,将数据扩大10倍
    val rdd2 = rdd1.map(item => item * 10)
      .collect()
      .foreach(println(_))
  }

  @Test
  def flatMapTest(): Unit = {
    //创建RDD
    val rdd1 = sc.parallelize(Seq("hello world", "hi Jack"))
    //转换,将数据以空格分割
    rdd1.flatMap(_.split(" "))
      .collect()
      .foreach(println(_))
  }

  @Test
  def ReduceByKeyTest(): Unit = {
    //创建RDD
    val rdd1 = sc.parallelize(Seq("hello world", "hi world", "hello Rom"))
    //处理数据
    rdd1.flatMap(_.split(" "))
      .map(item => (item, 1))
      .reduceByKey((curr, agg) => curr + agg)
      .collect()
      .foreach(println(_))
    //关闭
    sc.stop()
  }
}
