package cn.rslee.scala.demos

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object FlatMapTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("wordcount");
    val sc = new SparkContext(conf);
    val rdd1 = sc.parallelize(Array(("A", 1), ("B", 2), ("C", 3), ("A", 1), ("A", 1)))
    println("test1-----")
    rdd1.foreach(println);

    println("test2-----")
    rdd1.map(x => { x }).foreach(println);

    println("test3-----")

    rdd1.flatMap(x => x._1).foreach(println);

    println("test4-----")
    rdd1.map(x => { x._2 }).foreach(println);

    println("test5-----")
    rdd1.flatMap(x => (x._1 + x._2)).foreach(println);

    println("test6-----")
    rdd1.flatMap(x => x.toString()).foreach(println);

    println("test7-----")
    rdd1.map(x => { x._2 }).foreach(println);
  }
}