package demo

import org.apache.spark.{SparkConf, SparkContext}

object DemoOne02 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("testMapAndFlatMap").setMaster("local[*]")
    val context = new SparkContext(conf)

    // TODO map and flatMap

    val txt = context.textFile("src\\main\\scala\\data\\join1.txt")
    txt.map(x => (x, 1)).collect().foreach(println)
    txt.map(x => x.split("")).collect().foreach(println)
    txt.flatMap(x => (x.split(" "))).collect().foreach(println)

    /**
     * (张三 100,1)
     * (李四 90,1)
     * (王五 95,1)
     * (大毛 80,1)
     * (李建 85,1)
     *
     * [Ljava.lang.String;@4c2af006
     * [Ljava.lang.String;@44032fde
     * [Ljava.lang.String;@7b676112
     * [Ljava.lang.String;@5578be42
     * [Ljava.lang.String;@4e49ce2b
     *
     * 张三
     * 100
     * 李四
     * 90
     * 王五
     * 95
     * 大毛
     * 80
     * 李建
     * 85
     */

  }

}
