import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Create by jenrey on 2018/5/22 9:18
  */
object t {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("t")
    val sc = new SparkContext(conf)
    var map1: Map[String, Int] = Map()
    map1 += ("qwe" -> 1)
    map1 += ("asd" -> 2)
    map1 += ("zxc" -> 3)
    val rdd: RDD[Map[String, Int]] = sc.parallelize(Seq(map1))

    //    Map(qwe -> 1, asd -> 2, zxc -> 3)
    val stringToInts: Array[Map[String, Int]] = rdd.collect()
    stringToInts.foreach(println(_))
    //    Map(qwe -> 1, asd -> 2, zxc -> 3)
    rdd.foreach(println(_))

    /**
      * flatten 通过将所有行连接起来，使二维数组变平
      * 变成一个数组。
      * (qwe,1)
      * (asd,2)
      * (zxc,3)
      */
    stringToInts.flatten.foreach(println(_))

    val maybeInt: Option[Int] = map1.get("zxc")
    //Some(3)
    println(maybeInt)
    //3
    println(maybeInt.get)




  }
}
