package com.lenxia.spark.scala.learn.advance

/**
  * Author : Lenxia
  * Created: 2017/5/3
  * Updated: 2017/5/3
  * Version: 0.0.0
  * Contact: 2219708253@qq.com
  */
object WordCountDemo {

  def main(args: Array[String]): Unit = {
    val arr = Array("hadoop java","scala spark","python dj")
    val word: Array[String] = arr.flatMap(_.split(" "))
//    var word2one: Array[(Any) => (Any, Int)] = words.map(_=>(_,1))
  }
}
