package Demo1

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by lenovo on 2017/8/25.
  */
object test4 {
  def main(args: Array[String]) {
    System.setProperty("hadoop.home.dir","E://hadoop-liyadong//hadoop-2.7.1")
   val sparkConf = new SparkConf().setAppName("test4").setMaster("local[2]").set("spark.testing.memory","2147480000")
   val sc = new SparkContext(sparkConf)
   val line = sc.textFile("D:\\test.txt")
    val a = line.flatMap(str => str.split(" ")).map(_+"*")
    a.foreach(str => {
      println(str+"*")
    })
    val c = a.reduce(_ + _).foreach(str => {
      println(str)
    })

   val b = line.map(str => {
     val b1 = str.split(" ")
     val b2 = b1+"&"
     (b2)
   }).collect()
println(b.toString)
  }

}
