package com.tcy

import org.apache.spark.{SparkConf, SparkContext}

object App {


  def foo(x: Array[String]) = x.foldLeft("")((a, b) => a + b)

  def main(args: Array[String]) {

    //hadoop configuration  没有这个在local模式下会报错
//    val hadoopConf = new Configuration()
//    hadoopConf.setBoolean("fs.hdfs.impl.disable.cache", true)
//    val fileSystem = FileSystem.get(hadoopConf)

    //spark configuration
    //val conf = new SparkConf().setAppName("wordCount").setMaster("yarn-cluster") //这里采用yarn集群的方式运行
    val conf = new SparkConf() //这里Local方式运行


    val sc = new SparkContext(conf)


    sc.textFile("/demo/README.md", 1).flatMap(_.split(" "))
      .map(word => (word, 1)).reduceByKey(_ + _).collect().foreach(println)

    val data2 = Seq(1, 2, 3)
    println(data2)
    val data = Array(1, 2, 3, 4, 5)
    val distData = sc.parallelize(data)
//    distData.collect().foreach(println)
    sc.stop()
    //    distData.saveAsTextFile("/demo/spark-demo")

  }
}