package com.alison.scala

import org.apache.spark.{SparkConf, SparkContext}

object Apps {

  //一种  VM options中输入“-Dspark.master=local”
  //一种 main,指定 new SparkConf().setAppName("xx").setMaster("local[*]")
  // local 本地单线程， local[x], 本地x线程,  local[*] 本地多线程，有几个线程就用几个线程
  def main(args: Array[String]): Unit = {
    //创建配置文件
    val conf = new SparkConf().setAppName("wc").setMaster("local")

    //创建SparkContext
    val sc = new SparkContext(conf);

    //读取数据
//    val lines = sc.textFile(args(0))

    val filePath = "D:\\workspace\\lab\\learnbigdata\\learnspark\\sparkcore\\src\\main\\resources\\word.txt"
    val lines = sc.textFile(filePath)

    //flatmap压平（一行行读取数据）
    val words = lines.flatMap(_.split(" "))

    //map(word,1)
    val k2v = words.map((_,1))

    //reduceByKey(word,1)
    val result = k2v.reduceByKey(_+_)

    //保存数据
//    result.saveAsTextFile(args(1))
    result.foreach(println)

    //关闭连接
    sc.stop();
  }

}
