package action.RDD创建操作

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 使用textFile通过hdfs或者本地加载数据
  *
  * @author wdmcode@aliyun.com
  * @version 1.0.0
  * @date 2018/11/8
  */
object SparkTextFile {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("SparkParallelize")
    conf.setMaster("local[2]")

    val spark = new SparkContext(conf)

    // 获取resource下的文件
    val filePath = SparkTextFile.getClass.getResource("/") + "hello.txt"
    val rdd = spark.textFile(filePath)

    //Working with Key-Value Pairs
//    val rddSplit = rdd.flatMap(line => line.split("\\s+"))
//
//    val map = rddSplit.map(s => (s, 1))
//    val count = map.reduceByKey((a, b) => a + b)
//    count.foreach(s => println(s))
    //获取hdfs上的文件

//    val rdd2 = spark.textFile("hdfs:///hello.txt")

    spark.stop()
  }

}
