package cn.wangjie.spark.source

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object SparkFileSystemTest {

  def main(args: Array[String]): Unit = {

    // 1. 构建SparkContext上下为实例对象
    val sc: SparkContext = {
			// 1.a 创建SparkConf对象，设置应用属性
			val sparkConf = new SparkConf()
				.setAppName(this.getClass.getSimpleName.stripSuffix("$"))
				.setMaster("local[2]")
			// 1.b 传递SparkConf对象，创建上下文实例
			SparkContext.getOrCreate(sparkConf)
		}
		
		
		/*
		  def textFile(
		      path: String,
		      minPartitions: Int = defaultMinPartitions
		  ): RDD[String]
		 */
		//val inputRDD: RDD[String] = sc.textFile("/datas/wordcount.data", minPartitions = 2)
		val inputRDD: RDD[String] = sc.textFile("/datas/wordcount.data", minPartitions = 2)

		// 5. 应用运行结束，关闭资源
		sc.stop()
	}
	
}
