package cn.wangjie.spark.source

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

/**
 * 读取小文件数据，封装RDD中
 */
object SparkWholeTextFileTest {

  def main(args: Array[String]): Unit = {
		// 1. 构建SparkContext上下为实例对象
		val sc: SparkContext = {
			// 1.a 创建SparkConf对象，设置应用属性
			val sparkConf = new SparkConf()
				.setAppName(this.getClass.getSimpleName.stripSuffix("$"))
				.setMaster("local[2]")
			// 1.b 传递SparkConf对象，创建上下文实例
			SparkContext.getOrCreate(sparkConf)
		}
		
		/*
		  def wholeTextFiles(
		      path: String,
		      minPartitions: Int = defaultMinPartitions
		  ): RDD[(String, String)]
		 */
		// 读取小文件数据
		val inputRDD: RDD[(String, String)] = sc.wholeTextFiles("/datas/xx/", minPartitions = 3)
		
		
		// 5. 应用运行结束，关闭资源
		sc.stop()
	}
	
}
