package org.ykx.mllib

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object Test {
  
  def main(args: Array[String]): Unit = {
		  val conf = new SparkConf().setMaster ("local[*]").setAppName ("SparkMllib")
				  val sc = new SparkContext (conf)
		  val path = "hdfs://10.10.61.192:8020/apps/hive/warehouse/test123"
		  val rdd = sc.wholeTextFiles(path, 3)
		 rdd.foreach(x => println(x))
//    val file = rdd.map {
//      case (fileName, content) =>
//      println(fileName)
//        fileName.replace("file:", "")
//    }
//		  
//	  println(">>>>>>>>>>>>>>>>>>>>[文件个数]："+file.count())
    
  }
}