//package spark
//
//import java.io.File
//
//import org.apache.hadoop.conf.Configuration
//import org.apache.hadoop.fs.{FileSystem, Path}
//
///**
//  *
//  */
//object MyPredef {
//
//	implicit def delete(path:String) = new Util(path)
//
//}
//
//
//class Util(path: String) {
//	val configuration: Configuration = new Configuration()
//	val fs: FileSystem = FileSystem.get(configuration)
//	val outPath: Path = new Path(path)
//
//	val file = new File(path)
//
//	def delete() = {
//		if (fs.exists(outPath)) {
//			fs.delete(outPath, true)
//			println("HDFS输出目录已经删除:" + outPath.toString)
//		}
//
//		if(file.exists()){
//			fs.delete(outPath, true)
//			println("本地输出目录已经删除:" + outPath.toString)
//		}
//	}
//
//	implicit def client(path: String) =  new Util(path)
//}
////object Util{
////	implicit def delete(path: String) = {
////		val configuration: Configuration = new Configuration()
////		val fs: FileSystem = FileSystem.get(configuration)
////		val outPath: Path = new Path(path)
////		if (fs.exists(outPath)) {
////			fs.delete(outPath, true)
////			println("输出目录已经删除:" + outPath.toString)
////		}
////	}
////
////}
//
