package com.shujia.spark.util

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}

object HdfsUtil {
  /**
   * 删除hdfs中已存在路径
   *
   * @param path ：路径
   */
  def delete(path: String): Unit = {
    val configuration = new Configuration()
    val fileSystem: FileSystem = FileSystem.get(configuration)
    //判断路径是否存在
    if (fileSystem.exists(new Path(path))) {
      //删除路径
      fileSystem.delete(new Path(path), true)
    }

  }

}
