import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}

import scala.collection.mutable

object HdfsTempPath {
   def   subStr(str:String):String={
     val  array=str.split("/")
     if("_SUCCESS.".equals(array(7))){
       val day=array(4).split("=")(1).substring(1)
       val  h=array(4).substring(8,10)
       val txt=array(7).split("-")(2).substring(3).toInt+1
       return   "/spark/emp/data/d="+day+"/h="+h+"/"+txt
     }
     return  null
    }
  def main(args:Array[String]): Unit ={
    val  Array(path)=args
    val conf=new Configuration()
    conf.set("fs.default.name","hdfs://nameservice1:8020")
    val  fs=FileSystem.get(conf)
    try{
      val fileList:Array[FileStatus]=fs.globStatus(new Path(path))
      val  filePartitions=new mutable.HashSet[String]
      for(i<- 0  until  fileList.length){
        val nowpath=fileList(i).getPath.toString
        filePartitions+=subStr(nowpath)
      }
    }
    catch{
      case  e:Exception=>e.getMessage()
    }
    finally{
      fs.close()
    }



  }


//    git@gitee.com:yuzhijian28/HdfsPath.git

}
