package com.wangwg.sparkTest.hdfs

import java.io.File

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}

object UploadLocalFile {
  def main(args: Array[String]): Unit = {

    val hdfsUrl = "hdfs://geomesa1.com:9000"
    val hdfsConf: Configuration = new Configuration();
    hdfsConf.set("fs.defaultFS", hdfsUrl)
    val hdfs = FileSystem.get(hdfsConf)
    val shpName = "landuse"
    val path = new Path("/data/".concat(shpName))
    hdfs.mkdirs(path)
    val localPath = new File("D:\\work\\bigdata\\local-1597831584433")
    localPath.listFiles().foreach(file => {
      hdfs.copyFromLocalFile(new Path(file.getPath), new Path(path.toUri.toString().concat("/").concat(file.getName)))
    })

    

  }
}
