package DataClear;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.File;
import java.io.IOException;

// 项目需求6：将输出目录的文件上传到hdfs
public class HDFS {
    public static void main(String[] args) {
        // 连接hdfs
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://nd11:9000");
        try {
            FileSystem fs = FileSystem.get(conf);
            String path = "C:/Users/24350/IdeaProjects/Meteorological_BigData/output";  // 源文件所在的目录
            String[] filelist = new File(path).list();  // 将源文件的文件名保存为列表
            for (String file : filelist) {  // 遍历源文件列表，并上传至hdfs
                fs.copyFromLocalFile(new Path(path + "/" + file), new Path("/user/hive/warehouse/air.db/" + file));
            }
            System.out.println("上传完成！！");
        } catch (IOException e) {
            System.out.println("上传失败，原因如下：");
            e.printStackTrace();
        }
    }
}