package HDFS;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsClient {
    /**
     *
     * @param localpath 本地存储数据的路径
     * @param uploadPath 上传至集群的路径
     */
    public void upload(String localpath, String uploadPath) {
        {
            try {
                //获取对象
                Configuration conf = new Configuration();
                FileSystem fs = FileSystem.get(new URI("hdfs://192.168.0.155:9000"), conf, "root");
                //获取输入流
                FileInputStream inputStream = new FileInputStream(new File(localpath));
                //获取输出流
                FSDataOutputStream fileOutputStream = fs.create(new Path(uploadPath));
                //流的对转
                IOUtils.copyBytes(inputStream, fileOutputStream, conf);
                //关闭
                IOUtils.closeStream(inputStream);
                IOUtils.closeStream(fileOutputStream);
                fs.close();
            } catch (IOException e) {
                e.printStackTrace();
            } catch (InterruptedException e) {
                e.printStackTrace();
            } catch (URISyntaxException e) {
                e.printStackTrace();
            }
        }
    }

    public static void main(String[] args) {
        HdfsClient hdfsDB = new HdfsClient();
        File file = new File("src/java/data");
        if (file.exists()) {
            File[] files = file.listFiles();
            for (File file2 : files) {
                if (!file2.isDirectory()) {
                    hdfsDB.upload(file2.getAbsolutePath(), "/data/" + file2.getName());
                }
            }
        } else {
            System.out.println(file.getName() + "目录不存在");
        }
    }
}
