package com.galeno.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;

/**
 * @author galeno
 * @Title:
 * @Description: 在Hdfs中复制文件,从把根目录的wc.TXT复制到/myself/wc.txt
 * @date 2021/7/2221:15
 */
public class HdfsCopyFile {
    public static void main(String[] args) throws IOException {
        System.setProperty("HADOOP_USER_NAME","root");
        FileSystem fs = getFileSystem();
        FSDataInputStream in = fs.open(new Path("/wc.txt"));
        FSDataOutputStream out = fs.create(new Path("/myself/wc.txt"));
        byte[]bytes=new byte[1024];
        int len=0;
        while ((len=in.read(bytes))!=-1){
            out.write(bytes,0,len);
        }
        System.out.println("文件复制成功");
    }

    public static FileSystem getFileSystem() throws IOException {
        return FileSystem.get(new Configuration());

    }




}
