import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.BasicConfigurator;

import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;

public class HadoopTest {
    static Configuration conf=new Configuration();
    static FileSystem file;

    public static void main(String[] args) throws Exception{
        //调用日志
        BasicConfigurator.configure();
        /*xiazai();*/
        shangchuan();
    }

    public static void xiazai() throws Exception{
        file=FileSystem.get(conf);
        file.copyToLocalFile(false,new Path("/wwt.txt"),new Path("f:/hadoop"),true);
        file.close();
    }

    public static void shangchuan() throws Exception{
        file = FileSystem.get(conf);
        //打开一个输入流：字节流
        InputStream in = new FileInputStream("f:\\hadoop\\abc.txt");

        //创建一个输出流  -------> HDFS         这个名称要自己修改不然不会自动起名字
        OutputStream out = file.create(new Path("/abc.txt"));

        //创建一个缓冲区
        byte[] buffer = new byte[1024];
        //数据长度
        int len = 0;
        while ((len=in.read(buffer))>0) {
            //读入了数据，写到输出流
            out.write(buffer, 0, len);
        }
        out.flush();
        //关闭流
        in.close();
        out.close();
    }
}
