import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

public class Hadoop_Test {
    static Configuration conf=new Configuration();
    static FileSystem fs;

    public static void main(String[] args) throws Exception{
        //Download();
        //create();
        //check();
        //Getlist();
        upload();
    }
    /**
     * 查看文件
     * @throws Exception
     */
    public static void Getlist() throws Exception{
        fs =FileSystem.get(conf);
        Path path = new Path("/");
        FileStatus[] list =fs.listStatus(path);
        for (FileStatus f:list){
            System.out.print(f);
        }
    }

    /**
     * 将HDFS上文件下载到本地
     * @throws Exception
     */
    public static void Download() throws Exception{
        fs = FileSystem.get(conf);
        fs.copyToLocalFile(false,new Path("/a.txt"),new Path("D:/"),true);
        fs.close();
    }

    /**
     * 创建目录和文件
     * @throws Exception
     */
    public static void create() throws Exception{
        fs =FileSystem.get(new URI("hdfs://hadoop1:9000"),conf,"hadoop");
        Path path=new Path("/bb/test1.txt");
        fs.mkdirs(path);
        System.out.println("*********************************");
        System.out.println("创建成功");
    }

    /**
     * 追加内容到文件
     * @throws Exception
     */
    public static void check() throws Exception{
        fs = FileSystem.get(new URI("hdfs://hadoop:9000"),conf,"hadoop");
        String countName="Hello HDFS";
        Path path = new Path("/test.txt");
        byte[] bytes =countName.getBytes();
        FSDataOutputStream put=fs.append(path);
        put.write(bytes);
    }

    public static void upload() throws Exception{
        fs = FileSystem.get(conf);
        fs.copyFromLocalFile(false,new Path("D:/b.txt"),new Path("/"));
        fs.close();
        System.out.println("上传成功");
    }

}
