package com.etc.test;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;




public class Hadoopcao {
    Configuration configuration;
    FileSystem fileSystem;

    @Before
    public void init() throws IOException {
//        创建一个hadoop 配置文件
         configuration = new Configuration();
//配置 一个hdfs   主要是给namenode连接
        configuration.set("fs.defaultFS", "hdfs://192.168.66.101:9000");
//        得到一个文件系统
        fileSystem= FileSystem.get(configuration);
    }

    /**
     * 上传文件
     */
    @Test
    public void testUpload() throws IOException {
        fileSystem.copyFromLocalFile(new Path("e:\\c.txt"), new Path("/"));
        fileSystem.close();
    }
    /**
     * 创建文件夹
     */
    @Test
    public void testMkdir() throws IOException {
        boolean mkdirs = fileSystem.mkdirs(new Path("/bb"));
        System.out.println(mkdirs);
    }
    /**
     * 删除一个文件夹
     */

    @Test
    public void testdelete() throws IOException {
        boolean b = fileSystem.deleteOnExit(new Path("/c.txt"));
        System.out.println(b);
    }
    /**
     * 查询自定目录下的目录所有文件夹中的文件
     */
    @Test
    public void testLs() throws IOException {
        RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/"), true);
        while(listFiles.hasNext()){
            LocatedFileStatus next = listFiles.next();
            System.out.println("name:"+next.getPath().getName());
            System.out.println("path:"+next.getPath());
            System.out.println("blcSize: "+next.getBlockSize());
            System.out.println("owner: "+next.getOwner());
            System.out.println("----------------");

        }
    }
}
