package com.atguigu.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HDFSDemo {
    private FileSystem fs = null;
    /*
    创建hdfs文件系统对象
     */
    @Before
    public void before() throws Exception {
        /*
        get(final URI uri, final Configuration conf,
        final String user)
         */
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://hadoop102:9820");
        fs = FileSystem.get(uri, conf, "atguigu");

    }
    /*
    关闭文件系统资源
     */
    @After
    public void after(){
        try {
            if (fs!=null) {
                fs.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    /*
    具体的操作:上传
    copyFromLocalFile(boolean delSrc, boolean overwrite,
                                Path src, Path dst)
     args1:是否删除源文件 args2:是否覆盖
     */
    @Test
    public void test() throws IOException {
        fs.copyFromLocalFile(false,true,new Path("D:\\io\\2.txt"),
                new Path("/hdfs/"));
    }
    /*
    具体的操作:下载
     */
    @Test
    public void test1() throws IOException {
        fs.copyToLocalFile(true,new Path("/hdfs"),new Path("D:\\io"));
    }
    /*
    删除
     */
    @Test
    public void test2() throws IOException {
        fs.delete(new Path("/jdk-8u212-linux-x64.tar.gz"),false);
    }
    /*
    改名
     */
    @Test
    public void test3() throws IOException {
        fs.rename(new Path("/hdfs/2.txt"),new Path("/1.txt"));
    }
    /*
    文件详情
     */
    @Test
    public void test4() throws IOException {
        RemoteIterator<LocatedFileStatus> remoteIterator = fs.listFiles(new Path("/"), true);
        while (remoteIterator.hasNext()) {
            LocatedFileStatus fileStatus = remoteIterator.next();
            String name = fileStatus.getPath().getName();
            String owner = fileStatus.getOwner();

            short replication = fileStatus.getReplication();
            System.out.println("文件名:"+name);
            System.out.println("文件主:"+owner);
            System.out.println("文件副本数"+replication);
            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            for (BlockLocation blockLocation : blockLocations) {
                System.out.println(blockLocation);
            }
        }
    }
    /*
    判断是文件还是目录
     */
    @Test
    public void test5() throws IOException {
        FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
        for (FileStatus status : fileStatuses) {
            if (status.isDirectory()) {
                System.out.println(status.getPath().getName()+"是一个目录");
            } else if(status.isFile()){
                System.out.println(status.getPath().getName()+"是一个文件");

            }
        }
    }
    /*
    流的方式上传
     */
    @Test
    public void test6() throws Exception {
        //从本地读-输入流
        FileInputStream fis = new FileInputStream("D:\\io\\2.txt");
        //写到hdfs-输出流
        FSDataOutputStream fos = fs.create(new Path("/hdfs/2.txt"));
        //一边读一边写文件对拷
        IOUtils.copyBytes(fis,fos,1024,false);
        fis.close();
        fos.close();
    }
    /*
    流的方式下载
     */
    @Test
    public void test7() throws IOException {
        //hdfs读-输入流
        FSDataInputStream fis = fs.open(new Path("/hdfs/2.txt"));
        //写到本地-输出流
        FileOutputStream fos = new FileOutputStream("D:\\io\\1.txt");
        IOUtils.copyBytes(fis,fos,1024,true);
    }
}
