package com.cc.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;


public class HdfsClient {
    private FileSystem fs;
    @Before
    public void init() throws URISyntaxException, IOException, InterruptedException {
//        连接集群地址，这个就是集群的core-site.xml中nameNode的配置地址
        URI uri = new URI("hdfs://hadoop101:8020");
        Configuration configuration = new Configuration();

        configuration.set("dfs.replication","2");
        String user = "cwc";
//        获取一个客服端
        fs = FileSystem.get(uri,configuration,user);
    }

    @After
    public void close() throws IOException {
        fs.close();
    }
//    创建文件
    @Test
    public void testMkdir() throws IOException {
        fs.mkdirs(new Path("/chen/wc123"));
    }
//    上传文件
    @Test
    public void testPut() throws IOException {
        fs.copyFromLocalFile(false,false,
                new Path("F:\\test\\cwc_write\\testhdfs_write.txt"),
                new Path("hdfs://hadoop101:8020/"));
    }

//    下载文件
    @Test
    public void testGet() throws IOException {
        fs.copyToLocalFile(false,
                new Path("hdfs://hadoop101:8020/user_cwc/tmp/test_read.txt"),
                new Path("F:\\test\\test_rea.txt"),false);
    }

//    删除文件
    @Test
    public void testRm() throws IOException {
        fs.delete(new Path("/eclipse"),true);
    }

//    文件的移动和更名
    @Test
    public void testMV() throws IOException {
//        如果根目录存在/input，那么就表示将output移动到input，否则就是更换名字
        fs.rename(new Path("/output"), new Path("/input"));
    }

//    获取文件信息
    @Test
    public  void fileDetails() throws IOException {
        RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"),true);
//        遍历文件
        while (listFiles.hasNext()){
            LocatedFileStatus fileStatus = listFiles.next();
//            路径
            System.out.println("========="+fileStatus.getPath()+"==========");
            System.out.println(fileStatus.getPermission());
            System.out.println(fileStatus.getOwner());
            System.out.println(fileStatus.getGroup());
            System.out.println(fileStatus.getLen());
            System.out.println(fileStatus.getModificationTime());
            System.out.println(fileStatus.getReplication());
            System.out.println(fileStatus.getBlockSize());
            System.out.println(fileStatus.getPath().getName());
//            获取快信息
            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            System.out.println(Arrays.toString(blockLocations));

        }
    }

//    判断是不是文件
    @Test
    public void testFile() throws IOException {
        FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
        for (FileStatus fStatus : fileStatuses){
            if(fStatus.isFile()){
                System.out.println("文件："+fStatus.getPath().getName());
            }else {
                System.out.println("目录："+fStatus.getPath().getName());
            }

        }
    }

//    读取文本内容
    @Test
    public void redContent() throws IOException {
        FSDataInputStream fsDataInputStream =
                fs.open(new Path("hdfs://hadoop101:8020/user_cwc/tmp/test_read.txt"));
//        得到一个流
        BufferedReader r = new BufferedReader(new InputStreamReader(fsDataInputStream));
        String line = null;
//        循环读取每一行
        while((line = r.readLine()) != null){
            System.out.println(line);
        }

    }

}
