package cn.eud360.hadoop;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Before;
import org.junit.Test;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Arrays;

public class hdfsclient {
    //成员变量
    FileSystem fs = null;

    @Before
    public void init() throws Exception {
        //设置用户名
        System.setProperty("HADOOP_USER_NAME", "root");
        //设置参数
        Configuration conf = new Configuration();
        //默认是本地文件系统，修改为hdfs文件系统
        conf.set("fs.defaultFS", "hdfs://Linux00:9000/");
        //设置文件快的大小  默认快的大小是128M
        conf.set("dfs.blocksize", "128m");
        //设置副本数量
        conf.set("dfs.replication", "3");
        //获取访问客户端的对象
        fs = FileSystem.get(conf);
    }

    //上传文件
    @Test
    public void Upload() throws Exception {
        //调方法，传文
        fs.copyFromLocalFile(new Path("E:\\projects\\hadoop-hdfs\\pom.xml"), new Path("\\hadoop\\"));
    }

    //下载文件
    @Test
    public void Download() throws Exception {
        fs.copyToLocalFile(new Path("/app/wc.txt"), new Path("C:\\Users\\Rockefeller\\Desktop\\wc1.txt"));
    }

    //删除文件
    @Test
    public void Delete() throws Exception {
        fs.delete(new Path("/dashuju2/"), true);//递归
    }

    //移动、更名
    @Test
    public void Move() throws Exception {
        fs.rename(new Path("/a/wc.txt"), new Path("/app/wc1.txt"));
    }

    //创建目录
    @Test
    public void Mkdirs() throws Exception {
        fs.mkdirs(new Path("/x/y/z/"));
    }

    //查看文件目录信息
    @Test
    public void Testls() throws IOException {
        RemoteIterator<LocatedFileStatus> lt = fs.listFiles(new Path("/"), true);
        while (lt.hasNext()) {
            LocatedFileStatus ne = lt.next();
            System.out.println("文件路径=" + ne.getPath());
            System.out.println("文件长度=" + ne.getLen());
            System.out.println("文件副本的数量" + ne.getReplication());
            System.out.println("文件块大小=" + ne.getBlockSize());
            BlockLocation[] blockLocations = ne.getBlockLocations();
            for (BlockLocation block : blockLocations) {
                System.out.println("快的长度=" + block.getLength());
                System.out.println("快的偏移量=" + block.getOffset());
                System.out.println("块所在的节点=" + Arrays.toString(block.getHosts()));
            }
            System.out.println("-------------------------------------------");
        }
        fs.close();
    }

    //查看文件夹和文件内容
    @Test
    public void Testls2() throws IOException {
        FileStatus[] f = fs.listStatus(new Path("/"));
        for (FileStatus fi : f) {
            System.out.println("文件或者文件夹路径=" + fi.getPath());
            System.out.println("是否是文件" + fi.isDirectory());
        }


    }

    //读取数据
    @Test
    public void Readfile() throws IOException {
        FSDataInputStream open = fs.open(new Path("/app/wc.txt"));
        //以字节的方式
     /*   byte[] bytes = new byte[1024];
        int read = open.read(bytes);
        System.out.println(new String(bytes, 0, read));*/
        //以行的方式
        //指定位置
        open.seek(3);
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(open));
        String line=null;
       while ((line=bufferedReader.readLine())!=null){
           System.out.println(line);
       }
        bufferedReader.close();
        open.close();

    }
    @Test
    //写数据
    public void WriteFile() throws Exception {
        FSDataOutputStream fsDataOutputStream = fs.create(new Path("/hadoop/hadoop.txt"));
       fsDataOutputStream.write("大数据高薪就业\n".getBytes());
        fsDataOutputStream.write("一定就业\n".getBytes());
       fsDataOutputStream.writeUTF("大数据我就要哪个就业\n");//写字符串
    }

}