package com.csw.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Before;
import org.junit.Test;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URI;

public class HdfsApi {
    FileSystem fs;
    @Before
    public void init()throws Exception{
        //获取配置文件
        Configuration conf = new Configuration();
        //配置副本数
        conf.set("dfs.replication","1");
        //获取hdfs连接地址
        URI uri = new URI("hdfs://master:9000");
        //获取fileSystem对象，hdfs文件管理系统的对象
        //通过这个对象操作hdfs
        fs = FileSystem.get(uri, conf);
    }
    @Test
    public void mk()throws Exception{
        //创建目录
        Path path = new Path("/csw");
        boolean mkdirs = fs.mkdirs(path);
        System.out.println(mkdirs);
    }
    @Test
    public void del()throws Exception{
        //删除目录
        boolean delete = fs.delete(new Path("/csw"));
        System.out.println(delete);
    }
    @Test
    public void list()throws Exception{
        //获取目录路径，名称，长度，以及block块大小
        FileStatus[] fileStatuses = fs.listStatus(new Path("/data"));
        for (FileStatus fileStatus : fileStatuses) {
            Path path = fileStatus.getPath();
            long len = fileStatus.getLen();
            String name = path.getName();
            long blockSize = fileStatus.getBlockSize();
            System.out.println(path+"  "+len+"  "+name+"   "+blockSize);
        }
    }
    @Test
    public void getFile()throws Exception{
        //获取文件的路径，名称，长度，block块大小
        FileStatus fileStatus = fs.getFileStatus(new Path("/data/student.txt"));
        Path path = fileStatus.getPath();
        String name = path.getName();
        long len = fileStatus.getLen();
        long blockSize = fileStatus.getBlockSize();
        System.out.println(path+"  "+len+"  "+name+"   "+blockSize);
    }
    @Test
    public void read()throws Exception{
        //读文件
        FSDataInputStream open = fs.open(new Path("/data/test.txt"));
        //这样读文件会导致在1024个字节后的连接处有乱码
//        byte[] bytes = new byte[1024];
//        int i;
//        while ((i=open.read(bytes))!=-1){
//            String s = new String(bytes, 0, i);
//            System.out.println(s);
//        }
//        open.close();
        //创建缓冲流读文件
        BufferedReader br = new BufferedReader(new InputStreamReader(open));
        String line;
        while ((line=br.readLine())!=null){
            System.out.println(line);
        }
        open.close();
    }
    @Test
    public void write()throws Exception{
        FSDataOutputStream creat = fs.create(new Path("/a.txt"), true);
        creat.writeUTF("csw");
        creat.writeUTF("is");
        creat.writeUTF("a");
        creat.writeUTF("boy");
        creat.close();
    }
    @Test
    public void put()throws Exception{
        //上传  local为将要上传的文件的位置  hdfs为上传后文件所存储的位置
        Path local = new Path("E:\\IdeaFile\\Hadoop\\data\\cource.txt");
        Path hdfs = new Path("/data/");
        fs.copyFromLocalFile(local,hdfs);
    }
    @Test
    public void get()throws Exception{
        //下载 local为下载后文件存储位置  hdfs为要下载文件的位置
        Path local = new Path("data/test.txt");
        Path hdfs = new Path("/data/test.txt");
        fs.copyToLocalFile(false,hdfs,local,true);
    }
}
