package com.shujia.hdfs;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.MapTask;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.nio.file.Files;
import java.util.Arrays;

public class HDFSDemo1 {
    public static void main(String[] args) throws Exception {
        //第一步：获取配置文件
        Configuration conf = new Configuration();
        //2、告诉我们要连接主节点（HDFS的主进程）
        conf.set("fs.defaultFS", "hdfs://master:9000");

        //3、通过hadoop自带了类FileSystem根据配置文件对象获取文件系统对象
        FileSystem fs = FileSystem.get(conf);



        System.out.println("成功获取hdfs文件系统：" + fs);

        //在hdfs上创建一个文件夹
        boolean b = fs.mkdirs(new Path("/bigdata27/input2"));
        System.out.println(b ? "文件夹创建成功！" : "文件夹创建失败！");

        //上传本地文件到hdfs中
//        fs.copyFromLocalFile(new Path("hadoop/data/words.txt"),new Path("/bigdata27/input1/"));
        //修改文件副本数
//        fs.setReplication(new Path("/bigdata27/input1/words.txt"),Short.parseShort("1"));

        //下载文件 需要windows本地配置环境
//        fs.copyToLocalFile(new Path("/bigdata27/input1/words.txt"),new Path("hadoop/data/out1/"));

        //查看文件的基本信息
        FileStatus fileStatus = fs.getFileStatus(new Path("/bigdata27/input1/words.txt"));
//        String owner = fileStatus.getOwner(); //所属用户
//        String group = fileStatus.getGroup(); //所属用户组
//        FsPermission permission = fileStatus.getPermission();
//        String s = permission.toString();
//        long blockSize = fileStatus.getBlockSize();
//        System.out.println(owner+"-"+group+"-"+s+"-"+blockSize);

        //获取文件block块的信息
//        BlockLocation[] blockLocations = fs.getFileBlockLocations(fileStatus, 0, 1024);
//        for (BlockLocation blockLocation : blockLocations) {
//            System.out.println(Arrays.toString(blockLocation.getNames()));
//            System.out.println(Arrays.toString(blockLocation.getHosts()));
//            System.out.println("------------------------------");
//        }

        System.out.println("-----------------------------------------------------------------");
        //上传文件和下载文件的新编写代码的方式
        //对流复制
        //将本地的文件封装成输入流
//        BufferedReader br = new BufferedReader(new FileReader("hadoop/data/words.txt"));
//        FSDataOutputStream fsDataOutputStream = fs.create(new Path("/bigdata27/input2/words.txt"));
//        IOUtils.copy(br,fsDataOutputStream,"UTF-8");


        //下载文件
        FSDataInputStream fsDataInputStream = fs.open(new Path("/bigdata27/input2/words.txt"));
        BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter("hadoop/data/out2/words.txt"));
        IOUtils.copy(fsDataInputStream,bufferedWriter,"UTF-8");

        fsDataInputStream.close();
        bufferedWriter.close();
//        MapTask


    }
}
