package mamingliang.xbmu.lz;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import java.io.*;
import java.net.URI;
import java.util.Properties;
import java.util.Scanner;

public class FileOperation {

    private static String HADOOP_USER_NAME;
    private static String HdfsPath;
    private static String HdfsUrl;
    private Scanner scanner = new Scanner(System.in);

    static {
        try {
            // 读取配置文件
            Properties properties = new Properties();
            InputStream inputStream = FileOperation.class.getResourceAsStream("/myserver.properties");
            properties.load(inputStream);
            // 赋值
            HADOOP_USER_NAME = properties.getProperty("HADOOP_USER_NAME");
            HdfsPath = properties.getProperty("HdfsPath");
            HdfsUrl = properties.getProperty("HdfsUrl");
            // 设置用户
            System.setProperty("HADOOP_USER_NAME",HADOOP_USER_NAME);
        } catch (IOException e) {
            e.printStackTrace();
            System.err.println("读取hdfs配置文件失败！");
        }
    }

    /**
     * 查看目录文件
     */
    public void showFiles(){
        String choose = "";
        while (true){
            try {
                System.out.println("请输入要查看的路径 （字符),输入 exit 返回主菜单：");
                choose = scanner.next();
                if (choose.equalsIgnoreCase("exit"))
                    break;
                // 创建配置对象
                Configuration configuration = new Configuration();
                configuration.set("fs.defaultFS",HdfsUrl);
                configuration.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
                // 获取文件系统对象
                FileSystem fileSystem = FileSystem.get(URI.create(HdfsPath),configuration);
                // 获取指定目录下的所有文件
                FileStatus[] fileStatuses = fileSystem.listStatus(new Path(choose));
                // 创建标准输出流
                //PrintStream printStream = new PrintStream(System.out);
                for (FileStatus fileStatus : fileStatuses) {
                    System.out.println(fileStatus.getPath() + "   " + fileStatus.getLen() +
                            "   " + fileStatus.getPermission());
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

    }

    /**
     * 上传本地文件
     */
    public void upload(){
        String choose = "";
        String remote = "";
        while (true){
            try {
                System.out.println("请输入要上传的本地文件路径 （字符),输入 exit 返回主菜单：");
                choose = scanner.next();
                if (choose.equalsIgnoreCase("exit"))
                    break;
                System.out.println("请输入要上传到的路径 （字符),输入 exit 返回主菜单：");
                remote = scanner.next();
                if (remote.equalsIgnoreCase("exit"))
                    break;
                // 创建配置对象
                Configuration configuration = new Configuration();
                configuration.set("fs.defaultFS", HdfsUrl);
                configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
                // 获取文件系统对象
                FileSystem fileSystem = FileSystem.get(URI.create(HdfsPath), configuration);
                // 开始上传
                fileSystem.copyFromLocalFile(new Path(choose), new Path(remote));
                File file = new File(choose);
                if (file.isDirectory()){
                    File[] files = file.listFiles();
                    for (File file1 : files) {
                        System.out.println(file1.getName() + "上传成功...");
                    }
                } else {
                    System.out.println(file.getName() + "上传成功...");
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    /**
     * 删除hdfs文件
     */
    public void delete(){
        String choose = "";
        while (true) {
            try{
                System.out.println("请输入要删除的文件路径 （字符),输入 exit 返回主菜单：");
                choose = scanner.next();
                if (choose.equalsIgnoreCase("exit"))
                    break;
                // 创建配置对象
                Configuration configuration = new Configuration();
                configuration.set("fs.defaultFS", HdfsUrl);
                configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
                // 获取文件系统对象
                FileSystem fileSystem = FileSystem.get(URI.create(HdfsPath), configuration);
                // 执行删除
                fileSystem.delete(new Path(choose), true);
                System.out.println("删除成功！");
            } catch (IOException e){
                e.printStackTrace();
            }
        }
    }

    /**
     * 合并文件内容
     */
    public void merge(){
        String choose = "";
        String newFile = "";
        while (true) {
            try{
                System.out.println("请输入要合并的文件路径 （字符),输入 exit 返回主菜单：");
                choose = scanner.next();
                if (choose.equalsIgnoreCase("exit"))
                    break;
                System.out.println("请输入合并后生成文件的路劲及文件名 （字符),输入 exit 返回主菜单：");
                newFile = scanner.next();
                if (newFile.equalsIgnoreCase("exit"))
                    break;
                // 创建配置对象
                Configuration configuration = new Configuration();
                configuration.set("fs.defaultFS", HdfsUrl);
                configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
                // 获取源数据文件系统对象
                FileSystem fileSystemInput = FileSystem.get(URI.create(choose),configuration);
                // 获取输出文件系统对象
                FileSystem fileSystemOutput = FileSystem.get(URI.create(newFile),configuration);
                // 获取该目录下的所有文件
                FileStatus[] fileStatuses = fileSystemInput.listStatus(new Path(choose));
                // 获取文件输入流对象
                FSDataInputStream fsDataInputStream = null;
                // 获取文件输出流对象
                FSDataOutputStream fsDataOutputStream = fileSystemOutput.create(new Path(newFile));
                // 获取标准输出流对象
                //PrintStream printStream = new PrintStream(System.out);
                // 开始循环遍历文件并合并
                for (FileStatus fileStatus : fileStatuses) {
                    System.out.println(fileStatus.getPath() + "   " + fileStatus.getLen() +
                            "   " + fileStatus.getPermission());
                    fsDataInputStream = fileSystemInput.open(fileStatus.getPath());
                    byte[] bytes = new byte[1024 * 1024];
                    int read = -1;
                    while ((read = fsDataInputStream.read(bytes)) > 0) {
                        fsDataOutputStream.write(bytes, 0, read);
                    }
                }
                System.out.println("文件合并成功！");

                // 关闭资源
                //printStream.close();
                fsDataOutputStream.close();
                fsDataInputStream.close();

            } catch (IOException e){
                e.printStackTrace();
            }
        }
    }

    /**
     * 查看文件内容
     */
    public void showFileContent(){
        String choose = "";
        while (true) {
            try {
                System.out.println("请输入要查看文件内容的路径 （字符),输入 exit 返回主菜单：");
                choose = scanner.next();
                if (choose.equalsIgnoreCase("exit"))
                    break;
                // 创建配置对象
                Configuration configuration = new Configuration();
                configuration.set("fs.defaultFS", HdfsUrl);
                configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
                // 获取文件系统对象
                FileSystem fileSystem = FileSystem.get(URI.create(choose), configuration);
                // 获取文件输入流对象
                FSDataInputStream fsDataInputStream = fileSystem.open(new Path(choose));
                // 获取字符输入流对象
                BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fsDataInputStream));
                // 一行行读取
                String content = "";
                while ((content = bufferedReader.readLine()) != null) {
                    System.out.println(content);
                }

                // 关闭资源
                fsDataInputStream.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

}
