package com.zlh.hdfsfilesys.controller;

import com.zlh.hdfsfilesys.kit.HadoopController;
import com.zlh.hdfsfilesys.kit.KitClass;
import com.zlh.hdfsfilesys.entity.HdFile;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;

import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;

/**
 * 主界面hdfslist.html的控制器
 * 用于响应主界面各种http请求并处理
 */

@Controller
public class hdfslistController {
    /**
     * 响应主界面的文件列表请求，model中带有文件列表数据返回。
     * @param dir  需要列出文件目录的路径，从http请求中获取
     * @param model 返回的model，含有当前路径cdir和文件列表 hdfslist
     * @return 返回到主页面
     */
    @RequestMapping("/hdfslist")
    public String hdfslist(String dir, Model model) throws IOException, InterruptedException {
        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm");
        if (dir == null) {
            dir = "/";
        } else if (dir.charAt(0)!='/'){
            dir = '/'+dir;
        }

        HadoopController hadoopController = new HadoopController();
        hadoopController.getFileSystem();
        FileStatus[] fileStatuses = hadoopController.hdls(dir);
        List<HdFile> hdFileList = new ArrayList<HdFile>();
        for (FileStatus fileStatus : fileStatuses) {
            hdFileList.add(new HdFile(fileStatus.getPath().toString().replace("hdfs://192.168.48.128:9000/", ""),
                    KitClass.formatFileSize(fileStatus.getLen()), fileStatus.isDirectory(),
                    fileStatus.getOwner(), format.format(fileStatus.getModificationTime())));
        }
        model.addAttribute("hdfiles", hdFileList);
        dir = dir.replaceFirst("/","");
        model.addAttribute("cdir", dir);


        return "hdfslist";
    }

    /**
     * 响应创建文件夹请求，调用api创建文件夹
     * @param newdir 新文件夹的绝对路径，从http请求中获取
     * @return 返回到主页面
     */
    @RequestMapping("/mkdir")
    public String mkdir(String newdir ,Model model) throws IOException, InterruptedException {
        HadoopController hadoopController = new HadoopController();
        hadoopController.getFileSystem();
        if(newdir.contains("null")){
            return "wait";
        }
        if (newdir.charAt(0)!='/'){
            newdir = '/'+newdir;
        }
        hadoopController.hdmkdir(newdir);
        System.out.println(newdir);
        return "wait";
    }

    /**
     * 响应下载按钮的请求，调用hdfsget下载文件
     * @param fdir 文件的绝对路径，从http请求中获取
     * @return 返回到主页面
     */
    @RequestMapping("/getdir")
    public String getfile(String fdir,Model model) throws IOException, InterruptedException {
        HadoopController hadoopController = new HadoopController();
        hadoopController.getFileSystem();
        if(fdir.contains("null")){
            return "wait";
        }
        if (fdir.charAt(0)!='/'){
            fdir = '/'+fdir;
        }
        System.out.println(fdir);
        hadoopController.hdget(fdir,"E:/");
        return "wait";

    }

    /**
     * 响应删除按钮的请求，调用hdfsdel删除文件夹或文件
     * @param deldir 文件或文件夹的绝对路径，从http请求中获取
     * @return 返回到主页面
     */
    @RequestMapping("/delf")
    public String delfile(String deldir,Model model) throws IOException, InterruptedException {
        HadoopController hadoopController = new HadoopController();
        hadoopController.getFileSystem();
        if(deldir.contains("null")){
            return "wait";
        }
        if (deldir.charAt(0)!='/'){
            deldir = '/'+deldir;
        }
        System.out.println(deldir);
        hadoopController.hddel(deldir);
        return "wait";

    }


    /**
     *响应上传文件按钮的请求，将本地文件上传至hdfs中
     * @param file 上传得到的文件，由layui返回得到
     * @param path 用户当前所处的目录
     */
    @ResponseBody
    @RequestMapping("/hdputTest")
    public String uploadFile(MultipartFile file, String path) throws IOException, InterruptedException {
        String url="hdfs://192.168.48.128:9000/";
        Configuration conf=new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(url),conf,"root");
//        得到hdfs当前目录路径
        Path src = new Path(path,file.getOriginalFilename());
//        创建输出流
        FSDataOutputStream fsDataOutputStream = null;
        try {
//            输出流得到输出文件目录路径
            fsDataOutputStream = hdfs.create(src);
//
            OutputStream wrappedStream = fsDataOutputStream.getWrappedStream();
//            创建输入流
            InputStream is=file.getInputStream();
            byte[] buffer =new byte[1024];
            int len=-1;
//            循环读取数据，将文件读入流中
            while ((len=is.read(buffer))!=-1){
                wrappedStream.write(buffer,0,len);
            }
//            刷新流
            wrappedStream.flush();
//            关闭流
            wrappedStream.close();
            return "true";
        } catch (IOException e) {
            e.printStackTrace();
        }
        return "false";
    }
}
