package com.yunfei.commonbase.hdutil.conreoller;

import com.alibaba.fastjson.JSONObject;
import com.yunfei.commonbase.hdutil.HDFSUtil;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;

import javax.servlet.http.HttpServletResponse;
import java.io.FileInputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;

@RestController
@RequestMapping(value = "/hdfs")
public class HdfsController {

    @ApiOperation(value = "hdfs文件上传",notes = "<pre>\n" +
            "入参说明\n" +
            "目前权限一样  前6个参数不用动\n" +
            "targetHdfsPath：需要上传文件到hdfs的路径\n" +
            "示例：\n" +
            "{\n" +
            "\t\"hdfsKey\": \"oss\",\n" +
            "\t\"hdfsKeytabPath\": \"/mnt/cephfs/hdfsFile/oss.keytab\",\n" +
            "\t\"hdfsConfPath\": \"/mnt/cephfs/hdfsFile/krb5.conf\",\n" +
            "\t\"fsDefaultFS\": \"hdfs://pk-dsp-150:8020\",\n" +
            "\t\"namenode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"datanode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"targetHdfsPath\": \"/user/oss/test/yf/\"\n" +
            "}\n" +
            "</pre>")
    @PostMapping(value = "/upload")
    public String upload(String requestJson, MultipartFile file) {
        try {
            JSONObject obj = JSONObject.parseObject(requestJson);
            String hdfsKey = obj.getString("hdfsKey");
            String hdfsKeytabPath = obj.getString("hdfsKeytabPath");
            String hdfsConfPath = obj.getString("hdfsConfPath");
            String fsDefaultFS = obj.getString("fsDefaultFS");
            String namenode =obj.getString("namenode");
            String datanode =obj.getString("datanode");
            String targetHdfsPath =obj.getString("targetHdfsPath");
            targetHdfsPath=targetHdfsPath+file.getOriginalFilename();
            FileInputStream fis = (FileInputStream)file.getInputStream();
            try {
                HDFSUtil.fileToHdfs(fis,targetHdfsPath,hdfsConfPath,fsDefaultFS,namenode,datanode,hdfsKey,hdfsKeytabPath);
            }catch (Exception e){
                throw new RuntimeException("上传失败");
            }
            return "success";
        } catch (Exception e) {
            e.printStackTrace();
            return "error";
        }
    }


    @ApiOperation(value = "hdfs文件下载",notes = "<pre>\n" +
            "入参说明\n" +
            "目前权限一样  前6个参数不用动\n" +
            "downloadPath：需要下载的文件路径指定到文件名\n" +
            "示例：\n" +
            "{\n" +
            "\t\"hdfsKey\": \"oss\",\n" +
            "\t\"hdfsKeytabPath\": \"/mnt/cephfs/hdfsFile/oss.keytab\",\n" +
            "\t\"hdfsConfPath\": \"/mnt/cephfs/hdfsFile/krb5.conf\",\n" +
            "\t\"fsDefaultFS\": \"hdfs://pk-dsp-150:8020\",\n" +
            "\t\"namenode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"datanode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"downloadPath\": \"/user/oss/test/yf/aa.txt\"\n" +
            "}\n" +
            "</pre>")
    @RequestMapping(value = "/download",method = RequestMethod.POST)
    public void download(@RequestBody String requestJson, HttpServletResponse response) {
        try {
            JSONObject obj = JSONObject.parseObject(requestJson);
            String hdfsKey = obj.getString("hdfsKey");
            String hdfsKeytabPath = obj.getString("hdfsKeytabPath");
            String hdfsConfPath = obj.getString("hdfsConfPath");
            String fsDefaultFS = obj.getString("fsDefaultFS");
            String namenode =obj.getString("namenode");
            String datanode =obj.getString("datanode");
            String downloadPath =obj.getString("downloadPath");

            String[] paths = downloadPath.split("/");
            String filename = paths[paths.length-1];

            response.setContentType("application/octet-stream");
            response.setHeader("content-type", "application/octet-stream");
            response.setHeader("Content-Disposition", "attachment;fileName=" + java.net.URLEncoder.encode(filename, "UTF-8"));// 设置文件名
            OutputStream os = response.getOutputStream();
            HDFSUtil.download(os,downloadPath,hdfsConfPath,fsDefaultFS,namenode,datanode,hdfsKey,hdfsKeytabPath);
        } catch (Exception e) {
            e.printStackTrace();
            throw new RuntimeException("下载失败");
        }
    }


    @ApiOperation(value = "HDFS文件删除",notes = "<pre>\n" +
            "入参说明\n" +
            "目前权限一样  前6个参数不用动\n" +
            "delPath：需要删除的文件的路径或指定到文件\n" +
            "示例：\n" +
            "{\n" +
            "\t\"hdfsKey\": \"oss\",\n" +
            "\t\"hdfsKeytabPath\": \"/mnt/cephfs/hdfsFile/oss.keytab\",\n" +
            "\t\"hdfsConfPath\": \"/mnt/cephfs/hdfsFile/krb5.conf\",\n" +
            "\t\"fsDefaultFS\": \"hdfs://pk-dsp-150:8020\",\n" +
            "\t\"namenode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"datanode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"delPath\": \"/user/oss/test/yf/aa.txt\"\n" +
            "}\n" +
            "</pre>")
    @PostMapping(value = "/delete")
    @ApiImplicitParam(name = "requestJson", value = "requestJson", required = true, dataType = "string", paramType = "body")
    public String delete(@RequestBody String requestJson) {
        try {
            JSONObject obj = JSONObject.parseObject(requestJson);
            String hdfsKey = obj.getString("hdfsKey");
            String hdfsKeytabPath = obj.getString("hdfsKeytabPath");
            String hdfsConfPath = obj.getString("hdfsConfPath");
            String fsDefaultFS = obj.getString("fsDefaultFS");
            String namenode =obj.getString("namenode");
            String datanode =obj.getString("datanode");
            String delPath =obj.getString("delPath");
            HDFSUtil.delete(delPath,hdfsConfPath,fsDefaultFS,namenode,datanode,hdfsKey,hdfsKeytabPath);
        } catch (Exception e) {
            return e.getMessage();
        }
        return "SUCCESS";
    }



    @ApiOperation(value = "HDFS目录下文件查询",notes = "<pre>\n" +
            "入参说明\n" +
            "目前权限一样  前6个参数不用动\n" +
            "path：需要查询的目录  会把之文件的内容也递归查询出来\n" +
            "示例：\n" +
            "{\n" +
            "\t\"hdfsKey\": \"oss\",\n" +
            "\t\"hdfsKeytabPath\": \"/mnt/cephfs/hdfsFile/oss.keytab\",\n" +
            "\t\"hdfsConfPath\": \"/mnt/cephfs/hdfsFile/krb5.conf\",\n" +
            "\t\"fsDefaultFS\": \"hdfs://pk-dsp-150:8020\",\n" +
            "\t\"namenode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"datanode\": \"hdfs/_HOST@OSSOAS.COM\",\n" +
            "\t\"path\": \"/user/oss/test/yf/\"\n" +
            "}\n" +
            "</pre>")
    @PostMapping(value = "/getFileList")
    @ApiImplicitParam(name = "requestJson", value = "requestJson", required = true, dataType = "string", paramType = "body")
    public Object getFileList(@RequestBody String requestJson) {
        List<String> fileList = new ArrayList<>();
        try {
            JSONObject obj = JSONObject.parseObject(requestJson);
            String hdfsKey = obj.getString("hdfsKey");
            String hdfsKeytabPath = obj.getString("hdfsKeytabPath");
            String hdfsConfPath = obj.getString("hdfsConfPath");
            String fsDefaultFS = obj.getString("fsDefaultFS");
            String namenode =obj.getString("namenode");
            String datanode =obj.getString("datanode");
            String path =obj.getString("path");
            fileList = HDFSUtil.autoList(path,hdfsConfPath,fsDefaultFS,namenode,datanode,hdfsKey,hdfsKeytabPath);
        } catch (Exception e) {
            e.printStackTrace();
            return e.getMessage();
        }
        return fileList;
    }


    public static void main(String[] args) {
        String path = "/home/vet/data/aa.txt";
        String[] paths = path.split("/");
        String filename = paths[paths.length-1];
        System.out.println(filename);
    }

}
